2 * Copyright © 2009 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Xiang Haihao <haihao.xiang@intel.com>
26 * Zou Nan hai <nanhai.zou@intel.com>
35 #include "va/x11/va_dricommon.h"
37 #include "intel_driver.h"
38 #include "intel_memman.h"
39 #include "intel_batchbuffer.h"
40 #include "i965_defines.h"
41 #include "i965_drv_video.h"
43 #define CONFIG_ID_OFFSET 0x01000000
44 #define CONTEXT_ID_OFFSET 0x02000000
45 #define SURFACE_ID_OFFSET 0x04000000
46 #define BUFFER_ID_OFFSET 0x08000000
47 #define IMAGE_ID_OFFSET 0x0a000000
48 #define SUBPIC_ID_OFFSET 0x10000000
50 #define HAS_MPEG2(ctx) (IS_G4X((ctx)->intel.device_id) || \
51 IS_IRONLAKE((ctx)->intel.device_id) || \
52 ((IS_GEN6((ctx)->intel.device_id) || \
53 IS_GEN7((ctx)->intel.device_id)) && \
54 (ctx)->intel.has_bsd))
56 #define HAS_H264(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
57 IS_GEN6((ctx)->intel.device_id) || \
58 IS_IRONLAKE((ctx)->intel.device_id)) && \
61 #define HAS_VC1(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
62 IS_GEN6((ctx)->intel.device_id)) && \
65 #define HAS_TILED_SURFACE(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
66 IS_GEN6((ctx)->intel.device_id)) && \
67 (ctx)->render_state.interleaved_uv)
69 #define HAS_ENCODER(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
70 IS_GEN6((ctx)->intel.device_id)) && \
74 I965_SURFACETYPE_RGBA = 1,
76 I965_SURFACETYPE_INDEXED
79 /* List of supported image formats */
82 VAImageFormat va_format;
83 } i965_image_format_map_t;
85 static const i965_image_format_map_t
86 i965_image_formats_map[I965_MAX_IMAGE_FORMATS + 1] = {
87 { I965_SURFACETYPE_YUV,
88 { VA_FOURCC('Y','V','1','2'), VA_LSB_FIRST, 12, } },
89 { I965_SURFACETYPE_YUV,
90 { VA_FOURCC('I','4','2','0'), VA_LSB_FIRST, 12, } },
91 { I965_SURFACETYPE_YUV,
92 { VA_FOURCC('N','V','1','2'), VA_LSB_FIRST, 12, } },
95 /* List of supported subpicture formats */
99 VAImageFormat va_format;
100 unsigned int va_flags;
101 } i965_subpic_format_map_t;
103 static const i965_subpic_format_map_t
104 i965_subpic_formats_map[I965_MAX_SUBPIC_FORMATS + 1] = {
105 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_P4A4_UNORM,
106 { VA_FOURCC('I','A','4','4'), VA_MSB_FIRST, 8, },
108 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_A4P4_UNORM,
109 { VA_FOURCC('A','I','4','4'), VA_MSB_FIRST, 8, },
111 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_B8G8R8A8_UNORM,
112 { VA_FOURCC('B','G','R','A'), VA_LSB_FIRST, 32,
113 32, 0x00ff0000, 0x0000ff00, 0x000000ff, 0xff000000 },
115 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_R8G8B8A8_UNORM,
116 { VA_FOURCC('R','G','B','A'), VA_LSB_FIRST, 32,
117 32, 0x000000ff, 0x0000ff00, 0x00ff0000, 0xff000000 },
121 static const i965_subpic_format_map_t *
122 get_subpic_format(const VAImageFormat *va_format)
125 for (i = 0; i965_subpic_formats_map[i].type != 0; i++) {
126 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[i];
127 if (m->va_format.fourcc == va_format->fourcc &&
128 (m->type == I965_SURFACETYPE_RGBA ?
129 (m->va_format.byte_order == va_format->byte_order &&
130 m->va_format.red_mask == va_format->red_mask &&
131 m->va_format.green_mask == va_format->green_mask &&
132 m->va_format.blue_mask == va_format->blue_mask &&
133 m->va_format.alpha_mask == va_format->alpha_mask) : 1))
139 extern struct hw_context *g4x_dec_hw_context_init(VADriverContextP, VAProfile);
140 static struct hw_codec_info g4x_hw_codec_info = {
141 .dec_hw_context_init = g4x_dec_hw_context_init,
142 .enc_hw_context_init = NULL,
145 extern struct hw_context *ironlake_dec_hw_context_init(VADriverContextP, VAProfile);
146 static struct hw_codec_info ironlake_hw_codec_info = {
147 .dec_hw_context_init = ironlake_dec_hw_context_init,
148 .enc_hw_context_init = NULL,
151 extern struct hw_context *gen6_dec_hw_context_init(VADriverContextP, VAProfile);
152 extern struct hw_context *gen6_enc_hw_context_init(VADriverContextP, VAProfile);
153 static struct hw_codec_info gen6_hw_codec_info = {
154 .dec_hw_context_init = gen6_dec_hw_context_init,
155 .enc_hw_context_init = gen6_enc_hw_context_init,
158 extern struct hw_context *gen7_dec_hw_context_init(VADriverContextP, VAProfile);
159 static struct hw_codec_info gen7_hw_codec_info = {
160 .dec_hw_context_init = gen7_dec_hw_context_init,
161 .enc_hw_context_init = gen6_enc_hw_context_init,
165 i965_QueryConfigProfiles(VADriverContextP ctx,
166 VAProfile *profile_list, /* out */
167 int *num_profiles) /* out */
169 struct i965_driver_data * const i965 = i965_driver_data(ctx);
172 if (HAS_MPEG2(i965)) {
173 profile_list[i++] = VAProfileMPEG2Simple;
174 profile_list[i++] = VAProfileMPEG2Main;
177 if (HAS_H264(i965)) {
178 profile_list[i++] = VAProfileH264Baseline;
179 profile_list[i++] = VAProfileH264Main;
180 profile_list[i++] = VAProfileH264High;
184 profile_list[i++] = VAProfileVC1Simple;
185 profile_list[i++] = VAProfileVC1Main;
186 profile_list[i++] = VAProfileVC1Advanced;
189 /* If the assert fails then I965_MAX_PROFILES needs to be bigger */
190 assert(i <= I965_MAX_PROFILES);
193 return VA_STATUS_SUCCESS;
197 i965_QueryConfigEntrypoints(VADriverContextP ctx,
199 VAEntrypoint *entrypoint_list, /* out */
200 int *num_entrypoints) /* out */
202 struct i965_driver_data * const i965 = i965_driver_data(ctx);
206 case VAProfileMPEG2Simple:
207 case VAProfileMPEG2Main:
209 entrypoint_list[n++] = VAEntrypointVLD;
212 case VAProfileH264Baseline:
213 case VAProfileH264Main:
214 case VAProfileH264High:
216 entrypoint_list[n++] = VAEntrypointVLD;
218 if (HAS_ENCODER(i965))
219 entrypoint_list[n++] = VAEntrypointEncSlice;
223 case VAProfileVC1Simple:
224 case VAProfileVC1Main:
225 case VAProfileVC1Advanced:
227 entrypoint_list[n++] = VAEntrypointVLD;
234 /* If the assert fails then I965_MAX_ENTRYPOINTS needs to be bigger */
235 assert(n <= I965_MAX_ENTRYPOINTS);
236 *num_entrypoints = n;
237 return n > 0 ? VA_STATUS_SUCCESS : VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
241 i965_GetConfigAttributes(VADriverContextP ctx,
243 VAEntrypoint entrypoint,
244 VAConfigAttrib *attrib_list, /* in/out */
249 /* Other attributes don't seem to be defined */
250 /* What to do if we don't know the attribute? */
251 for (i = 0; i < num_attribs; i++) {
252 switch (attrib_list[i].type) {
253 case VAConfigAttribRTFormat:
254 attrib_list[i].value = VA_RT_FORMAT_YUV420;
257 case VAConfigAttribRateControl:
258 attrib_list[i].value = VA_RC_VBR;
263 attrib_list[i].value = VA_ATTRIB_NOT_SUPPORTED;
268 return VA_STATUS_SUCCESS;
272 i965_destroy_config(struct object_heap *heap, struct object_base *obj)
274 object_heap_free(heap, obj);
278 i965_update_attribute(struct object_config *obj_config, VAConfigAttrib *attrib)
282 /* Check existing attrbiutes */
283 for (i = 0; obj_config->num_attribs < i; i++) {
284 if (obj_config->attrib_list[i].type == attrib->type) {
285 /* Update existing attribute */
286 obj_config->attrib_list[i].value = attrib->value;
287 return VA_STATUS_SUCCESS;
291 if (obj_config->num_attribs < I965_MAX_CONFIG_ATTRIBUTES) {
292 i = obj_config->num_attribs;
293 obj_config->attrib_list[i].type = attrib->type;
294 obj_config->attrib_list[i].value = attrib->value;
295 obj_config->num_attribs++;
296 return VA_STATUS_SUCCESS;
299 return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
303 i965_CreateConfig(VADriverContextP ctx,
305 VAEntrypoint entrypoint,
306 VAConfigAttrib *attrib_list,
308 VAConfigID *config_id) /* out */
310 struct i965_driver_data * const i965 = i965_driver_data(ctx);
311 struct object_config *obj_config;
316 /* Validate profile & entrypoint */
318 case VAProfileMPEG2Simple:
319 case VAProfileMPEG2Main:
320 if (HAS_MPEG2(i965) && VAEntrypointVLD == entrypoint) {
321 vaStatus = VA_STATUS_SUCCESS;
323 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
327 case VAProfileH264Baseline:
328 case VAProfileH264Main:
329 case VAProfileH264High:
330 if ((HAS_H264(i965) && VAEntrypointVLD == entrypoint) ||
331 (HAS_ENCODER(i965) && VAEntrypointEncSlice == entrypoint)) {
332 vaStatus = VA_STATUS_SUCCESS;
334 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
339 case VAProfileVC1Simple:
340 case VAProfileVC1Main:
341 case VAProfileVC1Advanced:
342 if (HAS_VC1(i965) && VAEntrypointVLD == entrypoint) {
343 vaStatus = VA_STATUS_SUCCESS;
345 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
351 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
355 if (VA_STATUS_SUCCESS != vaStatus) {
359 configID = NEW_CONFIG_ID();
360 obj_config = CONFIG(configID);
362 if (NULL == obj_config) {
363 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
367 obj_config->profile = profile;
368 obj_config->entrypoint = entrypoint;
369 obj_config->attrib_list[0].type = VAConfigAttribRTFormat;
370 obj_config->attrib_list[0].value = VA_RT_FORMAT_YUV420;
371 obj_config->num_attribs = 1;
373 for(i = 0; i < num_attribs; i++) {
374 vaStatus = i965_update_attribute(obj_config, &(attrib_list[i]));
376 if (VA_STATUS_SUCCESS != vaStatus) {
382 if (VA_STATUS_SUCCESS != vaStatus) {
383 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
385 *config_id = configID;
392 i965_DestroyConfig(VADriverContextP ctx, VAConfigID config_id)
394 struct i965_driver_data *i965 = i965_driver_data(ctx);
395 struct object_config *obj_config = CONFIG(config_id);
398 if (NULL == obj_config) {
399 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
403 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
404 return VA_STATUS_SUCCESS;
407 VAStatus i965_QueryConfigAttributes(VADriverContextP ctx,
408 VAConfigID config_id,
409 VAProfile *profile, /* out */
410 VAEntrypoint *entrypoint, /* out */
411 VAConfigAttrib *attrib_list, /* out */
412 int *num_attribs) /* out */
414 struct i965_driver_data *i965 = i965_driver_data(ctx);
415 struct object_config *obj_config = CONFIG(config_id);
416 VAStatus vaStatus = VA_STATUS_SUCCESS;
420 *profile = obj_config->profile;
421 *entrypoint = obj_config->entrypoint;
422 *num_attribs = obj_config->num_attribs;
424 for(i = 0; i < obj_config->num_attribs; i++) {
425 attrib_list[i] = obj_config->attrib_list[i];
432 i965_destroy_surface(struct object_heap *heap, struct object_base *obj)
434 struct object_surface *obj_surface = (struct object_surface *)obj;
436 dri_bo_unreference(obj_surface->bo);
437 obj_surface->bo = NULL;
438 dri_bo_unreference(obj_surface->pp_out_bo);
439 obj_surface->pp_out_bo = NULL;
441 if (obj_surface->free_private_data != NULL) {
442 obj_surface->free_private_data(&obj_surface->private_data);
443 obj_surface->private_data = NULL;
446 object_heap_free(heap, obj);
450 i965_CreateSurfaces(VADriverContextP ctx,
455 VASurfaceID *surfaces) /* out */
457 struct i965_driver_data *i965 = i965_driver_data(ctx);
459 VAStatus vaStatus = VA_STATUS_SUCCESS;
461 /* We only support one format */
462 if (VA_RT_FORMAT_YUV420 != format) {
463 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
466 for (i = 0; i < num_surfaces; i++) {
467 int surfaceID = NEW_SURFACE_ID();
468 struct object_surface *obj_surface = SURFACE(surfaceID);
470 if (NULL == obj_surface) {
471 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
475 surfaces[i] = surfaceID;
476 obj_surface->status = VASurfaceReady;
477 obj_surface->subpic = VA_INVALID_ID;
478 obj_surface->orig_width = width;
479 obj_surface->orig_height = height;
481 if (IS_GEN6(i965->intel.device_id) ||
482 IS_GEN7(i965->intel.device_id)) {
483 obj_surface->width = ALIGN(obj_surface->orig_width, 128);
484 obj_surface->height = ALIGN(obj_surface->orig_height, 32);
486 obj_surface->width = ALIGN(obj_surface->orig_width, 16);
487 obj_surface->height = ALIGN(obj_surface->orig_height, 16);
490 obj_surface->size = SIZE_YUV420(obj_surface->width, obj_surface->height);
491 obj_surface->flags = SURFACE_REFERENCED;
492 obj_surface->fourcc = 0;
493 obj_surface->bo = NULL;
494 obj_surface->pp_out_bo = NULL;
495 obj_surface->locked_image_id = VA_INVALID_ID;
496 obj_surface->private_data = NULL;
497 obj_surface->free_private_data = NULL;
501 if (VA_STATUS_SUCCESS != vaStatus) {
502 /* surfaces[i-1] was the last successful allocation */
504 struct object_surface *obj_surface = SURFACE(surfaces[i]);
506 surfaces[i] = VA_INVALID_SURFACE;
508 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
516 i965_DestroySurfaces(VADriverContextP ctx,
517 VASurfaceID *surface_list,
520 struct i965_driver_data *i965 = i965_driver_data(ctx);
523 for (i = num_surfaces; i--; ) {
524 struct object_surface *obj_surface = SURFACE(surface_list[i]);
527 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
530 return VA_STATUS_SUCCESS;
534 i965_QueryImageFormats(VADriverContextP ctx,
535 VAImageFormat *format_list, /* out */
536 int *num_formats) /* out */
540 for (n = 0; i965_image_formats_map[n].va_format.fourcc != 0; n++) {
541 const i965_image_format_map_t * const m = &i965_image_formats_map[n];
543 format_list[n] = m->va_format;
549 return VA_STATUS_SUCCESS;
553 i965_PutImage(VADriverContextP ctx,
558 unsigned int src_width,
559 unsigned int src_height,
562 unsigned int dest_width,
563 unsigned int dest_height)
565 return VA_STATUS_SUCCESS;
569 i965_QuerySubpictureFormats(VADriverContextP ctx,
570 VAImageFormat *format_list, /* out */
571 unsigned int *flags, /* out */
572 unsigned int *num_formats) /* out */
576 for (n = 0; i965_subpic_formats_map[n].va_format.fourcc != 0; n++) {
577 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[n];
579 format_list[n] = m->va_format;
581 flags[n] = m->va_flags;
587 return VA_STATUS_SUCCESS;
591 i965_destroy_subpic(struct object_heap *heap, struct object_base *obj)
593 // struct object_subpic *obj_subpic = (struct object_subpic *)obj;
595 object_heap_free(heap, obj);
599 i965_CreateSubpicture(VADriverContextP ctx,
601 VASubpictureID *subpicture) /* out */
603 struct i965_driver_data *i965 = i965_driver_data(ctx);
604 VASubpictureID subpicID = NEW_SUBPIC_ID()
605 struct object_subpic *obj_subpic = SUBPIC(subpicID);
608 return VA_STATUS_ERROR_ALLOCATION_FAILED;
610 struct object_image *obj_image = IMAGE(image);
612 return VA_STATUS_ERROR_INVALID_IMAGE;
614 const i965_subpic_format_map_t * const m = get_subpic_format(&obj_image->image.format);
616 return VA_STATUS_ERROR_UNKNOWN; /* XXX: VA_STATUS_ERROR_UNSUPPORTED_FORMAT? */
618 *subpicture = subpicID;
619 obj_subpic->image = image;
620 obj_subpic->format = m->format;
621 obj_subpic->width = obj_image->image.width;
622 obj_subpic->height = obj_image->image.height;
623 obj_subpic->pitch = obj_image->image.pitches[0];
624 obj_subpic->bo = obj_image->bo;
625 return VA_STATUS_SUCCESS;
629 i965_DestroySubpicture(VADriverContextP ctx,
630 VASubpictureID subpicture)
632 struct i965_driver_data *i965 = i965_driver_data(ctx);
633 struct object_subpic *obj_subpic = SUBPIC(subpicture);
634 i965_destroy_subpic(&i965->subpic_heap, (struct object_base *)obj_subpic);
635 return VA_STATUS_SUCCESS;
639 i965_SetSubpictureImage(VADriverContextP ctx,
640 VASubpictureID subpicture,
644 return VA_STATUS_ERROR_UNIMPLEMENTED;
648 i965_SetSubpictureChromakey(VADriverContextP ctx,
649 VASubpictureID subpicture,
650 unsigned int chromakey_min,
651 unsigned int chromakey_max,
652 unsigned int chromakey_mask)
655 return VA_STATUS_ERROR_UNIMPLEMENTED;
659 i965_SetSubpictureGlobalAlpha(VADriverContextP ctx,
660 VASubpictureID subpicture,
664 return VA_STATUS_ERROR_UNIMPLEMENTED;
668 i965_AssociateSubpicture(VADriverContextP ctx,
669 VASubpictureID subpicture,
670 VASurfaceID *target_surfaces,
672 short src_x, /* upper left offset in subpicture */
674 unsigned short src_width,
675 unsigned short src_height,
676 short dest_x, /* upper left offset in surface */
678 unsigned short dest_width,
679 unsigned short dest_height,
681 * whether to enable chroma-keying or global-alpha
682 * see VA_SUBPICTURE_XXX values
686 struct i965_driver_data *i965 = i965_driver_data(ctx);
687 struct object_subpic *obj_subpic = SUBPIC(subpicture);
690 obj_subpic->src_rect.x = src_x;
691 obj_subpic->src_rect.y = src_y;
692 obj_subpic->src_rect.width = src_width;
693 obj_subpic->src_rect.height = src_height;
694 obj_subpic->dst_rect.x = dest_x;
695 obj_subpic->dst_rect.y = dest_y;
696 obj_subpic->dst_rect.width = dest_width;
697 obj_subpic->dst_rect.height = dest_height;
699 for (i = 0; i < num_surfaces; i++) {
700 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
702 return VA_STATUS_ERROR_INVALID_SURFACE;
703 obj_surface->subpic = subpicture;
705 return VA_STATUS_SUCCESS;
710 i965_DeassociateSubpicture(VADriverContextP ctx,
711 VASubpictureID subpicture,
712 VASurfaceID *target_surfaces,
715 struct i965_driver_data *i965 = i965_driver_data(ctx);
718 for (i = 0; i < num_surfaces; i++) {
719 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
721 return VA_STATUS_ERROR_INVALID_SURFACE;
722 if (obj_surface->subpic == subpicture)
723 obj_surface->subpic = VA_INVALID_ID;
725 return VA_STATUS_SUCCESS;
729 i965_reference_buffer_store(struct buffer_store **ptr,
730 struct buffer_store *buffer_store)
732 assert(*ptr == NULL);
735 buffer_store->ref_count++;
741 i965_release_buffer_store(struct buffer_store **ptr)
743 struct buffer_store *buffer_store = *ptr;
745 if (buffer_store == NULL)
748 assert(buffer_store->bo || buffer_store->buffer);
749 assert(!(buffer_store->bo && buffer_store->buffer));
750 buffer_store->ref_count--;
752 if (buffer_store->ref_count == 0) {
753 dri_bo_unreference(buffer_store->bo);
754 free(buffer_store->buffer);
755 buffer_store->bo = NULL;
756 buffer_store->buffer = NULL;
764 i965_destroy_context(struct object_heap *heap, struct object_base *obj)
766 struct object_context *obj_context = (struct object_context *)obj;
769 if (obj_context->hw_context) {
770 obj_context->hw_context->destroy(obj_context->hw_context);
771 obj_context->hw_context = NULL;
774 if (obj_context->codec_type == CODEC_ENC) {
775 assert(obj_context->codec_state.enc.num_slice_params <= obj_context->codec_state.enc.max_slice_params);
776 i965_release_buffer_store(&obj_context->codec_state.enc.pic_param);
777 i965_release_buffer_store(&obj_context->codec_state.enc.seq_param);
779 assert(obj_context->codec_state.dec.num_slice_params <= obj_context->codec_state.dec.max_slice_params);
780 assert(obj_context->codec_state.dec.num_slice_datas <= obj_context->codec_state.dec.max_slice_datas);
782 i965_release_buffer_store(&obj_context->codec_state.dec.pic_param);
783 i965_release_buffer_store(&obj_context->codec_state.dec.iq_matrix);
784 i965_release_buffer_store(&obj_context->codec_state.dec.bit_plane);
786 for (i = 0; i < obj_context->codec_state.dec.num_slice_params; i++)
787 i965_release_buffer_store(&obj_context->codec_state.dec.slice_params[i]);
789 for (i = 0; i < obj_context->codec_state.dec.num_slice_datas; i++)
790 i965_release_buffer_store(&obj_context->codec_state.dec.slice_datas[i]);
792 free(obj_context->codec_state.dec.slice_params);
793 free(obj_context->codec_state.dec.slice_datas);
796 free(obj_context->render_targets);
797 object_heap_free(heap, obj);
801 i965_CreateContext(VADriverContextP ctx,
802 VAConfigID config_id,
806 VASurfaceID *render_targets,
807 int num_render_targets,
808 VAContextID *context) /* out */
810 struct i965_driver_data *i965 = i965_driver_data(ctx);
811 struct i965_render_state *render_state = &i965->render_state;
812 struct object_config *obj_config = CONFIG(config_id);
813 struct object_context *obj_context = NULL;
814 VAStatus vaStatus = VA_STATUS_SUCCESS;
818 if (NULL == obj_config) {
819 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
824 /* Validate picture dimensions */
825 contextID = NEW_CONTEXT_ID();
826 obj_context = CONTEXT(contextID);
828 if (NULL == obj_context) {
829 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
833 render_state->inited = 1;
835 switch (obj_config->profile) {
836 case VAProfileH264Baseline:
837 case VAProfileH264Main:
838 case VAProfileH264High:
840 return VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
841 render_state->interleaved_uv = 1;
844 render_state->interleaved_uv = !!(IS_GEN6(i965->intel.device_id) || IS_GEN7(i965->intel.device_id));
848 *context = contextID;
849 obj_context->flags = flag;
850 obj_context->context_id = contextID;
851 obj_context->config_id = config_id;
852 obj_context->picture_width = picture_width;
853 obj_context->picture_height = picture_height;
854 obj_context->num_render_targets = num_render_targets;
855 obj_context->render_targets =
856 (VASurfaceID *)calloc(num_render_targets, sizeof(VASurfaceID));
857 obj_context->hw_context = NULL;
859 for(i = 0; i < num_render_targets; i++) {
860 if (NULL == SURFACE(render_targets[i])) {
861 vaStatus = VA_STATUS_ERROR_INVALID_SURFACE;
865 obj_context->render_targets[i] = render_targets[i];
868 if (VA_STATUS_SUCCESS == vaStatus) {
869 if (VAEntrypointEncSlice == obj_config->entrypoint ) { /*encode routin only*/
870 obj_context->codec_type = CODEC_ENC;
871 memset(&obj_context->codec_state.enc, 0, sizeof(obj_context->codec_state.enc));
872 obj_context->codec_state.enc.current_render_target = VA_INVALID_ID;
873 obj_context->codec_state.enc.max_slice_params = NUM_SLICES;
874 obj_context->codec_state.enc.slice_params = calloc(obj_context->codec_state.enc.max_slice_params,
875 sizeof(*obj_context->codec_state.enc.slice_params));
876 assert(i965->codec_info->enc_hw_context_init);
877 obj_context->hw_context = i965->codec_info->enc_hw_context_init(ctx, obj_config->profile);
879 obj_context->codec_type = CODEC_DEC;
880 memset(&obj_context->codec_state.dec, 0, sizeof(obj_context->codec_state.dec));
881 obj_context->codec_state.dec.current_render_target = -1;
882 obj_context->codec_state.dec.max_slice_params = NUM_SLICES;
883 obj_context->codec_state.dec.max_slice_datas = NUM_SLICES;
884 obj_context->codec_state.dec.slice_params = calloc(obj_context->codec_state.dec.max_slice_params,
885 sizeof(*obj_context->codec_state.dec.slice_params));
886 obj_context->codec_state.dec.slice_datas = calloc(obj_context->codec_state.dec.max_slice_datas,
887 sizeof(*obj_context->codec_state.dec.slice_datas));
889 assert(i965->codec_info->dec_hw_context_init);
890 obj_context->hw_context = i965->codec_info->dec_hw_context_init(ctx, obj_config->profile);
895 if (VA_STATUS_SUCCESS != vaStatus) {
896 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
903 i965_DestroyContext(VADriverContextP ctx, VAContextID context)
905 struct i965_driver_data *i965 = i965_driver_data(ctx);
906 struct object_context *obj_context = CONTEXT(context);
909 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
911 return VA_STATUS_SUCCESS;
915 i965_destroy_buffer(struct object_heap *heap, struct object_base *obj)
917 struct object_buffer *obj_buffer = (struct object_buffer *)obj;
919 assert(obj_buffer->buffer_store);
920 i965_release_buffer_store(&obj_buffer->buffer_store);
921 object_heap_free(heap, obj);
925 i965_create_buffer_internal(VADriverContextP ctx,
929 unsigned int num_elements,
934 struct i965_driver_data *i965 = i965_driver_data(ctx);
935 struct object_buffer *obj_buffer = NULL;
936 struct buffer_store *buffer_store = NULL;
941 case VAPictureParameterBufferType:
942 case VAIQMatrixBufferType:
943 case VABitPlaneBufferType:
944 case VASliceGroupMapBufferType:
945 case VASliceParameterBufferType:
946 case VASliceDataBufferType:
947 case VAMacroblockParameterBufferType:
948 case VAResidualDataBufferType:
949 case VADeblockingParameterBufferType:
950 case VAImageBufferType:
951 case VAEncCodedBufferType:
952 case VAEncSequenceParameterBufferType:
953 case VAEncPictureParameterBufferType:
954 case VAEncSliceParameterBufferType:
959 return VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
962 bufferID = NEW_BUFFER_ID();
963 obj_buffer = BUFFER(bufferID);
965 if (NULL == obj_buffer) {
966 return VA_STATUS_ERROR_ALLOCATION_FAILED;
969 if (type == VAEncCodedBufferType) {
970 size += ALIGN(sizeof(VACodedBufferSegment), 64);
973 obj_buffer->max_num_elements = num_elements;
974 obj_buffer->num_elements = num_elements;
975 obj_buffer->size_element = size;
976 obj_buffer->type = type;
977 obj_buffer->buffer_store = NULL;
978 buffer_store = calloc(1, sizeof(struct buffer_store));
979 assert(buffer_store);
980 buffer_store->ref_count = 1;
982 if (store_bo != NULL) {
983 buffer_store->bo = store_bo;
984 dri_bo_reference(buffer_store->bo);
987 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
988 } else if (type == VASliceDataBufferType || type == VAImageBufferType || type == VAEncCodedBufferType) {
989 buffer_store->bo = dri_bo_alloc(i965->intel.bufmgr,
991 size * num_elements, 64);
992 assert(buffer_store->bo);
994 if (type == VAEncCodedBufferType) {
995 VACodedBufferSegment *coded_buffer_segment;
996 dri_bo_map(buffer_store->bo, 1);
997 coded_buffer_segment = (VACodedBufferSegment *)buffer_store->bo->virtual;
998 coded_buffer_segment->size = size - ALIGN(sizeof(VACodedBufferSegment), 64);
999 coded_buffer_segment->bit_offset = 0;
1000 coded_buffer_segment->status = 0;
1001 coded_buffer_segment->buf = NULL;
1002 coded_buffer_segment->next = NULL;
1003 dri_bo_unmap(buffer_store->bo);
1005 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
1009 buffer_store->buffer = malloc(size * num_elements);
1010 assert(buffer_store->buffer);
1013 memcpy(buffer_store->buffer, data, size * num_elements);
1016 buffer_store->num_elements = obj_buffer->num_elements;
1017 i965_reference_buffer_store(&obj_buffer->buffer_store, buffer_store);
1018 i965_release_buffer_store(&buffer_store);
1021 return VA_STATUS_SUCCESS;
1025 i965_CreateBuffer(VADriverContextP ctx,
1026 VAContextID context, /* in */
1027 VABufferType type, /* in */
1028 unsigned int size, /* in */
1029 unsigned int num_elements, /* in */
1030 void *data, /* in */
1031 VABufferID *buf_id) /* out */
1033 return i965_create_buffer_internal(ctx, context, type, size, num_elements, data, NULL, buf_id);
1038 i965_BufferSetNumElements(VADriverContextP ctx,
1039 VABufferID buf_id, /* in */
1040 unsigned int num_elements) /* in */
1042 struct i965_driver_data *i965 = i965_driver_data(ctx);
1043 struct object_buffer *obj_buffer = BUFFER(buf_id);
1044 VAStatus vaStatus = VA_STATUS_SUCCESS;
1048 if ((num_elements < 0) ||
1049 (num_elements > obj_buffer->max_num_elements)) {
1050 vaStatus = VA_STATUS_ERROR_UNKNOWN;
1052 obj_buffer->num_elements = num_elements;
1053 if (obj_buffer->buffer_store != NULL) {
1054 obj_buffer->buffer_store->num_elements = num_elements;
1062 i965_MapBuffer(VADriverContextP ctx,
1063 VABufferID buf_id, /* in */
1064 void **pbuf) /* out */
1066 struct i965_driver_data *i965 = i965_driver_data(ctx);
1067 struct object_buffer *obj_buffer = BUFFER(buf_id);
1068 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1070 assert(obj_buffer && obj_buffer->buffer_store);
1071 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1072 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1074 if (NULL != obj_buffer->buffer_store->bo) {
1075 unsigned int tiling, swizzle;
1077 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1079 if (tiling != I915_TILING_NONE)
1080 drm_intel_gem_bo_map_gtt(obj_buffer->buffer_store->bo);
1082 dri_bo_map(obj_buffer->buffer_store->bo, 1);
1084 assert(obj_buffer->buffer_store->bo->virtual);
1085 *pbuf = obj_buffer->buffer_store->bo->virtual;
1087 if (obj_buffer->type == VAEncCodedBufferType) {
1088 VACodedBufferSegment *coded_buffer_segment = (VACodedBufferSegment *)(obj_buffer->buffer_store->bo->virtual);
1089 coded_buffer_segment->buf = (unsigned char *)(obj_buffer->buffer_store->bo->virtual) + ALIGN(sizeof(VACodedBufferSegment), 64);
1092 vaStatus = VA_STATUS_SUCCESS;
1093 } else if (NULL != obj_buffer->buffer_store->buffer) {
1094 *pbuf = obj_buffer->buffer_store->buffer;
1095 vaStatus = VA_STATUS_SUCCESS;
1102 i965_UnmapBuffer(VADriverContextP ctx, VABufferID buf_id)
1104 struct i965_driver_data *i965 = i965_driver_data(ctx);
1105 struct object_buffer *obj_buffer = BUFFER(buf_id);
1106 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1108 assert(obj_buffer && obj_buffer->buffer_store);
1109 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1110 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1112 if (NULL != obj_buffer->buffer_store->bo) {
1113 unsigned int tiling, swizzle;
1115 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1117 if (tiling != I915_TILING_NONE)
1118 drm_intel_gem_bo_unmap_gtt(obj_buffer->buffer_store->bo);
1120 dri_bo_unmap(obj_buffer->buffer_store->bo);
1122 vaStatus = VA_STATUS_SUCCESS;
1123 } else if (NULL != obj_buffer->buffer_store->buffer) {
1125 vaStatus = VA_STATUS_SUCCESS;
1132 i965_DestroyBuffer(VADriverContextP ctx, VABufferID buffer_id)
1134 struct i965_driver_data *i965 = i965_driver_data(ctx);
1135 struct object_buffer *obj_buffer = BUFFER(buffer_id);
1138 i965_destroy_buffer(&i965->buffer_heap, (struct object_base *)obj_buffer);
1140 return VA_STATUS_SUCCESS;
1144 i965_BeginPicture(VADriverContextP ctx,
1145 VAContextID context,
1146 VASurfaceID render_target)
1148 struct i965_driver_data *i965 = i965_driver_data(ctx);
1149 struct object_context *obj_context = CONTEXT(context);
1150 struct object_surface *obj_surface = SURFACE(render_target);
1151 struct object_config *obj_config;
1155 assert(obj_context);
1156 assert(obj_surface);
1158 config = obj_context->config_id;
1159 obj_config = CONFIG(config);
1162 switch (obj_config->profile) {
1163 case VAProfileMPEG2Simple:
1164 case VAProfileMPEG2Main:
1165 vaStatus = VA_STATUS_SUCCESS;
1168 case VAProfileH264Baseline:
1169 case VAProfileH264Main:
1170 case VAProfileH264High:
1171 vaStatus = VA_STATUS_SUCCESS;
1174 case VAProfileVC1Simple:
1175 case VAProfileVC1Main:
1176 case VAProfileVC1Advanced:
1177 vaStatus = VA_STATUS_SUCCESS;
1182 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1186 if (obj_context->codec_type == CODEC_ENC)
1187 obj_context->codec_state.enc.current_render_target = render_target; /*This is input new frame*/
1189 obj_context->codec_state.dec.current_render_target = render_target;
1195 i965_render_picture_parameter_buffer(VADriverContextP ctx,
1196 struct object_context *obj_context,
1197 struct object_buffer *obj_buffer)
1199 assert(obj_buffer->buffer_store->bo == NULL);
1200 assert(obj_buffer->buffer_store->buffer);
1201 i965_release_buffer_store(&obj_context->codec_state.dec.pic_param);
1202 i965_reference_buffer_store(&obj_context->codec_state.dec.pic_param,
1203 obj_buffer->buffer_store);
1205 return VA_STATUS_SUCCESS;
1209 i965_render_iq_matrix_buffer(VADriverContextP ctx,
1210 struct object_context *obj_context,
1211 struct object_buffer *obj_buffer)
1213 assert(obj_buffer->buffer_store->bo == NULL);
1214 assert(obj_buffer->buffer_store->buffer);
1215 i965_release_buffer_store(&obj_context->codec_state.dec.iq_matrix);
1216 i965_reference_buffer_store(&obj_context->codec_state.dec.iq_matrix,
1217 obj_buffer->buffer_store);
1219 return VA_STATUS_SUCCESS;
1223 i965_render_bit_plane_buffer(VADriverContextP ctx,
1224 struct object_context *obj_context,
1225 struct object_buffer *obj_buffer)
1227 assert(obj_buffer->buffer_store->bo == NULL);
1228 assert(obj_buffer->buffer_store->buffer);
1229 i965_release_buffer_store(&obj_context->codec_state.dec.bit_plane);
1230 i965_reference_buffer_store(&obj_context->codec_state.dec.bit_plane,
1231 obj_buffer->buffer_store);
1233 return VA_STATUS_SUCCESS;
1237 i965_render_slice_parameter_buffer(VADriverContextP ctx,
1238 struct object_context *obj_context,
1239 struct object_buffer *obj_buffer)
1241 assert(obj_buffer->buffer_store->bo == NULL);
1242 assert(obj_buffer->buffer_store->buffer);
1244 if (obj_context->codec_state.dec.num_slice_params == obj_context->codec_state.dec.max_slice_params) {
1245 obj_context->codec_state.dec.slice_params = realloc(obj_context->codec_state.dec.slice_params,
1246 (obj_context->codec_state.dec.max_slice_params + NUM_SLICES) * sizeof(*obj_context->codec_state.dec.slice_params));
1247 memset(obj_context->codec_state.dec.slice_params + obj_context->codec_state.dec.max_slice_params, 0, NUM_SLICES * sizeof(*obj_context->codec_state.dec.slice_params));
1248 obj_context->codec_state.dec.max_slice_params += NUM_SLICES;
1251 i965_release_buffer_store(&obj_context->codec_state.dec.slice_params[obj_context->codec_state.dec.num_slice_params]);
1252 i965_reference_buffer_store(&obj_context->codec_state.dec.slice_params[obj_context->codec_state.dec.num_slice_params],
1253 obj_buffer->buffer_store);
1254 obj_context->codec_state.dec.num_slice_params++;
1256 return VA_STATUS_SUCCESS;
1260 i965_render_slice_data_buffer(VADriverContextP ctx,
1261 struct object_context *obj_context,
1262 struct object_buffer *obj_buffer)
1264 assert(obj_buffer->buffer_store->buffer == NULL);
1265 assert(obj_buffer->buffer_store->bo);
1267 if (obj_context->codec_state.dec.num_slice_datas == obj_context->codec_state.dec.max_slice_datas) {
1268 obj_context->codec_state.dec.slice_datas = realloc(obj_context->codec_state.dec.slice_datas,
1269 (obj_context->codec_state.dec.max_slice_datas + NUM_SLICES) * sizeof(*obj_context->codec_state.dec.slice_datas));
1270 memset(obj_context->codec_state.dec.slice_datas + obj_context->codec_state.dec.max_slice_datas, 0, NUM_SLICES * sizeof(*obj_context->codec_state.dec.slice_datas));
1271 obj_context->codec_state.dec.max_slice_datas += NUM_SLICES;
1274 i965_release_buffer_store(&obj_context->codec_state.dec.slice_datas[obj_context->codec_state.dec.num_slice_datas]);
1275 i965_reference_buffer_store(&obj_context->codec_state.dec.slice_datas[obj_context->codec_state.dec.num_slice_datas],
1276 obj_buffer->buffer_store);
1277 obj_context->codec_state.dec.num_slice_datas++;
1279 return VA_STATUS_SUCCESS;
1283 i965_decoder_render_picture(VADriverContextP ctx,
1284 VAContextID context,
1285 VABufferID *buffers,
1288 struct i965_driver_data *i965 = i965_driver_data(ctx);
1289 struct object_context *obj_context = CONTEXT(context);
1293 for (i = 0; i < num_buffers; i++) {
1294 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1297 switch (obj_buffer->type) {
1298 case VAPictureParameterBufferType:
1299 vaStatus = i965_render_picture_parameter_buffer(ctx, obj_context, obj_buffer);
1302 case VAIQMatrixBufferType:
1303 vaStatus = i965_render_iq_matrix_buffer(ctx, obj_context, obj_buffer);
1306 case VABitPlaneBufferType:
1307 vaStatus = i965_render_bit_plane_buffer(ctx, obj_context, obj_buffer);
1310 case VASliceParameterBufferType:
1311 vaStatus = i965_render_slice_parameter_buffer(ctx, obj_context, obj_buffer);
1314 case VASliceDataBufferType:
1315 vaStatus = i965_render_slice_data_buffer(ctx, obj_context, obj_buffer);
1319 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1328 i965_encoder_render_squence_parameter_buffer(VADriverContextP ctx,
1329 struct object_context *obj_context,
1330 struct object_buffer *obj_buffer)
1332 assert(obj_buffer->buffer_store->bo == NULL);
1333 assert(obj_buffer->buffer_store->buffer);
1334 i965_release_buffer_store(&obj_context->codec_state.enc.seq_param);
1335 i965_reference_buffer_store(&obj_context->codec_state.enc.seq_param,
1336 obj_buffer->buffer_store);
1338 return VA_STATUS_SUCCESS;
1343 i965_encoder_render_picture_parameter_buffer(VADriverContextP ctx,
1344 struct object_context *obj_context,
1345 struct object_buffer *obj_buffer)
1347 assert(obj_buffer->buffer_store->bo == NULL);
1348 assert(obj_buffer->buffer_store->buffer);
1349 i965_release_buffer_store(&obj_context->codec_state.enc.pic_param);
1350 i965_reference_buffer_store(&obj_context->codec_state.enc.pic_param,
1351 obj_buffer->buffer_store);
1353 return VA_STATUS_SUCCESS;
1357 i965_encoder_render_slice_parameter_buffer(VADriverContextP ctx,
1358 struct object_context *obj_context,
1359 struct object_buffer *obj_buffer)
1361 if (obj_context->codec_state.enc.num_slice_params == obj_context->codec_state.enc.max_slice_params) {
1362 obj_context->codec_state.enc.slice_params = realloc(obj_context->codec_state.enc.slice_params,
1363 (obj_context->codec_state.enc.max_slice_params + NUM_SLICES) * sizeof(*obj_context->codec_state.enc.slice_params));
1364 memset(obj_context->codec_state.enc.slice_params + obj_context->codec_state.enc.max_slice_params, 0, NUM_SLICES * sizeof(*obj_context->codec_state.enc.slice_params));
1365 obj_context->codec_state.enc.max_slice_params += NUM_SLICES;
1368 i965_release_buffer_store(&obj_context->codec_state.enc.slice_params[obj_context->codec_state.enc.num_slice_params]);
1369 i965_reference_buffer_store(&obj_context->codec_state.enc.slice_params[obj_context->codec_state.enc.num_slice_params],
1370 obj_buffer->buffer_store);
1371 obj_context->codec_state.enc.num_slice_params++;
1373 return VA_STATUS_SUCCESS;
1377 i965_encoder_render_picture_control_buffer(VADriverContextP ctx,
1378 struct object_context *obj_context,
1379 struct object_buffer *obj_buffer)
1381 assert(obj_buffer->buffer_store->bo == NULL);
1382 assert(obj_buffer->buffer_store->buffer);
1383 i965_release_buffer_store(&obj_context->codec_state.enc.pic_control);
1384 i965_reference_buffer_store(&obj_context->codec_state.enc.pic_control,
1385 obj_buffer->buffer_store);
1387 return VA_STATUS_SUCCESS;
1391 i965_encoder_render_qmatrix_buffer(VADriverContextP ctx,
1392 struct object_context *obj_context,
1393 struct object_buffer *obj_buffer)
1395 assert(obj_buffer->buffer_store->bo == NULL);
1396 assert(obj_buffer->buffer_store->buffer);
1397 i965_release_buffer_store(&obj_context->codec_state.enc.q_matrix);
1398 i965_reference_buffer_store(&obj_context->codec_state.enc.iq_matrix,
1399 obj_buffer->buffer_store);
1401 return VA_STATUS_SUCCESS;
1405 i965_encoder_render_iqmatrix_buffer(VADriverContextP ctx,
1406 struct object_context *obj_context,
1407 struct object_buffer *obj_buffer)
1409 assert(obj_buffer->buffer_store->bo == NULL);
1410 assert(obj_buffer->buffer_store->buffer);
1411 i965_release_buffer_store(&obj_context->codec_state.enc.iq_matrix);
1412 i965_reference_buffer_store(&obj_context->codec_state.enc.iq_matrix,
1413 obj_buffer->buffer_store);
1415 return VA_STATUS_SUCCESS;
1419 i965_encoder_render_picture(VADriverContextP ctx,
1420 VAContextID context,
1421 VABufferID *buffers,
1424 struct i965_driver_data *i965 = i965_driver_data(ctx);
1425 struct object_context *obj_context = CONTEXT(context);
1429 for (i = 0; i < num_buffers; i++) {
1430 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1433 switch (obj_buffer->type) {
1434 case VAEncSequenceParameterBufferType:
1435 vaStatus = i965_encoder_render_squence_parameter_buffer(ctx, obj_context, obj_buffer);
1438 case VAEncPictureParameterBufferType:
1439 vaStatus = i965_encoder_render_picture_parameter_buffer(ctx, obj_context, obj_buffer);
1442 case VAEncSliceParameterBufferType:
1443 vaStatus = i965_encoder_render_slice_parameter_buffer(ctx, obj_context, obj_buffer);
1446 case VAPictureParameterBufferType:
1447 vaStatus = i965_encoder_render_picture_control_buffer(ctx, obj_context, obj_buffer);
1450 case VAQMatrixBufferType:
1451 vaStatus = i965_encoder_render_qmatrix_buffer(ctx, obj_context, obj_buffer);
1454 case VAIQMatrixBufferType:
1455 vaStatus = i965_encoder_render_iqmatrix_buffer(ctx, obj_context, obj_buffer);
1459 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1468 i965_RenderPicture(VADriverContextP ctx,
1469 VAContextID context,
1470 VABufferID *buffers,
1473 struct i965_driver_data *i965 = i965_driver_data(ctx);
1474 struct object_context *obj_context;
1475 struct object_config *obj_config;
1477 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1479 obj_context = CONTEXT(context);
1480 assert(obj_context);
1482 config = obj_context->config_id;
1483 obj_config = CONFIG(config);
1486 if (VAEntrypointEncSlice == obj_config->entrypoint ){
1487 vaStatus = i965_encoder_render_picture(ctx, context, buffers, num_buffers);
1489 vaStatus = i965_decoder_render_picture(ctx, context, buffers, num_buffers);
1496 i965_EndPicture(VADriverContextP ctx, VAContextID context)
1498 struct i965_driver_data *i965 = i965_driver_data(ctx);
1499 struct object_context *obj_context = CONTEXT(context);
1500 struct object_config *obj_config;
1504 assert(obj_context);
1505 config = obj_context->config_id;
1506 obj_config = CONFIG(config);
1509 if (obj_context->codec_type == CODEC_ENC) {
1510 assert(VAEntrypointEncSlice == obj_config->entrypoint);
1512 assert(obj_context->codec_state.enc.pic_param);
1513 assert(obj_context->codec_state.enc.seq_param);
1514 assert(obj_context->codec_state.enc.num_slice_params >= 1);
1516 assert(obj_context->codec_state.dec.pic_param);
1517 assert(obj_context->codec_state.dec.num_slice_params >= 1);
1518 assert(obj_context->codec_state.dec.num_slice_datas >= 1);
1519 assert(obj_context->codec_state.dec.num_slice_params == obj_context->codec_state.dec.num_slice_datas);
1522 assert(obj_context->hw_context->run);
1523 obj_context->hw_context->run(ctx, obj_config->profile, &obj_context->codec_state, obj_context->hw_context);
1525 if (obj_context->codec_type == CODEC_ENC) {
1526 obj_context->codec_state.enc.current_render_target = VA_INVALID_SURFACE;
1527 obj_context->codec_state.enc.num_slice_params = 0;
1528 i965_release_buffer_store(&obj_context->codec_state.enc.pic_param);
1529 i965_release_buffer_store(&obj_context->codec_state.enc.seq_param);
1531 for (i = 0; i < obj_context->codec_state.enc.num_slice_params; i++) {
1532 i965_release_buffer_store(&obj_context->codec_state.enc.slice_params[i]);
1535 obj_context->codec_state.dec.current_render_target = -1;
1536 obj_context->codec_state.dec.num_slice_params = 0;
1537 obj_context->codec_state.dec.num_slice_datas = 0;
1538 i965_release_buffer_store(&obj_context->codec_state.dec.pic_param);
1539 i965_release_buffer_store(&obj_context->codec_state.dec.iq_matrix);
1540 i965_release_buffer_store(&obj_context->codec_state.dec.bit_plane);
1542 for (i = 0; i < obj_context->codec_state.dec.num_slice_params; i++) {
1543 i965_release_buffer_store(&obj_context->codec_state.dec.slice_params[i]);
1544 i965_release_buffer_store(&obj_context->codec_state.dec.slice_datas[i]);
1548 return VA_STATUS_SUCCESS;
1552 i965_SyncSurface(VADriverContextP ctx,
1553 VASurfaceID render_target)
1555 struct i965_driver_data *i965 = i965_driver_data(ctx);
1556 struct object_surface *obj_surface = SURFACE(render_target);
1558 assert(obj_surface);
1560 return VA_STATUS_SUCCESS;
1564 i965_QuerySurfaceStatus(VADriverContextP ctx,
1565 VASurfaceID render_target,
1566 VASurfaceStatus *status) /* out */
1568 struct i965_driver_data *i965 = i965_driver_data(ctx);
1569 struct object_surface *obj_surface = SURFACE(render_target);
1571 assert(obj_surface);
1573 /* Usually GEM will handle synchronization with the graphics hardware */
1575 if (obj_surface->bo) {
1576 dri_bo_map(obj_surface->bo, 0);
1577 dri_bo_unmap(obj_surface->bo);
1581 *status = obj_surface->status;
1583 return VA_STATUS_SUCCESS;
1588 * Query display attributes
1589 * The caller must provide a "attr_list" array that can hold at
1590 * least vaMaxNumDisplayAttributes() entries. The actual number of attributes
1591 * returned in "attr_list" is returned in "num_attributes".
1594 i965_QueryDisplayAttributes(VADriverContextP ctx,
1595 VADisplayAttribute *attr_list, /* out */
1596 int *num_attributes) /* out */
1599 *num_attributes = 0;
1601 return VA_STATUS_SUCCESS;
1605 * Get display attributes
1606 * This function returns the current attribute values in "attr_list".
1607 * Only attributes returned with VA_DISPLAY_ATTRIB_GETTABLE set in the "flags" field
1608 * from vaQueryDisplayAttributes() can have their values retrieved.
1611 i965_GetDisplayAttributes(VADriverContextP ctx,
1612 VADisplayAttribute *attr_list, /* in/out */
1616 return VA_STATUS_ERROR_UNIMPLEMENTED;
1620 * Set display attributes
1621 * Only attributes returned with VA_DISPLAY_ATTRIB_SETTABLE set in the "flags" field
1622 * from vaQueryDisplayAttributes() can be set. If the attribute is not settable or
1623 * the value is out of range, the function returns VA_STATUS_ERROR_ATTR_NOT_SUPPORTED
1626 i965_SetDisplayAttributes(VADriverContextP ctx,
1627 VADisplayAttribute *attr_list,
1631 return VA_STATUS_ERROR_UNIMPLEMENTED;
1635 i965_DbgCopySurfaceToBuffer(VADriverContextP ctx,
1636 VASurfaceID surface,
1637 void **buffer, /* out */
1638 unsigned int *stride) /* out */
1641 return VA_STATUS_ERROR_UNIMPLEMENTED;
1645 i965_Init(VADriverContextP ctx)
1647 struct i965_driver_data *i965 = i965_driver_data(ctx);
1649 if (intel_driver_init(ctx) == False)
1650 return VA_STATUS_ERROR_UNKNOWN;
1652 if (IS_G4X(i965->intel.device_id))
1653 i965->codec_info = &g4x_hw_codec_info;
1654 else if (IS_IRONLAKE(i965->intel.device_id))
1655 i965->codec_info = &ironlake_hw_codec_info;
1656 else if (IS_GEN6(i965->intel.device_id))
1657 i965->codec_info = &gen6_hw_codec_info;
1658 else if (IS_GEN7(i965->intel.device_id))
1659 i965->codec_info = &gen7_hw_codec_info;
1661 return VA_STATUS_ERROR_UNKNOWN;
1663 if (i965_post_processing_init(ctx) == False)
1664 return VA_STATUS_ERROR_UNKNOWN;
1666 if (i965_render_init(ctx) == False)
1667 return VA_STATUS_ERROR_UNKNOWN;
1669 _i965InitMutex(&i965->render_mutex);
1670 i965->batch = intel_batchbuffer_new(&i965->intel, I915_EXEC_RENDER);
1672 return VA_STATUS_SUCCESS;
1676 i965_destroy_heap(struct object_heap *heap,
1677 void (*func)(struct object_heap *heap, struct object_base *object))
1679 struct object_base *object;
1680 object_heap_iterator iter;
1682 object = object_heap_first(heap, &iter);
1688 object = object_heap_next(heap, &iter);
1691 object_heap_destroy(heap);
1696 i965_DestroyImage(VADriverContextP ctx, VAImageID image);
1699 i965_CreateImage(VADriverContextP ctx,
1700 VAImageFormat *format,
1703 VAImage *out_image) /* out */
1705 struct i965_driver_data *i965 = i965_driver_data(ctx);
1706 struct object_image *obj_image;
1707 VAStatus va_status = VA_STATUS_ERROR_OPERATION_FAILED;
1709 unsigned int width2, height2, size2, size;
1711 out_image->image_id = VA_INVALID_ID;
1712 out_image->buf = VA_INVALID_ID;
1714 image_id = NEW_IMAGE_ID();
1715 if (image_id == VA_INVALID_ID)
1716 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1718 obj_image = IMAGE(image_id);
1720 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1721 obj_image->bo = NULL;
1722 obj_image->palette = NULL;
1723 obj_image->derived_surface = VA_INVALID_ID;
1725 VAImage * const image = &obj_image->image;
1726 image->image_id = image_id;
1727 image->buf = VA_INVALID_ID;
1729 size = width * height;
1730 width2 = (width + 1) / 2;
1731 height2 = (height + 1) / 2;
1732 size2 = width2 * height2;
1734 image->num_palette_entries = 0;
1735 image->entry_bytes = 0;
1736 memset(image->component_order, 0, sizeof(image->component_order));
1738 switch (format->fourcc) {
1739 case VA_FOURCC('I','A','4','4'):
1740 case VA_FOURCC('A','I','4','4'):
1741 image->num_planes = 1;
1742 image->pitches[0] = width;
1743 image->offsets[0] = 0;
1744 image->data_size = image->offsets[0] + image->pitches[0] * height;
1745 image->num_palette_entries = 16;
1746 image->entry_bytes = 3;
1747 image->component_order[0] = 'R';
1748 image->component_order[1] = 'G';
1749 image->component_order[2] = 'B';
1751 case VA_FOURCC('A','R','G','B'):
1752 case VA_FOURCC('A','B','G','R'):
1753 case VA_FOURCC('B','G','R','A'):
1754 case VA_FOURCC('R','G','B','A'):
1755 image->num_planes = 1;
1756 image->pitches[0] = width * 4;
1757 image->offsets[0] = 0;
1758 image->data_size = image->offsets[0] + image->pitches[0] * height;
1760 case VA_FOURCC('Y','V','1','2'):
1761 image->num_planes = 3;
1762 image->pitches[0] = width;
1763 image->offsets[0] = 0;
1764 image->pitches[1] = width2;
1765 image->offsets[1] = size + size2;
1766 image->pitches[2] = width2;
1767 image->offsets[2] = size;
1768 image->data_size = size + 2 * size2;
1770 case VA_FOURCC('I','4','2','0'):
1771 image->num_planes = 3;
1772 image->pitches[0] = width;
1773 image->offsets[0] = 0;
1774 image->pitches[1] = width2;
1775 image->offsets[1] = size;
1776 image->pitches[2] = width2;
1777 image->offsets[2] = size + size2;
1778 image->data_size = size + 2 * size2;
1780 case VA_FOURCC('N','V','1','2'):
1781 image->num_planes = 2;
1782 image->pitches[0] = width;
1783 image->offsets[0] = 0;
1784 image->pitches[1] = width;
1785 image->offsets[1] = size;
1786 image->data_size = size + 2 * size2;
1792 va_status = i965_CreateBuffer(ctx, 0, VAImageBufferType,
1793 image->data_size, 1, NULL, &image->buf);
1794 if (va_status != VA_STATUS_SUCCESS)
1797 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
1798 dri_bo_reference(obj_image->bo);
1800 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
1801 obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
1802 if (!obj_image->palette)
1806 image->image_id = image_id;
1807 image->format = *format;
1808 image->width = width;
1809 image->height = height;
1811 *out_image = *image;
1812 return VA_STATUS_SUCCESS;
1815 i965_DestroyImage(ctx, image_id);
1820 i965_check_alloc_surface_bo(VADriverContextP ctx,
1821 struct object_surface *obj_surface,
1823 unsigned int fourcc)
1825 struct i965_driver_data *i965 = i965_driver_data(ctx);
1827 if (obj_surface->bo)
1831 uint32_t tiling_mode = I915_TILING_Y; /* always uses Y-tiled format */
1832 unsigned long pitch;
1834 obj_surface->bo = drm_intel_bo_alloc_tiled(i965->intel.bufmgr,
1837 obj_surface->height + obj_surface->height / 2,
1842 assert(tiling_mode == I915_TILING_Y);
1843 assert(pitch == obj_surface->width);
1845 obj_surface->bo = dri_bo_alloc(i965->intel.bufmgr,
1851 obj_surface->fourcc = fourcc;
1852 assert(obj_surface->bo);
1855 VAStatus i965_DeriveImage(VADriverContextP ctx,
1856 VASurfaceID surface,
1857 VAImage *out_image) /* out */
1859 struct i965_driver_data *i965 = i965_driver_data(ctx);
1860 struct i965_render_state *render_state = &i965->render_state;
1861 struct object_image *obj_image;
1862 struct object_surface *obj_surface;
1864 unsigned int w_pitch, h_pitch;
1865 unsigned int data_size;
1868 out_image->image_id = VA_INVALID_ID;
1869 obj_surface = SURFACE(surface);
1872 return VA_STATUS_ERROR_INVALID_SURFACE;
1874 w_pitch = obj_surface->width;
1875 h_pitch = obj_surface->height;
1876 data_size = obj_surface->orig_width * obj_surface->orig_height +
1877 2 * (((obj_surface->orig_width + 1) / 2) * ((obj_surface->orig_height + 1) / 2));
1879 image_id = NEW_IMAGE_ID();
1881 if (image_id == VA_INVALID_ID)
1882 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1884 obj_image = IMAGE(image_id);
1887 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1889 obj_image->bo = NULL;
1890 obj_image->palette = NULL;
1891 obj_image->derived_surface = VA_INVALID_ID;
1893 VAImage * const image = &obj_image->image;
1895 memset(image, 0, sizeof(*image));
1896 image->image_id = image_id;
1897 image->buf = VA_INVALID_ID;
1898 image->num_palette_entries = 0;
1899 image->entry_bytes = 0;
1900 image->width = obj_surface->orig_width;
1901 image->height = obj_surface->orig_height;
1902 image->data_size = data_size;
1904 if (!render_state->inited) {
1905 image->format.fourcc = VA_FOURCC('Y','V','1','2');
1906 image->format.byte_order = VA_LSB_FIRST;
1907 image->format.bits_per_pixel = 12;
1908 image->num_planes = 3;
1909 image->pitches[0] = w_pitch;
1910 image->offsets[0] = 0;
1911 image->pitches[1] = w_pitch / 2;
1912 image->offsets[1] = w_pitch * h_pitch;
1913 image->pitches[2] = w_pitch / 2;
1914 image->offsets[2] = w_pitch * h_pitch + (w_pitch / 2) * (h_pitch / 2);
1916 if (render_state->interleaved_uv) {
1917 image->format.fourcc = VA_FOURCC('N','V','1','2');
1918 image->format.byte_order = VA_LSB_FIRST;
1919 image->format.bits_per_pixel = 12;
1920 image->num_planes = 2;
1921 image->pitches[0] = w_pitch;
1922 image->offsets[0] = 0;
1923 image->pitches[1] = w_pitch;
1924 image->offsets[1] = w_pitch * h_pitch;
1926 image->format.fourcc = VA_FOURCC('I','4','2','0');
1927 image->format.byte_order = VA_LSB_FIRST;
1928 image->format.bits_per_pixel = 12;
1929 image->num_planes = 3;
1930 image->pitches[0] = w_pitch;
1931 image->offsets[0] = 0;
1932 image->pitches[1] = w_pitch / 2;
1933 image->offsets[1] = w_pitch * h_pitch;
1934 image->pitches[2] = w_pitch / 2;
1935 image->offsets[2] = w_pitch * h_pitch + (w_pitch / 2) * (h_pitch / 2);
1939 i965_check_alloc_surface_bo(ctx, obj_surface, HAS_TILED_SURFACE(i965), image->format.fourcc);
1940 va_status = i965_create_buffer_internal(ctx, 0, VAImageBufferType,
1941 obj_surface->size, 1, NULL, obj_surface->bo, &image->buf);
1942 if (va_status != VA_STATUS_SUCCESS)
1945 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
1946 dri_bo_reference(obj_image->bo);
1948 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
1949 obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
1950 if (!obj_image->palette) {
1951 va_status = VA_STATUS_ERROR_ALLOCATION_FAILED;
1956 *out_image = *image;
1957 obj_surface->flags |= SURFACE_DERIVED;
1958 obj_image->derived_surface = surface;
1960 return VA_STATUS_SUCCESS;
1963 i965_DestroyImage(ctx, image_id);
1968 i965_destroy_image(struct object_heap *heap, struct object_base *obj)
1970 object_heap_free(heap, obj);
1975 i965_DestroyImage(VADriverContextP ctx, VAImageID image)
1977 struct i965_driver_data *i965 = i965_driver_data(ctx);
1978 struct object_image *obj_image = IMAGE(image);
1979 struct object_surface *obj_surface;
1982 return VA_STATUS_SUCCESS;
1984 dri_bo_unreference(obj_image->bo);
1985 obj_image->bo = NULL;
1987 if (obj_image->image.buf != VA_INVALID_ID) {
1988 i965_DestroyBuffer(ctx, obj_image->image.buf);
1989 obj_image->image.buf = VA_INVALID_ID;
1992 if (obj_image->palette) {
1993 free(obj_image->palette);
1994 obj_image->palette = NULL;
1997 obj_surface = SURFACE(obj_image->derived_surface);
2000 obj_surface->flags &= ~SURFACE_DERIVED;
2003 i965_destroy_image(&i965->image_heap, (struct object_base *)obj_image);
2005 return VA_STATUS_SUCCESS;
2009 * pointer to an array holding the palette data. The size of the array is
2010 * num_palette_entries * entry_bytes in size. The order of the components
2011 * in the palette is described by the component_order in VASubpicture struct
2014 i965_SetImagePalette(VADriverContextP ctx,
2016 unsigned char *palette)
2018 struct i965_driver_data *i965 = i965_driver_data(ctx);
2021 struct object_image *obj_image = IMAGE(image);
2023 return VA_STATUS_ERROR_INVALID_IMAGE;
2025 if (!obj_image->palette)
2026 return VA_STATUS_ERROR_ALLOCATION_FAILED; /* XXX: unpaletted/error */
2028 for (i = 0; i < obj_image->image.num_palette_entries; i++)
2029 obj_image->palette[i] = (((unsigned int)palette[3*i + 0] << 16) |
2030 ((unsigned int)palette[3*i + 1] << 8) |
2031 (unsigned int)palette[3*i + 2]);
2032 return VA_STATUS_SUCCESS;
2036 memcpy_pic(uint8_t *dst, unsigned int dst_stride,
2037 const uint8_t *src, unsigned int src_stride,
2038 unsigned int len, unsigned int height)
2042 for (i = 0; i < height; i++) {
2043 memcpy(dst, src, len);
2050 get_image_i420(struct object_image *obj_image, uint8_t *image_data,
2051 struct object_surface *obj_surface,
2052 const VARectangle *rect)
2054 uint8_t *dst[3], *src[3];
2056 const int U = obj_image->image.format.fourcc == obj_surface->fourcc ? 1 : 2;
2057 const int V = obj_image->image.format.fourcc == obj_surface->fourcc ? 2 : 1;
2058 unsigned int tiling, swizzle;
2060 if (!obj_surface->bo)
2063 assert(obj_surface->fourcc);
2064 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2066 if (tiling != I915_TILING_NONE)
2067 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2069 dri_bo_map(obj_surface->bo, 0);
2071 if (!obj_surface->bo->virtual)
2074 /* Dest VA image has either I420 or YV12 format.
2075 Source VA surface alway has I420 format */
2076 dst[Y] = image_data + obj_image->image.offsets[Y];
2077 src[0] = (uint8_t *)obj_surface->bo->virtual;
2078 dst[U] = image_data + obj_image->image.offsets[U];
2079 src[1] = src[0] + obj_surface->width * obj_surface->height;
2080 dst[V] = image_data + obj_image->image.offsets[V];
2081 src[2] = src[1] + (obj_surface->width / 2) * (obj_surface->height / 2);
2084 dst[Y] += rect->y * obj_image->image.pitches[Y] + rect->x;
2085 src[0] += rect->y * obj_surface->width + rect->x;
2086 memcpy_pic(dst[Y], obj_image->image.pitches[Y],
2087 src[0], obj_surface->width,
2088 rect->width, rect->height);
2091 dst[U] += (rect->y / 2) * obj_image->image.pitches[U] + rect->x / 2;
2092 src[1] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2093 memcpy_pic(dst[U], obj_image->image.pitches[U],
2094 src[1], obj_surface->width / 2,
2095 rect->width / 2, rect->height / 2);
2098 dst[V] += (rect->y / 2) * obj_image->image.pitches[V] + rect->x / 2;
2099 src[2] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2100 memcpy_pic(dst[V], obj_image->image.pitches[V],
2101 src[2], obj_surface->width / 2,
2102 rect->width / 2, rect->height / 2);
2104 if (tiling != I915_TILING_NONE)
2105 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2107 dri_bo_unmap(obj_surface->bo);
2111 get_image_nv12(struct object_image *obj_image, uint8_t *image_data,
2112 struct object_surface *obj_surface,
2113 const VARectangle *rect)
2115 uint8_t *dst[2], *src[2];
2116 unsigned int tiling, swizzle;
2118 if (!obj_surface->bo)
2121 assert(obj_surface->fourcc);
2122 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2124 if (tiling != I915_TILING_NONE)
2125 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2127 dri_bo_map(obj_surface->bo, 0);
2129 if (!obj_surface->bo->virtual)
2132 /* Both dest VA image and source surface have NV12 format */
2133 dst[0] = image_data + obj_image->image.offsets[0];
2134 src[0] = (uint8_t *)obj_surface->bo->virtual;
2135 dst[1] = image_data + obj_image->image.offsets[1];
2136 src[1] = src[0] + obj_surface->width * obj_surface->height;
2139 dst[0] += rect->y * obj_image->image.pitches[0] + rect->x;
2140 src[0] += rect->y * obj_surface->width + rect->x;
2141 memcpy_pic(dst[0], obj_image->image.pitches[0],
2142 src[0], obj_surface->width,
2143 rect->width, rect->height);
2146 dst[1] += (rect->y / 2) * obj_image->image.pitches[1] + (rect->x & -2);
2147 src[1] += (rect->y / 2) * obj_surface->width + (rect->x & -2);
2148 memcpy_pic(dst[1], obj_image->image.pitches[1],
2149 src[1], obj_surface->width,
2150 rect->width, rect->height / 2);
2152 if (tiling != I915_TILING_NONE)
2153 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2155 dri_bo_unmap(obj_surface->bo);
2159 i965_GetImage(VADriverContextP ctx,
2160 VASurfaceID surface,
2161 int x, /* coordinates of the upper left source pixel */
2163 unsigned int width, /* width and height of the region */
2164 unsigned int height,
2167 struct i965_driver_data *i965 = i965_driver_data(ctx);
2168 struct i965_render_state *render_state = &i965->render_state;
2170 struct object_surface *obj_surface = SURFACE(surface);
2172 return VA_STATUS_ERROR_INVALID_SURFACE;
2174 struct object_image *obj_image = IMAGE(image);
2176 return VA_STATUS_ERROR_INVALID_IMAGE;
2179 return VA_STATUS_ERROR_INVALID_PARAMETER;
2180 if (x + width > obj_surface->orig_width ||
2181 y + height > obj_surface->orig_height)
2182 return VA_STATUS_ERROR_INVALID_PARAMETER;
2183 if (x + width > obj_image->image.width ||
2184 y + height > obj_image->image.height)
2185 return VA_STATUS_ERROR_INVALID_PARAMETER;
2188 void *image_data = NULL;
2190 va_status = i965_MapBuffer(ctx, obj_image->image.buf, &image_data);
2191 if (va_status != VA_STATUS_SUCCESS)
2198 rect.height = height;
2200 switch (obj_image->image.format.fourcc) {
2201 case VA_FOURCC('Y','V','1','2'):
2202 case VA_FOURCC('I','4','2','0'):
2203 /* I420 is native format for MPEG-2 decoded surfaces */
2204 if (render_state->interleaved_uv)
2205 goto operation_failed;
2206 get_image_i420(obj_image, image_data, obj_surface, &rect);
2208 case VA_FOURCC('N','V','1','2'):
2209 /* NV12 is native format for H.264 decoded surfaces */
2210 if (!render_state->interleaved_uv)
2211 goto operation_failed;
2212 get_image_nv12(obj_image, image_data, obj_surface, &rect);
2216 va_status = VA_STATUS_ERROR_OPERATION_FAILED;
2220 i965_UnmapBuffer(ctx, obj_image->image.buf);
2225 i965_PutSurface(VADriverContextP ctx,
2226 VASurfaceID surface,
2227 void *draw, /* X Drawable */
2230 unsigned short srcw,
2231 unsigned short srch,
2234 unsigned short destw,
2235 unsigned short desth,
2236 VARectangle *cliprects, /* client supplied clip list */
2237 unsigned int number_cliprects, /* number of clip rects in the clip list */
2238 unsigned int flags) /* de-interlacing flags */
2240 struct i965_driver_data *i965 = i965_driver_data(ctx);
2241 struct dri_state *dri_state = (struct dri_state *)ctx->dri_state;
2242 struct i965_render_state *render_state = &i965->render_state;
2243 struct dri_drawable *dri_drawable;
2244 union dri_buffer *buffer;
2245 struct intel_region *dest_region;
2246 struct object_surface *obj_surface;
2247 VARectangle src_rect, dst_rect;
2250 Bool new_region = False;
2253 /* Currently don't support DRI1 */
2254 if (dri_state->driConnectedFlag != VA_DRI2)
2255 return VA_STATUS_ERROR_UNKNOWN;
2257 /* Some broken sources such as H.264 conformance case FM2_SVA_C
2260 obj_surface = SURFACE(surface);
2261 if (!obj_surface || !obj_surface->bo)
2262 return VA_STATUS_SUCCESS;
2264 _i965LockMutex(&i965->render_mutex);
2266 dri_drawable = dri_get_drawable(ctx, (Drawable)draw);
2267 assert(dri_drawable);
2269 buffer = dri_get_rendering_buffer(ctx, dri_drawable);
2272 dest_region = render_state->draw_region;
2275 assert(dest_region->bo);
2276 dri_bo_flink(dest_region->bo, &name);
2278 if (buffer->dri2.name != name) {
2280 dri_bo_unreference(dest_region->bo);
2283 dest_region = (struct intel_region *)calloc(1, sizeof(*dest_region));
2284 assert(dest_region);
2285 render_state->draw_region = dest_region;
2290 dest_region->x = dri_drawable->x;
2291 dest_region->y = dri_drawable->y;
2292 dest_region->width = dri_drawable->width;
2293 dest_region->height = dri_drawable->height;
2294 dest_region->cpp = buffer->dri2.cpp;
2295 dest_region->pitch = buffer->dri2.pitch;
2297 dest_region->bo = intel_bo_gem_create_from_name(i965->intel.bufmgr, "rendering buffer", buffer->dri2.name);
2298 assert(dest_region->bo);
2300 ret = dri_bo_get_tiling(dest_region->bo, &(dest_region->tiling), &(dest_region->swizzle));
2304 if ((flags & VA_FILTER_SCALING_MASK) == VA_FILTER_SCALING_NL_ANAMORPHIC)
2305 pp_flag |= I965_PP_FLAG_AVS;
2307 if (flags & (VA_BOTTOM_FIELD | VA_TOP_FIELD))
2308 pp_flag |= I965_PP_FLAG_DEINTERLACING;
2312 src_rect.width = srcw;
2313 src_rect.height = srch;
2317 dst_rect.width = destw;
2318 dst_rect.height = desth;
2320 intel_render_put_surface(ctx, surface, &src_rect, &dst_rect, pp_flag);
2322 if(obj_surface->subpic != VA_INVALID_ID) {
2323 intel_render_put_subpicture(ctx, surface, &src_rect, &dst_rect);
2326 dri_swap_buffer(ctx, dri_drawable);
2327 obj_surface->flags |= SURFACE_DISPLAYED;
2329 if ((obj_surface->flags & SURFACE_ALL_MASK) == SURFACE_DISPLAYED) {
2330 dri_bo_unreference(obj_surface->bo);
2331 obj_surface->bo = NULL;
2332 obj_surface->flags &= ~SURFACE_REF_DIS_MASK;
2334 if (obj_surface->free_private_data)
2335 obj_surface->free_private_data(&obj_surface->private_data);
2338 _i965UnlockMutex(&i965->render_mutex);
2340 return VA_STATUS_SUCCESS;
2344 i965_Terminate(VADriverContextP ctx)
2346 struct i965_driver_data *i965 = i965_driver_data(ctx);
2349 intel_batchbuffer_free(i965->batch);
2351 _i965DestroyMutex(&i965->render_mutex);
2353 if (i965_render_terminate(ctx) == False)
2354 return VA_STATUS_ERROR_UNKNOWN;
2356 if (i965_post_processing_terminate(ctx) == False)
2357 return VA_STATUS_ERROR_UNKNOWN;
2359 if (intel_driver_terminate(ctx) == False)
2360 return VA_STATUS_ERROR_UNKNOWN;
2362 i965_destroy_heap(&i965->buffer_heap, i965_destroy_buffer);
2363 i965_destroy_heap(&i965->image_heap, i965_destroy_image);
2364 i965_destroy_heap(&i965->subpic_heap, i965_destroy_subpic);
2365 i965_destroy_heap(&i965->surface_heap, i965_destroy_surface);
2366 i965_destroy_heap(&i965->context_heap, i965_destroy_context);
2367 i965_destroy_heap(&i965->config_heap, i965_destroy_config);
2369 free(ctx->pDriverData);
2370 ctx->pDriverData = NULL;
2372 return VA_STATUS_SUCCESS;
2377 VADriverContextP ctx, /* in */
2378 VABufferID buf_id, /* in */
2379 VABufferType *type, /* out */
2380 unsigned int *size, /* out */
2381 unsigned int *num_elements /* out */
2384 struct i965_driver_data *i965 = NULL;
2385 struct object_buffer *obj_buffer = NULL;
2387 i965 = i965_driver_data(ctx);
2388 obj_buffer = BUFFER(buf_id);
2390 *type = obj_buffer->type;
2391 *size = obj_buffer->size_element;
2392 *num_elements = obj_buffer->num_elements;
2394 return VA_STATUS_SUCCESS;
2399 VADriverContextP ctx, /* in */
2400 VASurfaceID surface, /* in */
2401 unsigned int *fourcc, /* out */
2402 unsigned int *luma_stride, /* out */
2403 unsigned int *chroma_u_stride, /* out */
2404 unsigned int *chroma_v_stride, /* out */
2405 unsigned int *luma_offset, /* out */
2406 unsigned int *chroma_u_offset, /* out */
2407 unsigned int *chroma_v_offset, /* out */
2408 unsigned int *buffer_name, /* out */
2409 void **buffer /* out */
2412 VAStatus vaStatus = VA_STATUS_SUCCESS;
2413 struct i965_driver_data *i965 = i965_driver_data(ctx);
2414 struct object_surface *obj_surface = NULL;
2418 assert(luma_stride);
2419 assert(chroma_u_stride);
2420 assert(chroma_v_stride);
2421 assert(luma_offset);
2422 assert(chroma_u_offset);
2423 assert(chroma_v_offset);
2424 assert(buffer_name);
2427 tmpImage.image_id = VA_INVALID_ID;
2429 obj_surface = SURFACE(surface);
2430 if (obj_surface == NULL) {
2431 // Surface is absent.
2432 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2436 // Lock functionality is absent now.
2437 if (obj_surface->locked_image_id != VA_INVALID_ID) {
2438 // Surface is locked already.
2439 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2443 vaStatus = i965_DeriveImage(
2447 if (vaStatus != VA_STATUS_SUCCESS) {
2451 obj_surface->locked_image_id = tmpImage.image_id;
2453 vaStatus = i965_MapBuffer(
2457 if (vaStatus != VA_STATUS_SUCCESS) {
2461 *fourcc = tmpImage.format.fourcc;
2462 *luma_offset = tmpImage.offsets[0];
2463 *luma_stride = tmpImage.pitches[0];
2464 *chroma_u_offset = tmpImage.offsets[1];
2465 *chroma_u_stride = tmpImage.pitches[1];
2466 *chroma_v_offset = tmpImage.offsets[2];
2467 *chroma_v_stride = tmpImage.pitches[2];
2468 *buffer_name = tmpImage.buf;
2471 if (vaStatus != VA_STATUS_SUCCESS) {
2480 VADriverContextP ctx, /* in */
2481 VASurfaceID surface /* in */
2484 VAStatus vaStatus = VA_STATUS_SUCCESS;
2485 struct i965_driver_data *i965 = i965_driver_data(ctx);
2486 struct object_image *locked_img = NULL;
2487 struct object_surface *obj_surface = NULL;
2489 obj_surface = SURFACE(surface);
2491 if (obj_surface == NULL) {
2492 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is absent
2495 if (obj_surface->locked_image_id == VA_INVALID_ID) {
2496 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is not locked
2500 locked_img = IMAGE(obj_surface->locked_image_id);
2501 if (locked_img == NULL || (locked_img->image.image_id == VA_INVALID_ID)) {
2502 // Work image was deallocated before i965_UnlockSurface()
2503 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2507 vaStatus = i965_UnmapBuffer(
2509 locked_img->image.buf);
2510 if (vaStatus != VA_STATUS_SUCCESS) {
2514 vaStatus = i965_DestroyImage(
2516 locked_img->image.image_id);
2517 if (vaStatus != VA_STATUS_SUCCESS) {
2521 locked_img->image.image_id = VA_INVALID_ID;
2528 VA_DRIVER_INIT_FUNC( VADriverContextP ctx )
2530 struct VADriverVTable * const vtable = ctx->vtable;
2531 struct i965_driver_data *i965;
2534 ctx->version_major = VA_MAJOR_VERSION;
2535 ctx->version_minor = VA_MINOR_VERSION;
2536 ctx->max_profiles = I965_MAX_PROFILES;
2537 ctx->max_entrypoints = I965_MAX_ENTRYPOINTS;
2538 ctx->max_attributes = I965_MAX_CONFIG_ATTRIBUTES;
2539 ctx->max_image_formats = I965_MAX_IMAGE_FORMATS;
2540 ctx->max_subpic_formats = I965_MAX_SUBPIC_FORMATS;
2541 ctx->max_display_attributes = I965_MAX_DISPLAY_ATTRIBUTES;
2542 ctx->str_vendor = I965_STR_VENDOR;
2544 vtable->vaTerminate = i965_Terminate;
2545 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
2546 vtable->vaQueryConfigProfiles = i965_QueryConfigProfiles;
2547 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
2548 vtable->vaQueryConfigAttributes = i965_QueryConfigAttributes;
2549 vtable->vaCreateConfig = i965_CreateConfig;
2550 vtable->vaDestroyConfig = i965_DestroyConfig;
2551 vtable->vaGetConfigAttributes = i965_GetConfigAttributes;
2552 vtable->vaCreateSurfaces = i965_CreateSurfaces;
2553 vtable->vaDestroySurfaces = i965_DestroySurfaces;
2554 vtable->vaCreateContext = i965_CreateContext;
2555 vtable->vaDestroyContext = i965_DestroyContext;
2556 vtable->vaCreateBuffer = i965_CreateBuffer;
2557 vtable->vaBufferSetNumElements = i965_BufferSetNumElements;
2558 vtable->vaMapBuffer = i965_MapBuffer;
2559 vtable->vaUnmapBuffer = i965_UnmapBuffer;
2560 vtable->vaDestroyBuffer = i965_DestroyBuffer;
2561 vtable->vaBeginPicture = i965_BeginPicture;
2562 vtable->vaRenderPicture = i965_RenderPicture;
2563 vtable->vaEndPicture = i965_EndPicture;
2564 vtable->vaSyncSurface = i965_SyncSurface;
2565 vtable->vaQuerySurfaceStatus = i965_QuerySurfaceStatus;
2566 vtable->vaPutSurface = i965_PutSurface;
2567 vtable->vaQueryImageFormats = i965_QueryImageFormats;
2568 vtable->vaCreateImage = i965_CreateImage;
2569 vtable->vaDeriveImage = i965_DeriveImage;
2570 vtable->vaDestroyImage = i965_DestroyImage;
2571 vtable->vaSetImagePalette = i965_SetImagePalette;
2572 vtable->vaGetImage = i965_GetImage;
2573 vtable->vaPutImage = i965_PutImage;
2574 vtable->vaQuerySubpictureFormats = i965_QuerySubpictureFormats;
2575 vtable->vaCreateSubpicture = i965_CreateSubpicture;
2576 vtable->vaDestroySubpicture = i965_DestroySubpicture;
2577 vtable->vaSetSubpictureImage = i965_SetSubpictureImage;
2578 vtable->vaSetSubpictureChromakey = i965_SetSubpictureChromakey;
2579 vtable->vaSetSubpictureGlobalAlpha = i965_SetSubpictureGlobalAlpha;
2580 vtable->vaAssociateSubpicture = i965_AssociateSubpicture;
2581 vtable->vaDeassociateSubpicture = i965_DeassociateSubpicture;
2582 vtable->vaQueryDisplayAttributes = i965_QueryDisplayAttributes;
2583 vtable->vaGetDisplayAttributes = i965_GetDisplayAttributes;
2584 vtable->vaSetDisplayAttributes = i965_SetDisplayAttributes;
2585 vtable->vaBufferInfo = i965_BufferInfo;
2586 vtable->vaLockSurface = i965_LockSurface;
2587 vtable->vaUnlockSurface = i965_UnlockSurface;
2588 // vtable->vaDbgCopySurfaceToBuffer = i965_DbgCopySurfaceToBuffer;
2590 i965 = (struct i965_driver_data *)calloc(1, sizeof(*i965));
2592 ctx->pDriverData = (void *)i965;
2594 result = object_heap_init(&i965->config_heap,
2595 sizeof(struct object_config),
2597 assert(result == 0);
2599 result = object_heap_init(&i965->context_heap,
2600 sizeof(struct object_context),
2602 assert(result == 0);
2604 result = object_heap_init(&i965->surface_heap,
2605 sizeof(struct object_surface),
2607 assert(result == 0);
2609 result = object_heap_init(&i965->buffer_heap,
2610 sizeof(struct object_buffer),
2612 assert(result == 0);
2614 result = object_heap_init(&i965->image_heap,
2615 sizeof(struct object_image),
2617 assert(result == 0);
2619 result = object_heap_init(&i965->subpic_heap,
2620 sizeof(struct object_subpic),
2622 assert(result == 0);
2624 return i965_Init(ctx);