2 * Copyright © 2009 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Xiang Haihao <haihao.xiang@intel.com>
26 * Zou Nan hai <nanhai.zou@intel.com>
36 #include <va/va_dricommon.h>
38 #include "intel_driver.h"
39 #include "intel_memman.h"
40 #include "intel_batchbuffer.h"
41 #include "i965_defines.h"
42 #include "i965_drv_video.h"
44 #define CONFIG_ID_OFFSET 0x01000000
45 #define CONTEXT_ID_OFFSET 0x02000000
46 #define SURFACE_ID_OFFSET 0x04000000
47 #define BUFFER_ID_OFFSET 0x08000000
48 #define IMAGE_ID_OFFSET 0x0a000000
49 #define SUBPIC_ID_OFFSET 0x10000000
51 #define HAS_MPEG2(ctx) (IS_G4X((ctx)->intel.device_id) || \
52 IS_IRONLAKE((ctx)->intel.device_id) || \
53 ((IS_GEN6((ctx)->intel.device_id) || \
54 IS_GEN7((ctx)->intel.device_id)) && \
55 (ctx)->intel.has_bsd))
57 #define HAS_H264(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
58 IS_GEN6((ctx)->intel.device_id) || \
59 IS_IRONLAKE((ctx)->intel.device_id)) && \
62 #define HAS_VC1(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
63 IS_GEN6((ctx)->intel.device_id)) && \
66 #define HAS_TILED_SURFACE(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
67 IS_GEN6((ctx)->intel.device_id)) && \
68 (ctx)->render_state.interleaved_uv)
70 #define HAS_ENCODER(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
71 IS_GEN6((ctx)->intel.device_id)) && \
74 #define HAS_VPP(ctx) (IS_IRONLAKE((ctx)->intel.device_id) || \
75 IS_GEN6((ctx)->intel.device_id) || \
76 IS_GEN7((ctx)->intel.device_id))
78 #define HAS_JPEG(ctx) (IS_GEN7((ctx)->intel.device_id) && \
81 #define HAS_ACCELERATED_GETIMAGE(ctx) (IS_GEN6((ctx)->intel.device_id) || \
82 IS_GEN7((ctx)->intel.device_id))
86 I965_SURFACETYPE_RGBA = 1,
88 I965_SURFACETYPE_INDEXED
91 /* List of supported image formats */
94 VAImageFormat va_format;
95 } i965_image_format_map_t;
97 static const i965_image_format_map_t
98 i965_image_formats_map[I965_MAX_IMAGE_FORMATS + 1] = {
99 { I965_SURFACETYPE_YUV,
100 { VA_FOURCC('Y','V','1','2'), VA_LSB_FIRST, 12, } },
101 { I965_SURFACETYPE_YUV,
102 { VA_FOURCC('I','4','2','0'), VA_LSB_FIRST, 12, } },
103 { I965_SURFACETYPE_YUV,
104 { VA_FOURCC('N','V','1','2'), VA_LSB_FIRST, 12, } },
107 /* List of supported subpicture formats */
111 VAImageFormat va_format;
112 unsigned int va_flags;
113 } i965_subpic_format_map_t;
115 static const i965_subpic_format_map_t
116 i965_subpic_formats_map[I965_MAX_SUBPIC_FORMATS + 1] = {
117 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_P4A4_UNORM,
118 { VA_FOURCC('I','A','4','4'), VA_MSB_FIRST, 8, },
119 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
120 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_A4P4_UNORM,
121 { VA_FOURCC('A','I','4','4'), VA_MSB_FIRST, 8, },
122 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
123 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_B8G8R8A8_UNORM,
124 { VA_FOURCC('B','G','R','A'), VA_LSB_FIRST, 32,
125 32, 0x00ff0000, 0x0000ff00, 0x000000ff, 0xff000000 },
126 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
127 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_R8G8B8A8_UNORM,
128 { VA_FOURCC('R','G','B','A'), VA_LSB_FIRST, 32,
129 32, 0x000000ff, 0x0000ff00, 0x00ff0000, 0xff000000 },
130 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
133 static const i965_subpic_format_map_t *
134 get_subpic_format(const VAImageFormat *va_format)
137 for (i = 0; i965_subpic_formats_map[i].type != 0; i++) {
138 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[i];
139 if (m->va_format.fourcc == va_format->fourcc &&
140 (m->type == I965_SURFACETYPE_RGBA ?
141 (m->va_format.byte_order == va_format->byte_order &&
142 m->va_format.red_mask == va_format->red_mask &&
143 m->va_format.green_mask == va_format->green_mask &&
144 m->va_format.blue_mask == va_format->blue_mask &&
145 m->va_format.alpha_mask == va_format->alpha_mask) : 1))
151 extern struct hw_context *i965_proc_context_init(VADriverContextP, VAProfile);
152 extern struct hw_context *g4x_dec_hw_context_init(VADriverContextP, VAProfile);
153 static struct hw_codec_info g4x_hw_codec_info = {
154 .dec_hw_context_init = g4x_dec_hw_context_init,
155 .enc_hw_context_init = NULL,
156 .proc_hw_context_init = NULL,
159 extern struct hw_context *ironlake_dec_hw_context_init(VADriverContextP, VAProfile);
160 static struct hw_codec_info ironlake_hw_codec_info = {
161 .dec_hw_context_init = ironlake_dec_hw_context_init,
162 .enc_hw_context_init = NULL,
163 .proc_hw_context_init = i965_proc_context_init,
166 extern struct hw_context *gen6_dec_hw_context_init(VADriverContextP, VAProfile);
167 extern struct hw_context *gen6_enc_hw_context_init(VADriverContextP, VAProfile);
168 static struct hw_codec_info gen6_hw_codec_info = {
169 .dec_hw_context_init = gen6_dec_hw_context_init,
170 .enc_hw_context_init = gen6_enc_hw_context_init,
171 .proc_hw_context_init = i965_proc_context_init,
174 extern struct hw_context *gen7_dec_hw_context_init(VADriverContextP, VAProfile);
175 extern struct hw_context *gen7_enc_hw_context_init(VADriverContextP, VAProfile);
176 static struct hw_codec_info gen7_hw_codec_info = {
177 .dec_hw_context_init = gen7_dec_hw_context_init,
178 .enc_hw_context_init = gen7_enc_hw_context_init,
179 .proc_hw_context_init = i965_proc_context_init,
183 i965_QueryConfigProfiles(VADriverContextP ctx,
184 VAProfile *profile_list, /* out */
185 int *num_profiles) /* out */
187 struct i965_driver_data * const i965 = i965_driver_data(ctx);
190 if (HAS_MPEG2(i965)) {
191 profile_list[i++] = VAProfileMPEG2Simple;
192 profile_list[i++] = VAProfileMPEG2Main;
195 if (HAS_H264(i965)) {
196 profile_list[i++] = VAProfileH264Baseline;
197 profile_list[i++] = VAProfileH264Main;
198 profile_list[i++] = VAProfileH264High;
202 profile_list[i++] = VAProfileVC1Simple;
203 profile_list[i++] = VAProfileVC1Main;
204 profile_list[i++] = VAProfileVC1Advanced;
208 profile_list[i++] = VAProfileNone;
211 if (HAS_JPEG(i965)) {
212 profile_list[i++] = VAProfileJPEGBaseline;
215 /* If the assert fails then I965_MAX_PROFILES needs to be bigger */
216 assert(i <= I965_MAX_PROFILES);
219 return VA_STATUS_SUCCESS;
223 i965_QueryConfigEntrypoints(VADriverContextP ctx,
225 VAEntrypoint *entrypoint_list, /* out */
226 int *num_entrypoints) /* out */
228 struct i965_driver_data * const i965 = i965_driver_data(ctx);
232 case VAProfileMPEG2Simple:
233 case VAProfileMPEG2Main:
235 entrypoint_list[n++] = VAEntrypointVLD;
238 case VAProfileH264Baseline:
239 case VAProfileH264Main:
240 case VAProfileH264High:
242 entrypoint_list[n++] = VAEntrypointVLD;
244 if (HAS_ENCODER(i965))
245 entrypoint_list[n++] = VAEntrypointEncSlice;
249 case VAProfileVC1Simple:
250 case VAProfileVC1Main:
251 case VAProfileVC1Advanced:
253 entrypoint_list[n++] = VAEntrypointVLD;
258 entrypoint_list[n++] = VAEntrypointVideoProc;
261 case VAProfileJPEGBaseline:
263 entrypoint_list[n++] = VAEntrypointVLD;
270 /* If the assert fails then I965_MAX_ENTRYPOINTS needs to be bigger */
271 assert(n <= I965_MAX_ENTRYPOINTS);
272 *num_entrypoints = n;
273 return n > 0 ? VA_STATUS_SUCCESS : VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
277 i965_GetConfigAttributes(VADriverContextP ctx,
279 VAEntrypoint entrypoint,
280 VAConfigAttrib *attrib_list, /* in/out */
285 /* Other attributes don't seem to be defined */
286 /* What to do if we don't know the attribute? */
287 for (i = 0; i < num_attribs; i++) {
288 switch (attrib_list[i].type) {
289 case VAConfigAttribRTFormat:
290 attrib_list[i].value = VA_RT_FORMAT_YUV420;
293 case VAConfigAttribRateControl:
294 attrib_list[i].value = VA_RC_VBR;
297 case VAConfigAttribEncHeaderPacking:
298 if (entrypoint == VAEntrypointEncSlice) {
299 attrib_list[i].value = VA_ENC_HEADER_PACKING_SLICE;
305 attrib_list[i].value = VA_ATTRIB_NOT_SUPPORTED;
310 return VA_STATUS_SUCCESS;
314 i965_destroy_config(struct object_heap *heap, struct object_base *obj)
316 object_heap_free(heap, obj);
320 i965_update_attribute(struct object_config *obj_config, VAConfigAttrib *attrib)
324 /* Check existing attrbiutes */
325 for (i = 0; obj_config->num_attribs < i; i++) {
326 if (obj_config->attrib_list[i].type == attrib->type) {
327 /* Update existing attribute */
328 obj_config->attrib_list[i].value = attrib->value;
329 return VA_STATUS_SUCCESS;
333 if (obj_config->num_attribs < I965_MAX_CONFIG_ATTRIBUTES) {
334 i = obj_config->num_attribs;
335 obj_config->attrib_list[i].type = attrib->type;
336 obj_config->attrib_list[i].value = attrib->value;
337 obj_config->num_attribs++;
338 return VA_STATUS_SUCCESS;
341 return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
345 i965_CreateConfig(VADriverContextP ctx,
347 VAEntrypoint entrypoint,
348 VAConfigAttrib *attrib_list,
350 VAConfigID *config_id) /* out */
352 struct i965_driver_data * const i965 = i965_driver_data(ctx);
353 struct object_config *obj_config;
358 /* Validate profile & entrypoint */
360 case VAProfileMPEG2Simple:
361 case VAProfileMPEG2Main:
362 if (HAS_MPEG2(i965) && VAEntrypointVLD == entrypoint) {
363 vaStatus = VA_STATUS_SUCCESS;
365 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
369 case VAProfileH264Baseline:
370 case VAProfileH264Main:
371 case VAProfileH264High:
372 if ((HAS_H264(i965) && VAEntrypointVLD == entrypoint) ||
373 (HAS_ENCODER(i965) && VAEntrypointEncSlice == entrypoint)) {
374 vaStatus = VA_STATUS_SUCCESS;
376 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
381 case VAProfileVC1Simple:
382 case VAProfileVC1Main:
383 case VAProfileVC1Advanced:
384 if (HAS_VC1(i965) && VAEntrypointVLD == entrypoint) {
385 vaStatus = VA_STATUS_SUCCESS;
387 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
393 if (HAS_VPP(i965) && VAEntrypointVideoProc == entrypoint) {
394 vaStatus = VA_STATUS_SUCCESS;
396 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
401 case VAProfileJPEGBaseline:
402 if (HAS_JPEG(i965) && VAEntrypointVLD == entrypoint) {
403 vaStatus = VA_STATUS_SUCCESS;
405 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
410 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
414 if (VA_STATUS_SUCCESS != vaStatus) {
418 configID = NEW_CONFIG_ID();
419 obj_config = CONFIG(configID);
421 if (NULL == obj_config) {
422 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
426 obj_config->profile = profile;
427 obj_config->entrypoint = entrypoint;
428 obj_config->attrib_list[0].type = VAConfigAttribRTFormat;
429 obj_config->attrib_list[0].value = VA_RT_FORMAT_YUV420;
430 obj_config->num_attribs = 1;
432 for(i = 0; i < num_attribs; i++) {
433 vaStatus = i965_update_attribute(obj_config, &(attrib_list[i]));
435 if (VA_STATUS_SUCCESS != vaStatus) {
441 if (VA_STATUS_SUCCESS != vaStatus) {
442 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
444 *config_id = configID;
451 i965_DestroyConfig(VADriverContextP ctx, VAConfigID config_id)
453 struct i965_driver_data *i965 = i965_driver_data(ctx);
454 struct object_config *obj_config = CONFIG(config_id);
457 if (NULL == obj_config) {
458 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
462 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
463 return VA_STATUS_SUCCESS;
466 VAStatus i965_QueryConfigAttributes(VADriverContextP ctx,
467 VAConfigID config_id,
468 VAProfile *profile, /* out */
469 VAEntrypoint *entrypoint, /* out */
470 VAConfigAttrib *attrib_list, /* out */
471 int *num_attribs) /* out */
473 struct i965_driver_data *i965 = i965_driver_data(ctx);
474 struct object_config *obj_config = CONFIG(config_id);
475 VAStatus vaStatus = VA_STATUS_SUCCESS;
479 *profile = obj_config->profile;
480 *entrypoint = obj_config->entrypoint;
481 *num_attribs = obj_config->num_attribs;
483 for(i = 0; i < obj_config->num_attribs; i++) {
484 attrib_list[i] = obj_config->attrib_list[i];
491 i965_destroy_surface(struct object_heap *heap, struct object_base *obj)
493 struct object_surface *obj_surface = (struct object_surface *)obj;
495 dri_bo_unreference(obj_surface->bo);
496 obj_surface->bo = NULL;
498 if (obj_surface->free_private_data != NULL) {
499 obj_surface->free_private_data(&obj_surface->private_data);
500 obj_surface->private_data = NULL;
503 object_heap_free(heap, obj);
507 i965_CreateSurfaces(VADriverContextP ctx,
512 VASurfaceID *surfaces) /* out */
514 struct i965_driver_data *i965 = i965_driver_data(ctx);
516 VAStatus vaStatus = VA_STATUS_SUCCESS;
518 /* We only support one format */
519 if (VA_RT_FORMAT_YUV420 != format) {
520 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
523 for (i = 0; i < num_surfaces; i++) {
524 int surfaceID = NEW_SURFACE_ID();
525 struct object_surface *obj_surface = SURFACE(surfaceID);
527 if (NULL == obj_surface) {
528 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
532 surfaces[i] = surfaceID;
533 obj_surface->status = VASurfaceReady;
534 obj_surface->subpic = VA_INVALID_ID;
535 obj_surface->orig_width = width;
536 obj_surface->orig_height = height;
538 obj_surface->width = ALIGN(width, 16);
539 obj_surface->height = ALIGN(height, 16);
540 obj_surface->flags = SURFACE_REFERENCED;
541 obj_surface->fourcc = 0;
542 obj_surface->bo = NULL;
543 obj_surface->locked_image_id = VA_INVALID_ID;
544 obj_surface->private_data = NULL;
545 obj_surface->free_private_data = NULL;
546 obj_surface->subsampling = SUBSAMPLE_YUV420;
550 if (VA_STATUS_SUCCESS != vaStatus) {
551 /* surfaces[i-1] was the last successful allocation */
553 struct object_surface *obj_surface = SURFACE(surfaces[i]);
555 surfaces[i] = VA_INVALID_SURFACE;
557 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
565 i965_DestroySurfaces(VADriverContextP ctx,
566 VASurfaceID *surface_list,
569 struct i965_driver_data *i965 = i965_driver_data(ctx);
572 for (i = num_surfaces; i--; ) {
573 struct object_surface *obj_surface = SURFACE(surface_list[i]);
576 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
579 return VA_STATUS_SUCCESS;
583 i965_QueryImageFormats(VADriverContextP ctx,
584 VAImageFormat *format_list, /* out */
585 int *num_formats) /* out */
589 for (n = 0; i965_image_formats_map[n].va_format.fourcc != 0; n++) {
590 const i965_image_format_map_t * const m = &i965_image_formats_map[n];
592 format_list[n] = m->va_format;
598 return VA_STATUS_SUCCESS;
602 * Guess the format when the usage of a VA surface is unknown
603 * 1. Without a valid context: YV12
604 * 2. The current context is valid:
605 * a) always NV12 on GEN6 and later
606 * b) I420 for MPEG-2 and NV12 for other codec on GEN4 & GEN5
609 i965_guess_surface_format(VADriverContextP ctx,
611 unsigned int *fourcc,
612 unsigned int *is_tiled)
614 struct i965_driver_data *i965 = i965_driver_data(ctx);
615 struct object_context *obj_context = NULL;
616 struct object_config *obj_config = NULL;
618 *fourcc = VA_FOURCC('Y', 'V', '1', '2');
621 if (i965->current_context_id == VA_INVALID_ID)
624 obj_context = CONTEXT(i965->current_context_id);
626 if (!obj_context || obj_context->config_id == VA_INVALID_ID)
629 obj_config = CONFIG(obj_context->config_id);
634 if (IS_GEN6(i965->intel.device_id) || IS_GEN7(i965->intel.device_id)) {
635 *fourcc = VA_FOURCC('N', 'V', '1', '2');
640 switch (obj_config->profile) {
641 case VAProfileMPEG2Simple:
642 case VAProfileMPEG2Main:
643 *fourcc = VA_FOURCC('I', '4', '2', '0');
648 *fourcc = VA_FOURCC('N', 'V', '1', '2');
655 i965_PutImage(VADriverContextP ctx,
660 unsigned int src_width,
661 unsigned int src_height,
664 unsigned int dest_width,
665 unsigned int dest_height)
667 struct i965_driver_data *i965 = i965_driver_data(ctx);
668 struct object_surface *obj_surface = SURFACE(surface);
669 struct object_image *obj_image = IMAGE(image);
670 struct i965_surface src_surface, dst_surface;
671 VAStatus va_status = VA_STATUS_SUCCESS;
672 VARectangle src_rect, dst_rect;
675 return VA_STATUS_ERROR_INVALID_SURFACE;
677 if (!obj_image || !obj_image->bo)
678 return VA_STATUS_ERROR_INVALID_IMAGE;
682 src_x + src_width > obj_image->image.width ||
683 src_y + src_height > obj_image->image.height)
684 return VA_STATUS_ERROR_INVALID_PARAMETER;
688 dest_x + dest_width > obj_surface->orig_width ||
689 dest_y + dest_height > obj_surface->orig_height)
690 return VA_STATUS_ERROR_INVALID_PARAMETER;
692 i965_check_alloc_surface_bo(ctx, obj_surface, HAS_TILED_SURFACE(i965), VA_FOURCC('N', 'V', '1', '2'), SUBSAMPLE_YUV420);
694 src_surface.id = image;
695 src_surface.type = I965_SURFACE_TYPE_IMAGE;
696 src_surface.flags = I965_SURFACE_FLAG_FRAME;
699 src_rect.width = src_width;
700 src_rect.height = src_height;
702 dst_surface.id = surface;
703 dst_surface.type = I965_SURFACE_TYPE_SURFACE;
704 dst_surface.flags = I965_SURFACE_FLAG_FRAME;
707 dst_rect.width = dest_width;
708 dst_rect.height = dest_height;
710 va_status = i965_image_processing(ctx,
720 i965_QuerySubpictureFormats(VADriverContextP ctx,
721 VAImageFormat *format_list, /* out */
722 unsigned int *flags, /* out */
723 unsigned int *num_formats) /* out */
727 for (n = 0; i965_subpic_formats_map[n].va_format.fourcc != 0; n++) {
728 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[n];
730 format_list[n] = m->va_format;
732 flags[n] = m->va_flags;
738 return VA_STATUS_SUCCESS;
742 i965_destroy_subpic(struct object_heap *heap, struct object_base *obj)
744 // struct object_subpic *obj_subpic = (struct object_subpic *)obj;
746 object_heap_free(heap, obj);
750 i965_CreateSubpicture(VADriverContextP ctx,
752 VASubpictureID *subpicture) /* out */
754 struct i965_driver_data *i965 = i965_driver_data(ctx);
755 VASubpictureID subpicID = NEW_SUBPIC_ID()
756 struct object_subpic *obj_subpic = SUBPIC(subpicID);
759 return VA_STATUS_ERROR_ALLOCATION_FAILED;
761 struct object_image *obj_image = IMAGE(image);
763 return VA_STATUS_ERROR_INVALID_IMAGE;
765 const i965_subpic_format_map_t * const m = get_subpic_format(&obj_image->image.format);
767 return VA_STATUS_ERROR_UNKNOWN; /* XXX: VA_STATUS_ERROR_UNSUPPORTED_FORMAT? */
769 *subpicture = subpicID;
770 obj_subpic->image = image;
771 obj_subpic->format = m->format;
772 obj_subpic->width = obj_image->image.width;
773 obj_subpic->height = obj_image->image.height;
774 obj_subpic->pitch = obj_image->image.pitches[0];
775 obj_subpic->bo = obj_image->bo;
776 return VA_STATUS_SUCCESS;
780 i965_DestroySubpicture(VADriverContextP ctx,
781 VASubpictureID subpicture)
783 struct i965_driver_data *i965 = i965_driver_data(ctx);
784 struct object_subpic *obj_subpic = SUBPIC(subpicture);
785 i965_destroy_subpic(&i965->subpic_heap, (struct object_base *)obj_subpic);
786 return VA_STATUS_SUCCESS;
790 i965_SetSubpictureImage(VADriverContextP ctx,
791 VASubpictureID subpicture,
795 return VA_STATUS_ERROR_UNIMPLEMENTED;
799 i965_SetSubpictureChromakey(VADriverContextP ctx,
800 VASubpictureID subpicture,
801 unsigned int chromakey_min,
802 unsigned int chromakey_max,
803 unsigned int chromakey_mask)
806 return VA_STATUS_ERROR_UNIMPLEMENTED;
810 i965_SetSubpictureGlobalAlpha(VADriverContextP ctx,
811 VASubpictureID subpicture,
815 return VA_STATUS_ERROR_UNIMPLEMENTED;
819 i965_AssociateSubpicture(VADriverContextP ctx,
820 VASubpictureID subpicture,
821 VASurfaceID *target_surfaces,
823 short src_x, /* upper left offset in subpicture */
825 unsigned short src_width,
826 unsigned short src_height,
827 short dest_x, /* upper left offset in surface */
829 unsigned short dest_width,
830 unsigned short dest_height,
832 * whether to enable chroma-keying or global-alpha
833 * see VA_SUBPICTURE_XXX values
837 struct i965_driver_data *i965 = i965_driver_data(ctx);
838 struct object_subpic *obj_subpic = SUBPIC(subpicture);
841 obj_subpic->src_rect.x = src_x;
842 obj_subpic->src_rect.y = src_y;
843 obj_subpic->src_rect.width = src_width;
844 obj_subpic->src_rect.height = src_height;
845 obj_subpic->dst_rect.x = dest_x;
846 obj_subpic->dst_rect.y = dest_y;
847 obj_subpic->dst_rect.width = dest_width;
848 obj_subpic->dst_rect.height = dest_height;
849 obj_subpic->flags = flags;
851 for (i = 0; i < num_surfaces; i++) {
852 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
854 return VA_STATUS_ERROR_INVALID_SURFACE;
855 obj_surface->subpic = subpicture;
857 return VA_STATUS_SUCCESS;
862 i965_DeassociateSubpicture(VADriverContextP ctx,
863 VASubpictureID subpicture,
864 VASurfaceID *target_surfaces,
867 struct i965_driver_data *i965 = i965_driver_data(ctx);
870 for (i = 0; i < num_surfaces; i++) {
871 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
873 return VA_STATUS_ERROR_INVALID_SURFACE;
874 if (obj_surface->subpic == subpicture)
875 obj_surface->subpic = VA_INVALID_ID;
877 return VA_STATUS_SUCCESS;
881 i965_reference_buffer_store(struct buffer_store **ptr,
882 struct buffer_store *buffer_store)
884 assert(*ptr == NULL);
887 buffer_store->ref_count++;
893 i965_release_buffer_store(struct buffer_store **ptr)
895 struct buffer_store *buffer_store = *ptr;
897 if (buffer_store == NULL)
900 assert(buffer_store->bo || buffer_store->buffer);
901 assert(!(buffer_store->bo && buffer_store->buffer));
902 buffer_store->ref_count--;
904 if (buffer_store->ref_count == 0) {
905 dri_bo_unreference(buffer_store->bo);
906 free(buffer_store->buffer);
907 buffer_store->bo = NULL;
908 buffer_store->buffer = NULL;
916 i965_destroy_context(struct object_heap *heap, struct object_base *obj)
918 struct object_context *obj_context = (struct object_context *)obj;
921 if (obj_context->hw_context) {
922 obj_context->hw_context->destroy(obj_context->hw_context);
923 obj_context->hw_context = NULL;
926 if (obj_context->codec_type == CODEC_PROC) {
927 i965_release_buffer_store(&obj_context->codec_state.proc.pipeline_param);
928 i965_release_buffer_store(&obj_context->codec_state.proc.input_param);
930 for (i = 0; i < VA_PROC_PIPELINE_MAX_NUM_FILTERS; i++)
931 i965_release_buffer_store(&obj_context->codec_state.proc.filter_param[i]);
932 } else if (obj_context->codec_type == CODEC_ENC) {
933 assert(obj_context->codec_state.encode.num_slice_params <= obj_context->codec_state.encode.max_slice_params);
934 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param);
935 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param);
937 for (i = 0; i < obj_context->codec_state.encode.num_slice_params; i++)
938 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params[i]);
940 free(obj_context->codec_state.encode.slice_params);
942 assert(obj_context->codec_state.encode.num_slice_params_ext <= obj_context->codec_state.encode.max_slice_params_ext);
943 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param_ext);
944 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param_ext);
945 i965_release_buffer_store(&obj_context->codec_state.encode.dec_ref_pic_marking);
947 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_param); i++)
948 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_param[i]);
950 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_data); i++)
951 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_data[i]);
953 for (i = 0; i < obj_context->codec_state.encode.num_slice_params_ext; i++)
954 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params_ext[i]);
956 free(obj_context->codec_state.encode.slice_params_ext);
958 assert(obj_context->codec_state.decode.num_slice_params <= obj_context->codec_state.decode.max_slice_params);
959 assert(obj_context->codec_state.decode.num_slice_datas <= obj_context->codec_state.decode.max_slice_datas);
961 i965_release_buffer_store(&obj_context->codec_state.decode.pic_param);
962 i965_release_buffer_store(&obj_context->codec_state.decode.iq_matrix);
963 i965_release_buffer_store(&obj_context->codec_state.decode.bit_plane);
965 for (i = 0; i < obj_context->codec_state.decode.num_slice_params; i++)
966 i965_release_buffer_store(&obj_context->codec_state.decode.slice_params[i]);
968 for (i = 0; i < obj_context->codec_state.decode.num_slice_datas; i++)
969 i965_release_buffer_store(&obj_context->codec_state.decode.slice_datas[i]);
971 free(obj_context->codec_state.decode.slice_params);
972 free(obj_context->codec_state.decode.slice_datas);
975 free(obj_context->render_targets);
976 object_heap_free(heap, obj);
980 i965_CreateContext(VADriverContextP ctx,
981 VAConfigID config_id,
985 VASurfaceID *render_targets,
986 int num_render_targets,
987 VAContextID *context) /* out */
989 struct i965_driver_data *i965 = i965_driver_data(ctx);
990 struct i965_render_state *render_state = &i965->render_state;
991 struct object_config *obj_config = CONFIG(config_id);
992 struct object_context *obj_context = NULL;
993 VAStatus vaStatus = VA_STATUS_SUCCESS;
997 if (NULL == obj_config) {
998 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
1003 /* Validate picture dimensions */
1004 contextID = NEW_CONTEXT_ID();
1005 obj_context = CONTEXT(contextID);
1007 if (NULL == obj_context) {
1008 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
1012 render_state->inited = 1;
1014 switch (obj_config->profile) {
1015 case VAProfileH264Baseline:
1016 case VAProfileH264Main:
1017 case VAProfileH264High:
1018 if (!HAS_H264(i965))
1019 return VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1020 render_state->interleaved_uv = 1;
1023 render_state->interleaved_uv = !!(IS_GEN6(i965->intel.device_id) || IS_GEN7(i965->intel.device_id));
1027 *context = contextID;
1028 obj_context->flags = flag;
1029 obj_context->context_id = contextID;
1030 obj_context->config_id = config_id;
1031 obj_context->picture_width = picture_width;
1032 obj_context->picture_height = picture_height;
1033 obj_context->num_render_targets = num_render_targets;
1034 obj_context->render_targets =
1035 (VASurfaceID *)calloc(num_render_targets, sizeof(VASurfaceID));
1036 obj_context->hw_context = NULL;
1038 for(i = 0; i < num_render_targets; i++) {
1039 if (NULL == SURFACE(render_targets[i])) {
1040 vaStatus = VA_STATUS_ERROR_INVALID_SURFACE;
1044 obj_context->render_targets[i] = render_targets[i];
1047 if (VA_STATUS_SUCCESS == vaStatus) {
1048 if (VAEntrypointVideoProc == obj_config->entrypoint) {
1049 obj_context->codec_type = CODEC_PROC;
1050 memset(&obj_context->codec_state.proc, 0, sizeof(obj_context->codec_state.proc));
1051 obj_context->codec_state.proc.current_render_target = VA_INVALID_ID;
1052 assert(i965->codec_info->proc_hw_context_init);
1053 obj_context->hw_context = i965->codec_info->proc_hw_context_init(ctx, obj_config->profile);
1054 } else if (VAEntrypointEncSlice == obj_config->entrypoint) { /*encode routin only*/
1055 obj_context->codec_type = CODEC_ENC;
1056 memset(&obj_context->codec_state.encode, 0, sizeof(obj_context->codec_state.encode));
1057 obj_context->codec_state.encode.current_render_target = VA_INVALID_ID;
1058 obj_context->codec_state.encode.max_slice_params = NUM_SLICES;
1059 obj_context->codec_state.encode.slice_params = calloc(obj_context->codec_state.encode.max_slice_params,
1060 sizeof(*obj_context->codec_state.encode.slice_params));
1061 assert(i965->codec_info->enc_hw_context_init);
1062 obj_context->hw_context = i965->codec_info->enc_hw_context_init(ctx, obj_config->profile);
1064 obj_context->codec_type = CODEC_DEC;
1065 memset(&obj_context->codec_state.decode, 0, sizeof(obj_context->codec_state.decode));
1066 obj_context->codec_state.decode.current_render_target = -1;
1067 obj_context->codec_state.decode.max_slice_params = NUM_SLICES;
1068 obj_context->codec_state.decode.max_slice_datas = NUM_SLICES;
1069 obj_context->codec_state.decode.slice_params = calloc(obj_context->codec_state.decode.max_slice_params,
1070 sizeof(*obj_context->codec_state.decode.slice_params));
1071 obj_context->codec_state.decode.slice_datas = calloc(obj_context->codec_state.decode.max_slice_datas,
1072 sizeof(*obj_context->codec_state.decode.slice_datas));
1074 assert(i965->codec_info->dec_hw_context_init);
1075 obj_context->hw_context = i965->codec_info->dec_hw_context_init(ctx, obj_config->profile);
1079 /* Error recovery */
1080 if (VA_STATUS_SUCCESS != vaStatus) {
1081 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
1084 i965->current_context_id = contextID;
1090 i965_DestroyContext(VADriverContextP ctx, VAContextID context)
1092 struct i965_driver_data *i965 = i965_driver_data(ctx);
1093 struct object_context *obj_context = CONTEXT(context);
1095 assert(obj_context);
1097 if (i965->current_context_id == context)
1098 i965->current_context_id = VA_INVALID_ID;
1100 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
1102 return VA_STATUS_SUCCESS;
1106 i965_destroy_buffer(struct object_heap *heap, struct object_base *obj)
1108 struct object_buffer *obj_buffer = (struct object_buffer *)obj;
1110 assert(obj_buffer->buffer_store);
1111 i965_release_buffer_store(&obj_buffer->buffer_store);
1112 object_heap_free(heap, obj);
1116 i965_create_buffer_internal(VADriverContextP ctx,
1117 VAContextID context,
1120 unsigned int num_elements,
1125 struct i965_driver_data *i965 = i965_driver_data(ctx);
1126 struct object_buffer *obj_buffer = NULL;
1127 struct buffer_store *buffer_store = NULL;
1132 case VAPictureParameterBufferType:
1133 case VAIQMatrixBufferType:
1134 case VAQMatrixBufferType:
1135 case VABitPlaneBufferType:
1136 case VASliceGroupMapBufferType:
1137 case VASliceParameterBufferType:
1138 case VASliceDataBufferType:
1139 case VAMacroblockParameterBufferType:
1140 case VAResidualDataBufferType:
1141 case VADeblockingParameterBufferType:
1142 case VAImageBufferType:
1143 case VAEncCodedBufferType:
1144 case VAEncSequenceParameterBufferType:
1145 case VAEncPictureParameterBufferType:
1146 case VAEncSliceParameterBufferType:
1147 case VAEncDecRefPicMarkingBufferH264Type:
1148 case VAEncPackedHeaderParameterBufferType:
1149 case VAEncPackedHeaderDataBufferType:
1150 case VAProcPipelineParameterBufferType:
1151 case VAProcInputParameterBufferType:
1152 case VAProcFilterBaseParameterBufferType:
1153 case VAProcFilterDeinterlacingParameterBufferType:
1154 case VAProcFilterProcAmpParameterBufferType:
1155 case VAHuffmanTableBufferType:
1160 return VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1163 bufferID = NEW_BUFFER_ID();
1164 obj_buffer = BUFFER(bufferID);
1166 if (NULL == obj_buffer) {
1167 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1170 if (type == VAEncCodedBufferType) {
1171 size += ALIGN(sizeof(VACodedBufferSegment), 64);
1172 size += 0x1000; /* for upper bound check */
1175 obj_buffer->max_num_elements = num_elements;
1176 obj_buffer->num_elements = num_elements;
1177 obj_buffer->size_element = size;
1178 obj_buffer->type = type;
1179 obj_buffer->buffer_store = NULL;
1180 buffer_store = calloc(1, sizeof(struct buffer_store));
1181 assert(buffer_store);
1182 buffer_store->ref_count = 1;
1184 if (store_bo != NULL) {
1185 buffer_store->bo = store_bo;
1186 dri_bo_reference(buffer_store->bo);
1189 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
1190 } else if (type == VASliceDataBufferType ||
1191 type == VAImageBufferType ||
1192 type == VAEncCodedBufferType) {
1193 buffer_store->bo = dri_bo_alloc(i965->intel.bufmgr,
1195 size * num_elements, 64);
1196 assert(buffer_store->bo);
1198 if (type == VAEncCodedBufferType) {
1199 VACodedBufferSegment *coded_buffer_segment;
1200 dri_bo_map(buffer_store->bo, 1);
1201 coded_buffer_segment = (VACodedBufferSegment *)buffer_store->bo->virtual;
1202 coded_buffer_segment->size = size - ALIGN(sizeof(VACodedBufferSegment), 64);
1203 coded_buffer_segment->bit_offset = 0;
1204 coded_buffer_segment->status = 0;
1205 coded_buffer_segment->buf = NULL;
1206 coded_buffer_segment->next = NULL;
1207 dri_bo_unmap(buffer_store->bo);
1209 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
1212 } else if (type == VAEncPackedHeaderParameterBufferType) {
1213 VAEncPackedHeaderParameterBuffer *param;
1217 assert(num_elements == 1);
1218 assert(size == sizeof(*param));
1220 param = (VAEncPackedHeaderParameterBuffer *)data;
1221 msize = ALIGN(size, 32) + param->num_headers * sizeof(int) * 2;
1222 buffer_store->buffer = malloc(msize);
1223 assert(buffer_store->buffer);
1225 memcpy(buffer_store->buffer,
1228 memcpy((unsigned char *)buffer_store->buffer + ALIGN(size, 32),
1229 param->length_in_bits,
1230 param->num_headers * sizeof(int));
1231 memcpy((unsigned char *)buffer_store->buffer + ALIGN(size, 32) + param->num_headers * sizeof(int),
1232 param->offset_in_bytes,
1233 param->num_headers * sizeof(int));
1235 param = (VAEncPackedHeaderParameterBuffer *)buffer_store->buffer;
1236 param->length_in_bits = (unsigned int *)((unsigned char *)buffer_store->buffer + ALIGN(size, 32));
1237 param->offset_in_bytes = (unsigned int *)((unsigned char *)buffer_store->buffer + ALIGN(size, 32) + param->num_headers * sizeof(int));
1241 if (type == VAEncPackedHeaderDataBufferType) {
1242 msize = ALIGN(size, 4);
1245 buffer_store->buffer = malloc(msize * num_elements);
1246 assert(buffer_store->buffer);
1249 memcpy(buffer_store->buffer, data, size * num_elements);
1252 buffer_store->num_elements = obj_buffer->num_elements;
1253 i965_reference_buffer_store(&obj_buffer->buffer_store, buffer_store);
1254 i965_release_buffer_store(&buffer_store);
1257 return VA_STATUS_SUCCESS;
1261 i965_CreateBuffer(VADriverContextP ctx,
1262 VAContextID context, /* in */
1263 VABufferType type, /* in */
1264 unsigned int size, /* in */
1265 unsigned int num_elements, /* in */
1266 void *data, /* in */
1267 VABufferID *buf_id) /* out */
1269 return i965_create_buffer_internal(ctx, context, type, size, num_elements, data, NULL, buf_id);
1274 i965_BufferSetNumElements(VADriverContextP ctx,
1275 VABufferID buf_id, /* in */
1276 unsigned int num_elements) /* in */
1278 struct i965_driver_data *i965 = i965_driver_data(ctx);
1279 struct object_buffer *obj_buffer = BUFFER(buf_id);
1280 VAStatus vaStatus = VA_STATUS_SUCCESS;
1284 if ((num_elements < 0) ||
1285 (num_elements > obj_buffer->max_num_elements)) {
1286 vaStatus = VA_STATUS_ERROR_UNKNOWN;
1288 obj_buffer->num_elements = num_elements;
1289 if (obj_buffer->buffer_store != NULL) {
1290 obj_buffer->buffer_store->num_elements = num_elements;
1298 i965_MapBuffer(VADriverContextP ctx,
1299 VABufferID buf_id, /* in */
1300 void **pbuf) /* out */
1302 struct i965_driver_data *i965 = i965_driver_data(ctx);
1303 struct object_buffer *obj_buffer = BUFFER(buf_id);
1304 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1306 assert(obj_buffer && obj_buffer->buffer_store);
1307 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1308 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1310 if (NULL != obj_buffer->buffer_store->bo) {
1311 unsigned int tiling, swizzle;
1313 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1315 if (tiling != I915_TILING_NONE)
1316 drm_intel_gem_bo_map_gtt(obj_buffer->buffer_store->bo);
1318 dri_bo_map(obj_buffer->buffer_store->bo, 1);
1320 assert(obj_buffer->buffer_store->bo->virtual);
1321 *pbuf = obj_buffer->buffer_store->bo->virtual;
1323 if (obj_buffer->type == VAEncCodedBufferType) {
1325 unsigned char *buffer = NULL;
1326 VACodedBufferSegment *coded_buffer_segment = (VACodedBufferSegment *)(obj_buffer->buffer_store->bo->virtual);
1328 coded_buffer_segment->buf = buffer = (unsigned char *)(obj_buffer->buffer_store->bo->virtual) + ALIGN(sizeof(VACodedBufferSegment), 64);
1330 for (i = 0; i < obj_buffer->size_element - ALIGN(sizeof(VACodedBufferSegment), 64) - 3 - 0x1000; i++) {
1338 if (i == obj_buffer->size_element - ALIGN(sizeof(VACodedBufferSegment), 64) - 3 - 0x1000) {
1339 coded_buffer_segment->status |= VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK;
1342 coded_buffer_segment->size = i;
1345 vaStatus = VA_STATUS_SUCCESS;
1346 } else if (NULL != obj_buffer->buffer_store->buffer) {
1347 *pbuf = obj_buffer->buffer_store->buffer;
1348 vaStatus = VA_STATUS_SUCCESS;
1355 i965_UnmapBuffer(VADriverContextP ctx, VABufferID buf_id)
1357 struct i965_driver_data *i965 = i965_driver_data(ctx);
1358 struct object_buffer *obj_buffer = BUFFER(buf_id);
1359 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1361 assert(obj_buffer && obj_buffer->buffer_store);
1362 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1363 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1365 if (NULL != obj_buffer->buffer_store->bo) {
1366 unsigned int tiling, swizzle;
1368 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1370 if (tiling != I915_TILING_NONE)
1371 drm_intel_gem_bo_unmap_gtt(obj_buffer->buffer_store->bo);
1373 dri_bo_unmap(obj_buffer->buffer_store->bo);
1375 vaStatus = VA_STATUS_SUCCESS;
1376 } else if (NULL != obj_buffer->buffer_store->buffer) {
1378 vaStatus = VA_STATUS_SUCCESS;
1385 i965_DestroyBuffer(VADriverContextP ctx, VABufferID buffer_id)
1387 struct i965_driver_data *i965 = i965_driver_data(ctx);
1388 struct object_buffer *obj_buffer = BUFFER(buffer_id);
1391 i965_destroy_buffer(&i965->buffer_heap, (struct object_base *)obj_buffer);
1393 return VA_STATUS_SUCCESS;
1397 i965_BeginPicture(VADriverContextP ctx,
1398 VAContextID context,
1399 VASurfaceID render_target)
1401 struct i965_driver_data *i965 = i965_driver_data(ctx);
1402 struct object_context *obj_context = CONTEXT(context);
1403 struct object_surface *obj_surface = SURFACE(render_target);
1404 struct object_config *obj_config;
1409 assert(obj_context);
1410 assert(obj_surface);
1412 config = obj_context->config_id;
1413 obj_config = CONFIG(config);
1416 switch (obj_config->profile) {
1417 case VAProfileMPEG2Simple:
1418 case VAProfileMPEG2Main:
1419 vaStatus = VA_STATUS_SUCCESS;
1422 case VAProfileH264Baseline:
1423 case VAProfileH264Main:
1424 case VAProfileH264High:
1425 vaStatus = VA_STATUS_SUCCESS;
1428 case VAProfileVC1Simple:
1429 case VAProfileVC1Main:
1430 case VAProfileVC1Advanced:
1431 vaStatus = VA_STATUS_SUCCESS;
1434 case VAProfileJPEGBaseline:
1435 vaStatus = VA_STATUS_SUCCESS;
1439 vaStatus = VA_STATUS_SUCCESS;
1444 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1448 if (obj_context->codec_type == CODEC_PROC) {
1449 obj_context->codec_state.proc.current_render_target = render_target;
1450 } else if (obj_context->codec_type == CODEC_ENC) {
1451 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param);
1452 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param);
1454 for (i = 0; i < obj_context->codec_state.encode.num_slice_params; i++) {
1455 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params[i]);
1458 obj_context->codec_state.encode.num_slice_params = 0;
1461 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param_ext);
1462 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param_ext);
1463 i965_release_buffer_store(&obj_context->codec_state.encode.dec_ref_pic_marking);
1466 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_param); i++)
1467 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_param[i]);
1469 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_data); i++)
1470 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_data[i]);
1472 for (i = 0; i < obj_context->codec_state.encode.num_slice_params_ext; i++)
1473 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params_ext[i]);
1475 obj_context->codec_state.encode.num_slice_params_ext = 0;
1476 obj_context->codec_state.encode.current_render_target = render_target; /*This is input new frame*/
1477 obj_context->codec_state.encode.last_packed_header_type = 0;
1479 obj_context->codec_state.decode.current_render_target = render_target;
1480 i965_release_buffer_store(&obj_context->codec_state.decode.pic_param);
1481 i965_release_buffer_store(&obj_context->codec_state.decode.iq_matrix);
1482 i965_release_buffer_store(&obj_context->codec_state.decode.bit_plane);
1484 for (i = 0; i < obj_context->codec_state.decode.num_slice_params; i++) {
1485 i965_release_buffer_store(&obj_context->codec_state.decode.slice_params[i]);
1486 i965_release_buffer_store(&obj_context->codec_state.decode.slice_datas[i]);
1489 obj_context->codec_state.decode.num_slice_params = 0;
1490 obj_context->codec_state.decode.num_slice_datas = 0;
1496 #define I965_RENDER_BUFFER(category, name) i965_render_##category##_##name##_buffer(ctx, obj_context, obj_buffer)
1498 #define DEF_RENDER_SINGLE_BUFFER_FUNC(category, name, member) \
1500 i965_render_##category##_##name##_buffer(VADriverContextP ctx, \
1501 struct object_context *obj_context, \
1502 struct object_buffer *obj_buffer) \
1504 struct category##_state *category = &obj_context->codec_state.category; \
1505 assert(obj_buffer->buffer_store->bo == NULL); \
1506 assert(obj_buffer->buffer_store->buffer); \
1507 i965_release_buffer_store(&category->member); \
1508 i965_reference_buffer_store(&category->member, obj_buffer->buffer_store); \
1509 return VA_STATUS_SUCCESS; \
1512 #define DEF_RENDER_MULTI_BUFFER_FUNC(category, name, member) \
1514 i965_render_##category##_##name##_buffer(VADriverContextP ctx, \
1515 struct object_context *obj_context, \
1516 struct object_buffer *obj_buffer) \
1518 struct category##_state *category = &obj_context->codec_state.category; \
1519 if (category->num_##member == category->max_##member) { \
1520 category->member = realloc(category->member, (category->max_##member + NUM_SLICES) * sizeof(*category->member)); \
1521 memset(category->member + category->max_##member, 0, NUM_SLICES * sizeof(*category->member)); \
1522 category->max_##member += NUM_SLICES; \
1524 i965_release_buffer_store(&category->member[category->num_##member]); \
1525 i965_reference_buffer_store(&category->member[category->num_##member], obj_buffer->buffer_store); \
1526 category->num_##member++; \
1527 return VA_STATUS_SUCCESS; \
1530 #define I965_RENDER_DECODE_BUFFER(name) I965_RENDER_BUFFER(decode, name)
1532 #define DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(decode, name, member)
1533 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(picture_parameter, pic_param)
1534 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(iq_matrix, iq_matrix)
1535 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(bit_plane, bit_plane)
1536 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(huffman_table, huffman_table)
1538 #define DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(name, member) DEF_RENDER_MULTI_BUFFER_FUNC(decode, name, member)
1539 DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(slice_parameter, slice_params)
1540 DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(slice_data, slice_datas)
1543 i965_decoder_render_picture(VADriverContextP ctx,
1544 VAContextID context,
1545 VABufferID *buffers,
1548 struct i965_driver_data *i965 = i965_driver_data(ctx);
1549 struct object_context *obj_context = CONTEXT(context);
1550 VAStatus vaStatus = VA_STATUS_SUCCESS;
1553 for (i = 0; i < num_buffers && vaStatus == VA_STATUS_SUCCESS; i++) {
1554 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1557 switch (obj_buffer->type) {
1558 case VAPictureParameterBufferType:
1559 vaStatus = I965_RENDER_DECODE_BUFFER(picture_parameter);
1562 case VAIQMatrixBufferType:
1563 vaStatus = I965_RENDER_DECODE_BUFFER(iq_matrix);
1566 case VABitPlaneBufferType:
1567 vaStatus = I965_RENDER_DECODE_BUFFER(bit_plane);
1570 case VASliceParameterBufferType:
1571 vaStatus = I965_RENDER_DECODE_BUFFER(slice_parameter);
1574 case VASliceDataBufferType:
1575 vaStatus = I965_RENDER_DECODE_BUFFER(slice_data);
1578 case VAHuffmanTableBufferType:
1579 vaStatus = I965_RENDER_DECODE_BUFFER(huffman_table);
1583 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1591 #define I965_RENDER_ENCODE_BUFFER(name) I965_RENDER_BUFFER(encode, name)
1593 #define DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(encode, name, member)
1594 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(sequence_parameter, seq_param)
1595 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_parameter, pic_param)
1596 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_control, pic_control)
1597 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(qmatrix, q_matrix)
1598 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(iqmatrix, iq_matrix)
1599 /* extended buffer */
1600 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(sequence_parameter_ext, seq_param_ext)
1601 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_parameter_ext, pic_param_ext)
1602 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(dec_ref_pic_marking, dec_ref_pic_marking)
1604 #define DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(name, member) DEF_RENDER_MULTI_BUFFER_FUNC(encode, name, member)
1605 DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(slice_parameter, slice_params)
1606 DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(slice_parameter_ext, slice_params_ext)
1609 i965_encoder_render_packed_header_parameter_buffer(VADriverContextP ctx,
1610 struct object_context *obj_context,
1611 struct object_buffer *obj_buffer,
1612 VAEncPackedHeaderType type)
1614 struct encode_state *encode = &obj_context->codec_state.encode;
1616 assert(obj_buffer->buffer_store->bo == NULL);
1617 assert(obj_buffer->buffer_store->buffer);
1618 i965_release_buffer_store(&encode->packed_header_param[type]);
1619 i965_reference_buffer_store(&encode->packed_header_param[type], obj_buffer->buffer_store);
1621 return VA_STATUS_SUCCESS;
1625 i965_encoder_render_packed_header_data_buffer(VADriverContextP ctx,
1626 struct object_context *obj_context,
1627 struct object_buffer *obj_buffer,
1628 VAEncPackedHeaderType type)
1630 struct encode_state *encode = &obj_context->codec_state.encode;
1632 assert(obj_buffer->buffer_store->bo == NULL);
1633 assert(obj_buffer->buffer_store->buffer);
1634 i965_release_buffer_store(&encode->packed_header_data[type]);
1635 i965_reference_buffer_store(&encode->packed_header_data[type], obj_buffer->buffer_store);
1637 return VA_STATUS_SUCCESS;
1641 i965_encoder_render_picture(VADriverContextP ctx,
1642 VAContextID context,
1643 VABufferID *buffers,
1646 struct i965_driver_data *i965 = i965_driver_data(ctx);
1647 struct object_context *obj_context = CONTEXT(context);
1648 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1651 for (i = 0; i < num_buffers; i++) {
1652 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1655 switch (obj_buffer->type) {
1656 case VAQMatrixBufferType:
1657 vaStatus = I965_RENDER_ENCODE_BUFFER(qmatrix);
1660 case VAIQMatrixBufferType:
1661 vaStatus = I965_RENDER_ENCODE_BUFFER(iqmatrix);
1664 case VAEncSequenceParameterBufferType:
1665 vaStatus = I965_RENDER_ENCODE_BUFFER(sequence_parameter_ext);
1668 case VAEncPictureParameterBufferType:
1669 vaStatus = I965_RENDER_ENCODE_BUFFER(picture_parameter_ext);
1672 case VAEncSliceParameterBufferType:
1673 vaStatus = I965_RENDER_ENCODE_BUFFER(slice_parameter_ext);
1676 case VAEncDecRefPicMarkingBufferH264Type:
1677 vaStatus = I965_RENDER_ENCODE_BUFFER(dec_ref_pic_marking);
1680 case VAEncPackedHeaderParameterBufferType:
1682 struct encode_state *encode = &obj_context->codec_state.encode;
1683 VAEncPackedHeaderParameterBuffer *param = (VAEncPackedHeaderParameterBuffer *)obj_buffer->buffer_store->buffer;
1684 encode->last_packed_header_type = param->type;
1686 vaStatus = i965_encoder_render_packed_header_parameter_buffer(ctx,
1689 encode->last_packed_header_type);
1693 case VAEncPackedHeaderDataBufferType:
1695 struct encode_state *encode = &obj_context->codec_state.encode;
1697 assert(encode->last_packed_header_type == VAEncPackedHeaderSPS ||
1698 encode->last_packed_header_type == VAEncPackedHeaderPPS ||
1699 encode->last_packed_header_type == VAEncPackedHeaderSlice);
1700 vaStatus = i965_encoder_render_packed_header_data_buffer(ctx,
1703 encode->last_packed_header_type);
1708 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1716 #define I965_RENDER_PROC_BUFFER(name) I965_RENDER_BUFFER(proc, name)
1718 #define DEF_RENDER_PROC_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(proc, name, member)
1719 DEF_RENDER_PROC_SINGLE_BUFFER_FUNC(pipeline_parameter, pipeline_param)
1720 DEF_RENDER_PROC_SINGLE_BUFFER_FUNC(input_parameter, input_param)
1723 i965_render_proc_filter_parameter_buffer(VADriverContextP ctx,
1724 struct object_context *obj_context,
1725 struct object_buffer *obj_buffer,
1726 VAProcFilterType type)
1728 struct proc_state *proc = &obj_context->codec_state.proc;
1730 assert(obj_buffer->buffer_store->bo == NULL);
1731 assert(obj_buffer->buffer_store->buffer);
1732 i965_release_buffer_store(&proc->filter_param[type]);
1733 i965_reference_buffer_store(&proc->filter_param[type], obj_buffer->buffer_store);
1735 return VA_STATUS_SUCCESS;
1739 i965_proc_render_picture(VADriverContextP ctx,
1740 VAContextID context,
1741 VABufferID *buffers,
1744 struct i965_driver_data *i965 = i965_driver_data(ctx);
1745 struct object_context *obj_context = CONTEXT(context);
1746 VAStatus vaStatus = VA_STATUS_SUCCESS;
1749 for (i = 0; i < num_buffers && vaStatus == VA_STATUS_SUCCESS; i++) {
1750 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1753 switch (obj_buffer->type) {
1754 case VAProcPipelineParameterBufferType:
1755 vaStatus = I965_RENDER_PROC_BUFFER(pipeline_parameter);
1758 case VAProcInputParameterBufferType:
1759 vaStatus = I965_RENDER_PROC_BUFFER(input_parameter);
1762 case VAProcFilterBaseParameterBufferType:
1764 VAProcFilterBaseParameterBuffer *param = (VAProcFilterBaseParameterBuffer *)obj_buffer->buffer_store->buffer;
1765 vaStatus = i965_render_proc_filter_parameter_buffer(ctx, obj_context, obj_buffer, param->filter);
1769 case VAProcFilterDeinterlacingParameterBufferType:
1770 vaStatus = i965_render_proc_filter_parameter_buffer(ctx, obj_context, obj_buffer, VAProcFilterDeinterlacing);
1773 case VAProcFilterProcAmpParameterBufferType:
1774 vaStatus = i965_render_proc_filter_parameter_buffer(ctx, obj_context, obj_buffer, VAProcFilterProcAmp);
1778 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1787 i965_RenderPicture(VADriverContextP ctx,
1788 VAContextID context,
1789 VABufferID *buffers,
1792 struct i965_driver_data *i965 = i965_driver_data(ctx);
1793 struct object_context *obj_context;
1794 struct object_config *obj_config;
1796 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1798 obj_context = CONTEXT(context);
1799 assert(obj_context);
1801 config = obj_context->config_id;
1802 obj_config = CONFIG(config);
1805 if (VAEntrypointVideoProc == obj_config->entrypoint) {
1806 vaStatus = i965_proc_render_picture(ctx, context, buffers, num_buffers);
1807 } else if (VAEntrypointEncSlice == obj_config->entrypoint ) {
1808 vaStatus = i965_encoder_render_picture(ctx, context, buffers, num_buffers);
1810 vaStatus = i965_decoder_render_picture(ctx, context, buffers, num_buffers);
1817 i965_EndPicture(VADriverContextP ctx, VAContextID context)
1819 struct i965_driver_data *i965 = i965_driver_data(ctx);
1820 struct object_context *obj_context = CONTEXT(context);
1821 struct object_config *obj_config;
1824 assert(obj_context);
1825 config = obj_context->config_id;
1826 obj_config = CONFIG(config);
1829 if (obj_context->codec_type == CODEC_PROC) {
1830 assert(VAEntrypointVideoProc == obj_config->entrypoint);
1831 } else if (obj_context->codec_type == CODEC_ENC) {
1832 assert(VAEntrypointEncSlice == obj_config->entrypoint);
1834 assert(obj_context->codec_state.encode.pic_param ||
1835 obj_context->codec_state.encode.pic_param_ext);
1836 assert(obj_context->codec_state.encode.seq_param ||
1837 obj_context->codec_state.encode.seq_param_ext);
1838 assert(obj_context->codec_state.encode.num_slice_params >= 1 ||
1839 obj_context->codec_state.encode.num_slice_params_ext >= 1);
1841 assert(obj_context->codec_state.decode.pic_param);
1842 assert(obj_context->codec_state.decode.num_slice_params >= 1);
1843 assert(obj_context->codec_state.decode.num_slice_datas >= 1);
1844 assert(obj_context->codec_state.decode.num_slice_params == obj_context->codec_state.decode.num_slice_datas);
1847 assert(obj_context->hw_context->run);
1848 obj_context->hw_context->run(ctx, obj_config->profile, &obj_context->codec_state, obj_context->hw_context);
1850 return VA_STATUS_SUCCESS;
1854 i965_SyncSurface(VADriverContextP ctx,
1855 VASurfaceID render_target)
1857 struct i965_driver_data *i965 = i965_driver_data(ctx);
1858 struct object_surface *obj_surface = SURFACE(render_target);
1860 assert(obj_surface);
1862 return VA_STATUS_SUCCESS;
1866 i965_QuerySurfaceStatus(VADriverContextP ctx,
1867 VASurfaceID render_target,
1868 VASurfaceStatus *status) /* out */
1870 struct i965_driver_data *i965 = i965_driver_data(ctx);
1871 struct object_surface *obj_surface = SURFACE(render_target);
1873 assert(obj_surface);
1875 /* Usually GEM will handle synchronization with the graphics hardware */
1877 if (obj_surface->bo) {
1878 dri_bo_map(obj_surface->bo, 0);
1879 dri_bo_unmap(obj_surface->bo);
1883 *status = obj_surface->status;
1885 return VA_STATUS_SUCCESS;
1890 * Query display attributes
1891 * The caller must provide a "attr_list" array that can hold at
1892 * least vaMaxNumDisplayAttributes() entries. The actual number of attributes
1893 * returned in "attr_list" is returned in "num_attributes".
1896 i965_QueryDisplayAttributes(VADriverContextP ctx,
1897 VADisplayAttribute *attr_list, /* out */
1898 int *num_attributes) /* out */
1901 *num_attributes = 0;
1903 return VA_STATUS_SUCCESS;
1907 * Get display attributes
1908 * This function returns the current attribute values in "attr_list".
1909 * Only attributes returned with VA_DISPLAY_ATTRIB_GETTABLE set in the "flags" field
1910 * from vaQueryDisplayAttributes() can have their values retrieved.
1913 i965_GetDisplayAttributes(VADriverContextP ctx,
1914 VADisplayAttribute *attr_list, /* in/out */
1918 return VA_STATUS_ERROR_UNIMPLEMENTED;
1922 * Set display attributes
1923 * Only attributes returned with VA_DISPLAY_ATTRIB_SETTABLE set in the "flags" field
1924 * from vaQueryDisplayAttributes() can be set. If the attribute is not settable or
1925 * the value is out of range, the function returns VA_STATUS_ERROR_ATTR_NOT_SUPPORTED
1928 i965_SetDisplayAttributes(VADriverContextP ctx,
1929 VADisplayAttribute *attr_list,
1933 return VA_STATUS_ERROR_UNIMPLEMENTED;
1937 i965_DbgCopySurfaceToBuffer(VADriverContextP ctx,
1938 VASurfaceID surface,
1939 void **buffer, /* out */
1940 unsigned int *stride) /* out */
1943 return VA_STATUS_ERROR_UNIMPLEMENTED;
1947 i965_Init(VADriverContextP ctx)
1949 struct i965_driver_data *i965 = i965_driver_data(ctx);
1951 if (intel_driver_init(ctx) == False)
1952 return VA_STATUS_ERROR_UNKNOWN;
1954 if (IS_G4X(i965->intel.device_id))
1955 i965->codec_info = &g4x_hw_codec_info;
1956 else if (IS_IRONLAKE(i965->intel.device_id))
1957 i965->codec_info = &ironlake_hw_codec_info;
1958 else if (IS_GEN6(i965->intel.device_id))
1959 i965->codec_info = &gen6_hw_codec_info;
1960 else if (IS_GEN7(i965->intel.device_id))
1961 i965->codec_info = &gen7_hw_codec_info;
1963 return VA_STATUS_ERROR_UNKNOWN;
1965 i965->batch = intel_batchbuffer_new(&i965->intel, I915_EXEC_RENDER);
1967 if (i965_post_processing_init(ctx) == False)
1968 return VA_STATUS_ERROR_UNKNOWN;
1970 if (i965_render_init(ctx) == False)
1971 return VA_STATUS_ERROR_UNKNOWN;
1973 _i965InitMutex(&i965->render_mutex);
1975 return VA_STATUS_SUCCESS;
1979 i965_destroy_heap(struct object_heap *heap,
1980 void (*func)(struct object_heap *heap, struct object_base *object))
1982 struct object_base *object;
1983 object_heap_iterator iter;
1985 object = object_heap_first(heap, &iter);
1991 object = object_heap_next(heap, &iter);
1994 object_heap_destroy(heap);
1999 i965_DestroyImage(VADriverContextP ctx, VAImageID image);
2002 i965_CreateImage(VADriverContextP ctx,
2003 VAImageFormat *format,
2006 VAImage *out_image) /* out */
2008 struct i965_driver_data *i965 = i965_driver_data(ctx);
2009 struct object_image *obj_image;
2010 VAStatus va_status = VA_STATUS_ERROR_OPERATION_FAILED;
2012 unsigned int width2, height2, size2, size;
2014 out_image->image_id = VA_INVALID_ID;
2015 out_image->buf = VA_INVALID_ID;
2017 image_id = NEW_IMAGE_ID();
2018 if (image_id == VA_INVALID_ID)
2019 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2021 obj_image = IMAGE(image_id);
2023 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2024 obj_image->bo = NULL;
2025 obj_image->palette = NULL;
2026 obj_image->derived_surface = VA_INVALID_ID;
2028 VAImage * const image = &obj_image->image;
2029 image->image_id = image_id;
2030 image->buf = VA_INVALID_ID;
2032 size = width * height;
2033 width2 = (width + 1) / 2;
2034 height2 = (height + 1) / 2;
2035 size2 = width2 * height2;
2037 image->num_palette_entries = 0;
2038 image->entry_bytes = 0;
2039 memset(image->component_order, 0, sizeof(image->component_order));
2041 switch (format->fourcc) {
2042 case VA_FOURCC('I','A','4','4'):
2043 case VA_FOURCC('A','I','4','4'):
2044 image->num_planes = 1;
2045 image->pitches[0] = width;
2046 image->offsets[0] = 0;
2047 image->data_size = image->offsets[0] + image->pitches[0] * height;
2048 image->num_palette_entries = 16;
2049 image->entry_bytes = 3;
2050 image->component_order[0] = 'R';
2051 image->component_order[1] = 'G';
2052 image->component_order[2] = 'B';
2054 case VA_FOURCC('A','R','G','B'):
2055 case VA_FOURCC('A','B','G','R'):
2056 case VA_FOURCC('B','G','R','A'):
2057 case VA_FOURCC('R','G','B','A'):
2058 image->num_planes = 1;
2059 image->pitches[0] = width * 4;
2060 image->offsets[0] = 0;
2061 image->data_size = image->offsets[0] + image->pitches[0] * height;
2063 case VA_FOURCC('Y','V','1','2'):
2064 image->num_planes = 3;
2065 image->pitches[0] = width;
2066 image->offsets[0] = 0;
2067 image->pitches[1] = width2;
2068 image->offsets[1] = size + size2;
2069 image->pitches[2] = width2;
2070 image->offsets[2] = size;
2071 image->data_size = size + 2 * size2;
2073 case VA_FOURCC('I','4','2','0'):
2074 image->num_planes = 3;
2075 image->pitches[0] = width;
2076 image->offsets[0] = 0;
2077 image->pitches[1] = width2;
2078 image->offsets[1] = size;
2079 image->pitches[2] = width2;
2080 image->offsets[2] = size + size2;
2081 image->data_size = size + 2 * size2;
2083 case VA_FOURCC('N','V','1','2'):
2084 image->num_planes = 2;
2085 image->pitches[0] = width;
2086 image->offsets[0] = 0;
2087 image->pitches[1] = width;
2088 image->offsets[1] = size;
2089 image->data_size = size + 2 * size2;
2095 va_status = i965_CreateBuffer(ctx, 0, VAImageBufferType,
2096 image->data_size, 1, NULL, &image->buf);
2097 if (va_status != VA_STATUS_SUCCESS)
2100 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
2101 dri_bo_reference(obj_image->bo);
2103 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
2104 obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
2105 if (!obj_image->palette)
2109 image->image_id = image_id;
2110 image->format = *format;
2111 image->width = width;
2112 image->height = height;
2114 *out_image = *image;
2115 return VA_STATUS_SUCCESS;
2118 i965_DestroyImage(ctx, image_id);
2123 i965_check_alloc_surface_bo(VADriverContextP ctx,
2124 struct object_surface *obj_surface,
2126 unsigned int fourcc,
2127 unsigned int subsampling)
2129 struct i965_driver_data *i965 = i965_driver_data(ctx);
2130 int region_width, region_height;
2132 if (obj_surface->bo) {
2133 assert(obj_surface->fourcc);
2134 assert(obj_surface->fourcc == fourcc);
2135 assert(obj_surface->subsampling == subsampling);
2139 obj_surface->x_cb_offset = 0; /* X offset is always 0 */
2140 obj_surface->x_cr_offset = 0;
2143 assert(fourcc == VA_FOURCC('N', 'V', '1', '2') ||
2144 fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
2145 fourcc == VA_FOURCC('I', 'M', 'C', '3'));
2147 obj_surface->width = ALIGN(obj_surface->orig_width, 128);
2148 obj_surface->height = ALIGN(obj_surface->orig_height, 32);
2149 obj_surface->cb_cr_pitch = obj_surface->width;
2150 region_width = obj_surface->width;
2151 region_height = obj_surface->height;
2153 if (fourcc == VA_FOURCC('N', 'V', '1', '2')) {
2154 assert(subsampling == SUBSAMPLE_YUV420);
2155 obj_surface->y_cb_offset = obj_surface->height;
2156 obj_surface->y_cr_offset = obj_surface->height;
2157 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2158 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2159 region_height = obj_surface->height + ALIGN(obj_surface->cb_cr_height, 32);
2160 } else if (fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
2161 fourcc == VA_FOURCC('I', 'M', 'C', '3')) {
2162 switch (subsampling) {
2163 case SUBSAMPLE_YUV400:
2164 obj_surface->cb_cr_width = 0;
2165 obj_surface->cb_cr_height = 0;
2168 case SUBSAMPLE_YUV420:
2169 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2170 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2173 case SUBSAMPLE_YUV422H:
2174 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2175 obj_surface->cb_cr_height = obj_surface->orig_height;
2178 case SUBSAMPLE_YUV422V:
2179 obj_surface->cb_cr_width = obj_surface->orig_width;
2180 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2183 case SUBSAMPLE_YUV444:
2184 obj_surface->cb_cr_width = obj_surface->orig_width;
2185 obj_surface->cb_cr_height = obj_surface->orig_height;
2188 case SUBSAMPLE_YUV411:
2189 obj_surface->cb_cr_width = obj_surface->orig_width / 4;
2190 obj_surface->cb_cr_height = obj_surface->orig_height;
2198 region_height = obj_surface->height + ALIGN(obj_surface->cb_cr_height, 32) * 2;
2200 if (fourcc == VA_FOURCC('I', 'M', 'C', '1')) {
2201 obj_surface->y_cr_offset = obj_surface->height;
2202 obj_surface->y_cb_offset = obj_surface->y_cr_offset + ALIGN(obj_surface->cb_cr_height, 32);
2204 obj_surface->y_cb_offset = obj_surface->height;
2205 obj_surface->y_cr_offset = obj_surface->y_cb_offset + ALIGN(obj_surface->cb_cr_height, 32);
2209 assert(fourcc != VA_FOURCC('I', 'M', 'C', '1') &&
2210 fourcc != VA_FOURCC('I', 'M', 'C', '3'));
2211 assert(subsampling == SUBSAMPLE_YUV420);
2213 region_width = obj_surface->width;
2214 region_height = obj_surface->height;
2217 case VA_FOURCC('N', 'V', '1', '2'):
2218 obj_surface->y_cb_offset = obj_surface->height;
2219 obj_surface->y_cr_offset = obj_surface->height;
2220 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2221 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2222 obj_surface->cb_cr_pitch = obj_surface->width;
2223 region_height = obj_surface->height + obj_surface->height / 2;
2226 case VA_FOURCC('Y', 'V', '1', '2'):
2227 case VA_FOURCC('I', '4', '2', '0'):
2228 if (fourcc == VA_FOURCC('Y', 'V', '1', '2')) {
2229 obj_surface->y_cr_offset = obj_surface->height;
2230 obj_surface->y_cb_offset = obj_surface->height + obj_surface->height / 4;
2232 obj_surface->y_cb_offset = obj_surface->height;
2233 obj_surface->y_cr_offset = obj_surface->height + obj_surface->height / 4;
2236 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2237 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2238 obj_surface->cb_cr_pitch = obj_surface->width / 2;
2239 region_height = obj_surface->height + obj_surface->height / 2;
2248 obj_surface->size = ALIGN(region_width * region_height, 0x1000);
2251 uint32_t tiling_mode = I915_TILING_Y; /* always uses Y-tiled format */
2252 unsigned long pitch;
2254 obj_surface->bo = drm_intel_bo_alloc_tiled(i965->intel.bufmgr,
2262 assert(tiling_mode == I915_TILING_Y);
2263 assert(pitch == obj_surface->width);
2265 obj_surface->bo = dri_bo_alloc(i965->intel.bufmgr,
2271 obj_surface->fourcc = fourcc;
2272 obj_surface->subsampling = subsampling;
2273 assert(obj_surface->bo);
2276 VAStatus i965_DeriveImage(VADriverContextP ctx,
2277 VASurfaceID surface,
2278 VAImage *out_image) /* out */
2280 struct i965_driver_data *i965 = i965_driver_data(ctx);
2281 struct object_image *obj_image;
2282 struct object_surface *obj_surface;
2284 unsigned int w_pitch, h_pitch;
2285 VAStatus va_status = VA_STATUS_ERROR_OPERATION_FAILED;
2287 out_image->image_id = VA_INVALID_ID;
2288 obj_surface = SURFACE(surface);
2291 return VA_STATUS_ERROR_INVALID_SURFACE;
2293 if (!obj_surface->bo) {
2294 unsigned int is_tiled = 0;
2295 unsigned int fourcc = VA_FOURCC('Y', 'V', '1', '2');
2296 i965_guess_surface_format(ctx, surface, &fourcc, &is_tiled);
2297 i965_check_alloc_surface_bo(ctx, obj_surface, is_tiled, fourcc, SUBSAMPLE_YUV420);
2300 assert(obj_surface->fourcc);
2302 w_pitch = obj_surface->width;
2303 h_pitch = obj_surface->height;
2305 image_id = NEW_IMAGE_ID();
2307 if (image_id == VA_INVALID_ID)
2308 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2310 obj_image = IMAGE(image_id);
2313 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2315 obj_image->bo = NULL;
2316 obj_image->palette = NULL;
2317 obj_image->derived_surface = VA_INVALID_ID;
2319 VAImage * const image = &obj_image->image;
2321 memset(image, 0, sizeof(*image));
2322 image->image_id = image_id;
2323 image->buf = VA_INVALID_ID;
2324 image->num_palette_entries = 0;
2325 image->entry_bytes = 0;
2326 image->width = obj_surface->orig_width;
2327 image->height = obj_surface->orig_height;
2328 image->data_size = obj_surface->size;
2330 image->format.fourcc = obj_surface->fourcc;
2331 image->format.byte_order = VA_LSB_FIRST;
2332 image->format.bits_per_pixel = 12;
2334 switch (image->format.fourcc) {
2335 case VA_FOURCC('Y', 'V', '1', '2'):
2336 image->num_planes = 3;
2337 image->pitches[0] = w_pitch; /* Y */
2338 image->offsets[0] = 0;
2339 image->pitches[1] = obj_surface->cb_cr_pitch; /* V */
2340 image->offsets[1] = w_pitch * obj_surface->y_cr_offset;
2341 image->pitches[2] = obj_surface->cb_cr_pitch; /* U */
2342 image->offsets[2] = w_pitch * obj_surface->y_cb_offset;
2345 case VA_FOURCC('N', 'V', '1', '2'):
2346 image->num_planes = 2;
2347 image->pitches[0] = w_pitch; /* Y */
2348 image->offsets[0] = 0;
2349 image->pitches[1] = obj_surface->cb_cr_pitch; /* UV */
2350 image->offsets[1] = w_pitch * obj_surface->y_cb_offset;
2353 case VA_FOURCC('I', '4', '2', '0'):
2354 image->num_planes = 3;
2355 image->pitches[0] = w_pitch; /* Y */
2356 image->offsets[0] = 0;
2357 image->pitches[1] = obj_surface->cb_cr_pitch; /* U */
2358 image->offsets[1] = w_pitch * obj_surface->y_cb_offset;
2359 image->pitches[2] = obj_surface->cb_cr_pitch; /* V */
2360 image->offsets[2] = w_pitch * obj_surface->y_cr_offset;
2367 va_status = i965_create_buffer_internal(ctx, 0, VAImageBufferType,
2368 obj_surface->size, 1, NULL, obj_surface->bo, &image->buf);
2369 if (va_status != VA_STATUS_SUCCESS)
2372 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
2373 dri_bo_reference(obj_image->bo);
2375 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
2376 obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
2377 if (!obj_image->palette) {
2378 va_status = VA_STATUS_ERROR_ALLOCATION_FAILED;
2383 *out_image = *image;
2384 obj_surface->flags |= SURFACE_DERIVED;
2385 obj_image->derived_surface = surface;
2387 return VA_STATUS_SUCCESS;
2390 i965_DestroyImage(ctx, image_id);
2395 i965_destroy_image(struct object_heap *heap, struct object_base *obj)
2397 object_heap_free(heap, obj);
2402 i965_DestroyImage(VADriverContextP ctx, VAImageID image)
2404 struct i965_driver_data *i965 = i965_driver_data(ctx);
2405 struct object_image *obj_image = IMAGE(image);
2406 struct object_surface *obj_surface;
2409 return VA_STATUS_SUCCESS;
2411 dri_bo_unreference(obj_image->bo);
2412 obj_image->bo = NULL;
2414 if (obj_image->image.buf != VA_INVALID_ID) {
2415 i965_DestroyBuffer(ctx, obj_image->image.buf);
2416 obj_image->image.buf = VA_INVALID_ID;
2419 if (obj_image->palette) {
2420 free(obj_image->palette);
2421 obj_image->palette = NULL;
2424 obj_surface = SURFACE(obj_image->derived_surface);
2427 obj_surface->flags &= ~SURFACE_DERIVED;
2430 i965_destroy_image(&i965->image_heap, (struct object_base *)obj_image);
2432 return VA_STATUS_SUCCESS;
2436 * pointer to an array holding the palette data. The size of the array is
2437 * num_palette_entries * entry_bytes in size. The order of the components
2438 * in the palette is described by the component_order in VASubpicture struct
2441 i965_SetImagePalette(VADriverContextP ctx,
2443 unsigned char *palette)
2445 struct i965_driver_data *i965 = i965_driver_data(ctx);
2448 struct object_image *obj_image = IMAGE(image);
2450 return VA_STATUS_ERROR_INVALID_IMAGE;
2452 if (!obj_image->palette)
2453 return VA_STATUS_ERROR_ALLOCATION_FAILED; /* XXX: unpaletted/error */
2455 for (i = 0; i < obj_image->image.num_palette_entries; i++)
2456 obj_image->palette[i] = (((unsigned int)palette[3*i + 0] << 16) |
2457 ((unsigned int)palette[3*i + 1] << 8) |
2458 (unsigned int)palette[3*i + 2]);
2459 return VA_STATUS_SUCCESS;
2463 memcpy_pic(uint8_t *dst, unsigned int dst_stride,
2464 const uint8_t *src, unsigned int src_stride,
2465 unsigned int len, unsigned int height)
2469 for (i = 0; i < height; i++) {
2470 memcpy(dst, src, len);
2477 get_image_i420(struct object_image *obj_image, uint8_t *image_data,
2478 struct object_surface *obj_surface,
2479 const VARectangle *rect)
2481 uint8_t *dst[3], *src[3];
2483 const int U = obj_image->image.format.fourcc == obj_surface->fourcc ? 1 : 2;
2484 const int V = obj_image->image.format.fourcc == obj_surface->fourcc ? 2 : 1;
2485 unsigned int tiling, swizzle;
2487 if (!obj_surface->bo)
2490 assert(obj_surface->fourcc);
2491 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2493 if (tiling != I915_TILING_NONE)
2494 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2496 dri_bo_map(obj_surface->bo, 0);
2498 if (!obj_surface->bo->virtual)
2501 /* Dest VA image has either I420 or YV12 format.
2502 Source VA surface alway has I420 format */
2503 dst[Y] = image_data + obj_image->image.offsets[Y];
2504 src[0] = (uint8_t *)obj_surface->bo->virtual;
2505 dst[U] = image_data + obj_image->image.offsets[U];
2506 src[1] = src[0] + obj_surface->width * obj_surface->height;
2507 dst[V] = image_data + obj_image->image.offsets[V];
2508 src[2] = src[1] + (obj_surface->width / 2) * (obj_surface->height / 2);
2511 dst[Y] += rect->y * obj_image->image.pitches[Y] + rect->x;
2512 src[0] += rect->y * obj_surface->width + rect->x;
2513 memcpy_pic(dst[Y], obj_image->image.pitches[Y],
2514 src[0], obj_surface->width,
2515 rect->width, rect->height);
2518 dst[U] += (rect->y / 2) * obj_image->image.pitches[U] + rect->x / 2;
2519 src[1] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2520 memcpy_pic(dst[U], obj_image->image.pitches[U],
2521 src[1], obj_surface->width / 2,
2522 rect->width / 2, rect->height / 2);
2525 dst[V] += (rect->y / 2) * obj_image->image.pitches[V] + rect->x / 2;
2526 src[2] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2527 memcpy_pic(dst[V], obj_image->image.pitches[V],
2528 src[2], obj_surface->width / 2,
2529 rect->width / 2, rect->height / 2);
2531 if (tiling != I915_TILING_NONE)
2532 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2534 dri_bo_unmap(obj_surface->bo);
2538 get_image_nv12(struct object_image *obj_image, uint8_t *image_data,
2539 struct object_surface *obj_surface,
2540 const VARectangle *rect)
2542 uint8_t *dst[2], *src[2];
2543 unsigned int tiling, swizzle;
2545 if (!obj_surface->bo)
2548 assert(obj_surface->fourcc);
2549 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2551 if (tiling != I915_TILING_NONE)
2552 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2554 dri_bo_map(obj_surface->bo, 0);
2556 if (!obj_surface->bo->virtual)
2559 /* Both dest VA image and source surface have NV12 format */
2560 dst[0] = image_data + obj_image->image.offsets[0];
2561 src[0] = (uint8_t *)obj_surface->bo->virtual;
2562 dst[1] = image_data + obj_image->image.offsets[1];
2563 src[1] = src[0] + obj_surface->width * obj_surface->height;
2566 dst[0] += rect->y * obj_image->image.pitches[0] + rect->x;
2567 src[0] += rect->y * obj_surface->width + rect->x;
2568 memcpy_pic(dst[0], obj_image->image.pitches[0],
2569 src[0], obj_surface->width,
2570 rect->width, rect->height);
2573 dst[1] += (rect->y / 2) * obj_image->image.pitches[1] + (rect->x & -2);
2574 src[1] += (rect->y / 2) * obj_surface->width + (rect->x & -2);
2575 memcpy_pic(dst[1], obj_image->image.pitches[1],
2576 src[1], obj_surface->width,
2577 rect->width, rect->height / 2);
2579 if (tiling != I915_TILING_NONE)
2580 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2582 dri_bo_unmap(obj_surface->bo);
2586 i965_sw_getimage(VADriverContextP ctx,
2587 VASurfaceID surface,
2588 int x, /* coordinates of the upper left source pixel */
2590 unsigned int width, /* width and height of the region */
2591 unsigned int height,
2594 struct i965_driver_data *i965 = i965_driver_data(ctx);
2595 struct i965_render_state *render_state = &i965->render_state;
2597 struct object_surface *obj_surface = SURFACE(surface);
2599 return VA_STATUS_ERROR_INVALID_SURFACE;
2601 struct object_image *obj_image = IMAGE(image);
2603 return VA_STATUS_ERROR_INVALID_IMAGE;
2606 return VA_STATUS_ERROR_INVALID_PARAMETER;
2607 if (x + width > obj_surface->orig_width ||
2608 y + height > obj_surface->orig_height)
2609 return VA_STATUS_ERROR_INVALID_PARAMETER;
2610 if (x + width > obj_image->image.width ||
2611 y + height > obj_image->image.height)
2612 return VA_STATUS_ERROR_INVALID_PARAMETER;
2615 void *image_data = NULL;
2617 va_status = i965_MapBuffer(ctx, obj_image->image.buf, &image_data);
2618 if (va_status != VA_STATUS_SUCCESS)
2625 rect.height = height;
2627 switch (obj_image->image.format.fourcc) {
2628 case VA_FOURCC('Y','V','1','2'):
2629 case VA_FOURCC('I','4','2','0'):
2630 /* I420 is native format for MPEG-2 decoded surfaces */
2631 if (render_state->interleaved_uv)
2632 goto operation_failed;
2633 get_image_i420(obj_image, image_data, obj_surface, &rect);
2635 case VA_FOURCC('N','V','1','2'):
2636 /* NV12 is native format for H.264 decoded surfaces */
2637 if (!render_state->interleaved_uv)
2638 goto operation_failed;
2639 get_image_nv12(obj_image, image_data, obj_surface, &rect);
2643 va_status = VA_STATUS_ERROR_OPERATION_FAILED;
2647 i965_UnmapBuffer(ctx, obj_image->image.buf);
2652 i965_hw_getimage(VADriverContextP ctx,
2653 VASurfaceID surface,
2654 int x, /* coordinates of the upper left source pixel */
2656 unsigned int width, /* width and height of the region */
2657 unsigned int height,
2660 struct i965_driver_data *i965 = i965_driver_data(ctx);
2661 struct i965_surface src_surface;
2662 struct i965_surface dst_surface;
2665 struct object_surface *obj_surface = SURFACE(surface);
2666 struct object_image *obj_image = IMAGE(image);
2669 return VA_STATUS_ERROR_INVALID_SURFACE;
2672 return VA_STATUS_ERROR_INVALID_IMAGE;
2675 return VA_STATUS_ERROR_INVALID_PARAMETER;
2676 if (x + width > obj_surface->orig_width ||
2677 y + height > obj_surface->orig_height)
2678 return VA_STATUS_ERROR_INVALID_PARAMETER;
2679 if (x + width > obj_image->image.width ||
2680 y + height > obj_image->image.height)
2681 return VA_STATUS_ERROR_INVALID_PARAMETER;
2683 if (!obj_surface->bo)
2684 return VA_STATUS_SUCCESS;
2689 rect.height = height;
2691 src_surface.id = surface;
2692 src_surface.type = I965_SURFACE_TYPE_SURFACE;
2693 src_surface.flags = I965_SURFACE_FLAG_FRAME;
2695 dst_surface.id = image;
2696 dst_surface.type = I965_SURFACE_TYPE_IMAGE;
2697 dst_surface.flags = I965_SURFACE_FLAG_FRAME;
2699 va_status = i965_image_processing(ctx,
2710 i965_GetImage(VADriverContextP ctx,
2711 VASurfaceID surface,
2712 int x, /* coordinates of the upper left source pixel */
2714 unsigned int width, /* width and height of the region */
2715 unsigned int height,
2718 struct i965_driver_data * const i965 = i965_driver_data(ctx);
2721 if (HAS_ACCELERATED_GETIMAGE(i965))
2722 va_status = i965_hw_getimage(ctx,
2728 va_status = i965_sw_getimage(ctx,
2738 i965_PutSurface(VADriverContextP ctx,
2739 VASurfaceID surface,
2740 void *draw, /* X Drawable */
2743 unsigned short srcw,
2744 unsigned short srch,
2747 unsigned short destw,
2748 unsigned short desth,
2749 VARectangle *cliprects, /* client supplied clip list */
2750 unsigned int number_cliprects, /* number of clip rects in the clip list */
2751 unsigned int flags) /* de-interlacing flags */
2753 struct i965_driver_data *i965 = i965_driver_data(ctx);
2754 struct dri_state *dri_state = (struct dri_state *)ctx->dri_state;
2755 struct i965_render_state *render_state = &i965->render_state;
2756 struct dri_drawable *dri_drawable;
2757 union dri_buffer *buffer;
2758 struct intel_region *dest_region;
2759 struct object_surface *obj_surface;
2760 VARectangle src_rect, dst_rect;
2763 Bool new_region = False;
2766 /* Currently don't support DRI1 */
2767 if (dri_state->driConnectedFlag != VA_DRI2)
2768 return VA_STATUS_ERROR_UNKNOWN;
2770 /* Some broken sources such as H.264 conformance case FM2_SVA_C
2773 obj_surface = SURFACE(surface);
2774 if (!obj_surface || !obj_surface->bo)
2775 return VA_STATUS_SUCCESS;
2777 _i965LockMutex(&i965->render_mutex);
2779 dri_drawable = dri_get_drawable(ctx, (Drawable)draw);
2780 assert(dri_drawable);
2782 buffer = dri_get_rendering_buffer(ctx, dri_drawable);
2785 dest_region = render_state->draw_region;
2788 assert(dest_region->bo);
2789 dri_bo_flink(dest_region->bo, &name);
2791 if (buffer->dri2.name != name) {
2793 dri_bo_unreference(dest_region->bo);
2796 dest_region = (struct intel_region *)calloc(1, sizeof(*dest_region));
2797 assert(dest_region);
2798 render_state->draw_region = dest_region;
2803 dest_region->x = dri_drawable->x;
2804 dest_region->y = dri_drawable->y;
2805 dest_region->width = dri_drawable->width;
2806 dest_region->height = dri_drawable->height;
2807 dest_region->cpp = buffer->dri2.cpp;
2808 dest_region->pitch = buffer->dri2.pitch;
2810 dest_region->bo = intel_bo_gem_create_from_name(i965->intel.bufmgr, "rendering buffer", buffer->dri2.name);
2811 assert(dest_region->bo);
2813 ret = dri_bo_get_tiling(dest_region->bo, &(dest_region->tiling), &(dest_region->swizzle));
2817 if ((flags & VA_FILTER_SCALING_MASK) == VA_FILTER_SCALING_NL_ANAMORPHIC)
2818 pp_flag |= I965_PP_FLAG_AVS;
2820 if (flags & VA_TOP_FIELD)
2821 pp_flag |= I965_PP_FLAG_DEINTERLACING_TOP_FISRT;
2822 else if (flags & VA_BOTTOM_FIELD)
2823 pp_flag |= I965_PP_FLAG_DEINTERLACING_BOTTOM_FIRST;
2827 src_rect.width = srcw;
2828 src_rect.height = srch;
2832 dst_rect.width = destw;
2833 dst_rect.height = desth;
2835 intel_render_put_surface(ctx, surface, &src_rect, &dst_rect, pp_flag);
2837 if(obj_surface->subpic != VA_INVALID_ID) {
2838 intel_render_put_subpicture(ctx, surface, &src_rect, &dst_rect);
2841 dri_swap_buffer(ctx, dri_drawable);
2842 obj_surface->flags |= SURFACE_DISPLAYED;
2844 if ((obj_surface->flags & SURFACE_ALL_MASK) == SURFACE_DISPLAYED) {
2845 dri_bo_unreference(obj_surface->bo);
2846 obj_surface->bo = NULL;
2847 obj_surface->flags &= ~SURFACE_REF_DIS_MASK;
2849 if (obj_surface->free_private_data)
2850 obj_surface->free_private_data(&obj_surface->private_data);
2853 _i965UnlockMutex(&i965->render_mutex);
2855 return VA_STATUS_SUCCESS;
2859 i965_Terminate(VADriverContextP ctx)
2861 struct i965_driver_data *i965 = i965_driver_data(ctx);
2864 intel_batchbuffer_free(i965->batch);
2866 _i965DestroyMutex(&i965->render_mutex);
2868 if (i965_render_terminate(ctx) == False)
2869 return VA_STATUS_ERROR_UNKNOWN;
2871 if (i965_post_processing_terminate(ctx) == False)
2872 return VA_STATUS_ERROR_UNKNOWN;
2874 if (intel_driver_terminate(ctx) == False)
2875 return VA_STATUS_ERROR_UNKNOWN;
2877 i965_destroy_heap(&i965->buffer_heap, i965_destroy_buffer);
2878 i965_destroy_heap(&i965->image_heap, i965_destroy_image);
2879 i965_destroy_heap(&i965->subpic_heap, i965_destroy_subpic);
2880 i965_destroy_heap(&i965->surface_heap, i965_destroy_surface);
2881 i965_destroy_heap(&i965->context_heap, i965_destroy_context);
2882 i965_destroy_heap(&i965->config_heap, i965_destroy_config);
2884 free(ctx->pDriverData);
2885 ctx->pDriverData = NULL;
2887 return VA_STATUS_SUCCESS;
2892 VADriverContextP ctx, /* in */
2893 VABufferID buf_id, /* in */
2894 VABufferType *type, /* out */
2895 unsigned int *size, /* out */
2896 unsigned int *num_elements /* out */
2899 struct i965_driver_data *i965 = NULL;
2900 struct object_buffer *obj_buffer = NULL;
2902 i965 = i965_driver_data(ctx);
2903 obj_buffer = BUFFER(buf_id);
2905 *type = obj_buffer->type;
2906 *size = obj_buffer->size_element;
2907 *num_elements = obj_buffer->num_elements;
2909 return VA_STATUS_SUCCESS;
2914 VADriverContextP ctx, /* in */
2915 VASurfaceID surface, /* in */
2916 unsigned int *fourcc, /* out */
2917 unsigned int *luma_stride, /* out */
2918 unsigned int *chroma_u_stride, /* out */
2919 unsigned int *chroma_v_stride, /* out */
2920 unsigned int *luma_offset, /* out */
2921 unsigned int *chroma_u_offset, /* out */
2922 unsigned int *chroma_v_offset, /* out */
2923 unsigned int *buffer_name, /* out */
2924 void **buffer /* out */
2927 VAStatus vaStatus = VA_STATUS_SUCCESS;
2928 struct i965_driver_data *i965 = i965_driver_data(ctx);
2929 struct object_surface *obj_surface = NULL;
2933 assert(luma_stride);
2934 assert(chroma_u_stride);
2935 assert(chroma_v_stride);
2936 assert(luma_offset);
2937 assert(chroma_u_offset);
2938 assert(chroma_v_offset);
2939 assert(buffer_name);
2942 tmpImage.image_id = VA_INVALID_ID;
2944 obj_surface = SURFACE(surface);
2945 if (obj_surface == NULL) {
2946 // Surface is absent.
2947 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2951 // Lock functionality is absent now.
2952 if (obj_surface->locked_image_id != VA_INVALID_ID) {
2953 // Surface is locked already.
2954 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
2958 vaStatus = i965_DeriveImage(
2962 if (vaStatus != VA_STATUS_SUCCESS) {
2966 obj_surface->locked_image_id = tmpImage.image_id;
2968 vaStatus = i965_MapBuffer(
2972 if (vaStatus != VA_STATUS_SUCCESS) {
2976 *fourcc = tmpImage.format.fourcc;
2977 *luma_offset = tmpImage.offsets[0];
2978 *luma_stride = tmpImage.pitches[0];
2979 *chroma_u_offset = tmpImage.offsets[1];
2980 *chroma_u_stride = tmpImage.pitches[1];
2981 *chroma_v_offset = tmpImage.offsets[2];
2982 *chroma_v_stride = tmpImage.pitches[2];
2983 *buffer_name = tmpImage.buf;
2986 if (vaStatus != VA_STATUS_SUCCESS) {
2995 VADriverContextP ctx, /* in */
2996 VASurfaceID surface /* in */
2999 VAStatus vaStatus = VA_STATUS_SUCCESS;
3000 struct i965_driver_data *i965 = i965_driver_data(ctx);
3001 struct object_image *locked_img = NULL;
3002 struct object_surface *obj_surface = NULL;
3004 obj_surface = SURFACE(surface);
3006 if (obj_surface == NULL) {
3007 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is absent
3010 if (obj_surface->locked_image_id == VA_INVALID_ID) {
3011 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is not locked
3015 locked_img = IMAGE(obj_surface->locked_image_id);
3016 if (locked_img == NULL || (locked_img->image.image_id == VA_INVALID_ID)) {
3017 // Work image was deallocated before i965_UnlockSurface()
3018 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
3022 vaStatus = i965_UnmapBuffer(
3024 locked_img->image.buf);
3025 if (vaStatus != VA_STATUS_SUCCESS) {
3029 vaStatus = i965_DestroyImage(
3031 locked_img->image.image_id);
3032 if (vaStatus != VA_STATUS_SUCCESS) {
3036 locked_img->image.image_id = VA_INVALID_ID;
3043 * Query video processing pipeline
3045 VAStatus i965_QueryVideoProcPipelineCap(
3046 VADriverContextP ctx,
3047 VAContextID context,
3048 VAProcPipelineCap *pipeline_cap /* out */
3051 struct i965_driver_data * const i965 = i965_driver_data(ctx);
3054 if (HAS_VPP(i965)) {
3055 pipeline_cap->filter_pipeline[i] = VAProcFilterNoiseReduction;
3056 pipeline_cap->bypass[i++] = 1;
3057 pipeline_cap->filter_pipeline[i] = VAProcFilterDeinterlacing;
3058 pipeline_cap->bypass[i++] = 1;
3061 for (; i < VA_PROC_PIPELINE_MAX_NUM_FILTERS; i++) {
3062 pipeline_cap->filter_pipeline[i] = VAProcFilterNone;
3063 pipeline_cap->bypass[i] = 1;
3066 return VA_STATUS_SUCCESS;
3069 VAStatus i965_QueryVideoProcFilterCap(
3070 VADriverContextP ctx,
3071 VAContextID context,
3072 VAProcFilterType filter,
3078 if (filter == VAProcFilterNoiseReduction) {
3079 VAProcFilterCapBase *base_cap = cap;
3080 base_cap->range.min = 0.0;
3081 base_cap->range.max = 1.0;
3082 base_cap->range.default_value = 0.5;
3083 base_cap->range.step = 0.03125; /* 1.0 / 32 */
3086 return VA_STATUS_SUCCESS;
3089 VAStatus i965_QueryVideoProcReferenceFramesCap(
3090 VADriverContextP ctx,
3091 VAContextID context,
3092 unsigned int *num_forward_reference, /* out */
3093 unsigned int *num_backward_reference /* out */
3097 *num_forward_reference = 0;
3098 *num_backward_reference = 0;
3100 return VA_STATUS_SUCCESS;
3104 VA_DRIVER_INIT_FUNC(VADriverContextP ctx);
3107 VA_DRIVER_INIT_FUNC( VADriverContextP ctx )
3109 struct VADriverVTable * const vtable = ctx->vtable;
3110 struct i965_driver_data *i965;
3113 ctx->version_major = VA_MAJOR_VERSION;
3114 ctx->version_minor = VA_MINOR_VERSION;
3115 ctx->max_profiles = I965_MAX_PROFILES;
3116 ctx->max_entrypoints = I965_MAX_ENTRYPOINTS;
3117 ctx->max_attributes = I965_MAX_CONFIG_ATTRIBUTES;
3118 ctx->max_image_formats = I965_MAX_IMAGE_FORMATS;
3119 ctx->max_subpic_formats = I965_MAX_SUBPIC_FORMATS;
3120 ctx->max_display_attributes = I965_MAX_DISPLAY_ATTRIBUTES;
3122 vtable->vaTerminate = i965_Terminate;
3123 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
3124 vtable->vaQueryConfigProfiles = i965_QueryConfigProfiles;
3125 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
3126 vtable->vaQueryConfigAttributes = i965_QueryConfigAttributes;
3127 vtable->vaCreateConfig = i965_CreateConfig;
3128 vtable->vaDestroyConfig = i965_DestroyConfig;
3129 vtable->vaGetConfigAttributes = i965_GetConfigAttributes;
3130 vtable->vaCreateSurfaces = i965_CreateSurfaces;
3131 vtable->vaDestroySurfaces = i965_DestroySurfaces;
3132 vtable->vaCreateContext = i965_CreateContext;
3133 vtable->vaDestroyContext = i965_DestroyContext;
3134 vtable->vaCreateBuffer = i965_CreateBuffer;
3135 vtable->vaBufferSetNumElements = i965_BufferSetNumElements;
3136 vtable->vaMapBuffer = i965_MapBuffer;
3137 vtable->vaUnmapBuffer = i965_UnmapBuffer;
3138 vtable->vaDestroyBuffer = i965_DestroyBuffer;
3139 vtable->vaBeginPicture = i965_BeginPicture;
3140 vtable->vaRenderPicture = i965_RenderPicture;
3141 vtable->vaEndPicture = i965_EndPicture;
3142 vtable->vaSyncSurface = i965_SyncSurface;
3143 vtable->vaQuerySurfaceStatus = i965_QuerySurfaceStatus;
3144 vtable->vaPutSurface = i965_PutSurface;
3145 vtable->vaQueryImageFormats = i965_QueryImageFormats;
3146 vtable->vaCreateImage = i965_CreateImage;
3147 vtable->vaDeriveImage = i965_DeriveImage;
3148 vtable->vaDestroyImage = i965_DestroyImage;
3149 vtable->vaSetImagePalette = i965_SetImagePalette;
3150 vtable->vaGetImage = i965_GetImage;
3151 vtable->vaPutImage = i965_PutImage;
3152 vtable->vaQuerySubpictureFormats = i965_QuerySubpictureFormats;
3153 vtable->vaCreateSubpicture = i965_CreateSubpicture;
3154 vtable->vaDestroySubpicture = i965_DestroySubpicture;
3155 vtable->vaSetSubpictureImage = i965_SetSubpictureImage;
3156 vtable->vaSetSubpictureChromakey = i965_SetSubpictureChromakey;
3157 vtable->vaSetSubpictureGlobalAlpha = i965_SetSubpictureGlobalAlpha;
3158 vtable->vaAssociateSubpicture = i965_AssociateSubpicture;
3159 vtable->vaDeassociateSubpicture = i965_DeassociateSubpicture;
3160 vtable->vaQueryDisplayAttributes = i965_QueryDisplayAttributes;
3161 vtable->vaGetDisplayAttributes = i965_GetDisplayAttributes;
3162 vtable->vaSetDisplayAttributes = i965_SetDisplayAttributes;
3163 vtable->vaBufferInfo = i965_BufferInfo;
3164 vtable->vaLockSurface = i965_LockSurface;
3165 vtable->vaUnlockSurface = i965_UnlockSurface;
3166 vtable->vaQueryVideoProcPipelineCap = i965_QueryVideoProcPipelineCap;
3167 vtable->vaQueryVideoProcFilterCap = i965_QueryVideoProcFilterCap;
3168 vtable->vaQueryVideoProcReferenceFramesCap = i965_QueryVideoProcReferenceFramesCap;
3170 // vtable->vaDbgCopySurfaceToBuffer = i965_DbgCopySurfaceToBuffer;
3172 i965 = (struct i965_driver_data *)calloc(1, sizeof(*i965));
3174 ctx->pDriverData = (void *)i965;
3176 result = object_heap_init(&i965->config_heap,
3177 sizeof(struct object_config),
3179 assert(result == 0);
3181 result = object_heap_init(&i965->context_heap,
3182 sizeof(struct object_context),
3184 assert(result == 0);
3186 result = object_heap_init(&i965->surface_heap,
3187 sizeof(struct object_surface),
3189 assert(result == 0);
3191 result = object_heap_init(&i965->buffer_heap,
3192 sizeof(struct object_buffer),
3194 assert(result == 0);
3196 result = object_heap_init(&i965->image_heap,
3197 sizeof(struct object_image),
3199 assert(result == 0);
3201 result = object_heap_init(&i965->subpic_heap,
3202 sizeof(struct object_subpic),
3204 assert(result == 0);
3206 sprintf(i965->va_vendor, "%s %s driver - %d.%d.%d",
3207 INTEL_STR_DRIVER_VENDOR,
3208 INTEL_STR_DRIVER_NAME,
3209 INTEL_DRIVER_MAJOR_VERSION,
3210 INTEL_DRIVER_MINOR_VERSION,
3211 INTEL_DRIVER_MICRO_VERSION);
3213 if (INTEL_DRIVER_PRE_VERSION > 0) {
3214 const int len = strlen(i965->va_vendor);
3215 sprintf(&i965->va_vendor[len], ".pre%d", INTEL_DRIVER_PRE_VERSION);
3218 i965->current_context_id = VA_INVALID_ID;
3220 ctx->str_vendor = i965->va_vendor;
3222 return i965_Init(ctx);