2 * Copyright © 2009 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Xiang Haihao <haihao.xiang@intel.com>
26 * Zou Nan hai <nanhai.zou@intel.com>
30 #include "config_android.h"
41 # include "i965_output_dri.h"
44 #ifdef HAVE_VA_WAYLAND
45 # include "i965_output_wayland.h"
48 #include "intel_driver.h"
49 #include "intel_memman.h"
50 #include "intel_batchbuffer.h"
51 #include "i965_defines.h"
52 #include "i965_drv_video.h"
53 #include "i965_decoder.h"
54 #include "i965_encoder.h"
56 #define CONFIG_ID_OFFSET 0x01000000
57 #define CONTEXT_ID_OFFSET 0x02000000
58 #define SURFACE_ID_OFFSET 0x04000000
59 #define BUFFER_ID_OFFSET 0x08000000
60 #define IMAGE_ID_OFFSET 0x0a000000
61 #define SUBPIC_ID_OFFSET 0x10000000
63 #define HAS_MPEG2(ctx) (IS_G4X((ctx)->intel.device_id) || \
64 IS_IRONLAKE((ctx)->intel.device_id) || \
65 ((IS_GEN6((ctx)->intel.device_id) || \
66 IS_GEN7((ctx)->intel.device_id)) && \
67 (ctx)->intel.has_bsd))
69 #define HAS_H264(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
70 IS_GEN6((ctx)->intel.device_id) || \
71 IS_IRONLAKE((ctx)->intel.device_id)) && \
74 #define HAS_VC1(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
75 IS_GEN6((ctx)->intel.device_id)) && \
78 #define HAS_TILED_SURFACE(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
79 IS_GEN6((ctx)->intel.device_id)))
81 #define HAS_ENCODER(ctx) ((IS_GEN7((ctx)->intel.device_id) || \
82 IS_GEN6((ctx)->intel.device_id)) && \
85 #define HAS_VPP(ctx) (IS_IRONLAKE((ctx)->intel.device_id) || \
86 IS_GEN6((ctx)->intel.device_id) || \
87 IS_GEN7((ctx)->intel.device_id))
89 #define HAS_JPEG(ctx) (IS_GEN7((ctx)->intel.device_id) && \
92 #define HAS_ACCELERATED_GETIMAGE(ctx) (IS_GEN6((ctx)->intel.device_id) || \
93 IS_GEN7((ctx)->intel.device_id))
95 #define HAS_ACCELERATED_PUTIMAGE(ctx) HAS_VPP(ctx)
96 static int get_sampling_from_fourcc(unsigned int fourcc);
98 /* Check whether we are rendering to X11 (VA/X11 or VA/GLX API) */
99 #define IS_VA_X11(ctx) \
100 (((ctx)->display_type & VA_DISPLAY_MAJOR_MASK) == VA_DISPLAY_X11)
102 /* Check whether we are rendering to Wayland */
103 #define IS_VA_WAYLAND(ctx) \
104 (((ctx)->display_type & VA_DISPLAY_MAJOR_MASK) == VA_DISPLAY_WAYLAND)
107 I965_SURFACETYPE_RGBA = 1,
108 I965_SURFACETYPE_YUV,
109 I965_SURFACETYPE_INDEXED
112 /* List of supported display attributes */
113 static const VADisplayAttribute i965_display_attributes[] = {
115 VADisplayAttribRotation,
116 0, 3, VA_ROTATION_NONE,
117 VA_DISPLAY_ATTRIB_GETTABLE|VA_DISPLAY_ATTRIB_SETTABLE
121 /* List of supported image formats */
124 VAImageFormat va_format;
125 } i965_image_format_map_t;
127 static const i965_image_format_map_t
128 i965_image_formats_map[I965_MAX_IMAGE_FORMATS + 1] = {
129 { I965_SURFACETYPE_YUV,
130 { VA_FOURCC('Y','V','1','2'), VA_LSB_FIRST, 12, } },
131 { I965_SURFACETYPE_YUV,
132 { VA_FOURCC('I','4','2','0'), VA_LSB_FIRST, 12, } },
133 { I965_SURFACETYPE_YUV,
134 { VA_FOURCC('N','V','1','2'), VA_LSB_FIRST, 12, } },
135 { I965_SURFACETYPE_YUV,
136 { VA_FOURCC('Y','U','Y','2'), VA_LSB_FIRST, 16, } },
137 { I965_SURFACETYPE_YUV,
138 { VA_FOURCC('U','Y','V','Y'), VA_LSB_FIRST, 16, } },
139 { I965_SURFACETYPE_RGBA,
140 { VA_FOURCC('R','G','B','X'), VA_LSB_FIRST, 32, 24, 0x000000ff, 0x0000ff00, 0x00ff0000 } },
141 { I965_SURFACETYPE_RGBA,
142 { VA_FOURCC('B','G','R','X'), VA_LSB_FIRST, 32, 24, 0x00ff0000, 0x0000ff00, 0x000000ff } },
145 /* List of supported subpicture formats */
149 VAImageFormat va_format;
150 unsigned int va_flags;
151 } i965_subpic_format_map_t;
153 static const i965_subpic_format_map_t
154 i965_subpic_formats_map[I965_MAX_SUBPIC_FORMATS + 1] = {
155 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_P4A4_UNORM,
156 { VA_FOURCC('I','A','4','4'), VA_MSB_FIRST, 8, },
157 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
158 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_A4P4_UNORM,
159 { VA_FOURCC('A','I','4','4'), VA_MSB_FIRST, 8, },
160 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
161 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_P8A8_UNORM,
162 { VA_FOURCC('I','A','8','8'), VA_MSB_FIRST, 16, },
163 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
164 { I965_SURFACETYPE_INDEXED, I965_SURFACEFORMAT_A8P8_UNORM,
165 { VA_FOURCC('A','I','8','8'), VA_MSB_FIRST, 16, },
166 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
167 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_B8G8R8A8_UNORM,
168 { VA_FOURCC('B','G','R','A'), VA_LSB_FIRST, 32,
169 32, 0x00ff0000, 0x0000ff00, 0x000000ff, 0xff000000 },
170 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
171 { I965_SURFACETYPE_RGBA, I965_SURFACEFORMAT_R8G8B8A8_UNORM,
172 { VA_FOURCC('R','G','B','A'), VA_LSB_FIRST, 32,
173 32, 0x000000ff, 0x0000ff00, 0x00ff0000, 0xff000000 },
174 VA_SUBPICTURE_DESTINATION_IS_SCREEN_COORD },
177 static const i965_subpic_format_map_t *
178 get_subpic_format(const VAImageFormat *va_format)
181 for (i = 0; i965_subpic_formats_map[i].type != 0; i++) {
182 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[i];
183 if (m->va_format.fourcc == va_format->fourcc &&
184 (m->type == I965_SURFACETYPE_RGBA ?
185 (m->va_format.byte_order == va_format->byte_order &&
186 m->va_format.red_mask == va_format->red_mask &&
187 m->va_format.green_mask == va_format->green_mask &&
188 m->va_format.blue_mask == va_format->blue_mask &&
189 m->va_format.alpha_mask == va_format->alpha_mask) : 1))
195 extern struct hw_context *i965_proc_context_init(VADriverContextP, struct object_config *);
196 extern struct hw_context *g4x_dec_hw_context_init(VADriverContextP, struct object_config *);
197 static struct hw_codec_info g4x_hw_codec_info = {
198 .dec_hw_context_init = g4x_dec_hw_context_init,
199 .enc_hw_context_init = NULL,
200 .proc_hw_context_init = NULL,
205 extern struct hw_context *ironlake_dec_hw_context_init(VADriverContextP, struct object_config *);
206 static struct hw_codec_info ironlake_hw_codec_info = {
207 .dec_hw_context_init = ironlake_dec_hw_context_init,
208 .enc_hw_context_init = NULL,
209 .proc_hw_context_init = i965_proc_context_init,
214 extern struct hw_context *gen6_dec_hw_context_init(VADriverContextP, struct object_config *);
215 extern struct hw_context *gen6_enc_hw_context_init(VADriverContextP, struct object_config *);
216 static struct hw_codec_info gen6_hw_codec_info = {
217 .dec_hw_context_init = gen6_dec_hw_context_init,
218 .enc_hw_context_init = gen6_enc_hw_context_init,
219 .proc_hw_context_init = i965_proc_context_init,
224 extern struct hw_context *gen7_dec_hw_context_init(VADriverContextP, struct object_config *);
225 extern struct hw_context *gen7_enc_hw_context_init(VADriverContextP, struct object_config *);
226 static struct hw_codec_info gen7_hw_codec_info = {
227 .dec_hw_context_init = gen7_dec_hw_context_init,
228 .enc_hw_context_init = gen7_enc_hw_context_init,
229 .proc_hw_context_init = i965_proc_context_init,
234 extern struct hw_context *gen75_proc_context_init(VADriverContextP, struct object_config *);
235 static struct hw_codec_info gen75_hw_codec_info = {
236 .dec_hw_context_init = gen75_dec_hw_context_init,
237 .enc_hw_context_init = gen75_enc_hw_context_init,
238 .proc_hw_context_init = gen75_proc_context_init,
243 #define I965_PACKED_HEADER_BASE 0
244 #define I965_PACKED_MISC_HEADER_BASE 3
247 va_enc_packed_type_to_idx(int packed_type)
251 if (packed_type & VAEncPackedHeaderMiscMask) {
252 idx = I965_PACKED_MISC_HEADER_BASE;
253 packed_type = (~VAEncPackedHeaderMiscMask & packed_type);
254 assert(packed_type > 0);
255 idx += (packed_type - 1);
257 idx = I965_PACKED_HEADER_BASE;
259 switch (packed_type) {
260 case VAEncPackedHeaderSequence:
261 idx = I965_PACKED_HEADER_BASE + 0;
264 case VAEncPackedHeaderPicture:
265 idx = I965_PACKED_HEADER_BASE + 1;
268 case VAEncPackedHeaderSlice:
269 idx = I965_PACKED_HEADER_BASE + 2;
273 /* Should not get here */
285 i965_QueryConfigProfiles(VADriverContextP ctx,
286 VAProfile *profile_list, /* out */
287 int *num_profiles) /* out */
289 struct i965_driver_data * const i965 = i965_driver_data(ctx);
292 if (HAS_MPEG2(i965)) {
293 profile_list[i++] = VAProfileMPEG2Simple;
294 profile_list[i++] = VAProfileMPEG2Main;
297 if (HAS_H264(i965)) {
298 profile_list[i++] = VAProfileH264Baseline;
299 profile_list[i++] = VAProfileH264Main;
300 profile_list[i++] = VAProfileH264High;
304 profile_list[i++] = VAProfileVC1Simple;
305 profile_list[i++] = VAProfileVC1Main;
306 profile_list[i++] = VAProfileVC1Advanced;
310 profile_list[i++] = VAProfileNone;
313 if (HAS_JPEG(i965)) {
314 profile_list[i++] = VAProfileJPEGBaseline;
317 /* If the assert fails then I965_MAX_PROFILES needs to be bigger */
318 assert(i <= I965_MAX_PROFILES);
321 return VA_STATUS_SUCCESS;
325 i965_QueryConfigEntrypoints(VADriverContextP ctx,
327 VAEntrypoint *entrypoint_list, /* out */
328 int *num_entrypoints) /* out */
330 struct i965_driver_data * const i965 = i965_driver_data(ctx);
334 case VAProfileMPEG2Simple:
335 case VAProfileMPEG2Main:
337 entrypoint_list[n++] = VAEntrypointVLD;
340 case VAProfileH264Baseline:
341 case VAProfileH264Main:
342 case VAProfileH264High:
344 entrypoint_list[n++] = VAEntrypointVLD;
346 if (HAS_ENCODER(i965))
347 entrypoint_list[n++] = VAEntrypointEncSlice;
351 case VAProfileVC1Simple:
352 case VAProfileVC1Main:
353 case VAProfileVC1Advanced:
355 entrypoint_list[n++] = VAEntrypointVLD;
360 entrypoint_list[n++] = VAEntrypointVideoProc;
363 case VAProfileJPEGBaseline:
365 entrypoint_list[n++] = VAEntrypointVLD;
372 /* If the assert fails then I965_MAX_ENTRYPOINTS needs to be bigger */
373 assert(n <= I965_MAX_ENTRYPOINTS);
374 *num_entrypoints = n;
375 return n > 0 ? VA_STATUS_SUCCESS : VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
379 i965_GetConfigAttributes(VADriverContextP ctx,
381 VAEntrypoint entrypoint,
382 VAConfigAttrib *attrib_list, /* in/out */
387 /* Other attributes don't seem to be defined */
388 /* What to do if we don't know the attribute? */
389 for (i = 0; i < num_attribs; i++) {
390 switch (attrib_list[i].type) {
391 case VAConfigAttribRTFormat:
392 attrib_list[i].value = VA_RT_FORMAT_YUV420;
395 case VAConfigAttribRateControl:
396 if (entrypoint == VAEntrypointEncSlice) {
397 attrib_list[i].value = VA_RC_CBR | VA_RC_CQP;
401 case VAConfigAttribEncPackedHeaders:
402 if (entrypoint == VAEntrypointEncSlice) {
403 attrib_list[i].value = VA_ENC_PACKED_HEADER_SEQUENCE | VA_ENC_PACKED_HEADER_PICTURE | VA_ENC_PACKED_HEADER_MISC;
409 attrib_list[i].value = VA_ATTRIB_NOT_SUPPORTED;
414 return VA_STATUS_SUCCESS;
418 i965_destroy_config(struct object_heap *heap, struct object_base *obj)
420 object_heap_free(heap, obj);
424 i965_update_attribute(struct object_config *obj_config, VAConfigAttrib *attrib)
428 /* Check existing attrbiutes */
429 for (i = 0; i < obj_config->num_attribs; i++) {
430 if (obj_config->attrib_list[i].type == attrib->type) {
431 /* Update existing attribute */
432 obj_config->attrib_list[i].value = attrib->value;
433 return VA_STATUS_SUCCESS;
437 if (obj_config->num_attribs < I965_MAX_CONFIG_ATTRIBUTES) {
438 i = obj_config->num_attribs;
439 obj_config->attrib_list[i].type = attrib->type;
440 obj_config->attrib_list[i].value = attrib->value;
441 obj_config->num_attribs++;
442 return VA_STATUS_SUCCESS;
445 return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
449 i965_CreateConfig(VADriverContextP ctx,
451 VAEntrypoint entrypoint,
452 VAConfigAttrib *attrib_list,
454 VAConfigID *config_id) /* out */
456 struct i965_driver_data * const i965 = i965_driver_data(ctx);
457 struct object_config *obj_config;
462 /* Validate profile & entrypoint */
464 case VAProfileMPEG2Simple:
465 case VAProfileMPEG2Main:
466 if (HAS_MPEG2(i965) && VAEntrypointVLD == entrypoint) {
467 vaStatus = VA_STATUS_SUCCESS;
469 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
473 case VAProfileH264Baseline:
474 case VAProfileH264Main:
475 case VAProfileH264High:
476 if ((HAS_H264(i965) && VAEntrypointVLD == entrypoint) ||
477 (HAS_ENCODER(i965) && VAEntrypointEncSlice == entrypoint)) {
478 vaStatus = VA_STATUS_SUCCESS;
480 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
485 case VAProfileVC1Simple:
486 case VAProfileVC1Main:
487 case VAProfileVC1Advanced:
488 if (HAS_VC1(i965) && VAEntrypointVLD == entrypoint) {
489 vaStatus = VA_STATUS_SUCCESS;
491 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
497 if (HAS_VPP(i965) && VAEntrypointVideoProc == entrypoint) {
498 vaStatus = VA_STATUS_SUCCESS;
500 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
505 case VAProfileJPEGBaseline:
506 if (HAS_JPEG(i965) && VAEntrypointVLD == entrypoint) {
507 vaStatus = VA_STATUS_SUCCESS;
509 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_ENTRYPOINT;
515 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
519 if (VA_STATUS_SUCCESS != vaStatus) {
523 configID = NEW_CONFIG_ID();
524 obj_config = CONFIG(configID);
526 if (NULL == obj_config) {
527 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
531 obj_config->profile = profile;
532 obj_config->entrypoint = entrypoint;
533 obj_config->attrib_list[0].type = VAConfigAttribRTFormat;
534 obj_config->attrib_list[0].value = VA_RT_FORMAT_YUV420;
535 obj_config->num_attribs = 1;
537 for(i = 0; i < num_attribs; i++) {
538 vaStatus = i965_update_attribute(obj_config, &(attrib_list[i]));
540 if (VA_STATUS_SUCCESS != vaStatus) {
546 if (VA_STATUS_SUCCESS != vaStatus) {
547 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
549 *config_id = configID;
556 i965_DestroyConfig(VADriverContextP ctx, VAConfigID config_id)
558 struct i965_driver_data *i965 = i965_driver_data(ctx);
559 struct object_config *obj_config = CONFIG(config_id);
562 if (NULL == obj_config) {
563 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
567 i965_destroy_config(&i965->config_heap, (struct object_base *)obj_config);
568 return VA_STATUS_SUCCESS;
571 VAStatus i965_QueryConfigAttributes(VADriverContextP ctx,
572 VAConfigID config_id,
573 VAProfile *profile, /* out */
574 VAEntrypoint *entrypoint, /* out */
575 VAConfigAttrib *attrib_list, /* out */
576 int *num_attribs) /* out */
578 struct i965_driver_data *i965 = i965_driver_data(ctx);
579 struct object_config *obj_config = CONFIG(config_id);
580 VAStatus vaStatus = VA_STATUS_SUCCESS;
584 *profile = obj_config->profile;
585 *entrypoint = obj_config->entrypoint;
586 *num_attribs = obj_config->num_attribs;
588 for(i = 0; i < obj_config->num_attribs; i++) {
589 attrib_list[i] = obj_config->attrib_list[i];
596 i965_destroy_surface(struct object_heap *heap, struct object_base *obj)
598 struct object_surface *obj_surface = (struct object_surface *)obj;
600 dri_bo_unreference(obj_surface->bo);
601 obj_surface->bo = NULL;
603 if (obj_surface->free_private_data != NULL) {
604 obj_surface->free_private_data(&obj_surface->private_data);
605 obj_surface->private_data = NULL;
608 object_heap_free(heap, obj);
612 i965_CreateSurfaces2(
613 VADriverContextP ctx,
617 VASurfaceID *surfaces,
618 unsigned int num_surfaces,
619 VASurfaceAttrib *attrib_list,
620 unsigned int num_attribs
623 struct i965_driver_data *i965 = i965_driver_data(ctx);
625 VAStatus vaStatus = VA_STATUS_SUCCESS;
626 int expected_fourcc = 0;
628 for (i = 0; i < num_attribs && attrib_list; i++) {
629 if ((attrib_list[i].type == VASurfaceAttribPixelFormat) &&
630 (attrib_list[i].flags & VA_SURFACE_ATTRIB_SETTABLE)) {
631 assert(attrib_list[i].value.type == VAGenericValueTypeInteger);
632 expected_fourcc = attrib_list[i].value.value.i;
637 /* support 420 & 422 & RGB32 format, 422 and RGB32 are only used
638 * for post-processing (including color conversion) */
639 if (VA_RT_FORMAT_YUV420 != format &&
640 VA_RT_FORMAT_YUV422 != format &&
641 VA_RT_FORMAT_RGB32 != format) {
642 return VA_STATUS_ERROR_UNSUPPORTED_RT_FORMAT;
645 for (i = 0; i < num_surfaces; i++) {
646 int surfaceID = NEW_SURFACE_ID();
647 struct object_surface *obj_surface = SURFACE(surfaceID);
649 if (NULL == obj_surface) {
650 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
654 surfaces[i] = surfaceID;
655 obj_surface->status = VASurfaceReady;
656 obj_surface->orig_width = width;
657 obj_surface->orig_height = height;
659 obj_surface->subpic_render_idx = 0;
660 for(j = 0; j < I965_MAX_SUBPIC_SUM; j++){
661 obj_surface->subpic[j] = VA_INVALID_ID;
664 obj_surface->width = ALIGN(width, 16);
665 obj_surface->height = ALIGN(height, 16);
666 obj_surface->flags = SURFACE_REFERENCED;
667 obj_surface->fourcc = 0;
668 obj_surface->bo = NULL;
669 obj_surface->locked_image_id = VA_INVALID_ID;
670 obj_surface->private_data = NULL;
671 obj_surface->free_private_data = NULL;
672 obj_surface->subsampling = SUBSAMPLE_YUV420;
674 if (expected_fourcc) {
675 int tiling = HAS_TILED_SURFACE(i965);
677 if (expected_fourcc != VA_FOURCC('N', 'V', '1', '2') &&
678 expected_fourcc != VA_FOURCC('R', 'G', 'B', 'X') &&
679 expected_fourcc != VA_FOURCC('R', 'G', 'B', 'A') )
681 // todo, should we disable tiling for 422 format?
683 if (VA_RT_FORMAT_YUV420 == format) {
684 obj_surface->subsampling = SUBSAMPLE_YUV420;
686 else if (VA_RT_FORMAT_YUV422 == format) {
687 obj_surface->subsampling = SUBSAMPLE_YUV422H;
689 else if (VA_RT_FORMAT_RGB32 == format) {
690 obj_surface->subsampling = SUBSAMPLE_RGBX;
696 i965_check_alloc_surface_bo(ctx, obj_surface, tiling, expected_fourcc, obj_surface->subsampling);
701 if (VA_STATUS_SUCCESS != vaStatus) {
702 /* surfaces[i-1] was the last successful allocation */
704 struct object_surface *obj_surface = SURFACE(surfaces[i]);
706 surfaces[i] = VA_INVALID_SURFACE;
708 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
716 i965_CreateSurfaces(VADriverContextP ctx,
721 VASurfaceID *surfaces) /* out */
723 return i965_CreateSurfaces2(ctx,
734 i965_DestroySurfaces(VADriverContextP ctx,
735 VASurfaceID *surface_list,
738 struct i965_driver_data *i965 = i965_driver_data(ctx);
741 for (i = num_surfaces; i--; ) {
742 struct object_surface *obj_surface = SURFACE(surface_list[i]);
745 i965_destroy_surface(&i965->surface_heap, (struct object_base *)obj_surface);
748 return VA_STATUS_SUCCESS;
752 i965_QueryImageFormats(VADriverContextP ctx,
753 VAImageFormat *format_list, /* out */
754 int *num_formats) /* out */
758 for (n = 0; i965_image_formats_map[n].va_format.fourcc != 0; n++) {
759 const i965_image_format_map_t * const m = &i965_image_formats_map[n];
761 format_list[n] = m->va_format;
767 return VA_STATUS_SUCCESS;
771 * Guess the format when the usage of a VA surface is unknown
772 * 1. Without a valid context: YV12
773 * 2. The current context is valid:
774 * a) always NV12 on GEN6 and later
775 * b) I420 for MPEG-2 and NV12 for other codec on GEN4 & GEN5
778 i965_guess_surface_format(VADriverContextP ctx,
780 unsigned int *fourcc,
781 unsigned int *is_tiled)
783 struct i965_driver_data *i965 = i965_driver_data(ctx);
784 struct object_context *obj_context = NULL;
785 struct object_config *obj_config = NULL;
787 *fourcc = VA_FOURCC('Y', 'V', '1', '2');
790 if (i965->current_context_id == VA_INVALID_ID)
793 obj_context = CONTEXT(i965->current_context_id);
795 if (!obj_context || obj_context->config_id == VA_INVALID_ID)
798 obj_config = CONFIG(obj_context->config_id);
803 if (IS_GEN6(i965->intel.device_id) || IS_GEN7(i965->intel.device_id)) {
804 *fourcc = VA_FOURCC('N', 'V', '1', '2');
809 switch (obj_config->profile) {
810 case VAProfileMPEG2Simple:
811 case VAProfileMPEG2Main:
812 *fourcc = VA_FOURCC('I', '4', '2', '0');
817 *fourcc = VA_FOURCC('N', 'V', '1', '2');
824 i965_QuerySubpictureFormats(VADriverContextP ctx,
825 VAImageFormat *format_list, /* out */
826 unsigned int *flags, /* out */
827 unsigned int *num_formats) /* out */
831 for (n = 0; i965_subpic_formats_map[n].va_format.fourcc != 0; n++) {
832 const i965_subpic_format_map_t * const m = &i965_subpic_formats_map[n];
834 format_list[n] = m->va_format;
836 flags[n] = m->va_flags;
842 return VA_STATUS_SUCCESS;
846 i965_destroy_subpic(struct object_heap *heap, struct object_base *obj)
848 // struct object_subpic *obj_subpic = (struct object_subpic *)obj;
850 object_heap_free(heap, obj);
854 i965_CreateSubpicture(VADriverContextP ctx,
856 VASubpictureID *subpicture) /* out */
858 struct i965_driver_data *i965 = i965_driver_data(ctx);
859 VASubpictureID subpicID = NEW_SUBPIC_ID()
860 struct object_subpic *obj_subpic = SUBPIC(subpicID);
863 return VA_STATUS_ERROR_ALLOCATION_FAILED;
865 struct object_image *obj_image = IMAGE(image);
867 return VA_STATUS_ERROR_INVALID_IMAGE;
869 const i965_subpic_format_map_t * const m = get_subpic_format(&obj_image->image.format);
871 return VA_STATUS_ERROR_UNKNOWN; /* XXX: VA_STATUS_ERROR_UNSUPPORTED_FORMAT? */
873 *subpicture = subpicID;
874 obj_subpic->image = image;
875 obj_subpic->format = m->format;
876 obj_subpic->width = obj_image->image.width;
877 obj_subpic->height = obj_image->image.height;
878 obj_subpic->pitch = obj_image->image.pitches[0];
879 obj_subpic->bo = obj_image->bo;
880 obj_subpic->global_alpha = 1.0;
882 return VA_STATUS_SUCCESS;
886 i965_DestroySubpicture(VADriverContextP ctx,
887 VASubpictureID subpicture)
889 struct i965_driver_data *i965 = i965_driver_data(ctx);
890 struct object_subpic *obj_subpic = SUBPIC(subpicture);
891 i965_destroy_subpic(&i965->subpic_heap, (struct object_base *)obj_subpic);
892 return VA_STATUS_SUCCESS;
896 i965_SetSubpictureImage(VADriverContextP ctx,
897 VASubpictureID subpicture,
901 return VA_STATUS_ERROR_UNIMPLEMENTED;
905 i965_SetSubpictureChromakey(VADriverContextP ctx,
906 VASubpictureID subpicture,
907 unsigned int chromakey_min,
908 unsigned int chromakey_max,
909 unsigned int chromakey_mask)
912 return VA_STATUS_ERROR_UNIMPLEMENTED;
916 i965_SetSubpictureGlobalAlpha(VADriverContextP ctx,
917 VASubpictureID subpicture,
920 struct i965_driver_data *i965 = i965_driver_data(ctx);
921 struct object_subpic *obj_subpic = SUBPIC(subpicture);
923 if(global_alpha > 1.0 || global_alpha < 0.0){
924 return VA_STATUS_ERROR_INVALID_PARAMETER;
926 obj_subpic->global_alpha = global_alpha;
928 return VA_STATUS_SUCCESS;
932 i965_AssociateSubpicture(VADriverContextP ctx,
933 VASubpictureID subpicture,
934 VASurfaceID *target_surfaces,
936 short src_x, /* upper left offset in subpicture */
938 unsigned short src_width,
939 unsigned short src_height,
940 short dest_x, /* upper left offset in surface */
942 unsigned short dest_width,
943 unsigned short dest_height,
945 * whether to enable chroma-keying or global-alpha
946 * see VA_SUBPICTURE_XXX values
950 struct i965_driver_data *i965 = i965_driver_data(ctx);
951 struct object_subpic *obj_subpic = SUBPIC(subpicture);
954 obj_subpic->src_rect.x = src_x;
955 obj_subpic->src_rect.y = src_y;
956 obj_subpic->src_rect.width = src_width;
957 obj_subpic->src_rect.height = src_height;
958 obj_subpic->dst_rect.x = dest_x;
959 obj_subpic->dst_rect.y = dest_y;
960 obj_subpic->dst_rect.width = dest_width;
961 obj_subpic->dst_rect.height = dest_height;
962 obj_subpic->flags = flags;
964 for (i = 0; i < num_surfaces; i++) {
965 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
967 return VA_STATUS_ERROR_INVALID_SURFACE;
969 for(j = 0; j < I965_MAX_SUBPIC_SUM; j ++){
970 if(obj_surface->subpic[j] == VA_INVALID_ID){
971 obj_surface->subpic[j] = subpicture;
976 if(j == I965_MAX_SUBPIC_SUM){
977 return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
981 return VA_STATUS_SUCCESS;
986 i965_DeassociateSubpicture(VADriverContextP ctx,
987 VASubpictureID subpicture,
988 VASurfaceID *target_surfaces,
991 struct i965_driver_data *i965 = i965_driver_data(ctx);
994 for (i = 0; i < num_surfaces; i++) {
995 struct object_surface *obj_surface = SURFACE(target_surfaces[i]);
997 return VA_STATUS_ERROR_INVALID_SURFACE;
999 for(j = 0; j < I965_MAX_SUBPIC_SUM; j ++){
1000 if(obj_surface->subpic[j] == subpicture){
1001 obj_surface->subpic[j] = VA_INVALID_ID;
1006 if(j == I965_MAX_SUBPIC_SUM){
1007 return VA_STATUS_ERROR_MAX_NUM_EXCEEDED;
1010 return VA_STATUS_SUCCESS;
1014 i965_reference_buffer_store(struct buffer_store **ptr,
1015 struct buffer_store *buffer_store)
1017 assert(*ptr == NULL);
1020 buffer_store->ref_count++;
1021 *ptr = buffer_store;
1026 i965_release_buffer_store(struct buffer_store **ptr)
1028 struct buffer_store *buffer_store = *ptr;
1030 if (buffer_store == NULL)
1033 assert(buffer_store->bo || buffer_store->buffer);
1034 assert(!(buffer_store->bo && buffer_store->buffer));
1035 buffer_store->ref_count--;
1037 if (buffer_store->ref_count == 0) {
1038 dri_bo_unreference(buffer_store->bo);
1039 free(buffer_store->buffer);
1040 buffer_store->bo = NULL;
1041 buffer_store->buffer = NULL;
1049 i965_destroy_context(struct object_heap *heap, struct object_base *obj)
1051 struct object_context *obj_context = (struct object_context *)obj;
1054 if (obj_context->hw_context) {
1055 obj_context->hw_context->destroy(obj_context->hw_context);
1056 obj_context->hw_context = NULL;
1059 if (obj_context->codec_type == CODEC_PROC) {
1060 i965_release_buffer_store(&obj_context->codec_state.proc.pipeline_param);
1062 } else if (obj_context->codec_type == CODEC_ENC) {
1063 assert(obj_context->codec_state.encode.num_slice_params <= obj_context->codec_state.encode.max_slice_params);
1064 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param);
1065 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param);
1067 for (i = 0; i < obj_context->codec_state.encode.num_slice_params; i++)
1068 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params[i]);
1070 free(obj_context->codec_state.encode.slice_params);
1072 assert(obj_context->codec_state.encode.num_slice_params_ext <= obj_context->codec_state.encode.max_slice_params_ext);
1073 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param_ext);
1074 i965_release_buffer_store(&obj_context->codec_state.encode.seq_param_ext);
1076 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_param); i++)
1077 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_param[i]);
1079 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_data); i++)
1080 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_data[i]);
1082 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.misc_param); i++)
1083 i965_release_buffer_store(&obj_context->codec_state.encode.misc_param[i]);
1085 for (i = 0; i < obj_context->codec_state.encode.num_slice_params_ext; i++)
1086 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params_ext[i]);
1088 free(obj_context->codec_state.encode.slice_params_ext);
1090 assert(obj_context->codec_state.decode.num_slice_params <= obj_context->codec_state.decode.max_slice_params);
1091 assert(obj_context->codec_state.decode.num_slice_datas <= obj_context->codec_state.decode.max_slice_datas);
1093 i965_release_buffer_store(&obj_context->codec_state.decode.pic_param);
1094 i965_release_buffer_store(&obj_context->codec_state.decode.iq_matrix);
1095 i965_release_buffer_store(&obj_context->codec_state.decode.bit_plane);
1097 for (i = 0; i < obj_context->codec_state.decode.num_slice_params; i++)
1098 i965_release_buffer_store(&obj_context->codec_state.decode.slice_params[i]);
1100 for (i = 0; i < obj_context->codec_state.decode.num_slice_datas; i++)
1101 i965_release_buffer_store(&obj_context->codec_state.decode.slice_datas[i]);
1103 free(obj_context->codec_state.decode.slice_params);
1104 free(obj_context->codec_state.decode.slice_datas);
1107 free(obj_context->render_targets);
1108 object_heap_free(heap, obj);
1112 i965_CreateContext(VADriverContextP ctx,
1113 VAConfigID config_id,
1117 VASurfaceID *render_targets,
1118 int num_render_targets,
1119 VAContextID *context) /* out */
1121 struct i965_driver_data *i965 = i965_driver_data(ctx);
1122 struct i965_render_state *render_state = &i965->render_state;
1123 struct object_config *obj_config = CONFIG(config_id);
1124 struct object_context *obj_context = NULL;
1125 VAStatus vaStatus = VA_STATUS_SUCCESS;
1129 if (NULL == obj_config) {
1130 vaStatus = VA_STATUS_ERROR_INVALID_CONFIG;
1134 if (picture_width > i965->codec_info->max_width ||
1135 picture_height > i965->codec_info->max_height) {
1136 vaStatus = VA_STATUS_ERROR_RESOLUTION_NOT_SUPPORTED;
1141 /* Validate picture dimensions */
1142 contextID = NEW_CONTEXT_ID();
1143 obj_context = CONTEXT(contextID);
1145 if (NULL == obj_context) {
1146 vaStatus = VA_STATUS_ERROR_ALLOCATION_FAILED;
1150 render_state->inited = 1;
1152 switch (obj_config->profile) {
1153 case VAProfileH264Baseline:
1154 case VAProfileH264Main:
1155 case VAProfileH264High:
1156 if (!HAS_H264(i965))
1157 return VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1158 render_state->interleaved_uv = 1;
1161 render_state->interleaved_uv = !!(IS_GEN6(i965->intel.device_id) || IS_GEN7(i965->intel.device_id));
1165 *context = contextID;
1166 obj_context->flags = flag;
1167 obj_context->context_id = contextID;
1168 obj_context->config_id = config_id;
1169 obj_context->picture_width = picture_width;
1170 obj_context->picture_height = picture_height;
1171 obj_context->num_render_targets = num_render_targets;
1172 obj_context->render_targets =
1173 (VASurfaceID *)calloc(num_render_targets, sizeof(VASurfaceID));
1174 obj_context->hw_context = NULL;
1176 for(i = 0; i < num_render_targets; i++) {
1177 if (NULL == SURFACE(render_targets[i])) {
1178 vaStatus = VA_STATUS_ERROR_INVALID_SURFACE;
1182 obj_context->render_targets[i] = render_targets[i];
1185 if (VA_STATUS_SUCCESS == vaStatus) {
1186 if (VAEntrypointVideoProc == obj_config->entrypoint) {
1187 obj_context->codec_type = CODEC_PROC;
1188 memset(&obj_context->codec_state.proc, 0, sizeof(obj_context->codec_state.proc));
1189 obj_context->codec_state.proc.current_render_target = VA_INVALID_ID;
1190 assert(i965->codec_info->proc_hw_context_init);
1191 obj_context->hw_context = i965->codec_info->proc_hw_context_init(ctx, obj_config);
1192 } else if (VAEntrypointEncSlice == obj_config->entrypoint) { /*encode routin only*/
1193 obj_context->codec_type = CODEC_ENC;
1194 memset(&obj_context->codec_state.encode, 0, sizeof(obj_context->codec_state.encode));
1195 obj_context->codec_state.encode.current_render_target = VA_INVALID_ID;
1196 obj_context->codec_state.encode.max_slice_params = NUM_SLICES;
1197 obj_context->codec_state.encode.slice_params = calloc(obj_context->codec_state.encode.max_slice_params,
1198 sizeof(*obj_context->codec_state.encode.slice_params));
1199 assert(i965->codec_info->enc_hw_context_init);
1200 obj_context->hw_context = i965->codec_info->enc_hw_context_init(ctx, obj_config);
1202 obj_context->codec_type = CODEC_DEC;
1203 memset(&obj_context->codec_state.decode, 0, sizeof(obj_context->codec_state.decode));
1204 obj_context->codec_state.decode.current_render_target = -1;
1205 obj_context->codec_state.decode.max_slice_params = NUM_SLICES;
1206 obj_context->codec_state.decode.max_slice_datas = NUM_SLICES;
1207 obj_context->codec_state.decode.slice_params = calloc(obj_context->codec_state.decode.max_slice_params,
1208 sizeof(*obj_context->codec_state.decode.slice_params));
1209 obj_context->codec_state.decode.slice_datas = calloc(obj_context->codec_state.decode.max_slice_datas,
1210 sizeof(*obj_context->codec_state.decode.slice_datas));
1212 assert(i965->codec_info->dec_hw_context_init);
1213 obj_context->hw_context = i965->codec_info->dec_hw_context_init(ctx, obj_config);
1217 /* Error recovery */
1218 if (VA_STATUS_SUCCESS != vaStatus) {
1219 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
1222 i965->current_context_id = contextID;
1228 i965_DestroyContext(VADriverContextP ctx, VAContextID context)
1230 struct i965_driver_data *i965 = i965_driver_data(ctx);
1231 struct object_context *obj_context = CONTEXT(context);
1233 assert(obj_context);
1235 if (i965->current_context_id == context)
1236 i965->current_context_id = VA_INVALID_ID;
1238 i965_destroy_context(&i965->context_heap, (struct object_base *)obj_context);
1240 return VA_STATUS_SUCCESS;
1244 i965_destroy_buffer(struct object_heap *heap, struct object_base *obj)
1246 struct object_buffer *obj_buffer = (struct object_buffer *)obj;
1248 assert(obj_buffer->buffer_store);
1249 i965_release_buffer_store(&obj_buffer->buffer_store);
1250 object_heap_free(heap, obj);
1254 i965_create_buffer_internal(VADriverContextP ctx,
1255 VAContextID context,
1258 unsigned int num_elements,
1263 struct i965_driver_data *i965 = i965_driver_data(ctx);
1264 struct object_buffer *obj_buffer = NULL;
1265 struct buffer_store *buffer_store = NULL;
1270 case VAPictureParameterBufferType:
1271 case VAIQMatrixBufferType:
1272 case VAQMatrixBufferType:
1273 case VABitPlaneBufferType:
1274 case VASliceGroupMapBufferType:
1275 case VASliceParameterBufferType:
1276 case VASliceDataBufferType:
1277 case VAMacroblockParameterBufferType:
1278 case VAResidualDataBufferType:
1279 case VADeblockingParameterBufferType:
1280 case VAImageBufferType:
1281 case VAEncCodedBufferType:
1282 case VAEncSequenceParameterBufferType:
1283 case VAEncPictureParameterBufferType:
1284 case VAEncSliceParameterBufferType:
1285 case VAEncPackedHeaderParameterBufferType:
1286 case VAEncPackedHeaderDataBufferType:
1287 case VAEncMiscParameterBufferType:
1288 case VAProcPipelineParameterBufferType:
1289 case VAProcFilterParameterBufferType:
1290 case VAHuffmanTableBufferType:
1295 return VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1298 bufferID = NEW_BUFFER_ID();
1299 obj_buffer = BUFFER(bufferID);
1301 if (NULL == obj_buffer) {
1302 return VA_STATUS_ERROR_ALLOCATION_FAILED;
1305 if (type == VAEncCodedBufferType) {
1306 size += I965_CODEDBUFFER_SIZE;
1307 size += 0x1000; /* for upper bound check */
1310 obj_buffer->max_num_elements = num_elements;
1311 obj_buffer->num_elements = num_elements;
1312 obj_buffer->size_element = size;
1313 obj_buffer->type = type;
1314 obj_buffer->buffer_store = NULL;
1315 buffer_store = calloc(1, sizeof(struct buffer_store));
1316 assert(buffer_store);
1317 buffer_store->ref_count = 1;
1319 if (store_bo != NULL) {
1320 buffer_store->bo = store_bo;
1321 dri_bo_reference(buffer_store->bo);
1324 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
1325 } else if (type == VASliceDataBufferType ||
1326 type == VAImageBufferType ||
1327 type == VAEncCodedBufferType) {
1328 buffer_store->bo = dri_bo_alloc(i965->intel.bufmgr,
1330 size * num_elements, 64);
1331 assert(buffer_store->bo);
1333 if (type == VAEncCodedBufferType) {
1334 VACodedBufferSegment *coded_buffer_segment;
1335 unsigned char *flag = NULL;
1336 dri_bo_map(buffer_store->bo, 1);
1337 coded_buffer_segment = (VACodedBufferSegment *)buffer_store->bo->virtual;
1338 coded_buffer_segment->size = size - I965_CODEDBUFFER_SIZE;
1339 coded_buffer_segment->bit_offset = 0;
1340 coded_buffer_segment->status = 0;
1341 coded_buffer_segment->buf = NULL;
1342 coded_buffer_segment->next = NULL;
1343 flag = (unsigned char *)(coded_buffer_segment + 1);
1345 dri_bo_unmap(buffer_store->bo);
1347 dri_bo_subdata(buffer_store->bo, 0, size * num_elements, data);
1353 if (type == VAEncPackedHeaderDataBufferType) {
1354 msize = ALIGN(size, 4);
1357 buffer_store->buffer = malloc(msize * num_elements);
1358 assert(buffer_store->buffer);
1361 memcpy(buffer_store->buffer, data, size * num_elements);
1364 buffer_store->num_elements = obj_buffer->num_elements;
1365 i965_reference_buffer_store(&obj_buffer->buffer_store, buffer_store);
1366 i965_release_buffer_store(&buffer_store);
1369 return VA_STATUS_SUCCESS;
1373 i965_CreateBuffer(VADriverContextP ctx,
1374 VAContextID context, /* in */
1375 VABufferType type, /* in */
1376 unsigned int size, /* in */
1377 unsigned int num_elements, /* in */
1378 void *data, /* in */
1379 VABufferID *buf_id) /* out */
1381 return i965_create_buffer_internal(ctx, context, type, size, num_elements, data, NULL, buf_id);
1386 i965_BufferSetNumElements(VADriverContextP ctx,
1387 VABufferID buf_id, /* in */
1388 unsigned int num_elements) /* in */
1390 struct i965_driver_data *i965 = i965_driver_data(ctx);
1391 struct object_buffer *obj_buffer = BUFFER(buf_id);
1392 VAStatus vaStatus = VA_STATUS_SUCCESS;
1396 if ((num_elements < 0) ||
1397 (num_elements > obj_buffer->max_num_elements)) {
1398 vaStatus = VA_STATUS_ERROR_UNKNOWN;
1400 obj_buffer->num_elements = num_elements;
1401 if (obj_buffer->buffer_store != NULL) {
1402 obj_buffer->buffer_store->num_elements = num_elements;
1410 i965_MapBuffer(VADriverContextP ctx,
1411 VABufferID buf_id, /* in */
1412 void **pbuf) /* out */
1414 struct i965_driver_data *i965 = i965_driver_data(ctx);
1415 struct object_buffer *obj_buffer = BUFFER(buf_id);
1416 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1418 assert(obj_buffer && obj_buffer->buffer_store);
1419 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1420 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1422 if (NULL != obj_buffer->buffer_store->bo) {
1423 unsigned int tiling, swizzle;
1425 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1427 if (tiling != I915_TILING_NONE)
1428 drm_intel_gem_bo_map_gtt(obj_buffer->buffer_store->bo);
1430 dri_bo_map(obj_buffer->buffer_store->bo, 1);
1432 assert(obj_buffer->buffer_store->bo->virtual);
1433 *pbuf = obj_buffer->buffer_store->bo->virtual;
1435 if (obj_buffer->type == VAEncCodedBufferType) {
1437 unsigned char *buffer = NULL;
1438 VACodedBufferSegment *coded_buffer_segment = (VACodedBufferSegment *)(obj_buffer->buffer_store->bo->virtual);
1439 unsigned char *flag = (unsigned char *)(coded_buffer_segment + 1);
1442 coded_buffer_segment->buf = buffer = (unsigned char *)(obj_buffer->buffer_store->bo->virtual) + I965_CODEDBUFFER_SIZE;
1444 for (i = 0; i < obj_buffer->size_element - I965_CODEDBUFFER_SIZE - 3 - 0x1000; i++) {
1453 if (i == obj_buffer->size_element - I965_CODEDBUFFER_SIZE - 3 - 0x1000) {
1454 coded_buffer_segment->status |= VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK;
1457 coded_buffer_segment->size = i;
1460 assert(coded_buffer_segment->buf);
1464 vaStatus = VA_STATUS_SUCCESS;
1465 } else if (NULL != obj_buffer->buffer_store->buffer) {
1466 *pbuf = obj_buffer->buffer_store->buffer;
1467 vaStatus = VA_STATUS_SUCCESS;
1474 i965_UnmapBuffer(VADriverContextP ctx, VABufferID buf_id)
1476 struct i965_driver_data *i965 = i965_driver_data(ctx);
1477 struct object_buffer *obj_buffer = BUFFER(buf_id);
1478 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1480 assert(obj_buffer && obj_buffer->buffer_store);
1481 assert(obj_buffer->buffer_store->bo || obj_buffer->buffer_store->buffer);
1482 assert(!(obj_buffer->buffer_store->bo && obj_buffer->buffer_store->buffer));
1484 if (NULL != obj_buffer->buffer_store->bo) {
1485 unsigned int tiling, swizzle;
1487 dri_bo_get_tiling(obj_buffer->buffer_store->bo, &tiling, &swizzle);
1489 if (tiling != I915_TILING_NONE)
1490 drm_intel_gem_bo_unmap_gtt(obj_buffer->buffer_store->bo);
1492 dri_bo_unmap(obj_buffer->buffer_store->bo);
1494 vaStatus = VA_STATUS_SUCCESS;
1495 } else if (NULL != obj_buffer->buffer_store->buffer) {
1497 vaStatus = VA_STATUS_SUCCESS;
1504 i965_DestroyBuffer(VADriverContextP ctx, VABufferID buffer_id)
1506 struct i965_driver_data *i965 = i965_driver_data(ctx);
1507 struct object_buffer *obj_buffer = BUFFER(buffer_id);
1510 i965_destroy_buffer(&i965->buffer_heap, (struct object_base *)obj_buffer);
1512 return VA_STATUS_SUCCESS;
1516 i965_BeginPicture(VADriverContextP ctx,
1517 VAContextID context,
1518 VASurfaceID render_target)
1520 struct i965_driver_data *i965 = i965_driver_data(ctx);
1521 struct object_context *obj_context = CONTEXT(context);
1522 struct object_surface *obj_surface = SURFACE(render_target);
1523 struct object_config *obj_config;
1528 assert(obj_context);
1529 assert(obj_surface);
1531 config = obj_context->config_id;
1532 obj_config = CONFIG(config);
1535 switch (obj_config->profile) {
1536 case VAProfileMPEG2Simple:
1537 case VAProfileMPEG2Main:
1538 vaStatus = VA_STATUS_SUCCESS;
1541 case VAProfileH264Baseline:
1542 case VAProfileH264Main:
1543 case VAProfileH264High:
1544 vaStatus = VA_STATUS_SUCCESS;
1547 case VAProfileVC1Simple:
1548 case VAProfileVC1Main:
1549 case VAProfileVC1Advanced:
1550 vaStatus = VA_STATUS_SUCCESS;
1553 case VAProfileJPEGBaseline:
1554 vaStatus = VA_STATUS_SUCCESS;
1558 vaStatus = VA_STATUS_SUCCESS;
1563 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
1567 if (obj_context->codec_type == CODEC_PROC) {
1568 obj_context->codec_state.proc.current_render_target = render_target;
1569 } else if (obj_context->codec_type == CODEC_ENC) {
1570 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param);
1572 for (i = 0; i < obj_context->codec_state.encode.num_slice_params; i++) {
1573 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params[i]);
1576 obj_context->codec_state.encode.num_slice_params = 0;
1579 i965_release_buffer_store(&obj_context->codec_state.encode.pic_param_ext);
1581 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_param); i++)
1582 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_param[i]);
1584 for (i = 0; i < ARRAY_ELEMS(obj_context->codec_state.encode.packed_header_data); i++)
1585 i965_release_buffer_store(&obj_context->codec_state.encode.packed_header_data[i]);
1587 for (i = 0; i < obj_context->codec_state.encode.num_slice_params_ext; i++)
1588 i965_release_buffer_store(&obj_context->codec_state.encode.slice_params_ext[i]);
1590 obj_context->codec_state.encode.num_slice_params_ext = 0;
1591 obj_context->codec_state.encode.current_render_target = render_target; /*This is input new frame*/
1592 obj_context->codec_state.encode.last_packed_header_type = 0;
1594 obj_context->codec_state.decode.current_render_target = render_target;
1595 i965_release_buffer_store(&obj_context->codec_state.decode.pic_param);
1596 i965_release_buffer_store(&obj_context->codec_state.decode.iq_matrix);
1597 i965_release_buffer_store(&obj_context->codec_state.decode.bit_plane);
1598 i965_release_buffer_store(&obj_context->codec_state.decode.huffman_table);
1600 for (i = 0; i < obj_context->codec_state.decode.num_slice_params; i++) {
1601 i965_release_buffer_store(&obj_context->codec_state.decode.slice_params[i]);
1602 i965_release_buffer_store(&obj_context->codec_state.decode.slice_datas[i]);
1605 obj_context->codec_state.decode.num_slice_params = 0;
1606 obj_context->codec_state.decode.num_slice_datas = 0;
1612 #define I965_RENDER_BUFFER(category, name) i965_render_##category##_##name##_buffer(ctx, obj_context, obj_buffer)
1614 #define DEF_RENDER_SINGLE_BUFFER_FUNC(category, name, member) \
1616 i965_render_##category##_##name##_buffer(VADriverContextP ctx, \
1617 struct object_context *obj_context, \
1618 struct object_buffer *obj_buffer) \
1620 struct category##_state *category = &obj_context->codec_state.category; \
1621 assert(obj_buffer->buffer_store->bo == NULL); \
1622 assert(obj_buffer->buffer_store->buffer); \
1623 i965_release_buffer_store(&category->member); \
1624 i965_reference_buffer_store(&category->member, obj_buffer->buffer_store); \
1625 return VA_STATUS_SUCCESS; \
1628 #define DEF_RENDER_MULTI_BUFFER_FUNC(category, name, member) \
1630 i965_render_##category##_##name##_buffer(VADriverContextP ctx, \
1631 struct object_context *obj_context, \
1632 struct object_buffer *obj_buffer) \
1634 struct category##_state *category = &obj_context->codec_state.category; \
1635 if (category->num_##member == category->max_##member) { \
1636 category->member = realloc(category->member, (category->max_##member + NUM_SLICES) * sizeof(*category->member)); \
1637 memset(category->member + category->max_##member, 0, NUM_SLICES * sizeof(*category->member)); \
1638 category->max_##member += NUM_SLICES; \
1640 i965_release_buffer_store(&category->member[category->num_##member]); \
1641 i965_reference_buffer_store(&category->member[category->num_##member], obj_buffer->buffer_store); \
1642 category->num_##member++; \
1643 return VA_STATUS_SUCCESS; \
1646 #define I965_RENDER_DECODE_BUFFER(name) I965_RENDER_BUFFER(decode, name)
1648 #define DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(decode, name, member)
1649 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(picture_parameter, pic_param)
1650 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(iq_matrix, iq_matrix)
1651 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(bit_plane, bit_plane)
1652 DEF_RENDER_DECODE_SINGLE_BUFFER_FUNC(huffman_table, huffman_table)
1654 #define DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(name, member) DEF_RENDER_MULTI_BUFFER_FUNC(decode, name, member)
1655 DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(slice_parameter, slice_params)
1656 DEF_RENDER_DECODE_MULTI_BUFFER_FUNC(slice_data, slice_datas)
1659 i965_decoder_render_picture(VADriverContextP ctx,
1660 VAContextID context,
1661 VABufferID *buffers,
1664 struct i965_driver_data *i965 = i965_driver_data(ctx);
1665 struct object_context *obj_context = CONTEXT(context);
1666 VAStatus vaStatus = VA_STATUS_SUCCESS;
1669 for (i = 0; i < num_buffers && vaStatus == VA_STATUS_SUCCESS; i++) {
1670 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1673 switch (obj_buffer->type) {
1674 case VAPictureParameterBufferType:
1675 vaStatus = I965_RENDER_DECODE_BUFFER(picture_parameter);
1678 case VAIQMatrixBufferType:
1679 vaStatus = I965_RENDER_DECODE_BUFFER(iq_matrix);
1682 case VABitPlaneBufferType:
1683 vaStatus = I965_RENDER_DECODE_BUFFER(bit_plane);
1686 case VASliceParameterBufferType:
1687 vaStatus = I965_RENDER_DECODE_BUFFER(slice_parameter);
1690 case VASliceDataBufferType:
1691 vaStatus = I965_RENDER_DECODE_BUFFER(slice_data);
1694 case VAHuffmanTableBufferType:
1695 vaStatus = I965_RENDER_DECODE_BUFFER(huffman_table);
1699 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1707 #define I965_RENDER_ENCODE_BUFFER(name) I965_RENDER_BUFFER(encode, name)
1709 #define DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(encode, name, member)
1710 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(sequence_parameter, seq_param)
1711 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_parameter, pic_param)
1712 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_control, pic_control)
1713 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(qmatrix, q_matrix)
1714 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(iqmatrix, iq_matrix)
1715 /* extended buffer */
1716 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(sequence_parameter_ext, seq_param_ext)
1717 DEF_RENDER_ENCODE_SINGLE_BUFFER_FUNC(picture_parameter_ext, pic_param_ext)
1719 #define DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(name, member) DEF_RENDER_MULTI_BUFFER_FUNC(encode, name, member)
1720 DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(slice_parameter, slice_params)
1721 DEF_RENDER_ENCODE_MULTI_BUFFER_FUNC(slice_parameter_ext, slice_params_ext)
1724 i965_encoder_render_packed_header_parameter_buffer(VADriverContextP ctx,
1725 struct object_context *obj_context,
1726 struct object_buffer *obj_buffer,
1729 struct encode_state *encode = &obj_context->codec_state.encode;
1731 assert(obj_buffer->buffer_store->bo == NULL);
1732 assert(obj_buffer->buffer_store->buffer);
1733 i965_release_buffer_store(&encode->packed_header_param[type_index]);
1734 i965_reference_buffer_store(&encode->packed_header_param[type_index], obj_buffer->buffer_store);
1736 return VA_STATUS_SUCCESS;
1740 i965_encoder_render_packed_header_data_buffer(VADriverContextP ctx,
1741 struct object_context *obj_context,
1742 struct object_buffer *obj_buffer,
1745 struct encode_state *encode = &obj_context->codec_state.encode;
1747 assert(obj_buffer->buffer_store->bo == NULL);
1748 assert(obj_buffer->buffer_store->buffer);
1749 i965_release_buffer_store(&encode->packed_header_data[type_index]);
1750 i965_reference_buffer_store(&encode->packed_header_data[type_index], obj_buffer->buffer_store);
1752 return VA_STATUS_SUCCESS;
1756 i965_encoder_render_misc_parameter_buffer(VADriverContextP ctx,
1757 struct object_context *obj_context,
1758 struct object_buffer *obj_buffer)
1760 struct encode_state *encode = &obj_context->codec_state.encode;
1761 VAEncMiscParameterBuffer *param = NULL;
1763 assert(obj_buffer->buffer_store->bo == NULL);
1764 assert(obj_buffer->buffer_store->buffer);
1766 param = (VAEncMiscParameterBuffer *)obj_buffer->buffer_store->buffer;
1767 i965_release_buffer_store(&encode->misc_param[param->type]);
1768 i965_reference_buffer_store(&encode->misc_param[param->type], obj_buffer->buffer_store);
1770 return VA_STATUS_SUCCESS;
1774 i965_encoder_render_picture(VADriverContextP ctx,
1775 VAContextID context,
1776 VABufferID *buffers,
1779 struct i965_driver_data *i965 = i965_driver_data(ctx);
1780 struct object_context *obj_context = CONTEXT(context);
1781 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1784 for (i = 0; i < num_buffers; i++) {
1785 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1788 switch (obj_buffer->type) {
1789 case VAQMatrixBufferType:
1790 vaStatus = I965_RENDER_ENCODE_BUFFER(qmatrix);
1793 case VAIQMatrixBufferType:
1794 vaStatus = I965_RENDER_ENCODE_BUFFER(iqmatrix);
1797 case VAEncSequenceParameterBufferType:
1798 vaStatus = I965_RENDER_ENCODE_BUFFER(sequence_parameter_ext);
1801 case VAEncPictureParameterBufferType:
1802 vaStatus = I965_RENDER_ENCODE_BUFFER(picture_parameter_ext);
1805 case VAEncSliceParameterBufferType:
1806 vaStatus = I965_RENDER_ENCODE_BUFFER(slice_parameter_ext);
1809 case VAEncPackedHeaderParameterBufferType:
1811 struct encode_state *encode = &obj_context->codec_state.encode;
1812 VAEncPackedHeaderParameterBuffer *param = (VAEncPackedHeaderParameterBuffer *)obj_buffer->buffer_store->buffer;
1813 encode->last_packed_header_type = param->type;
1815 vaStatus = i965_encoder_render_packed_header_parameter_buffer(ctx,
1818 va_enc_packed_type_to_idx(encode->last_packed_header_type));
1822 case VAEncPackedHeaderDataBufferType:
1824 struct encode_state *encode = &obj_context->codec_state.encode;
1826 assert(encode->last_packed_header_type == VAEncPackedHeaderSequence ||
1827 encode->last_packed_header_type == VAEncPackedHeaderPicture ||
1828 encode->last_packed_header_type == VAEncPackedHeaderSlice ||
1829 ((encode->last_packed_header_type & VAEncPackedHeaderMiscMask == VAEncPackedHeaderMiscMask) &&
1830 (encode->last_packed_header_type & (~VAEncPackedHeaderMiscMask) != 0)));
1831 vaStatus = i965_encoder_render_packed_header_data_buffer(ctx,
1834 va_enc_packed_type_to_idx(encode->last_packed_header_type));
1838 case VAEncMiscParameterBufferType:
1839 vaStatus = i965_encoder_render_misc_parameter_buffer(ctx,
1845 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1853 #define I965_RENDER_PROC_BUFFER(name) I965_RENDER_BUFFER(proc, name)
1855 #define DEF_RENDER_PROC_SINGLE_BUFFER_FUNC(name, member) DEF_RENDER_SINGLE_BUFFER_FUNC(proc, name, member)
1856 DEF_RENDER_PROC_SINGLE_BUFFER_FUNC(pipeline_parameter, pipeline_param)
1859 i965_proc_render_picture(VADriverContextP ctx,
1860 VAContextID context,
1861 VABufferID *buffers,
1864 struct i965_driver_data *i965 = i965_driver_data(ctx);
1865 struct object_context *obj_context = CONTEXT(context);
1866 VAStatus vaStatus = VA_STATUS_SUCCESS;
1869 for (i = 0; i < num_buffers && vaStatus == VA_STATUS_SUCCESS; i++) {
1870 struct object_buffer *obj_buffer = BUFFER(buffers[i]);
1873 switch (obj_buffer->type) {
1874 case VAProcPipelineParameterBufferType:
1875 vaStatus = I965_RENDER_PROC_BUFFER(pipeline_parameter);
1879 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_BUFFERTYPE;
1888 i965_RenderPicture(VADriverContextP ctx,
1889 VAContextID context,
1890 VABufferID *buffers,
1893 struct i965_driver_data *i965 = i965_driver_data(ctx);
1894 struct object_context *obj_context;
1895 struct object_config *obj_config;
1897 VAStatus vaStatus = VA_STATUS_ERROR_UNKNOWN;
1899 obj_context = CONTEXT(context);
1900 assert(obj_context);
1902 config = obj_context->config_id;
1903 obj_config = CONFIG(config);
1906 if (VAEntrypointVideoProc == obj_config->entrypoint) {
1907 vaStatus = i965_proc_render_picture(ctx, context, buffers, num_buffers);
1908 } else if (VAEntrypointEncSlice == obj_config->entrypoint ) {
1909 vaStatus = i965_encoder_render_picture(ctx, context, buffers, num_buffers);
1911 vaStatus = i965_decoder_render_picture(ctx, context, buffers, num_buffers);
1918 i965_EndPicture(VADriverContextP ctx, VAContextID context)
1920 struct i965_driver_data *i965 = i965_driver_data(ctx);
1921 struct object_context *obj_context = CONTEXT(context);
1922 struct object_config *obj_config;
1925 assert(obj_context);
1926 config = obj_context->config_id;
1927 obj_config = CONFIG(config);
1930 if (obj_context->codec_type == CODEC_PROC) {
1931 assert(VAEntrypointVideoProc == obj_config->entrypoint);
1932 } else if (obj_context->codec_type == CODEC_ENC) {
1933 assert(VAEntrypointEncSlice == obj_config->entrypoint);
1935 assert(obj_context->codec_state.encode.pic_param ||
1936 obj_context->codec_state.encode.pic_param_ext);
1937 assert(obj_context->codec_state.encode.seq_param ||
1938 obj_context->codec_state.encode.seq_param_ext);
1939 assert(obj_context->codec_state.encode.num_slice_params >= 1 ||
1940 obj_context->codec_state.encode.num_slice_params_ext >= 1);
1942 assert(obj_context->codec_state.decode.pic_param);
1943 assert(obj_context->codec_state.decode.num_slice_params >= 1);
1944 assert(obj_context->codec_state.decode.num_slice_datas >= 1);
1945 assert(obj_context->codec_state.decode.num_slice_params == obj_context->codec_state.decode.num_slice_datas);
1948 assert(obj_context->hw_context->run);
1949 obj_context->hw_context->run(ctx, obj_config->profile, &obj_context->codec_state, obj_context->hw_context);
1951 return VA_STATUS_SUCCESS;
1955 i965_SyncSurface(VADriverContextP ctx,
1956 VASurfaceID render_target)
1958 struct i965_driver_data *i965 = i965_driver_data(ctx);
1959 struct object_surface *obj_surface = SURFACE(render_target);
1961 assert(obj_surface);
1964 drm_intel_bo_wait_rendering(obj_surface->bo);
1966 return VA_STATUS_SUCCESS;
1970 i965_QuerySurfaceStatus(VADriverContextP ctx,
1971 VASurfaceID render_target,
1972 VASurfaceStatus *status) /* out */
1974 struct i965_driver_data *i965 = i965_driver_data(ctx);
1975 struct object_surface *obj_surface = SURFACE(render_target);
1977 assert(obj_surface);
1979 if (obj_surface->bo) {
1980 if (drm_intel_bo_busy(obj_surface->bo)){
1981 *status = VASurfaceRendering;
1984 *status = VASurfaceReady;
1987 *status = VASurfaceReady;
1990 return VA_STATUS_SUCCESS;
1993 static VADisplayAttribute *
1994 get_display_attribute(VADriverContextP ctx, VADisplayAttribType type)
1996 struct i965_driver_data * const i965 = i965_driver_data(ctx);
1999 if (!i965->display_attributes)
2002 for (i = 0; i < i965->num_display_attributes; i++) {
2003 if (i965->display_attributes[i].type == type)
2004 return &i965->display_attributes[i];
2010 i965_display_attributes_init(VADriverContextP ctx)
2012 struct i965_driver_data * const i965 = i965_driver_data(ctx);
2014 i965->num_display_attributes = ARRAY_ELEMS(i965_display_attributes);
2015 i965->display_attributes = malloc(
2016 i965->num_display_attributes * sizeof(i965->display_attributes[0]));
2017 if (!i965->display_attributes)
2021 i965->display_attributes,
2022 i965_display_attributes,
2023 sizeof(i965_display_attributes)
2026 i965->rotation_attrib = get_display_attribute(ctx, VADisplayAttribRotation);
2027 if (!i965->rotation_attrib)
2033 i965_display_attributes_terminate(VADriverContextP ctx)
2035 struct i965_driver_data * const i965 = i965_driver_data(ctx);
2037 if (i965->display_attributes) {
2038 free(i965->display_attributes);
2039 i965->display_attributes = NULL;
2040 i965->num_display_attributes = 0;
2045 * Query display attributes
2046 * The caller must provide a "attr_list" array that can hold at
2047 * least vaMaxNumDisplayAttributes() entries. The actual number of attributes
2048 * returned in "attr_list" is returned in "num_attributes".
2051 i965_QueryDisplayAttributes(
2052 VADriverContextP ctx,
2053 VADisplayAttribute *attribs, /* out */
2054 int *num_attribs_ptr /* out */
2057 const int num_attribs = ARRAY_ELEMS(i965_display_attributes);
2059 if (attribs && num_attribs > 0)
2060 memcpy(attribs, i965_display_attributes, sizeof(i965_display_attributes));
2062 if (num_attribs_ptr)
2063 *num_attribs_ptr = num_attribs;
2065 return VA_STATUS_SUCCESS;
2069 * Get display attributes
2070 * This function returns the current attribute values in "attr_list".
2071 * Only attributes returned with VA_DISPLAY_ATTRIB_GETTABLE set in the "flags" field
2072 * from vaQueryDisplayAttributes() can have their values retrieved.
2075 i965_GetDisplayAttributes(
2076 VADriverContextP ctx,
2077 VADisplayAttribute *attribs, /* inout */
2078 int num_attribs /* in */
2083 for (i = 0; i < num_attribs; i++) {
2084 VADisplayAttribute *src_attrib, * const dst_attrib = &attribs[i];
2086 src_attrib = get_display_attribute(ctx, dst_attrib->type);
2087 if (src_attrib && (src_attrib->flags & VA_DISPLAY_ATTRIB_GETTABLE)) {
2088 dst_attrib->min_value = src_attrib->min_value;
2089 dst_attrib->max_value = src_attrib->max_value;
2090 dst_attrib->value = src_attrib->value;
2093 dst_attrib->flags = VA_DISPLAY_ATTRIB_NOT_SUPPORTED;
2095 return VA_STATUS_SUCCESS;
2099 * Set display attributes
2100 * Only attributes returned with VA_DISPLAY_ATTRIB_SETTABLE set in the "flags" field
2101 * from vaQueryDisplayAttributes() can be set. If the attribute is not settable or
2102 * the value is out of range, the function returns VA_STATUS_ERROR_ATTR_NOT_SUPPORTED
2105 i965_SetDisplayAttributes(
2106 VADriverContextP ctx,
2107 VADisplayAttribute *attribs, /* in */
2108 int num_attribs /* in */
2113 for (i = 0; i < num_attribs; i++) {
2114 VADisplayAttribute *dst_attrib, * const src_attrib = &attribs[i];
2116 dst_attrib = get_display_attribute(ctx, src_attrib->type);
2118 return VA_STATUS_ERROR_ATTR_NOT_SUPPORTED;
2120 if (!(dst_attrib->flags & VA_DISPLAY_ATTRIB_SETTABLE))
2123 if (src_attrib->value < dst_attrib->min_value ||
2124 src_attrib->value > dst_attrib->max_value)
2125 return VA_STATUS_ERROR_INVALID_PARAMETER;
2127 dst_attrib->value = src_attrib->value;
2128 /* XXX: track modified attributes through timestamps */
2130 return VA_STATUS_SUCCESS;
2134 i965_DbgCopySurfaceToBuffer(VADriverContextP ctx,
2135 VASurfaceID surface,
2136 void **buffer, /* out */
2137 unsigned int *stride) /* out */
2140 return VA_STATUS_ERROR_UNIMPLEMENTED;
2144 i965_Init(VADriverContextP ctx)
2146 struct i965_driver_data *i965 = i965_driver_data(ctx);
2148 if (intel_driver_init(ctx) == False)
2149 return VA_STATUS_ERROR_UNKNOWN;
2151 if (IS_HASWELL(i965->intel.device_id))
2152 i965->codec_info = &gen75_hw_codec_info;
2153 else if (IS_G4X(i965->intel.device_id))
2154 i965->codec_info = &g4x_hw_codec_info;
2155 else if (IS_IRONLAKE(i965->intel.device_id))
2156 i965->codec_info = &ironlake_hw_codec_info;
2157 else if (IS_GEN6(i965->intel.device_id))
2158 i965->codec_info = &gen6_hw_codec_info;
2159 else if (IS_GEN7(i965->intel.device_id))
2160 i965->codec_info = &gen7_hw_codec_info;
2162 return VA_STATUS_ERROR_UNKNOWN;
2164 i965->batch = intel_batchbuffer_new(&i965->intel, I915_EXEC_RENDER, 0);
2166 if (!i965_display_attributes_init(ctx))
2167 return VA_STATUS_ERROR_UNKNOWN;
2169 if (i965_post_processing_init(ctx) == False)
2170 return VA_STATUS_ERROR_UNKNOWN;
2172 if (i965_render_init(ctx) == False)
2173 return VA_STATUS_ERROR_UNKNOWN;
2175 #ifdef HAVE_VA_WAYLAND
2176 if (IS_VA_WAYLAND(ctx) && !i965_output_wayland_init(ctx))
2177 return VA_STATUS_ERROR_UNKNOWN;
2181 if (IS_VA_X11(ctx) && !i965_output_dri_init(ctx))
2182 return VA_STATUS_ERROR_UNKNOWN;
2185 _i965InitMutex(&i965->render_mutex);
2186 _i965InitMutex(&i965->pp_mutex);
2188 return VA_STATUS_SUCCESS;
2192 i965_destroy_heap(struct object_heap *heap,
2193 void (*func)(struct object_heap *heap, struct object_base *object))
2195 struct object_base *object;
2196 object_heap_iterator iter;
2198 object = object_heap_first(heap, &iter);
2204 object = object_heap_next(heap, &iter);
2207 object_heap_destroy(heap);
2212 i965_DestroyImage(VADriverContextP ctx, VAImageID image);
2215 i965_CreateImage(VADriverContextP ctx,
2216 VAImageFormat *format,
2219 VAImage *out_image) /* out */
2221 struct i965_driver_data *i965 = i965_driver_data(ctx);
2222 struct object_image *obj_image;
2223 VAStatus va_status = VA_STATUS_ERROR_OPERATION_FAILED;
2225 unsigned int width2, height2, size2, size;
2227 out_image->image_id = VA_INVALID_ID;
2228 out_image->buf = VA_INVALID_ID;
2230 image_id = NEW_IMAGE_ID();
2231 if (image_id == VA_INVALID_ID)
2232 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2234 obj_image = IMAGE(image_id);
2236 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2237 obj_image->bo = NULL;
2238 obj_image->palette = NULL;
2239 obj_image->derived_surface = VA_INVALID_ID;
2241 VAImage * const image = &obj_image->image;
2242 image->image_id = image_id;
2243 image->buf = VA_INVALID_ID;
2245 size = width * height;
2246 width2 = (width + 1) / 2;
2247 height2 = (height + 1) / 2;
2248 size2 = width2 * height2;
2250 image->num_palette_entries = 0;
2251 image->entry_bytes = 0;
2252 memset(image->component_order, 0, sizeof(image->component_order));
2254 switch (format->fourcc) {
2255 case VA_FOURCC('I','A','4','4'):
2256 case VA_FOURCC('A','I','4','4'):
2257 image->num_planes = 1;
2258 image->pitches[0] = width;
2259 image->offsets[0] = 0;
2260 image->data_size = image->offsets[0] + image->pitches[0] * height;
2261 image->num_palette_entries = 16;
2262 image->entry_bytes = 3;
2263 image->component_order[0] = 'R';
2264 image->component_order[1] = 'G';
2265 image->component_order[2] = 'B';
2267 case VA_FOURCC('B','G','R','A'):
2268 case VA_FOURCC('R','G','B','A'):
2269 case VA_FOURCC('B','G','R','X'):
2270 case VA_FOURCC('R','G','B','X'):
2271 image->num_planes = 1;
2272 image->pitches[0] = width * 4;
2273 image->offsets[0] = 0;
2274 image->data_size = image->offsets[0] + image->pitches[0] * height;
2276 case VA_FOURCC('Y','V','1','2'):
2277 image->num_planes = 3;
2278 image->pitches[0] = width;
2279 image->offsets[0] = 0;
2280 image->pitches[1] = width2;
2281 image->offsets[1] = size + size2;
2282 image->pitches[2] = width2;
2283 image->offsets[2] = size;
2284 image->data_size = size + 2 * size2;
2286 case VA_FOURCC('I','4','2','0'):
2287 image->num_planes = 3;
2288 image->pitches[0] = width;
2289 image->offsets[0] = 0;
2290 image->pitches[1] = width2;
2291 image->offsets[1] = size;
2292 image->pitches[2] = width2;
2293 image->offsets[2] = size + size2;
2294 image->data_size = size + 2 * size2;
2296 case VA_FOURCC('N','V','1','2'):
2297 image->num_planes = 2;
2298 image->pitches[0] = width;
2299 image->offsets[0] = 0;
2300 image->pitches[1] = width;
2301 image->offsets[1] = size;
2302 image->data_size = size + 2 * size2;
2304 case VA_FOURCC('Y','U','Y','2'):
2305 case VA_FOURCC('U','Y','V','Y'):
2306 image->num_planes = 1;
2307 image->pitches[0] = width * 2;
2308 image->offsets[0] = 0;
2309 image->data_size = size * 2;
2315 va_status = i965_CreateBuffer(ctx, 0, VAImageBufferType,
2316 image->data_size, 1, NULL, &image->buf);
2317 if (va_status != VA_STATUS_SUCCESS)
2320 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
2321 dri_bo_reference(obj_image->bo);
2323 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
2324 obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
2325 if (!obj_image->palette)
2329 image->image_id = image_id;
2330 image->format = *format;
2331 image->width = width;
2332 image->height = height;
2334 *out_image = *image;
2335 return VA_STATUS_SUCCESS;
2338 i965_DestroyImage(ctx, image_id);
2343 i965_check_alloc_surface_bo(VADriverContextP ctx,
2344 struct object_surface *obj_surface,
2346 unsigned int fourcc,
2347 unsigned int subsampling)
2349 struct i965_driver_data *i965 = i965_driver_data(ctx);
2350 int region_width, region_height;
2352 if (obj_surface->bo) {
2353 assert(obj_surface->fourcc);
2354 assert(obj_surface->fourcc == fourcc);
2355 assert(obj_surface->subsampling == subsampling);
2359 obj_surface->x_cb_offset = 0; /* X offset is always 0 */
2360 obj_surface->x_cr_offset = 0;
2363 assert(fourcc == VA_FOURCC('N', 'V', '1', '2') ||
2364 fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
2365 fourcc == VA_FOURCC('I', 'M', 'C', '3') ||
2366 fourcc == VA_FOURCC('R', 'G', 'B', 'A') ||
2367 fourcc == VA_FOURCC('R', 'G', 'B', 'X') ||
2368 fourcc == VA_FOURCC('B', 'G', 'R', 'A') ||
2369 fourcc == VA_FOURCC('B', 'G', 'R', 'X') ||
2370 fourcc == VA_FOURCC('Y', 'U', 'Y', '2'));
2372 obj_surface->width = ALIGN(obj_surface->orig_width, 128);
2373 obj_surface->height = ALIGN(obj_surface->orig_height, 32);
2374 region_height = obj_surface->height;
2376 if (fourcc == VA_FOURCC('N', 'V', '1', '2') ||
2377 fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
2378 fourcc == VA_FOURCC('I', 'M', 'C', '3')) {
2379 obj_surface->cb_cr_pitch = obj_surface->width;
2380 region_width = obj_surface->width;
2382 else if (fourcc == VA_FOURCC('Y','U', 'Y', '2')) {
2383 obj_surface->cb_cr_pitch = obj_surface->width * 2;
2384 region_width = obj_surface->width * 2;
2386 else if (fourcc == VA_FOURCC('R', 'G', 'B', 'A') ||
2387 fourcc == VA_FOURCC('R', 'G', 'B', 'X') ||
2388 fourcc == VA_FOURCC('B', 'G', 'R', 'A') ||
2389 fourcc == VA_FOURCC('B', 'G', 'R', 'X')) {
2390 region_width = obj_surface->width * 4;
2397 if (fourcc == VA_FOURCC('N', 'V', '1', '2')) {
2398 assert(subsampling == SUBSAMPLE_YUV420);
2399 obj_surface->y_cb_offset = obj_surface->height;
2400 obj_surface->y_cr_offset = obj_surface->height;
2401 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2402 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2403 region_height = obj_surface->height + ALIGN(obj_surface->cb_cr_height, 32);
2404 } else if (fourcc == VA_FOURCC('I', 'M', 'C', '1') ||
2405 fourcc == VA_FOURCC('I', 'M', 'C', '3') ||
2406 fourcc == VA_FOURCC('R', 'G', 'B', 'A') ||
2407 fourcc == VA_FOURCC('R', 'G', 'B', 'X') ||
2408 fourcc == VA_FOURCC('B', 'G', 'R', 'A') ||
2409 fourcc == VA_FOURCC('B', 'G', 'R', 'X') ||
2410 fourcc == VA_FOURCC('Y', 'U', 'Y', '2')) {
2411 switch (subsampling) {
2412 case SUBSAMPLE_YUV400:
2413 obj_surface->cb_cr_width = 0;
2414 obj_surface->cb_cr_height = 0;
2417 case SUBSAMPLE_YUV420:
2418 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2419 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2422 case SUBSAMPLE_YUV422H:
2423 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2424 obj_surface->cb_cr_height = obj_surface->orig_height;
2427 case SUBSAMPLE_YUV422V:
2428 obj_surface->cb_cr_width = obj_surface->orig_width;
2429 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2432 case SUBSAMPLE_YUV444:
2433 obj_surface->cb_cr_width = obj_surface->orig_width;
2434 obj_surface->cb_cr_height = obj_surface->orig_height;
2437 case SUBSAMPLE_YUV411:
2438 obj_surface->cb_cr_width = obj_surface->orig_width / 4;
2439 obj_surface->cb_cr_height = obj_surface->orig_height;
2441 case SUBSAMPLE_RGBX:
2448 region_height = obj_surface->height + ALIGN(obj_surface->cb_cr_height, 32) * 2;
2450 if (fourcc == VA_FOURCC('I', 'M', 'C', '1')) {
2451 obj_surface->y_cr_offset = obj_surface->height;
2452 obj_surface->y_cb_offset = obj_surface->y_cr_offset + ALIGN(obj_surface->cb_cr_height, 32);
2453 } else if (fourcc == VA_FOURCC('I', 'M', 'C', '3')){
2454 obj_surface->y_cb_offset = obj_surface->height;
2455 obj_surface->y_cr_offset = obj_surface->y_cb_offset + ALIGN(obj_surface->cb_cr_height, 32);
2457 else if (fourcc == VA_FOURCC('Y','U', 'Y', '2')) {
2458 obj_surface->y_cb_offset = 0;
2459 obj_surface->y_cr_offset = 0;
2460 region_height = obj_surface->height;
2462 else if (fourcc == VA_FOURCC('R', 'G', 'B', 'A') ||
2463 fourcc == VA_FOURCC('R', 'G', 'B', 'X') ||
2464 fourcc == VA_FOURCC('B', 'G', 'R', 'A') ||
2465 fourcc == VA_FOURCC('B', 'G', 'R', 'X')) {
2466 region_height = obj_surface->height;
2470 assert(fourcc != VA_FOURCC('I', 'M', 'C', '1') &&
2471 fourcc != VA_FOURCC('I', 'M', 'C', '3'));
2472 assert(subsampling == SUBSAMPLE_YUV420 ||
2473 subsampling == SUBSAMPLE_YUV422H ||
2474 subsampling == SUBSAMPLE_YUV422V ||
2475 subsampling == SUBSAMPLE_RGBX);
2477 region_width = obj_surface->width;
2478 region_height = obj_surface->height;
2481 case VA_FOURCC('N', 'V', '1', '2'):
2482 obj_surface->y_cb_offset = obj_surface->height;
2483 obj_surface->y_cr_offset = obj_surface->height;
2484 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2485 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2486 obj_surface->cb_cr_pitch = obj_surface->width;
2487 region_height = obj_surface->height + obj_surface->height / 2;
2490 case VA_FOURCC('Y', 'V', '1', '2'):
2491 case VA_FOURCC('I', '4', '2', '0'):
2492 if (fourcc == VA_FOURCC('Y', 'V', '1', '2')) {
2493 obj_surface->y_cr_offset = obj_surface->height;
2494 obj_surface->y_cb_offset = obj_surface->height + obj_surface->height / 4;
2496 obj_surface->y_cb_offset = obj_surface->height;
2497 obj_surface->y_cr_offset = obj_surface->height + obj_surface->height / 4;
2500 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2501 obj_surface->cb_cr_height = obj_surface->orig_height / 2;
2502 obj_surface->cb_cr_pitch = obj_surface->width / 2;
2503 region_height = obj_surface->height + obj_surface->height / 2;
2506 case VA_FOURCC('Y','U', 'Y', '2'):
2507 obj_surface->y_cb_offset = 0;
2508 obj_surface->y_cr_offset = 0;
2509 obj_surface->cb_cr_width = obj_surface->orig_width / 2;
2510 obj_surface->cb_cr_height = obj_surface->orig_height;
2511 obj_surface->cb_cr_pitch = obj_surface->width * 2;
2512 region_width = obj_surface->width * 2;
2513 region_height = obj_surface->height;
2515 case VA_FOURCC('R', 'G', 'B', 'A'):
2516 case VA_FOURCC('R', 'G', 'B', 'X'):
2517 case VA_FOURCC('B', 'G', 'R', 'A'):
2518 case VA_FOURCC('B', 'G', 'R', 'X'):
2519 region_width = obj_surface->width * 4;
2520 region_height = obj_surface->height;
2529 obj_surface->size = ALIGN(region_width * region_height, 0x1000);
2532 uint32_t tiling_mode = I915_TILING_Y; /* always uses Y-tiled format */
2533 unsigned long pitch;
2535 obj_surface->bo = drm_intel_bo_alloc_tiled(i965->intel.bufmgr,
2543 assert(tiling_mode == I915_TILING_Y);
2544 assert(pitch == obj_surface->width ||
2545 pitch == obj_surface->width * 2 ||
2546 pitch == obj_surface->width * 4) ;
2548 obj_surface->bo = dri_bo_alloc(i965->intel.bufmgr,
2554 obj_surface->fourcc = fourcc;
2555 obj_surface->subsampling = subsampling;
2556 assert(obj_surface->bo);
2559 VAStatus i965_DeriveImage(VADriverContextP ctx,
2560 VASurfaceID surface,
2561 VAImage *out_image) /* out */
2563 struct i965_driver_data *i965 = i965_driver_data(ctx);
2564 struct object_image *obj_image;
2565 struct object_surface *obj_surface;
2567 unsigned int w_pitch, h_pitch;
2568 VAStatus va_status = VA_STATUS_ERROR_OPERATION_FAILED;
2570 out_image->image_id = VA_INVALID_ID;
2571 obj_surface = SURFACE(surface);
2574 return VA_STATUS_ERROR_INVALID_SURFACE;
2576 if (!obj_surface->bo) {
2577 unsigned int is_tiled = 0;
2578 unsigned int fourcc = VA_FOURCC('Y', 'V', '1', '2');
2579 i965_guess_surface_format(ctx, surface, &fourcc, &is_tiled);
2580 int sampling = get_sampling_from_fourcc(fourcc);
2581 i965_check_alloc_surface_bo(ctx, obj_surface, is_tiled, fourcc, sampling);
2584 assert(obj_surface->fourcc);
2586 w_pitch = obj_surface->width;
2587 h_pitch = obj_surface->height;
2589 image_id = NEW_IMAGE_ID();
2591 if (image_id == VA_INVALID_ID)
2592 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2594 obj_image = IMAGE(image_id);
2597 return VA_STATUS_ERROR_ALLOCATION_FAILED;
2599 obj_image->bo = NULL;
2600 obj_image->palette = NULL;
2601 obj_image->derived_surface = VA_INVALID_ID;
2603 VAImage * const image = &obj_image->image;
2605 memset(image, 0, sizeof(*image));
2606 image->image_id = image_id;
2607 image->buf = VA_INVALID_ID;
2608 image->num_palette_entries = 0;
2609 image->entry_bytes = 0;
2610 image->width = obj_surface->orig_width;
2611 image->height = obj_surface->orig_height;
2612 image->data_size = obj_surface->size;
2614 image->format.fourcc = obj_surface->fourcc;
2615 image->format.byte_order = VA_LSB_FIRST;
2616 image->format.bits_per_pixel = 12;
2618 switch (image->format.fourcc) {
2619 case VA_FOURCC('Y', 'V', '1', '2'):
2620 image->num_planes = 3;
2621 image->pitches[0] = w_pitch; /* Y */
2622 image->offsets[0] = 0;
2623 image->pitches[1] = obj_surface->cb_cr_pitch; /* V */
2624 image->offsets[1] = w_pitch * obj_surface->y_cr_offset;
2625 image->pitches[2] = obj_surface->cb_cr_pitch; /* U */
2626 image->offsets[2] = w_pitch * obj_surface->y_cb_offset;
2629 case VA_FOURCC('N', 'V', '1', '2'):
2630 image->num_planes = 2;
2631 image->pitches[0] = w_pitch; /* Y */
2632 image->offsets[0] = 0;
2633 image->pitches[1] = obj_surface->cb_cr_pitch; /* UV */
2634 image->offsets[1] = w_pitch * obj_surface->y_cb_offset;
2637 case VA_FOURCC('I', '4', '2', '0'):
2638 image->num_planes = 3;
2639 image->pitches[0] = w_pitch; /* Y */
2640 image->offsets[0] = 0;
2641 image->pitches[1] = obj_surface->cb_cr_pitch; /* U */
2642 image->offsets[1] = w_pitch * obj_surface->y_cb_offset;
2643 image->pitches[2] = obj_surface->cb_cr_pitch; /* V */
2644 image->offsets[2] = w_pitch * obj_surface->y_cr_offset;
2646 case VA_FOURCC('Y', 'U', 'Y', '2'):
2647 image->num_planes = 1;
2648 image->pitches[0] = obj_surface->width * 2; /* Y, width is aligned already */
2649 image->offsets[0] = 0;
2650 image->pitches[1] = obj_surface->width * 2; /* U */
2651 image->offsets[1] = 0;
2652 image->pitches[2] = obj_surface->width * 2; /* V */
2653 image->offsets[2] = 0;
2655 case VA_FOURCC('R', 'G', 'B', 'A'):
2656 case VA_FOURCC('R', 'G', 'B', 'X'):
2657 case VA_FOURCC('B', 'G', 'R', 'A'):
2658 case VA_FOURCC('B', 'G', 'R', 'X'):
2659 image->num_planes = 1;
2660 image->pitches[0] = obj_surface->width * 4;
2666 va_status = i965_create_buffer_internal(ctx, 0, VAImageBufferType,
2667 obj_surface->size, 1, NULL, obj_surface->bo, &image->buf);
2668 if (va_status != VA_STATUS_SUCCESS)
2671 obj_image->bo = BUFFER(image->buf)->buffer_store->bo;
2672 dri_bo_reference(obj_image->bo);
2674 if (image->num_palette_entries > 0 && image->entry_bytes > 0) {
2675 obj_image->palette = malloc(image->num_palette_entries * sizeof(obj_image->palette));
2676 if (!obj_image->palette) {
2677 va_status = VA_STATUS_ERROR_ALLOCATION_FAILED;
2682 *out_image = *image;
2683 obj_surface->flags |= SURFACE_DERIVED;
2684 obj_image->derived_surface = surface;
2686 return VA_STATUS_SUCCESS;
2689 i965_DestroyImage(ctx, image_id);
2694 i965_destroy_image(struct object_heap *heap, struct object_base *obj)
2696 object_heap_free(heap, obj);
2701 i965_DestroyImage(VADriverContextP ctx, VAImageID image)
2703 struct i965_driver_data *i965 = i965_driver_data(ctx);
2704 struct object_image *obj_image = IMAGE(image);
2705 struct object_surface *obj_surface;
2708 return VA_STATUS_SUCCESS;
2710 dri_bo_unreference(obj_image->bo);
2711 obj_image->bo = NULL;
2713 if (obj_image->image.buf != VA_INVALID_ID) {
2714 i965_DestroyBuffer(ctx, obj_image->image.buf);
2715 obj_image->image.buf = VA_INVALID_ID;
2718 if (obj_image->palette) {
2719 free(obj_image->palette);
2720 obj_image->palette = NULL;
2723 obj_surface = SURFACE(obj_image->derived_surface);
2726 obj_surface->flags &= ~SURFACE_DERIVED;
2729 i965_destroy_image(&i965->image_heap, (struct object_base *)obj_image);
2731 return VA_STATUS_SUCCESS;
2735 * pointer to an array holding the palette data. The size of the array is
2736 * num_palette_entries * entry_bytes in size. The order of the components
2737 * in the palette is described by the component_order in VASubpicture struct
2740 i965_SetImagePalette(VADriverContextP ctx,
2742 unsigned char *palette)
2744 struct i965_driver_data *i965 = i965_driver_data(ctx);
2747 struct object_image *obj_image = IMAGE(image);
2749 return VA_STATUS_ERROR_INVALID_IMAGE;
2751 if (!obj_image->palette)
2752 return VA_STATUS_ERROR_ALLOCATION_FAILED; /* XXX: unpaletted/error */
2754 for (i = 0; i < obj_image->image.num_palette_entries; i++)
2755 obj_image->palette[i] = (((unsigned int)palette[3*i + 0] << 16) |
2756 ((unsigned int)palette[3*i + 1] << 8) |
2757 (unsigned int)palette[3*i + 2]);
2758 return VA_STATUS_SUCCESS;
2762 get_sampling_from_fourcc(unsigned int fourcc)
2764 int surface_sampling = -1;
2766 case VA_FOURCC('N', 'V', '1', '2'):
2767 case VA_FOURCC('Y', 'V', '1', '2'):
2768 case VA_FOURCC('I', '4', '2', '0'):
2769 case VA_FOURCC('I', 'M', 'C', '1'):
2770 case VA_FOURCC('I', 'M', 'C', '3'):
2771 surface_sampling = SUBSAMPLE_YUV420;
2773 case VA_FOURCC('Y', 'U', 'Y', '2'):
2774 surface_sampling = SUBSAMPLE_YUV422H;
2776 case VA_FOURCC('R','G','B','A'):
2777 case VA_FOURCC('R','G','B','X'):
2778 case VA_FOURCC('B','G','R','A'):
2779 case VA_FOURCC('B','G','R','X'):
2780 surface_sampling = SUBSAMPLE_RGBX;
2785 return surface_sampling;
2789 memcpy_pic(uint8_t *dst, unsigned int dst_stride,
2790 const uint8_t *src, unsigned int src_stride,
2791 unsigned int len, unsigned int height)
2795 for (i = 0; i < height; i++) {
2796 memcpy(dst, src, len);
2803 get_image_i420(struct object_image *obj_image, uint8_t *image_data,
2804 struct object_surface *obj_surface,
2805 const VARectangle *rect)
2807 uint8_t *dst[3], *src[3];
2809 const int U = obj_image->image.format.fourcc == obj_surface->fourcc ? 1 : 2;
2810 const int V = obj_image->image.format.fourcc == obj_surface->fourcc ? 2 : 1;
2811 unsigned int tiling, swizzle;
2813 if (!obj_surface->bo)
2816 assert(obj_surface->fourcc);
2817 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2819 if (tiling != I915_TILING_NONE)
2820 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2822 dri_bo_map(obj_surface->bo, 0);
2824 if (!obj_surface->bo->virtual)
2827 /* Dest VA image has either I420 or YV12 format.
2828 Source VA surface alway has I420 format */
2829 dst[Y] = image_data + obj_image->image.offsets[Y];
2830 src[0] = (uint8_t *)obj_surface->bo->virtual;
2831 dst[U] = image_data + obj_image->image.offsets[U];
2832 src[1] = src[0] + obj_surface->width * obj_surface->height;
2833 dst[V] = image_data + obj_image->image.offsets[V];
2834 src[2] = src[1] + (obj_surface->width / 2) * (obj_surface->height / 2);
2837 dst[Y] += rect->y * obj_image->image.pitches[Y] + rect->x;
2838 src[0] += rect->y * obj_surface->width + rect->x;
2839 memcpy_pic(dst[Y], obj_image->image.pitches[Y],
2840 src[0], obj_surface->width,
2841 rect->width, rect->height);
2844 dst[U] += (rect->y / 2) * obj_image->image.pitches[U] + rect->x / 2;
2845 src[1] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2846 memcpy_pic(dst[U], obj_image->image.pitches[U],
2847 src[1], obj_surface->width / 2,
2848 rect->width / 2, rect->height / 2);
2851 dst[V] += (rect->y / 2) * obj_image->image.pitches[V] + rect->x / 2;
2852 src[2] += (rect->y / 2) * obj_surface->width / 2 + rect->x / 2;
2853 memcpy_pic(dst[V], obj_image->image.pitches[V],
2854 src[2], obj_surface->width / 2,
2855 rect->width / 2, rect->height / 2);
2857 if (tiling != I915_TILING_NONE)
2858 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2860 dri_bo_unmap(obj_surface->bo);
2864 get_image_nv12(struct object_image *obj_image, uint8_t *image_data,
2865 struct object_surface *obj_surface,
2866 const VARectangle *rect)
2868 uint8_t *dst[2], *src[2];
2869 unsigned int tiling, swizzle;
2871 if (!obj_surface->bo)
2874 assert(obj_surface->fourcc);
2875 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2877 if (tiling != I915_TILING_NONE)
2878 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2880 dri_bo_map(obj_surface->bo, 0);
2882 if (!obj_surface->bo->virtual)
2885 /* Both dest VA image and source surface have NV12 format */
2886 dst[0] = image_data + obj_image->image.offsets[0];
2887 src[0] = (uint8_t *)obj_surface->bo->virtual;
2888 dst[1] = image_data + obj_image->image.offsets[1];
2889 src[1] = src[0] + obj_surface->width * obj_surface->height;
2892 dst[0] += rect->y * obj_image->image.pitches[0] + rect->x;
2893 src[0] += rect->y * obj_surface->width + rect->x;
2894 memcpy_pic(dst[0], obj_image->image.pitches[0],
2895 src[0], obj_surface->width,
2896 rect->width, rect->height);
2899 dst[1] += (rect->y / 2) * obj_image->image.pitches[1] + (rect->x & -2);
2900 src[1] += (rect->y / 2) * obj_surface->width + (rect->x & -2);
2901 memcpy_pic(dst[1], obj_image->image.pitches[1],
2902 src[1], obj_surface->width,
2903 rect->width, rect->height / 2);
2905 if (tiling != I915_TILING_NONE)
2906 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2908 dri_bo_unmap(obj_surface->bo);
2912 get_image_yuy2(struct object_image *obj_image, uint8_t *image_data,
2913 struct object_surface *obj_surface,
2914 const VARectangle *rect)
2917 unsigned int tiling, swizzle;
2919 if (!obj_surface->bo)
2922 assert(obj_surface->fourcc);
2923 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
2925 if (tiling != I915_TILING_NONE)
2926 drm_intel_gem_bo_map_gtt(obj_surface->bo);
2928 dri_bo_map(obj_surface->bo, 0);
2930 if (!obj_surface->bo->virtual)
2933 /* Both dest VA image and source surface have YUYV format */
2934 dst = image_data + obj_image->image.offsets[0];
2935 src = (uint8_t *)obj_surface->bo->virtual;
2938 dst += rect->y * obj_image->image.pitches[0] + rect->x*2;
2939 src += rect->y * obj_surface->width + rect->x*2;
2940 memcpy_pic(dst, obj_image->image.pitches[0],
2941 src, obj_surface->width*2,
2942 rect->width*2, rect->height);
2944 if (tiling != I915_TILING_NONE)
2945 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
2947 dri_bo_unmap(obj_surface->bo);
2951 i965_sw_getimage(VADriverContextP ctx,
2952 VASurfaceID surface,
2953 int x, /* coordinates of the upper left source pixel */
2955 unsigned int width, /* width and height of the region */
2956 unsigned int height,
2959 struct i965_driver_data *i965 = i965_driver_data(ctx);
2960 struct i965_render_state *render_state = &i965->render_state;
2962 struct object_surface *obj_surface = SURFACE(surface);
2964 return VA_STATUS_ERROR_INVALID_SURFACE;
2966 struct object_image *obj_image = IMAGE(image);
2968 return VA_STATUS_ERROR_INVALID_IMAGE;
2971 return VA_STATUS_ERROR_INVALID_PARAMETER;
2972 if (x + width > obj_surface->orig_width ||
2973 y + height > obj_surface->orig_height)
2974 return VA_STATUS_ERROR_INVALID_PARAMETER;
2975 if (x + width > obj_image->image.width ||
2976 y + height > obj_image->image.height)
2977 return VA_STATUS_ERROR_INVALID_PARAMETER;
2979 if (obj_surface->fourcc != obj_image->image.format.fourcc)
2980 return VA_STATUS_ERROR_INVALID_IMAGE_FORMAT;
2983 void *image_data = NULL;
2985 va_status = i965_MapBuffer(ctx, obj_image->image.buf, &image_data);
2986 if (va_status != VA_STATUS_SUCCESS)
2993 rect.height = height;
2995 switch (obj_image->image.format.fourcc) {
2996 case VA_FOURCC('Y','V','1','2'):
2997 case VA_FOURCC('I','4','2','0'):
2998 /* I420 is native format for MPEG-2 decoded surfaces */
2999 if (render_state->interleaved_uv)
3000 goto operation_failed;
3001 get_image_i420(obj_image, image_data, obj_surface, &rect);
3003 case VA_FOURCC('N','V','1','2'):
3004 /* NV12 is native format for H.264 decoded surfaces */
3005 if (!render_state->interleaved_uv)
3006 goto operation_failed;
3007 get_image_nv12(obj_image, image_data, obj_surface, &rect);
3009 case VA_FOURCC('Y','U','Y','2'):
3010 /* YUY2 is the format supported by overlay plane */
3011 get_image_yuy2(obj_image, image_data, obj_surface, &rect);
3015 va_status = VA_STATUS_ERROR_OPERATION_FAILED;
3019 i965_UnmapBuffer(ctx, obj_image->image.buf);
3024 i965_hw_getimage(VADriverContextP ctx,
3025 VASurfaceID surface,
3026 int x, /* coordinates of the upper left source pixel */
3028 unsigned int width, /* width and height of the region */
3029 unsigned int height,
3032 struct i965_driver_data *i965 = i965_driver_data(ctx);
3033 struct i965_surface src_surface;
3034 struct i965_surface dst_surface;
3037 struct object_surface *obj_surface = SURFACE(surface);
3038 struct object_image *obj_image = IMAGE(image);
3041 return VA_STATUS_ERROR_INVALID_SURFACE;
3044 return VA_STATUS_ERROR_INVALID_IMAGE;
3047 return VA_STATUS_ERROR_INVALID_PARAMETER;
3048 if (x + width > obj_surface->orig_width ||
3049 y + height > obj_surface->orig_height)
3050 return VA_STATUS_ERROR_INVALID_PARAMETER;
3051 if (x + width > obj_image->image.width ||
3052 y + height > obj_image->image.height)
3053 return VA_STATUS_ERROR_INVALID_PARAMETER;
3055 if (!obj_surface->bo)
3056 return VA_STATUS_SUCCESS;
3057 assert(obj_image->bo); // image bo is always created, see i965_CreateImage()
3062 rect.height = height;
3064 src_surface.id = surface;
3065 src_surface.type = I965_SURFACE_TYPE_SURFACE;
3066 src_surface.flags = I965_SURFACE_FLAG_FRAME;
3068 dst_surface.id = image;
3069 dst_surface.type = I965_SURFACE_TYPE_IMAGE;
3070 dst_surface.flags = I965_SURFACE_FLAG_FRAME;
3072 va_status = i965_image_processing(ctx,
3083 i965_GetImage(VADriverContextP ctx,
3084 VASurfaceID surface,
3085 int x, /* coordinates of the upper left source pixel */
3087 unsigned int width, /* width and height of the region */
3088 unsigned int height,
3091 struct i965_driver_data * const i965 = i965_driver_data(ctx);
3094 if (HAS_ACCELERATED_GETIMAGE(i965))
3095 va_status = i965_hw_getimage(ctx,
3101 va_status = i965_sw_getimage(ctx,
3111 put_image_i420(struct object_surface *obj_surface,
3112 const VARectangle *dst_rect,
3113 struct object_image *obj_image, uint8_t *image_data,
3114 const VARectangle *src_rect)
3116 uint8_t *dst[3], *src[3];
3118 const int U = obj_image->image.format.fourcc == obj_surface->fourcc ? 1 : 2;
3119 const int V = obj_image->image.format.fourcc == obj_surface->fourcc ? 2 : 1;
3120 unsigned int tiling, swizzle;
3122 if (!obj_surface->bo)
3125 assert(obj_surface->fourcc);
3126 assert(dst_rect->width == src_rect->width);
3127 assert(dst_rect->height == src_rect->height);
3128 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
3130 if (tiling != I915_TILING_NONE)
3131 drm_intel_gem_bo_map_gtt(obj_surface->bo);
3133 dri_bo_map(obj_surface->bo, 0);
3135 if (!obj_surface->bo->virtual)
3138 /* Dest VA image has either I420 or YV12 format.
3139 Source VA surface alway has I420 format */
3140 dst[0] = (uint8_t *)obj_surface->bo->virtual;
3141 src[Y] = image_data + obj_image->image.offsets[Y];
3142 dst[1] = dst[0] + obj_surface->width * obj_surface->height;
3143 src[U] = image_data + obj_image->image.offsets[U];
3144 dst[2] = dst[1] + (obj_surface->width / 2) * (obj_surface->height / 2);
3145 src[V] = image_data + obj_image->image.offsets[V];
3148 dst[0] += dst_rect->y * obj_surface->width + dst_rect->x;
3149 src[Y] += src_rect->y * obj_image->image.pitches[Y] + src_rect->x;
3150 memcpy_pic(dst[0], obj_surface->width,
3151 src[Y], obj_image->image.pitches[Y],
3152 src_rect->width, src_rect->height);
3155 dst[1] += (dst_rect->y / 2) * obj_surface->width / 2 + dst_rect->x / 2;
3156 src[U] += (src_rect->y / 2) * obj_image->image.pitches[U] + src_rect->x / 2;
3157 memcpy_pic(dst[1], obj_surface->width / 2,
3158 src[U], obj_image->image.pitches[U],
3159 src_rect->width / 2, src_rect->height / 2);
3162 dst[2] += (dst_rect->y / 2) * obj_surface->width / 2 + dst_rect->x / 2;
3163 src[V] += (src_rect->y / 2) * obj_image->image.pitches[V] + src_rect->x / 2;
3164 memcpy_pic(dst[2], obj_surface->width / 2,
3165 src[V], obj_image->image.pitches[V],
3166 src_rect->width / 2, src_rect->height / 2);
3168 if (tiling != I915_TILING_NONE)
3169 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
3171 dri_bo_unmap(obj_surface->bo);
3175 put_image_nv12(struct object_surface *obj_surface,
3176 const VARectangle *dst_rect,
3177 struct object_image *obj_image, uint8_t *image_data,
3178 const VARectangle *src_rect)
3180 uint8_t *dst[2], *src[2];
3181 unsigned int tiling, swizzle;
3183 if (!obj_surface->bo)
3186 assert(obj_surface->fourcc);
3187 assert(dst_rect->width == src_rect->width);
3188 assert(dst_rect->height == src_rect->height);
3189 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
3191 if (tiling != I915_TILING_NONE)
3192 drm_intel_gem_bo_map_gtt(obj_surface->bo);
3194 dri_bo_map(obj_surface->bo, 0);
3196 if (!obj_surface->bo->virtual)
3199 /* Both dest VA image and source surface have NV12 format */
3200 dst[0] = (uint8_t *)obj_surface->bo->virtual;
3201 src[0] = image_data + obj_image->image.offsets[0];
3202 dst[1] = dst[0] + obj_surface->width * obj_surface->height;
3203 src[1] = image_data + obj_image->image.offsets[1];
3206 dst[0] += dst_rect->y * obj_surface->width + dst_rect->x;
3207 src[0] += src_rect->y * obj_image->image.pitches[0] + src_rect->x;
3208 memcpy_pic(dst[0], obj_surface->width,
3209 src[0], obj_image->image.pitches[0],
3210 src_rect->width, src_rect->height);
3213 dst[1] += (dst_rect->y / 2) * obj_surface->width + (dst_rect->x & -2);
3214 src[1] += (src_rect->y / 2) * obj_image->image.pitches[1] + (src_rect->x & -2);
3215 memcpy_pic(dst[1], obj_surface->width,
3216 src[1], obj_image->image.pitches[1],
3217 src_rect->width, src_rect->height / 2);
3219 if (tiling != I915_TILING_NONE)
3220 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
3222 dri_bo_unmap(obj_surface->bo);
3226 put_image_yuy2(struct object_surface *obj_surface,
3227 const VARectangle *dst_rect,
3228 struct object_image *obj_image, uint8_t *image_data,
3229 const VARectangle *src_rect)
3232 unsigned int tiling, swizzle;
3234 if (!obj_surface->bo)
3237 assert(obj_surface->fourcc);
3238 assert(dst_rect->width == src_rect->width);
3239 assert(dst_rect->height == src_rect->height);
3240 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
3242 if (tiling != I915_TILING_NONE)
3243 drm_intel_gem_bo_map_gtt(obj_surface->bo);
3245 dri_bo_map(obj_surface->bo, 0);
3247 if (!obj_surface->bo->virtual)
3250 /* Both dest VA image and source surface have YUY2 format */
3251 dst = (uint8_t *)obj_surface->bo->virtual;
3252 src = image_data + obj_image->image.offsets[0];
3254 /* YUYV packed plane */
3255 dst += dst_rect->y * obj_surface->width + dst_rect->x*2;
3256 src += src_rect->y * obj_image->image.pitches[0] + src_rect->x*2;
3257 memcpy_pic(dst, obj_surface->width*2,
3258 src, obj_image->image.pitches[0],
3259 src_rect->width*2, src_rect->height);
3261 if (tiling != I915_TILING_NONE)
3262 drm_intel_gem_bo_unmap_gtt(obj_surface->bo);
3264 dri_bo_unmap(obj_surface->bo);
3269 i965_sw_putimage(VADriverContextP ctx,
3270 VASurfaceID surface,
3274 unsigned int src_width,
3275 unsigned int src_height,
3278 unsigned int dest_width,
3279 unsigned int dest_height)
3281 struct i965_driver_data *i965 = i965_driver_data(ctx);
3282 struct object_surface *obj_surface = SURFACE(surface);
3285 return VA_STATUS_ERROR_INVALID_SURFACE;
3287 struct object_image *obj_image = IMAGE(image);
3289 return VA_STATUS_ERROR_INVALID_IMAGE;
3291 if (src_x < 0 || src_y < 0)
3292 return VA_STATUS_ERROR_INVALID_PARAMETER;
3293 if (src_x + src_width > obj_image->image.width ||
3294 src_y + src_height > obj_image->image.height)
3295 return VA_STATUS_ERROR_INVALID_PARAMETER;
3296 if (dest_x < 0 || dest_y < 0)
3297 return VA_STATUS_ERROR_INVALID_PARAMETER;
3298 if (dest_x + dest_width > obj_surface->orig_width ||
3299 dest_y + dest_height > obj_surface->orig_height)
3300 return VA_STATUS_ERROR_INVALID_PARAMETER;
3302 /* XXX: don't allow scaling */
3303 if (src_width != dest_width || src_height != dest_height)
3304 return VA_STATUS_ERROR_INVALID_PARAMETER;
3306 if (obj_surface->fourcc) {
3307 /* Don't allow format mismatch */
3308 if (obj_surface->fourcc != obj_image->image.format.fourcc)
3309 return VA_STATUS_ERROR_INVALID_IMAGE_FORMAT;
3313 /* VA is surface not used for decoding, use same VA image format */
3314 i965_check_alloc_surface_bo(
3317 0, /* XXX: don't use tiled surface */
3318 obj_image->image.format.fourcc,
3319 get_sampling_from_fourcc (obj_image->image.format.fourcc));
3323 void *image_data = NULL;
3325 va_status = i965_MapBuffer(ctx, obj_image->image.buf, &image_data);
3326 if (va_status != VA_STATUS_SUCCESS)
3329 VARectangle src_rect, dest_rect;
3332 src_rect.width = src_width;
3333 src_rect.height = src_height;
3334 dest_rect.x = dest_x;
3335 dest_rect.y = dest_y;
3336 dest_rect.width = dest_width;
3337 dest_rect.height = dest_height;
3339 switch (obj_image->image.format.fourcc) {
3340 case VA_FOURCC('Y','V','1','2'):
3341 case VA_FOURCC('I','4','2','0'):
3342 put_image_i420(obj_surface, &dest_rect, obj_image, image_data, &src_rect);
3344 case VA_FOURCC('N','V','1','2'):
3345 put_image_nv12(obj_surface, &dest_rect, obj_image, image_data, &src_rect);
3347 case VA_FOURCC('Y','U','Y','2'):
3348 put_image_yuy2(obj_surface, &dest_rect, obj_image, image_data, &src_rect);
3351 va_status = VA_STATUS_ERROR_OPERATION_FAILED;
3355 i965_UnmapBuffer(ctx, obj_image->image.buf);
3360 i965_hw_putimage(VADriverContextP ctx,
3361 VASurfaceID surface,
3365 unsigned int src_width,
3366 unsigned int src_height,
3369 unsigned int dest_width,
3370 unsigned int dest_height)
3372 struct i965_driver_data *i965 = i965_driver_data(ctx);
3373 struct object_surface *obj_surface = SURFACE(surface);
3374 struct object_image *obj_image = IMAGE(image);
3375 struct i965_surface src_surface, dst_surface;
3376 VAStatus va_status = VA_STATUS_SUCCESS;
3377 VARectangle src_rect, dst_rect;
3380 return VA_STATUS_ERROR_INVALID_SURFACE;
3382 if (!obj_image || !obj_image->bo)
3383 return VA_STATUS_ERROR_INVALID_IMAGE;
3387 src_x + src_width > obj_image->image.width ||
3388 src_y + src_height > obj_image->image.height)
3389 return VA_STATUS_ERROR_INVALID_PARAMETER;
3393 dest_x + dest_width > obj_surface->orig_width ||
3394 dest_y + dest_height > obj_surface->orig_height)
3395 return VA_STATUS_ERROR_INVALID_PARAMETER;
3397 if (!obj_surface->bo) {
3398 unsigned int tiling, swizzle;
3399 int surface_sampling = get_sampling_from_fourcc (obj_image->image.format.fourcc);;
3400 dri_bo_get_tiling(obj_image->bo, &tiling, &swizzle);
3402 i965_check_alloc_surface_bo(ctx,
3405 obj_image->image.format.fourcc,
3409 assert(obj_surface->fourcc);
3411 src_surface.id = image;
3412 src_surface.type = I965_SURFACE_TYPE_IMAGE;
3413 src_surface.flags = I965_SURFACE_FLAG_FRAME;
3416 src_rect.width = src_width;
3417 src_rect.height = src_height;
3419 dst_surface.id = surface;
3420 dst_surface.type = I965_SURFACE_TYPE_SURFACE;
3421 dst_surface.flags = I965_SURFACE_FLAG_FRAME;
3422 dst_rect.x = dest_x;
3423 dst_rect.y = dest_y;
3424 dst_rect.width = dest_width;
3425 dst_rect.height = dest_height;
3427 va_status = i965_image_processing(ctx,
3437 i965_PutImage(VADriverContextP ctx,
3438 VASurfaceID surface,
3442 unsigned int src_width,
3443 unsigned int src_height,
3446 unsigned int dest_width,
3447 unsigned int dest_height)
3449 struct i965_driver_data *i965 = i965_driver_data(ctx);
3450 VAStatus va_status = VA_STATUS_SUCCESS;
3452 if (HAS_ACCELERATED_PUTIMAGE(i965))
3453 va_status = i965_hw_putimage(ctx,
3465 va_status = i965_sw_putimage(ctx,
3481 i965_PutSurface(VADriverContextP ctx,
3482 VASurfaceID surface,
3483 void *draw, /* X Drawable */
3486 unsigned short srcw,
3487 unsigned short srch,
3490 unsigned short destw,
3491 unsigned short desth,
3492 VARectangle *cliprects, /* client supplied clip list */
3493 unsigned int number_cliprects, /* number of clip rects in the clip list */
3494 unsigned int flags) /* de-interlacing flags */
3497 if (IS_VA_X11(ctx)) {
3498 VARectangle src_rect, dst_rect;
3502 src_rect.width = srcw;
3503 src_rect.height = srch;
3507 dst_rect.width = destw;
3508 dst_rect.height = desth;
3510 return i965_put_surface_dri(ctx, surface, draw, &src_rect, &dst_rect,
3511 cliprects, number_cliprects, flags);
3514 return VA_STATUS_ERROR_UNIMPLEMENTED;
3518 i965_Terminate(VADriverContextP ctx)
3520 struct i965_driver_data *i965 = i965_driver_data(ctx);
3523 intel_batchbuffer_free(i965->batch);
3525 _i965DestroyMutex(&i965->pp_mutex);
3526 _i965DestroyMutex(&i965->render_mutex);
3530 i965_output_dri_terminate(ctx);
3533 #ifdef HAVE_VA_WAYLAND
3534 if (IS_VA_WAYLAND(ctx))
3535 i965_output_wayland_terminate(ctx);
3538 if (i965_render_terminate(ctx) == False)
3539 return VA_STATUS_ERROR_UNKNOWN;
3541 if (i965_post_processing_terminate(ctx) == False)
3542 return VA_STATUS_ERROR_UNKNOWN;
3544 i965_display_attributes_terminate(ctx);
3546 i965_destroy_heap(&i965->buffer_heap, i965_destroy_buffer);
3547 i965_destroy_heap(&i965->image_heap, i965_destroy_image);
3548 i965_destroy_heap(&i965->subpic_heap, i965_destroy_subpic);
3549 i965_destroy_heap(&i965->surface_heap, i965_destroy_surface);
3550 i965_destroy_heap(&i965->context_heap, i965_destroy_context);
3551 i965_destroy_heap(&i965->config_heap, i965_destroy_config);
3553 if (intel_driver_terminate(ctx) == False)
3554 return VA_STATUS_ERROR_UNKNOWN;
3556 free(ctx->pDriverData);
3557 ctx->pDriverData = NULL;
3559 return VA_STATUS_SUCCESS;
3564 VADriverContextP ctx, /* in */
3565 VABufferID buf_id, /* in */
3566 VABufferType *type, /* out */
3567 unsigned int *size, /* out */
3568 unsigned int *num_elements /* out */
3571 struct i965_driver_data *i965 = NULL;
3572 struct object_buffer *obj_buffer = NULL;
3574 i965 = i965_driver_data(ctx);
3575 obj_buffer = BUFFER(buf_id);
3577 *type = obj_buffer->type;
3578 *size = obj_buffer->size_element;
3579 *num_elements = obj_buffer->num_elements;
3581 return VA_STATUS_SUCCESS;
3586 VADriverContextP ctx, /* in */
3587 VASurfaceID surface, /* in */
3588 unsigned int *fourcc, /* out */
3589 unsigned int *luma_stride, /* out */
3590 unsigned int *chroma_u_stride, /* out */
3591 unsigned int *chroma_v_stride, /* out */
3592 unsigned int *luma_offset, /* out */
3593 unsigned int *chroma_u_offset, /* out */
3594 unsigned int *chroma_v_offset, /* out */
3595 unsigned int *buffer_name, /* out */
3596 void **buffer /* out */
3599 VAStatus vaStatus = VA_STATUS_SUCCESS;
3600 struct i965_driver_data *i965 = i965_driver_data(ctx);
3601 struct object_surface *obj_surface = NULL;
3605 assert(luma_stride);
3606 assert(chroma_u_stride);
3607 assert(chroma_v_stride);
3608 assert(luma_offset);
3609 assert(chroma_u_offset);
3610 assert(chroma_v_offset);
3611 assert(buffer_name);
3614 tmpImage.image_id = VA_INVALID_ID;
3616 obj_surface = SURFACE(surface);
3617 if (obj_surface == NULL) {
3618 // Surface is absent.
3619 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
3623 // Lock functionality is absent now.
3624 if (obj_surface->locked_image_id != VA_INVALID_ID) {
3625 // Surface is locked already.
3626 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
3630 vaStatus = i965_DeriveImage(
3634 if (vaStatus != VA_STATUS_SUCCESS) {
3638 obj_surface->locked_image_id = tmpImage.image_id;
3640 vaStatus = i965_MapBuffer(
3644 if (vaStatus != VA_STATUS_SUCCESS) {
3648 *fourcc = tmpImage.format.fourcc;
3649 *luma_offset = tmpImage.offsets[0];
3650 *luma_stride = tmpImage.pitches[0];
3651 *chroma_u_offset = tmpImage.offsets[1];
3652 *chroma_u_stride = tmpImage.pitches[1];
3653 *chroma_v_offset = tmpImage.offsets[2];
3654 *chroma_v_stride = tmpImage.pitches[2];
3655 *buffer_name = tmpImage.buf;
3658 if (vaStatus != VA_STATUS_SUCCESS) {
3667 VADriverContextP ctx, /* in */
3668 VASurfaceID surface /* in */
3671 VAStatus vaStatus = VA_STATUS_SUCCESS;
3672 struct i965_driver_data *i965 = i965_driver_data(ctx);
3673 struct object_image *locked_img = NULL;
3674 struct object_surface *obj_surface = NULL;
3676 obj_surface = SURFACE(surface);
3678 if (obj_surface == NULL) {
3679 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is absent
3682 if (obj_surface->locked_image_id == VA_INVALID_ID) {
3683 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER; // Surface is not locked
3687 locked_img = IMAGE(obj_surface->locked_image_id);
3688 if (locked_img == NULL || (locked_img->image.image_id == VA_INVALID_ID)) {
3689 // Work image was deallocated before i965_UnlockSurface()
3690 vaStatus = VA_STATUS_ERROR_INVALID_PARAMETER;
3694 vaStatus = i965_UnmapBuffer(
3696 locked_img->image.buf);
3697 if (vaStatus != VA_STATUS_SUCCESS) {
3701 vaStatus = i965_DestroyImage(
3703 locked_img->image.image_id);
3704 if (vaStatus != VA_STATUS_SUCCESS) {
3708 locked_img->image.image_id = VA_INVALID_ID;
3711 obj_surface->locked_image_id = VA_INVALID_ID;
3717 i965_GetSurfaceAttributes(
3718 VADriverContextP ctx,
3720 VASurfaceAttrib *attrib_list,
3721 unsigned int num_attribs
3724 VAStatus vaStatus = VA_STATUS_SUCCESS;
3725 struct i965_driver_data *i965 = i965_driver_data(ctx);
3726 struct object_config *obj_config;
3729 if (config == VA_INVALID_ID)
3730 return VA_STATUS_ERROR_INVALID_CONFIG;
3732 obj_config = CONFIG(config);
3734 if (obj_config == NULL)
3735 return VA_STATUS_ERROR_INVALID_CONFIG;
3737 if (attrib_list == NULL || num_attribs == 0)
3738 return VA_STATUS_ERROR_INVALID_PARAMETER;
3740 for (i = 0; i < num_attribs; i++) {
3741 switch (attrib_list[i].type) {
3742 case VASurfaceAttribPixelFormat:
3743 attrib_list[i].value.type = VAGenericValueTypeInteger;
3744 attrib_list[i].flags = VA_SURFACE_ATTRIB_GETTABLE | VA_SURFACE_ATTRIB_SETTABLE;
3746 if (attrib_list[i].value.value.i == 0) {
3747 if (IS_G4X(i965->intel.device_id)) {
3748 if (obj_config->profile == VAProfileMPEG2Simple ||
3749 obj_config->profile == VAProfileMPEG2Main) {
3750 attrib_list[i].value.value.i = VA_FOURCC('I', '4', '2', '0');
3753 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3755 } else if (IS_IRONLAKE(i965->intel.device_id)) {
3756 if (obj_config->profile == VAProfileMPEG2Simple ||
3757 obj_config->profile == VAProfileMPEG2Main) {
3758 attrib_list[i].value.value.i = VA_FOURCC('I', '4', '2', '0');
3759 } else if (obj_config->profile == VAProfileH264Baseline ||
3760 obj_config->profile == VAProfileH264Main ||
3761 obj_config->profile == VAProfileH264High) {
3762 attrib_list[i].value.value.i = VA_FOURCC('N', 'V', '1', '2');
3763 } else if (obj_config->profile == VAProfileNone) {
3764 attrib_list[i].value.value.i = VA_FOURCC('N', 'V', '1', '2');
3767 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3769 } else if (IS_GEN6(i965->intel.device_id)) {
3770 attrib_list[i].value.value.i = VA_FOURCC('N', 'V', '1', '2');
3771 } else if (IS_GEN7(i965->intel.device_id)) {
3772 if (obj_config->profile == VAProfileJPEGBaseline)
3773 attrib_list[i].value.value.i = 0; /* internal format */
3775 attrib_list[i].value.value.i = VA_FOURCC('N', 'V', '1', '2');
3778 if (IS_G4X(i965->intel.device_id)) {
3779 if (obj_config->profile == VAProfileMPEG2Simple ||
3780 obj_config->profile == VAProfileMPEG2Main) {
3781 if (attrib_list[i].value.value.i != VA_FOURCC('I', '4', '2', '0')) {
3782 attrib_list[i].value.value.i = 0;
3783 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3787 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3789 } else if (IS_IRONLAKE(i965->intel.device_id)) {
3790 if (obj_config->profile == VAProfileMPEG2Simple ||
3791 obj_config->profile == VAProfileMPEG2Main) {
3792 if (attrib_list[i].value.value.i != VA_FOURCC('I', '4', '2', '0')) {
3793 attrib_list[i].value.value.i = 0;
3794 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3796 } else if (obj_config->profile == VAProfileH264Baseline ||
3797 obj_config->profile == VAProfileH264Main ||
3798 obj_config->profile == VAProfileH264High) {
3799 if (attrib_list[i].value.value.i != VA_FOURCC('N', 'V', '1', '2')) {
3800 attrib_list[i].value.value.i = 0;
3801 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3803 } else if (obj_config->profile == VAProfileNone) {
3804 switch (attrib_list[i].value.value.i) {
3805 case VA_FOURCC('N', 'V', '1', '2'):
3806 case VA_FOURCC('I', '4', '2', '0'):
3807 case VA_FOURCC('Y', 'V', '1', '2'):
3808 case VA_FOURCC('Y', 'U', 'Y', '2'):
3809 case VA_FOURCC('B', 'G', 'R', 'A'):
3810 case VA_FOURCC('B', 'G', 'R', 'X'):
3811 case VA_FOURCC('R', 'G', 'B', 'X'):
3812 case VA_FOURCC('R', 'G', 'B', 'A'):
3815 attrib_list[i].value.value.i = 0;
3816 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3821 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3823 } else if (IS_GEN6(i965->intel.device_id)) {
3824 if (obj_config->entrypoint == VAEntrypointEncSlice ||
3825 obj_config->entrypoint == VAEntrypointVideoProc) {
3826 switch (attrib_list[i].value.value.i) {
3827 case VA_FOURCC('N', 'V', '1', '2'):
3828 case VA_FOURCC('I', '4', '2', '0'):
3829 case VA_FOURCC('Y', 'V', '1', '2'):
3830 case VA_FOURCC('Y', 'U', 'Y', '2'):
3831 case VA_FOURCC('B', 'G', 'R', 'A'):
3832 case VA_FOURCC('B', 'G', 'R', 'X'):
3833 case VA_FOURCC('R', 'G', 'B', 'X'):
3834 case VA_FOURCC('R', 'G', 'B', 'A'):
3837 attrib_list[i].value.value.i = 0;
3838 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3842 if (attrib_list[i].value.value.i != VA_FOURCC('N', 'V', '1', '2')) {
3843 attrib_list[i].value.value.i = 0;
3844 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3847 } else if (IS_GEN7(i965->intel.device_id)) {
3848 if (obj_config->entrypoint == VAEntrypointEncSlice ||
3849 obj_config->entrypoint == VAEntrypointVideoProc) {
3850 switch (attrib_list[i].value.value.i) {
3851 case VA_FOURCC('N', 'V', '1', '2'):
3852 case VA_FOURCC('I', '4', '2', '0'):
3853 case VA_FOURCC('Y', 'V', '1', '2'):
3856 attrib_list[i].value.value.i = 0;
3857 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3861 if (obj_config->profile == VAProfileJPEGBaseline) {
3862 attrib_list[i].value.value.i = 0; /* JPEG decoding always uses an internal format */
3863 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3865 if (attrib_list[i].value.value.i != VA_FOURCC('N', 'V', '1', '2')) {
3866 attrib_list[i].value.value.i = 0;
3867 attrib_list[i].flags &= ~VA_SURFACE_ATTRIB_SETTABLE;
3875 case VASurfaceAttribMinWidth:
3876 /* FIXME: add support for it later */
3877 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3879 case VASurfaceAttribMaxWidth:
3880 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3882 case VASurfaceAttribMinHeight:
3883 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3885 case VASurfaceAttribMaxHeight:
3886 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3889 attrib_list[i].flags = VA_SURFACE_ATTRIB_NOT_SUPPORTED;
3898 * Query video processing pipeline
3900 VAStatus i965_QueryVideoProcFilters(
3901 VADriverContextP ctx,
3902 VAContextID context,
3903 VAProcFilterType *filters,
3904 unsigned int *num_filters
3907 struct i965_driver_data *const i965 = i965_driver_data(ctx);
3910 if (HAS_VPP(i965)) {
3911 filters[i++] = VAProcFilterNoiseReduction;
3912 filters[i++] = VAProcFilterDeinterlacing;
3915 if(IS_HASWELL(i965->intel.device_id)){
3916 filters[i++] = VAProcFilterNone;
3917 filters[i++] = VAProcFilterSharpening;
3918 filters[i++] = VAProcFilterColorBalance;
3919 filters[i++] = VAProcFilterColorStandard;
3924 return VA_STATUS_SUCCESS;
3927 VAStatus i965_QueryVideoProcFilterCaps(
3928 VADriverContextP ctx,
3929 VAContextID context,
3930 VAProcFilterType type,
3932 unsigned int *num_filter_caps
3935 struct i965_driver_data *const i965 = i965_driver_data(ctx);
3938 if (type == VAProcFilterNoiseReduction) {
3939 VAProcFilterCap *cap = filter_caps;
3941 cap->range.min_value = 0.0;
3942 cap->range.max_value = 1.0;
3943 cap->range.default_value = 0.5;
3944 cap->range.step = 0.03125; /* 1.0 / 32 */
3946 } else if (type == VAProcFilterDeinterlacing) {
3947 VAProcFilterCapDeinterlacing *cap = filter_caps;
3949 cap->type = VAProcDeinterlacingBob;
3954 if(IS_HASWELL(i965->intel.device_id)){
3955 if(type == VAProcFilterColorBalance){
3956 VAProcFilterCapColorBalance *cap = filter_caps;
3957 cap->type = VAProcColorBalanceHue;
3958 cap->range.min_value = -180.0;
3959 cap->range.max_value = 180.0;
3960 cap->range.default_value = 0.0;
3961 cap->range.step = 1.0;
3965 cap->type = VAProcColorBalanceSaturation;
3966 cap->range.min_value = 0.0;
3967 cap->range.max_value = 10.0;
3968 cap->range.default_value = 0.0;
3969 cap->range.step = 0.1;
3973 cap->type = VAProcColorBalanceBrightness;
3974 cap->range.min_value = -100.0;
3975 cap->range.max_value = 100.0;
3976 cap->range.default_value = 0.0;
3977 cap->range.step = 1.0;
3981 cap->type = VAProcColorBalanceContrast;
3982 cap->range.min_value = 0.0;
3983 cap->range.max_value = 10.0;
3984 cap->range.default_value = 0.0;
3985 cap->range.step = 0.1;
3992 *num_filter_caps = i;
3994 return VA_STATUS_SUCCESS;
3997 static VAProcColorStandardType vpp_input_color_standards[VAProcColorStandardCount] = {
3998 VAProcColorStandardBT601,
4001 static VAProcColorStandardType vpp_output_color_standards[VAProcColorStandardCount] = {
4002 VAProcColorStandardBT601,
4005 VAStatus i965_QueryVideoProcPipelineCaps(
4006 VADriverContextP ctx,
4007 VAContextID context,
4008 VABufferID *filters,
4009 unsigned int num_filters,
4010 VAProcPipelineCaps *pipeline_cap /* out */
4013 struct i965_driver_data * const i965 = i965_driver_data(ctx);
4016 pipeline_cap->pipeline_flags = 0;
4017 pipeline_cap->filter_flags = 0;
4018 pipeline_cap->num_forward_references = 0;
4019 pipeline_cap->num_backward_references = 0;
4020 pipeline_cap->num_input_color_standards = 1;
4021 pipeline_cap->input_color_standards = vpp_input_color_standards;
4022 pipeline_cap->num_output_color_standards = 1;
4023 pipeline_cap->output_color_standards = vpp_output_color_standards;
4025 for (i = 0; i < num_filters; i++) {
4026 struct object_buffer *obj_buffer = BUFFER(filters[i]);
4027 VAProcFilterParameterBufferBase *base = (VAProcFilterParameterBufferBase *)obj_buffer->buffer_store->buffer;
4029 if (base->type == VAProcFilterNoiseReduction) {
4030 VAProcFilterParameterBuffer *denoise = (VAProcFilterParameterBuffer *)base;
4032 } else if (base->type == VAProcFilterDeinterlacing) {
4033 VAProcFilterParameterBufferDeinterlacing *deint = (VAProcFilterParameterBufferDeinterlacing *)base;
4035 assert(deint->algorithm == VAProcDeinterlacingWeave ||
4036 deint->algorithm == VAProcDeinterlacingBob);
4040 return VA_STATUS_SUCCESS;
4044 VA_DRIVER_INIT_FUNC(VADriverContextP ctx);
4047 VA_DRIVER_INIT_FUNC( VADriverContextP ctx )
4049 struct VADriverVTable * const vtable = ctx->vtable;
4050 struct VADriverVTableVPP * const vtable_vpp = ctx->vtable_vpp;
4052 struct i965_driver_data *i965;
4055 ctx->version_major = VA_MAJOR_VERSION;
4056 ctx->version_minor = VA_MINOR_VERSION;
4057 ctx->max_profiles = I965_MAX_PROFILES;
4058 ctx->max_entrypoints = I965_MAX_ENTRYPOINTS;
4059 ctx->max_attributes = I965_MAX_CONFIG_ATTRIBUTES;
4060 ctx->max_image_formats = I965_MAX_IMAGE_FORMATS;
4061 ctx->max_subpic_formats = I965_MAX_SUBPIC_FORMATS;
4062 ctx->max_display_attributes = 1 + ARRAY_ELEMS(i965_display_attributes);
4064 vtable->vaTerminate = i965_Terminate;
4065 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
4066 vtable->vaQueryConfigProfiles = i965_QueryConfigProfiles;
4067 vtable->vaQueryConfigEntrypoints = i965_QueryConfigEntrypoints;
4068 vtable->vaQueryConfigAttributes = i965_QueryConfigAttributes;
4069 vtable->vaCreateConfig = i965_CreateConfig;
4070 vtable->vaDestroyConfig = i965_DestroyConfig;
4071 vtable->vaGetConfigAttributes = i965_GetConfigAttributes;
4072 vtable->vaCreateSurfaces = i965_CreateSurfaces;
4073 vtable->vaDestroySurfaces = i965_DestroySurfaces;
4074 vtable->vaCreateContext = i965_CreateContext;
4075 vtable->vaDestroyContext = i965_DestroyContext;
4076 vtable->vaCreateBuffer = i965_CreateBuffer;
4077 vtable->vaBufferSetNumElements = i965_BufferSetNumElements;
4078 vtable->vaMapBuffer = i965_MapBuffer;
4079 vtable->vaUnmapBuffer = i965_UnmapBuffer;
4080 vtable->vaDestroyBuffer = i965_DestroyBuffer;
4081 vtable->vaBeginPicture = i965_BeginPicture;
4082 vtable->vaRenderPicture = i965_RenderPicture;
4083 vtable->vaEndPicture = i965_EndPicture;
4084 vtable->vaSyncSurface = i965_SyncSurface;
4085 vtable->vaQuerySurfaceStatus = i965_QuerySurfaceStatus;
4086 vtable->vaPutSurface = i965_PutSurface;
4087 vtable->vaQueryImageFormats = i965_QueryImageFormats;
4088 vtable->vaCreateImage = i965_CreateImage;
4089 vtable->vaDeriveImage = i965_DeriveImage;
4090 vtable->vaDestroyImage = i965_DestroyImage;
4091 vtable->vaSetImagePalette = i965_SetImagePalette;
4092 vtable->vaGetImage = i965_GetImage;
4093 vtable->vaPutImage = i965_PutImage;
4094 vtable->vaQuerySubpictureFormats = i965_QuerySubpictureFormats;
4095 vtable->vaCreateSubpicture = i965_CreateSubpicture;
4096 vtable->vaDestroySubpicture = i965_DestroySubpicture;
4097 vtable->vaSetSubpictureImage = i965_SetSubpictureImage;
4098 vtable->vaSetSubpictureChromakey = i965_SetSubpictureChromakey;
4099 vtable->vaSetSubpictureGlobalAlpha = i965_SetSubpictureGlobalAlpha;
4100 vtable->vaAssociateSubpicture = i965_AssociateSubpicture;
4101 vtable->vaDeassociateSubpicture = i965_DeassociateSubpicture;
4102 vtable->vaQueryDisplayAttributes = i965_QueryDisplayAttributes;
4103 vtable->vaGetDisplayAttributes = i965_GetDisplayAttributes;
4104 vtable->vaSetDisplayAttributes = i965_SetDisplayAttributes;
4105 vtable->vaBufferInfo = i965_BufferInfo;
4106 vtable->vaLockSurface = i965_LockSurface;
4107 vtable->vaUnlockSurface = i965_UnlockSurface;
4108 vtable->vaGetSurfaceAttributes = i965_GetSurfaceAttributes;
4109 vtable->vaCreateSurfaces2 = i965_CreateSurfaces2;
4111 vtable_vpp->vaQueryVideoProcFilters = i965_QueryVideoProcFilters;
4112 vtable_vpp->vaQueryVideoProcFilterCaps = i965_QueryVideoProcFilterCaps;
4113 vtable_vpp->vaQueryVideoProcPipelineCaps = i965_QueryVideoProcPipelineCaps;
4115 i965 = (struct i965_driver_data *)calloc(1, sizeof(*i965));
4117 ctx->pDriverData = (void *)i965;
4119 result = object_heap_init(&i965->config_heap,
4120 sizeof(struct object_config),
4122 assert(result == 0);
4124 result = object_heap_init(&i965->context_heap,
4125 sizeof(struct object_context),
4127 assert(result == 0);
4129 result = object_heap_init(&i965->surface_heap,
4130 sizeof(struct object_surface),
4132 assert(result == 0);
4134 result = object_heap_init(&i965->buffer_heap,
4135 sizeof(struct object_buffer),
4137 assert(result == 0);
4139 result = object_heap_init(&i965->image_heap,
4140 sizeof(struct object_image),
4142 assert(result == 0);
4144 result = object_heap_init(&i965->subpic_heap,
4145 sizeof(struct object_subpic),
4147 assert(result == 0);
4149 sprintf(i965->va_vendor, "%s %s driver - %d.%d.%d",
4150 INTEL_STR_DRIVER_VENDOR,
4151 INTEL_STR_DRIVER_NAME,
4152 INTEL_DRIVER_MAJOR_VERSION,
4153 INTEL_DRIVER_MINOR_VERSION,
4154 INTEL_DRIVER_MICRO_VERSION);
4156 if (INTEL_DRIVER_PRE_VERSION > 0) {
4157 const int len = strlen(i965->va_vendor);
4158 sprintf(&i965->va_vendor[len], ".pre%d", INTEL_DRIVER_PRE_VERSION);
4161 i965->current_context_id = VA_INVALID_ID;
4163 ctx->str_vendor = i965->va_vendor;
4165 return i965_Init(ctx);