2 * Copyright © 2010 Intel Corporation
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 * Zhou Chang <chang.zhou@intel.com>
34 #include "intel_batchbuffer.h"
35 #include "intel_driver.h"
37 #include "i965_defines.h"
38 #include "i965_drv_video.h"
39 #include "i965_encoder.h"
43 extern Bool gen6_mfc_context_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context);
44 extern Bool gen6_vme_context_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context);
45 extern Bool gen7_mfc_context_init(VADriverContextP ctx, struct intel_encoder_context *encoder_context);
48 i965_DestroySurfaces(VADriverContextP ctx,
49 VASurfaceID *surface_list,
52 i965_CreateSurfaces(VADriverContextP ctx,
57 VASurfaceID *surfaces);
60 intel_encoder_check_yuv_surface(VADriverContextP ctx,
62 struct encode_state *encode_state,
63 struct intel_encoder_context *encoder_context)
65 struct i965_driver_data *i965 = i965_driver_data(ctx);
66 struct i965_surface src_surface, dst_surface;
67 struct object_surface *obj_surface;
71 /* releae the temporary surface */
72 if (encoder_context->is_tmp_id) {
73 i965_DestroySurfaces(ctx, &encoder_context->input_yuv_surface, 1);
74 encode_state->input_yuv_object = NULL;
77 encoder_context->is_tmp_id = 0;
78 obj_surface = SURFACE(encode_state->current_render_target);
79 assert(obj_surface && obj_surface->bo);
81 if (!obj_surface || !obj_surface->bo)
82 return VA_STATUS_ERROR_INVALID_PARAMETER;
84 if (obj_surface->fourcc == VA_FOURCC('N', 'V', '1', '2')) {
85 unsigned int tiling = 0, swizzle = 0;
87 dri_bo_get_tiling(obj_surface->bo, &tiling, &swizzle);
89 if (tiling == I915_TILING_Y) {
90 encoder_context->input_yuv_surface = encode_state->current_render_target;
91 encode_state->input_yuv_object = obj_surface;
92 return VA_STATUS_SUCCESS;
98 rect.width = obj_surface->orig_width;
99 rect.height = obj_surface->orig_height;
101 src_surface.id = encode_state->current_render_target;
102 src_surface.type = I965_SURFACE_TYPE_SURFACE;
103 src_surface.flags = I965_SURFACE_FLAG_FRAME;
105 status = i965_CreateSurfaces(ctx,
106 obj_surface->orig_width,
107 obj_surface->orig_height,
110 &encoder_context->input_yuv_surface);
111 assert(status == VA_STATUS_SUCCESS);
113 if (status != VA_STATUS_SUCCESS)
116 obj_surface = SURFACE(encoder_context->input_yuv_surface);
117 encode_state->input_yuv_object = obj_surface;
118 i965_check_alloc_surface_bo(ctx, obj_surface, 1, VA_FOURCC('N', 'V', '1', '2'), SUBSAMPLE_YUV420);
120 dst_surface.id = encoder_context->input_yuv_surface;
121 dst_surface.type = I965_SURFACE_TYPE_SURFACE;
122 dst_surface.flags = I965_SURFACE_FLAG_FRAME;
124 status = i965_image_processing(ctx,
129 assert(status == VA_STATUS_SUCCESS);
131 encoder_context->is_tmp_id = 1;
133 return VA_STATUS_SUCCESS;
137 intel_encoder_check_avc_parameter(VADriverContextP ctx,
138 struct encode_state *encode_state,
139 struct intel_encoder_context *encoder_context)
141 struct i965_driver_data *i965 = i965_driver_data(ctx);
142 struct object_surface *obj_surface;
143 struct object_buffer *obj_buffer;
144 VAEncPictureParameterBufferH264 *pic_param = (VAEncPictureParameterBufferH264 *)encode_state->pic_param_ext->buffer;
146 assert(!(pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID));
148 if (pic_param->CurrPic.flags & VA_PICTURE_H264_INVALID)
151 obj_surface = SURFACE(pic_param->CurrPic.picture_id);
152 assert(obj_surface); /* It is possible the store buffer isn't allocated yet */
157 obj_buffer = BUFFER(pic_param->coded_buf);
158 assert(obj_buffer && obj_buffer->buffer_store && obj_buffer->buffer_store->bo);
160 if (!obj_buffer || !obj_buffer->buffer_store || !obj_buffer->buffer_store->bo)
163 return VA_STATUS_SUCCESS;
166 return VA_STATUS_ERROR_INVALID_PARAMETER;
170 intel_encoder_check_mpeg2_parameter(VADriverContextP ctx,
171 struct encode_state *encode_state,
172 struct intel_encoder_context *encoder_context)
174 struct i965_driver_data *i965 = i965_driver_data(ctx);
175 VAEncPictureParameterBufferMPEG2 *pic_param = (VAEncPictureParameterBufferMPEG2 *)encode_state->pic_param_ext->buffer;
176 struct object_surface *obj_surface;
177 struct object_buffer *obj_buffer;
179 obj_surface = SURFACE(pic_param->reconstructed_picture);
180 assert(obj_surface); /* It is possible the store buffer isn't allocated yet */
185 obj_buffer = BUFFER(pic_param->coded_buf);
186 assert(obj_buffer && obj_buffer->buffer_store && obj_buffer->buffer_store->bo);
188 if (!obj_buffer || !obj_buffer->buffer_store || !obj_buffer->buffer_store->bo)
191 return VA_STATUS_SUCCESS;
194 return VA_STATUS_ERROR_INVALID_PARAMETER;
198 intel_encoder_sanity_check_input(VADriverContextP ctx,
200 struct encode_state *encode_state,
201 struct intel_encoder_context *encoder_context)
206 case VAProfileH264Baseline:
207 case VAProfileH264Main:
208 case VAProfileH264High:
209 vaStatus = intel_encoder_check_avc_parameter(ctx, encode_state, encoder_context);
212 case VAProfileMPEG2Simple:
213 case VAProfileMPEG2Main:
214 vaStatus = intel_encoder_check_mpeg2_parameter(ctx, encode_state, encoder_context);
218 vaStatus = VA_STATUS_ERROR_UNSUPPORTED_PROFILE;
222 if (vaStatus != VA_STATUS_SUCCESS)
225 vaStatus = intel_encoder_check_yuv_surface(ctx, profile, encode_state, encoder_context);
232 intel_encoder_end_picture(VADriverContextP ctx,
234 union codec_state *codec_state,
235 struct hw_context *hw_context)
237 struct intel_encoder_context *encoder_context = (struct intel_encoder_context *)hw_context;
238 struct encode_state *encode_state = &codec_state->encode;
241 vaStatus = intel_encoder_sanity_check_input(ctx, profile, encode_state, encoder_context);
243 if (vaStatus != VA_STATUS_SUCCESS)
246 encoder_context->mfc_brc_prepare(encode_state, encoder_context);
248 vaStatus = encoder_context->vme_pipeline(ctx, profile, encode_state, encoder_context);
250 if (vaStatus == VA_STATUS_SUCCESS)
251 encoder_context->mfc_pipeline(ctx, profile, encode_state, encoder_context);
252 return VA_STATUS_SUCCESS;
256 intel_encoder_context_destroy(void *hw_context)
258 struct intel_encoder_context *encoder_context = (struct intel_encoder_context *)hw_context;
260 encoder_context->mfc_context_destroy(encoder_context->mfc_context);
261 encoder_context->vme_context_destroy(encoder_context->vme_context);
262 intel_batchbuffer_free(encoder_context->base.batch);
263 free(encoder_context);
266 typedef Bool (* hw_init_func)(VADriverContextP, struct intel_encoder_context *);
268 static struct hw_context *
269 intel_enc_hw_context_init(VADriverContextP ctx,
270 struct object_config *obj_config,
271 hw_init_func vme_context_init,
272 hw_init_func mfc_context_init)
274 struct intel_driver_data *intel = intel_driver_data(ctx);
275 struct intel_encoder_context *encoder_context = calloc(1, sizeof(struct intel_encoder_context));
278 encoder_context->base.destroy = intel_encoder_context_destroy;
279 encoder_context->base.run = intel_encoder_end_picture;
280 encoder_context->base.batch = intel_batchbuffer_new(intel, I915_EXEC_RENDER, 0);
281 encoder_context->input_yuv_surface = VA_INVALID_SURFACE;
282 encoder_context->is_tmp_id = 0;
283 encoder_context->rate_control_mode = VA_RC_NONE;
284 encoder_context->profile = obj_config->profile;
286 for (i = 0; i < obj_config->num_attribs; i++) {
287 if (obj_config->attrib_list[i].type == VAConfigAttribRateControl) {
288 encoder_context->rate_control_mode = obj_config->attrib_list[i].value;
293 vme_context_init(ctx, encoder_context);
294 assert(encoder_context->vme_context);
295 assert(encoder_context->vme_context_destroy);
296 assert(encoder_context->vme_pipeline);
298 mfc_context_init(ctx, encoder_context);
299 assert(encoder_context->mfc_context);
300 assert(encoder_context->mfc_context_destroy);
301 assert(encoder_context->mfc_pipeline);
303 return (struct hw_context *)encoder_context;
307 gen6_enc_hw_context_init(VADriverContextP ctx, struct object_config *obj_config)
309 return intel_enc_hw_context_init(ctx, obj_config, gen6_vme_context_init, gen6_mfc_context_init);
313 gen7_enc_hw_context_init(VADriverContextP ctx, struct object_config *obj_config)
315 return intel_enc_hw_context_init(ctx, obj_config, gen7_vme_context_init, gen7_mfc_context_init);
319 gen75_enc_hw_context_init(VADriverContextP ctx, struct object_config *obj_config)
321 return intel_enc_hw_context_init(ctx, obj_config, gen75_vme_context_init, gen75_mfc_context_init);