itv->is_50hz = 1;
itv->is_out_50hz = 1;
}
+
+ itv->yuv_info.osd_full_w = 720;
+ itv->yuv_info.osd_full_h = itv->is_out_50hz ? 576 : 480;
+ itv->yuv_info.v4l2_src_w = itv->yuv_info.osd_full_w;
+ itv->yuv_info.v4l2_src_h = itv->yuv_info.osd_full_h;
+
itv->params.video_gop_size = itv->is_60hz ? 15 : 12;
itv->stream_buf_size[IVTV_ENC_STREAM_TYPE_MPG] = 0x08000;
u32 osd_vis_w;
u32 osd_vis_h;
+ u32 osd_full_w;
+ u32 osd_full_h;
+
int decode_height;
int lace_mode;
u8 draw_frame; /* PVR350 buffer to draw into */
u8 max_frames_buffered; /* Maximum number of frames to buffer */
+
+ struct v4l2_rect main_rect;
+ u32 v4l2_src_w;
+ u32 v4l2_src_h;
};
#define IVTV_VBI_FRAMES 32
set_bit(IVTV_F_S_APPL_IO, &s->s_flags);
retry:
+ /* If possible, just DMA the entire frame - Check the data transfer size
+ since we may get here before the stream has been fully set-up */
+ if (mode == OUT_YUV && s->q_full.length == 0 && itv->dma_data_req_size) {
+ while (count >= itv->dma_data_req_size) {
+ if (!ivtv_yuv_udma_stream_frame (itv, (void *)user_buf)) {
+ bytes_written += itv->dma_data_req_size;
+ user_buf += itv->dma_data_req_size;
+ count -= itv->dma_data_req_size;
+ } else {
+ break;
+ }
+ }
+ if (count == 0) {
+ IVTV_DEBUG_HI_FILE("Wrote %d bytes to %s (%d)\n", bytes_written, s->name, s->q_full.bytesused);
+ return bytes_written;
+ }
+ }
+
for (;;) {
/* Gather buffers */
while (q.length - q.bytesused < count && (buf = ivtv_dequeue(s, &s->q_io)))
if (s->q_full.length >= itv->dma_data_req_size) {
int got_sig;
+ if (mode == OUT_YUV)
+ ivtv_yuv_setup_stream_frame(itv);
+
prepare_to_wait(&itv->dma_waitq, &wait, TASK_INTERRUPTIBLE);
while (!(got_sig = signal_pending(current)) &&
test_bit(IVTV_F_S_DMA_PENDING, &s->s_flags)) {
set_bit(IVTV_F_I_DEC_YUV, &itv->i_flags);
/* For yuv, we need to know the dma size before we start */
itv->dma_data_req_size =
- itv->params.width * itv->params.height * 3 / 2;
+ 1080 * ((itv->yuv_info.v4l2_src_h + 31) & ~31);
itv->yuv_info.stream_size = 0;
}
return 0;
fmt->fmt.pix.height = itv->main_rect.height;
fmt->fmt.pix.colorspace = V4L2_COLORSPACE_SMPTE170M;
fmt->fmt.pix.field = V4L2_FIELD_INTERLACED;
- if (itv->output_mode == OUT_UDMA_YUV) {
+ if (streamtype == IVTV_DEC_STREAM_TYPE_YUV) {
switch (itv->yuv_info.lace_mode & IVTV_YUV_MODE_MASK) {
case IVTV_YUV_MODE_INTERLACED:
fmt->fmt.pix.field = (itv->yuv_info.lace_mode & IVTV_YUV_SYNC_MASK) ?
break;
}
fmt->fmt.pix.pixelformat = V4L2_PIX_FMT_HM12;
+ fmt->fmt.pix.bytesperline = 720;
+ fmt->fmt.pix.width = itv->yuv_info.v4l2_src_w;
+ fmt->fmt.pix.height = itv->yuv_info.v4l2_src_h;
/* YUV size is (Y=(h*w) + UV=(h*(w/2))) */
fmt->fmt.pix.sizeimage =
- fmt->fmt.pix.height * fmt->fmt.pix.width +
- fmt->fmt.pix.height * (fmt->fmt.pix.width / 2);
- }
- else if (itv->output_mode == OUT_YUV ||
- streamtype == IVTV_ENC_STREAM_TYPE_YUV ||
- streamtype == IVTV_DEC_STREAM_TYPE_YUV) {
+ 1080 * ((fmt->fmt.pix.height + 31) & ~31);
+ } else if (streamtype == IVTV_ENC_STREAM_TYPE_YUV) {
fmt->fmt.pix.pixelformat = V4L2_PIX_FMT_HM12;
/* YUV size is (Y=(h*w) + UV=(h*(w/2))) */
fmt->fmt.pix.sizeimage =
static int ivtv_try_or_set_fmt(struct ivtv *itv, int streamtype,
struct v4l2_format *fmt, int set_fmt)
{
+ struct yuv_playback_info *yi = &itv->yuv_info;
struct v4l2_sliced_vbi_format *vbifmt = &fmt->fmt.sliced;
u16 set;
r.width = fmt->fmt.pix.width;
r.height = fmt->fmt.pix.height;
ivtv_get_fmt(itv, streamtype, fmt);
- if (itv->output_mode != OUT_UDMA_YUV) {
- /* TODO: would setting the rect also be valid for this mode? */
- fmt->fmt.pix.width = r.width;
- fmt->fmt.pix.height = r.height;
- }
- if (itv->output_mode == OUT_UDMA_YUV) {
- /* TODO: add checks for validity */
+ fmt->fmt.pix.width = r.width;
+ fmt->fmt.pix.height = r.height;
+ if (streamtype == IVTV_DEC_STREAM_TYPE_YUV) {
fmt->fmt.pix.field = field;
+ if (fmt->fmt.pix.width < 2)
+ fmt->fmt.pix.width = 2;
+ if (fmt->fmt.pix.width > 720)
+ fmt->fmt.pix.width = 720;
+ if (fmt->fmt.pix.height < 2)
+ fmt->fmt.pix.height = 2;
+ if (fmt->fmt.pix.height > 576)
+ fmt->fmt.pix.height = 576;
}
- if (set_fmt) {
- if (itv->output_mode == OUT_UDMA_YUV) {
- switch (field) {
- case V4L2_FIELD_NONE:
- itv->yuv_info.lace_mode = IVTV_YUV_MODE_PROGRESSIVE;
- break;
- case V4L2_FIELD_ANY:
- itv->yuv_info.lace_mode = IVTV_YUV_MODE_AUTO;
- break;
- case V4L2_FIELD_INTERLACED_BT:
- itv->yuv_info.lace_mode =
- IVTV_YUV_MODE_INTERLACED|IVTV_YUV_SYNC_ODD;
- break;
- case V4L2_FIELD_INTERLACED_TB:
- default:
- itv->yuv_info.lace_mode = IVTV_YUV_MODE_INTERLACED;
- break;
- }
- itv->yuv_info.lace_sync_field = (itv->yuv_info.lace_mode & IVTV_YUV_SYNC_MASK) == IVTV_YUV_SYNC_EVEN ? 0 : 1;
+ if (set_fmt && streamtype == IVTV_DEC_STREAM_TYPE_YUV) {
+ /* Return now if we already have some frame data */
+ if (yi->stream_size)
+ return -EBUSY;
- /* Force update of yuv registers */
- itv->yuv_info.yuv_forced_update = 1;
- return 0;
+ yi->v4l2_src_w = r.width;
+ yi->v4l2_src_h = r.height;
+
+ switch (field) {
+ case V4L2_FIELD_NONE:
+ yi->lace_mode = IVTV_YUV_MODE_PROGRESSIVE;
+ break;
+ case V4L2_FIELD_ANY:
+ yi->lace_mode = IVTV_YUV_MODE_AUTO;
+ break;
+ case V4L2_FIELD_INTERLACED_BT:
+ yi->lace_mode =
+ IVTV_YUV_MODE_INTERLACED|IVTV_YUV_SYNC_ODD;
+ break;
+ case V4L2_FIELD_INTERLACED_TB:
+ default:
+ yi->lace_mode = IVTV_YUV_MODE_INTERLACED;
+ break;
}
+ yi->lace_sync_field = (yi->lace_mode & IVTV_YUV_SYNC_MASK) == IVTV_YUV_SYNC_EVEN ? 0 : 1;
+
+ if (test_bit(IVTV_F_I_DEC_YUV, &itv->i_flags))
+ itv->dma_data_req_size =
+ 1080 * ((yi->v4l2_src_h + 31) & ~31);
+
+ /* Force update of yuv registers */
+ yi->yuv_forced_update = 1;
+ return 0;
}
return 0;
}
{
struct ivtv_open_id *id = NULL;
u32 data[CX2341X_MBOX_MAX_DATA];
+ int streamtype = 0;
- if (filp) id = (struct ivtv_open_id *)filp->private_data;
+ if (filp) {
+ id = (struct ivtv_open_id *)filp->private_data;
+ streamtype = id->type;
+ }
switch (cmd) {
case VIDIOC_G_PRIORITY:
cropcap->bounds.height = itv->is_50hz ? 576 : 480;
cropcap->pixelaspect.numerator = itv->is_50hz ? 59 : 10;
cropcap->pixelaspect.denominator = itv->is_50hz ? 54 : 11;
+ } else if (streamtype == IVTV_DEC_STREAM_TYPE_YUV) {
+ cropcap->bounds.width = itv->yuv_info.osd_full_w;
+ cropcap->bounds.height = itv->yuv_info.osd_full_h;
+ cropcap->pixelaspect.numerator = itv->is_out_50hz ? 59 : 10;
+ cropcap->pixelaspect.denominator = itv->is_out_50hz ? 54 : 11;
} else {
cropcap->bounds.height = itv->is_out_50hz ? 576 : 480;
cropcap->pixelaspect.numerator = itv->is_out_50hz ? 59 : 10;
if (crop->type == V4L2_BUF_TYPE_VIDEO_OUTPUT &&
(itv->v4l2_cap & V4L2_CAP_VIDEO_OUTPUT)) {
- if (!ivtv_vapi(itv, CX2341X_OSD_SET_FRAMEBUFFER_WINDOW, 4,
- crop->c.width, crop->c.height, crop->c.left, crop->c.top)) {
- itv->main_rect = crop->c;
+ if (streamtype == IVTV_DEC_STREAM_TYPE_YUV) {
+ itv->yuv_info.main_rect = crop->c;
return 0;
+ } else {
+ if (!ivtv_vapi(itv, CX2341X_OSD_SET_FRAMEBUFFER_WINDOW, 4,
+ crop->c.width, crop->c.height, crop->c.left, crop->c.top)) {
+ itv->main_rect = crop->c;
+ return 0;
+ }
}
return -EINVAL;
}
if (crop->type == V4L2_BUF_TYPE_VIDEO_OUTPUT &&
(itv->v4l2_cap & V4L2_CAP_VIDEO_OUTPUT)) {
- crop->c = itv->main_rect;
+ if (streamtype == IVTV_DEC_STREAM_TYPE_YUV)
+ crop->c = itv->yuv_info.main_rect;
+ else
+ crop->c = itv->main_rect;
return 0;
}
if (crop->type != V4L2_BUF_TYPE_VIDEO_CAPTURE)
case VIDIOC_ENUM_FMT: {
static struct v4l2_fmtdesc formats[] = {
{ 0, 0, 0,
- "HM12 (YUV 4:1:1)", V4L2_PIX_FMT_HM12,
+ "HM12 (YUV 4:2:2)", V4L2_PIX_FMT_HM12,
{ 0, 0, 0, 0 }
},
{ 1, 0, V4L2_FMT_FLAG_COMPRESSED,
itv->main_rect.height = itv->params.height;
ivtv_vapi(itv, CX2341X_OSD_SET_FRAMEBUFFER_WINDOW, 4,
720, itv->main_rect.height, 0, 0);
+ itv->yuv_info.main_rect = itv->main_rect;
+ if (!itv->osd_info) {
+ itv->yuv_info.osd_full_w = 720;
+ itv->yuv_info.osd_full_h =
+ itv->is_out_50hz ? 576 : 480;
+ }
}
break;
}
void ivtv_dma_stream_dec_prepare(struct ivtv_stream *s, u32 offset, int lock)
{
struct ivtv *itv = s->itv;
+ struct yuv_playback_info *yi = &itv->yuv_info;
+ u8 frame = yi->draw_frame;
+ struct yuv_frame_info *f = &yi->new_frame_info[frame];
struct ivtv_buffer *buf;
- u32 y_size = itv->params.height * itv->params.width;
+ u32 y_size = 720 * ((f->src_h + 31) & ~31);
u32 uv_offset = offset + IVTV_YUV_BUFFER_UV_OFFSET;
int y_done = 0;
int bytes_written = 0;
int idx = 0;
IVTV_DEBUG_HI_DMA("DEC PREPARE DMA %s: %08x %08x\n", s->name, s->q_predma.bytesused, offset);
+
+ /* Insert buffer block for YUV if needed */
+ if (s->type == IVTV_DEC_STREAM_TYPE_YUV && f->offset_y) {
+ if (yi->blanking_dmaptr) {
+ s->sg_pending[idx].src = yi->blanking_dmaptr;
+ s->sg_pending[idx].dst = offset;
+ s->sg_pending[idx].size = 720 * 16;
+ }
+ offset += 720 * 16;
+ idx++;
+ }
+
list_for_each_entry(buf, &s->q_predma.list, list) {
/* YUV UV Offset from Y Buffer */
if (s->type == IVTV_DEC_STREAM_TYPE_YUV && !y_done &&
ivtv_api_get_data(&itv->dec_mbox, IVTV_MBOX_DMA, data);
if (test_bit(IVTV_F_I_DEC_YUV, &itv->i_flags)) {
- itv->dma_data_req_size = itv->params.width * itv->params.height * 3 / 2;
- itv->dma_data_req_offset = data[1] ? data[1] : yuv_offset[0];
+ itv->dma_data_req_size =
+ 1080 * ((itv->yuv_info.v4l2_src_h + 31) & ~31);
+ itv->dma_data_req_offset = data[1];
+ if (atomic_read(&itv->yuv_info.next_dma_frame) >= 0)
+ ivtv_yuv_frame_complete(itv);
s = &itv->streams[IVTV_DEC_STREAM_TYPE_YUV];
}
else {
set_bit(IVTV_F_S_NEEDS_DATA, &s->s_flags);
}
else {
+ if (test_bit(IVTV_F_I_DEC_YUV, &itv->i_flags))
+ ivtv_yuv_setup_stream_frame(itv);
clear_bit(IVTV_F_S_NEEDS_DATA, &s->s_flags);
ivtv_queue_move(s, &s->q_full, NULL, &s->q_predma, itv->dma_data_req_size);
ivtv_dma_stream_dec_prepare(s, itv->dma_data_req_offset + IVTV_DECODER_OFFSET, 0);
IVTV_DEBUG_INFO("Starting decode stream %s (gop_offset %d)\n", s->name, gop_offset);
- /* Clear Streamoff */
- if (s->type == IVTV_DEC_STREAM_TYPE_YUV) {
- /* Initialize Decoder */
- /* Reprogram Decoder YUV Buffers for YUV */
- write_reg(yuv_offset[0] >> 4, 0x82c);
- write_reg((yuv_offset[0] + IVTV_YUV_BUFFER_UV_OFFSET) >> 4, 0x830);
- write_reg(yuv_offset[0] >> 4, 0x834);
- write_reg((yuv_offset[0] + IVTV_YUV_BUFFER_UV_OFFSET) >> 4, 0x838);
-
- write_reg_sync(0x00000000 | (0x0c << 16) | (0x0b << 8), 0x2d24);
-
- write_reg_sync(0x00108080, 0x2898);
- /* Enable YUV decoder output */
- write_reg_sync(0x01, IVTV_REG_VDM);
- }
-
ivtv_setup_v4l2_decode_stream(s);
/* set dma size to 65536 bytes */
ivtv_vapi(itv, CX2341X_DEC_SET_DMA_BLOCK_SIZE, 1, 65536);
+ /* Clear Streamoff */
clear_bit(IVTV_F_S_STREAMOFF, &s->s_flags);
/* Zero out decoder counters */
(itv->yuv_info.draw_frame + 1) % IVTV_YUV_BUFFERS);
}
-int ivtv_yuv_prep_frame(struct ivtv *itv, struct ivtv_dma_frame *args)
+int ivtv_yuv_udma_frame(struct ivtv *itv, struct ivtv_dma_frame *args)
{
DEFINE_WAIT(wait);
int rc = 0;
int got_sig = 0;
-
- IVTV_DEBUG_INFO("yuv_prep_frame\n");
-
- ivtv_yuv_next_free(itv);
- ivtv_yuv_setup_frame(itv, args);
-
/* DMA the frame */
mutex_lock(&itv->udma.lock);
return rc;
}
+/* Setup frame according to V4L2 parameters */
+void ivtv_yuv_setup_stream_frame(struct ivtv *itv)
+{
+ struct yuv_playback_info *yi = &itv->yuv_info;
+ struct ivtv_dma_frame dma_args;
+
+ ivtv_yuv_next_free(itv);
+
+ /* Copy V4L2 parameters to an ivtv_dma_frame struct... */
+ dma_args.y_source = 0L;
+ dma_args.uv_source = 0L;
+ dma_args.src.left = 0;
+ dma_args.src.top = 0;
+ dma_args.src.width = yi->v4l2_src_w;
+ dma_args.src.height = yi->v4l2_src_h;
+ dma_args.dst = yi->main_rect;
+ dma_args.src_width = yi->v4l2_src_w;
+ dma_args.src_height = yi->v4l2_src_h;
+
+ /* ... and use the same setup routine as ivtv_yuv_prep_frame */
+ ivtv_yuv_setup_frame(itv, &dma_args);
+
+ if (!itv->dma_data_req_offset)
+ itv->dma_data_req_offset = yuv_offset[yi->draw_frame];
+}
+
+/* Attempt to dma a frame from a user buffer */
+int ivtv_yuv_udma_stream_frame(struct ivtv *itv, void *src)
+{
+ struct yuv_playback_info *yi = &itv->yuv_info;
+ struct ivtv_dma_frame dma_args;
+
+ ivtv_yuv_setup_stream_frame(itv);
+
+ /* We only need to supply source addresses for this */
+ dma_args.y_source = src;
+ dma_args.uv_source = src + 720 * ((yi->v4l2_src_h + 31) & ~31);
+ return ivtv_yuv_udma_frame(itv, &dma_args);
+}
+
+/* IVTV_IOC_DMA_FRAME ioctl handler */
+int ivtv_yuv_prep_frame(struct ivtv *itv, struct ivtv_dma_frame *args)
+{
+ IVTV_DEBUG_INFO("yuv_prep_frame\n");
+
+ ivtv_yuv_next_free(itv);
+ ivtv_yuv_setup_frame(itv, args);
+ return ivtv_yuv_udma_frame(itv, args);
+}
+
void ivtv_yuv_close(struct ivtv *itv)
{
int h_filter, v_filter_1, v_filter_2;
extern const u32 yuv_offset[IVTV_YUV_BUFFERS];
int ivtv_yuv_filter_check(struct ivtv *itv);
+void ivtv_yuv_setup_stream_frame(struct ivtv *itv);
+int ivtv_yuv_udma_stream_frame(struct ivtv *itv, void *src);
+void ivtv_yuv_frame_complete(struct ivtv *itv);
int ivtv_yuv_prep_frame(struct ivtv *itv, struct ivtv_dma_frame *args);
void ivtv_yuv_close(struct ivtv *itv);
-void ivtv_yuv_work_handler (struct ivtv *itv);
+void ivtv_yuv_work_handler(struct ivtv *itv);
#endif
ivtvfb_set_display_window(itv, &ivtv_window);
+ /* Pass screen size back to yuv handler */
+ itv->yuv_info.osd_full_w = ivtv_osd.pixel_stride;
+ itv->yuv_info.osd_full_h = ivtv_osd.lines;
+
/* Force update of yuv registers */
itv->yuv_info.yuv_forced_update = 1;