#define icvReleaseCapture_FFMPEG_p cvReleaseCapture_FFMPEG
#define icvGrabFrame_FFMPEG_p cvGrabFrame_FFMPEG
#define icvRetrieveFrame_FFMPEG_p cvRetrieveFrame_FFMPEG
+#define icvRetrieveFrame2_FFMPEG_p cvRetrieveFrame2_FFMPEG
#define icvSetCaptureProperty_FFMPEG_p cvSetCaptureProperty_FFMPEG
#define icvGetCaptureProperty_FFMPEG_p cvGetCaptureProperty_FFMPEG
#define icvCreateVideoWriter_FFMPEG_p cvCreateVideoWriter_FFMPEG
virtual bool retrieveFrame(int flag, cv::OutputArray frame) CV_OVERRIDE
{
unsigned char* data = 0;
- int step=0, width=0, height=0, cn=0;
+ int step=0, width=0, height=0, cn=0, depth=0;
if (!ffmpegCapture)
return false;
}
if (flag == 0) {
- if (!icvRetrieveFrame_FFMPEG_p(ffmpegCapture, &data, &step, &width, &height, &cn))
+ if (!icvRetrieveFrame2_FFMPEG_p(ffmpegCapture, &data, &step, &width, &height, &cn, &depth))
return false;
}
else {
- if (!ffmpegCapture->retrieveFrame(flag, &data, &step, &width, &height, &cn))
+ if (!ffmpegCapture->retrieveFrame(flag, &data, &step, &width, &height, &cn, &depth))
return false;
}
- cv::Mat tmp(height, width, CV_MAKETYPE(CV_8U, cn), data, step);
+ cv::Mat tmp(height, width, CV_MAKETYPE(depth, cn), data, step);
applyMetadataRotation(*this, tmp);
tmp.copyTo(frame);
{
if(!ffmpegWriter)
return;
- CV_Assert(image.depth() == CV_8U);
+ CV_Assert(image.depth() == CV_8U || image.depth() == CV_16U);
// if UMat, try GPU to GPU copy using OpenCL extensions
if (image.isUMat()) {
int step;
int width;
int height;
- int cn;
};
double getProperty(int) const;
bool setProperty(int, double);
bool grabFrame();
- bool retrieveFrame(int flag, unsigned char** data, int* step, int* width, int* height, int* cn);
+ bool retrieveFrame(int flag, unsigned char** data, int* step, int* width, int* height, int* cn, int* depth);
bool retrieveHWFrame(cv::OutputArray output);
void rotateFrame(cv::Mat &mat) const;
bool processRawPacket();
bool rawMode;
bool rawModeInitialized;
+ bool convertRGB;
AVPacket packet_filtered;
#if LIBAVFORMAT_BUILD >= CALC_FFMPEG_VERSION(58, 20, 100)
AVBSFContext* bsfc;
rawMode = false;
rawModeInitialized = false;
+ convertRGB = true;
memset(&packet_filtered, 0, sizeof(packet_filtered));
av_init_packet(&packet_filtered);
bsfc = NULL;
if (!params.empty())
{
+ convertRGB = params.get<bool>(CAP_PROP_CONVERT_RGB, true);
+ if (!convertRGB)
+ {
+ CV_LOG_WARNING(NULL, "VIDEOIO/FFMPEG: BGR conversion turned OFF, decoded frame will be "
+ "returned in its original format. "
+ "Multiplanar formats are not supported by the backend. "
+ "Only GRAY8/GRAY16LE pixel formats have been tested. "
+ "Use at your own risk.");
+ }
if (params.has(CAP_PROP_FORMAT))
{
int value = params.get<int>(CAP_PROP_FORMAT);
frame.width = context->width;
frame.height = context->height;
- frame.cn = 3;
frame.step = 0;
frame.data = NULL;
get_rotation_angle();
return valid;
}
-bool CvCapture_FFMPEG::retrieveFrame(int flag, unsigned char** data, int* step, int* width, int* height, int* cn)
+bool CvCapture_FFMPEG::retrieveFrame(int flag, unsigned char** data, int* step, int* width, int* height, int* cn, int* depth)
{
if (!video_st || !context)
return false;
*width = *step;
*height = 1;
*cn = 1;
+ *depth = CV_8U;
return ret;
}
if (!sw_picture || !sw_picture->data[0])
return false;
+ CV_LOG_DEBUG(NULL, "Input picture format: " << av_get_pix_fmt_name((AVPixelFormat)sw_picture->format));
+ const AVPixelFormat result_format = convertRGB ? AV_PIX_FMT_BGR24 : (AVPixelFormat)sw_picture->format;
+ switch (result_format)
+ {
+ case AV_PIX_FMT_BGR24: *depth = CV_8U; *cn = 3; break;
+ case AV_PIX_FMT_GRAY8: *depth = CV_8U; *cn = 1; break;
+ case AV_PIX_FMT_GRAY16LE: *depth = CV_16U; *cn = 1; break;
+ default:
+ CV_LOG_WARNING(NULL, "Unknown/unsupported picture format: " << av_get_pix_fmt_name(result_format)
+ << ", will be treated as 8UC1.");
+ *depth = CV_8U;
+ *cn = 1;
+ break; // TODO: return false?
+ }
+
if( img_convert_ctx == NULL ||
frame.width != video_st->CV_FFMPEG_CODEC_FIELD->width ||
frame.height != video_st->CV_FFMPEG_CODEC_FIELD->height ||
buffer_width, buffer_height,
(AVPixelFormat)sw_picture->format,
buffer_width, buffer_height,
- AV_PIX_FMT_BGR24,
+ result_format,
SWS_BICUBIC,
NULL, NULL, NULL
);
#if USE_AV_FRAME_GET_BUFFER
av_frame_unref(&rgb_picture);
- rgb_picture.format = AV_PIX_FMT_BGR24;
+ rgb_picture.format = result_format;
rgb_picture.width = buffer_width;
rgb_picture.height = buffer_height;
if (0 != av_frame_get_buffer(&rgb_picture, 32))
int aligns[AV_NUM_DATA_POINTERS];
avcodec_align_dimensions2(video_st->codec, &buffer_width, &buffer_height, aligns);
rgb_picture.data[0] = (uint8_t*)realloc(rgb_picture.data[0],
- _opencv_ffmpeg_av_image_get_buffer_size( AV_PIX_FMT_BGR24,
+ _opencv_ffmpeg_av_image_get_buffer_size( result_format,
buffer_width, buffer_height ));
_opencv_ffmpeg_av_image_fill_arrays(&rgb_picture, rgb_picture.data[0],
- AV_PIX_FMT_BGR24, buffer_width, buffer_height );
+ result_format, buffer_width, buffer_height );
#endif
frame.width = video_st->CV_FFMPEG_CODEC_FIELD->width;
frame.height = video_st->CV_FFMPEG_CODEC_FIELD->height;
- frame.cn = 3;
frame.data = rgb_picture.data[0];
frame.step = rgb_picture.linesize[0];
}
*step = frame.step;
*width = frame.width;
*height = frame.height;
- *cn = frame.cn;
#if USE_AV_HW_CODECS
if (sw_picture != picture)
if (rawMode)
return -1;
break;
+ case CAP_PROP_CONVERT_RGB:
+ return convertRGB;
case CAP_PROP_LRF_HAS_KEY_FRAME: {
const AVPacket& p = bsfc ? packet_filtered : packet;
return ((p.flags & AV_PKT_FLAG_KEY) != 0) ? 1 : 0;
if (value == -1)
return setRaw();
return false;
+ case CAP_PROP_CONVERT_RGB:
+ convertRGB = (value != 0);
+ return true;
case CAP_PROP_ORIENTATION_AUTO:
#if LIBAVUTIL_BUILD >= CALC_FFMPEG_VERSION(52, 94, 100)
rotation_auto = value != 0 ? true : false;
c->gop_size = 12; /* emit one intra frame every twelve frames at most */
c->pix_fmt = pixel_format;
-
if (c->codec_id == CV_CODEC(CODEC_ID_MPEG2VIDEO)) {
c->max_b_frames = 2;
}
return false;
}
}
- else if (input_pix_fmt == AV_PIX_FMT_GRAY8) {
+ else if (input_pix_fmt == AV_PIX_FMT_GRAY8 || input_pix_fmt == AV_PIX_FMT_GRAY16LE) {
if (cn != 1) {
return false;
}
}
else {
+ CV_LOG_WARNING(NULL, "Input data does not match selected pixel format: "
+ << av_get_pix_fmt_name(input_pix_fmt)
+ << ", number of channels: " << cn);
CV_Assert(false);
}
close();
const bool is_color = params.get(VIDEOWRITER_PROP_IS_COLOR, true);
+ const int depth = params.get(VIDEOWRITER_PROP_DEPTH, CV_8U);
+ const bool is_supported = depth == CV_8U || (depth == CV_16U && !is_color);
+ if (!is_supported)
+ {
+ CV_LOG_WARNING(NULL, "Unsupported depth/isColor combination is selected, "
+ "only CV_8UC1/CV_8UC3/CV_16UC1 are supported.");
+ return false;
+ }
if (params.has(VIDEOWRITER_PROP_HW_ACCELERATION))
{
va_type = params.get<VideoAccelerationType>(VIDEOWRITER_PROP_HW_ACCELERATION, VIDEO_ACCELERATION_NONE);
return false;
/* determine optimal pixel format */
- if (is_color) {
- input_pix_fmt = AV_PIX_FMT_BGR24;
+ if (is_color)
+ {
+ switch (depth)
+ {
+ case CV_8U: input_pix_fmt = AV_PIX_FMT_BGR24; break;
+ default:
+ CV_LOG_WARNING(NULL, "Unsupported input depth for color image: " << depth);
+ return false;
+ }
}
- else {
- input_pix_fmt = AV_PIX_FMT_GRAY8;
+ else
+ {
+ switch (depth)
+ {
+ case CV_8U: input_pix_fmt = AV_PIX_FMT_GRAY8; break;
+ case CV_16U: input_pix_fmt = AV_PIX_FMT_GRAY16LE; break;
+ default:
+ CV_LOG_WARNING(NULL, "Unsupported input depth for grayscale image: " << depth);
+ return false;
+ }
}
+ CV_LOG_DEBUG(NULL, "Selected pixel format: " << av_get_pix_fmt_name(input_pix_fmt));
if (fourcc == -1)
{
int cvRetrieveFrame_FFMPEG(CvCapture_FFMPEG* capture, unsigned char** data, int* step, int* width, int* height, int* cn)
{
- return capture->retrieveFrame(0, data, step, width, height, cn);
+ int depth = CV_8U;
+ return cvRetrieveFrame2_FFMPEG(capture, data, step, width, height, cn, &depth);
+}
+
+int cvRetrieveFrame2_FFMPEG(CvCapture_FFMPEG* capture, unsigned char** data, int* step, int* width, int* height, int* cn, int* depth)
+{
+ return capture->retrieveFrame(0, data, step, width, height, cn, depth);
}
static CvVideoWriter_FFMPEG* cvCreateVideoWriterWithParams_FFMPEG( const char* filename, int fourcc, double fps,
OPENCV_FFMPEG_API int cvGrabFrame_FFMPEG(struct CvCapture_FFMPEG* cap);
OPENCV_FFMPEG_API int cvRetrieveFrame_FFMPEG(struct CvCapture_FFMPEG* capture, unsigned char** data,
int* step, int* width, int* height, int* cn);
+OPENCV_FFMPEG_API int cvRetrieveFrame2_FFMPEG(struct CvCapture_FFMPEG* capture, unsigned char** data,
+ int* step, int* width, int* height, int* cn, int* depth);
OPENCV_FFMPEG_API void cvReleaseCapture_FFMPEG(struct CvCapture_FFMPEG** cap);
OPENCV_FFMPEG_API struct CvVideoWriter_FFMPEG* cvCreateVideoWriter_FFMPEG(const char* filename,
make_tuple("mp4v", "avi", bigSize),
make_tuple("MPEG", "avi", Size(720, 576)),
make_tuple("XVID", "avi", bigSize),
- make_tuple("H264", "mp4", Size(4096, 2160))
+ make_tuple("H264", "mp4", Size(4096, 2160)),
+ make_tuple("FFV1", "avi", bigSize),
+ make_tuple("FFV1", "mkv", bigSize)
};
INSTANTIATE_TEST_CASE_P(videoio, videoio_ffmpeg, testing::ValuesIn(entries));
EXPECT_EQ((int)14315, n_frames);
}
+
+typedef tuple<string, string, bool, bool> FourCC_Ext_Color_Support;
+typedef testing::TestWithParam< FourCC_Ext_Color_Support > videoio_ffmpeg_16bit;
+
+TEST_P(videoio_ffmpeg_16bit, basic)
+{
+ if (!videoio_registry::hasBackend(CAP_FFMPEG))
+ throw SkipTestException("FFmpeg backend was not found");
+
+ const int fourcc = fourccFromString(get<0>(GetParam()));
+ const string ext = string(".") + get<1>(GetParam());
+ const bool isColor = get<2>(GetParam());
+ const bool isSupported = get<3>(GetParam());
+ const int cn = isColor ? 3 : 1;
+ const int dataType = CV_16UC(cn);
+
+ const string filename = tempfile(ext.c_str());
+ const Size sz(640, 480);
+ const double fps = 30.0;
+ const double time_sec = 1;
+ const int numFrames = static_cast<int>(fps * time_sec);
+
+ {
+ VideoWriter writer;
+ writer.open(filename, CAP_FFMPEG, fourcc, fps, sz,
+ {
+ VIDEOWRITER_PROP_DEPTH, CV_16U,
+ VIDEOWRITER_PROP_IS_COLOR, isColor
+ });
+
+ ASSERT_EQ(isSupported, writer.isOpened());
+ if (isSupported)
+ {
+ Mat img(sz, dataType, Scalar::all(0));
+ const int coeff = cvRound(min(sz.width, sz.height)/(fps * time_sec));
+ for (int i = 0 ; i < numFrames; i++ )
+ {
+ rectangle(img,
+ Point2i(coeff * i, coeff * i),
+ Point2i(coeff * (i + 1), coeff * (i + 1)),
+ Scalar::all(255 * (1.0 - static_cast<double>(i) / (fps * time_sec * 2))),
+ -1);
+ writer << img;
+ }
+ writer.release();
+ EXPECT_GT(getFileSize(filename), 8192);
+ }
+ }
+
+ if (isSupported)
+ {
+ VideoCapture cap;
+ ASSERT_TRUE(cap.open(filename, CAP_FFMPEG, {CAP_PROP_CONVERT_RGB, false}));
+ ASSERT_TRUE(cap.isOpened());
+ Mat img;
+ bool res = true;
+ int numRead = 0;
+ while(res)
+ {
+ res = cap.read(img);
+ if (res)
+ {
+ ++numRead;
+ ASSERT_EQ(img.type(), dataType);
+ ASSERT_EQ(img.size(), sz);
+ }
+ }
+ ASSERT_EQ(numRead, numFrames);
+ remove(filename.c_str());
+ }
+}
+
+const FourCC_Ext_Color_Support sixteen_bit_modes[] =
+{
+ // 16-bit grayscale is supported
+ make_tuple("FFV1", "avi", false, true),
+ make_tuple("FFV1", "mkv", false, true),
+ // 16-bit color formats are NOT supported
+ make_tuple("FFV1", "avi", true, false),
+ make_tuple("FFV1", "mkv", true, false),
+
+};
+
+INSTANTIATE_TEST_CASE_P(/**/, videoio_ffmpeg_16bit, testing::ValuesIn(sixteen_bit_modes));
+
}} // namespace
{"mkv", "XVID", 30.f, CAP_FFMPEG},
{"mkv", "MPEG", 30.f, CAP_FFMPEG},
{"mkv", "MJPG", 30.f, CAP_FFMPEG},
+ {"avi", "FFV1", 30.f, CAP_FFMPEG},
+ {"mkv", "FFV1", 30.f, CAP_FFMPEG},
{"avi", "MPEG", 28.f, CAP_GSTREAMER},
{"avi", "MJPG", 30.f, CAP_GSTREAMER},