#include <libavutil/mathematics.h>
#include <libavutil/opt.h>
+// https://github.com/FFmpeg/FFmpeg/blame/d79c240196f43b93bd204363f1facc270029f113/doc/APIchanges#L1689-L1695
+#if LIBAVUTIL_BUILD >= (LIBAVUTIL_VERSION_MICRO >= 100 \
+ ? CALC_FFMPEG_VERSION(52, 85, 100) : CALC_FFMPEG_VERSION(53, 15, 0))
#include <libavutil/display.h>
+#endif
#if LIBAVUTIL_BUILD >= (LIBAVUTIL_VERSION_MICRO >= 100 \
? CALC_FFMPEG_VERSION(51, 63, 100) : CALC_FFMPEG_VERSION(54, 6, 0))
#include <libavcodec/avcodec.h>
#include <libswscale/swscale.h>
+#ifdef HAVE_FFMPEG_LIBAVDEVICE
+#include <libavdevice/avdevice.h>
+#endif
+
+// https://github.com/FFmpeg/FFmpeg/blob/b6af56c034759b81985f8ea094e41cbd5f7fecfb/doc/APIchanges#L602-L605
+#if LIBAVFORMAT_BUILD < CALC_FFMPEG_VERSION(58, 9, 100)
+# define CV_FFMPEG_REGISTER
+#endif
+
+// https://github.com/FFmpeg/FFmpeg/blob/b6af56c034759b81985f8ea094e41cbd5f7fecfb/doc/APIchanges#L654-L657
+#if LIBAVCODEC_BUILD < CALC_FFMPEG_VERSION(58, 9, 100)
+# define CV_FFMPEG_LOCKMGR
+#endif
+
+// https://github.com/FFmpeg/FFmpeg/blob/b6af56c034759b81985f8ea094e41cbd5f7fecfb/doc/APIchanges#L390-L392
+#if LIBAVCODEC_BUILD >= CALC_FFMPEG_VERSION(58, 87, 100)
+#include <libavcodec/bsf.h>
+#endif
+
+#include <libavutil/pixdesc.h>
+
+// https://github.com/FFmpeg/FFmpeg/blob/b6af56c034759b81985f8ea094e41cbd5f7fecfb/doc/APIchanges#L208-L210
+#if LIBAVFORMAT_BUILD >= CALC_FFMPEG_VERSION(59, 0, 100)
+# define CV_FFMPEG_FMT_CONST const
+#else
+# define CV_FFMPEG_FMT_CONST
+#endif
+
+// https://github.com/FFmpeg/FFmpeg/blob/b6af56c034759b81985f8ea094e41cbd5f7fecfb/doc/APIchanges#L623-L624
+#if LIBAVFORMAT_BUILD >= CALC_FFMPEG_VERSION(58, 7, 100)
+# define CV_FFMPEG_URL
+#endif
+
+// AVStream.codec deprecated in favor of AVStream.codecpar
+// https://github.com/FFmpeg/FFmpeg/blob/b6af56c034759b81985f8ea094e41cbd5f7fecfb/doc/APIchanges#L1039-L1040
+#if LIBAVFORMAT_BUILD >= CALC_FFMPEG_VERSION(59, 16, 100)
+//#if LIBAVFORMAT_BUILD >= CALC_FFMPEG_VERSION(57, 33, 100)
+# define CV_FFMPEG_CODECPAR
+# define CV_FFMPEG_CODEC_FIELD codecpar
+#else
+# define CV_FFMPEG_CODEC_FIELD codec
+#endif
+
+#if LIBAVFORMAT_BUILD >= CALC_FFMPEG_VERSION(59, 16, 100)
+# define CV_FFMPEG_PTS_FIELD pts
+#else
+# define CV_FFMPEG_PTS_FIELD pkt_pts
+#endif
+
+// https://github.com/FFmpeg/FFmpeg/blob/b6af56c034759b81985f8ea094e41cbd5f7fecfb/doc/APIchanges#L1757-L1758
+#if LIBAVUTIL_BUILD < CALC_FFMPEG_VERSION(52, 63, 100)
+inline static AVRational av_make_q(int num, int den)
+{
+ AVRational res;
+ res.num = num;
+ res.den = den;
+ return res;
+}
+#endif
+
+
// https://github.com/FFmpeg/FFmpeg/blob/b6af56c034759b81985f8ea094e41cbd5f7fecfb/doc/APIchanges#L602-L605
#if LIBAVFORMAT_BUILD < CALC_FFMPEG_VERSION(58, 9, 100)
}
#endif // USE_AV_INTERRUPT_CALLBACK
-static int get_number_of_cpus(void)
-{
-#if defined _WIN32
- SYSTEM_INFO sysinfo;
- GetSystemInfo( &sysinfo );
-
- return (int)sysinfo.dwNumberOfProcessors;
-#elif defined __linux__ || defined __HAIKU__
- return (int)sysconf( _SC_NPROCESSORS_ONLN );
-#elif defined __APPLE__
- int numCPU=0;
- int mib[4];
- size_t len = sizeof(numCPU);
-
- // set the mib for hw.ncpu
- mib[0] = CTL_HW;
- mib[1] = HW_AVAILCPU; // alternatively, try HW_NCPU;
-
- // get the number of CPUs from the system
- sysctl(mib, 2, &numCPU, &len, NULL, 0);
-
- if( numCPU < 1 )
- {
- mib[1] = HW_NCPU;
- sysctl( mib, 2, &numCPU, &len, NULL, 0 );
-
- if( numCPU < 1 )
- numCPU = 1;
- }
-
- return (int)numCPU;
-#else
- return 1;
-#endif
-}
-
struct Image_FFMPEG
{
int step;
int width;
int height;
- int cn;
};
double getProperty(int) const;
bool setProperty(int, double);
bool grabFrame();
- bool retrieveFrame(int flag, unsigned char** data, int* step, int* width, int* height, int* cn);
+ bool retrieveFrame(int flag, unsigned char** data, int* step, int* width, int* height, int* cn, int* depth);
bool retrieveHWFrame(cv::OutputArray output);
void rotateFrame(cv::Mat &mat) const;
bool processRawPacket();
bool rawMode;
bool rawModeInitialized;
+ bool convertRGB;
AVPacket packet_filtered;
#if LIBAVFORMAT_BUILD >= CALC_FFMPEG_VERSION(58, 20, 100)
AVBSFContext* bsfc;
void CvCapture_FFMPEG::init()
{
+#ifdef HAVE_FFMPEG_LIBAVDEVICE
+ //libavdevice is available, so let's register all input and output devices (e.g v4l2)
+ avdevice_register_all();
+#endif
ic = 0;
video_stream = -1;
video_st = 0;
rawMode = false;
rawModeInitialized = false;
+ convertRGB = true;
memset(&packet_filtered, 0, sizeof(packet_filtered));
av_init_packet(&packet_filtered);
bsfc = NULL;
inline void fill_codec_context(AVCodecContext * enc, AVDictionary * dict)
{
-//#ifdef FF_API_THREAD_INIT
-// avcodec_thread_init(enc, get_number_of_cpus());
-//#else
- enc->thread_count = get_number_of_cpus();
-//#endif
+ if (!enc->thread_count)
+ {
+ int nCpus = cv::getNumberOfCPUs();
+ int requestedThreads = std::min(nCpus, 16); // [OPENCV:FFMPEG:24] Application has requested XX threads. Using a thread count greater than 16 is not recommended.
+ char* threads_option = getenv("OPENCV_FFMPEG_THREADS");
+ if (threads_option != NULL)
+ {
+ requestedThreads = atoi(threads_option);
+ }
+ enc->thread_count = requestedThreads;
+ }
AVDictionaryEntry* avdiscard_entry = av_dict_get(dict, "avdiscard", NULL, 0);
unsigned i;
bool valid = false;
+ int nThreads = 0;
close();
if (!params.empty())
{
+ convertRGB = params.get<bool>(CAP_PROP_CONVERT_RGB, true);
+ if (!convertRGB)
+ {
+ CV_LOG_WARNING(NULL, "VIDEOIO/FFMPEG: BGR conversion turned OFF, decoded frame will be "
+ "returned in its original format. "
+ "Multiplanar formats are not supported by the backend. "
+ "Only GRAY8/GRAY16LE pixel formats have been tested. "
+ "Use at your own risk.");
+ }
if (params.has(CAP_PROP_FORMAT))
{
int value = params.get<int>(CAP_PROP_FORMAT);
read_timeout = params.get<int>(CAP_PROP_READ_TIMEOUT_MSEC);
}
#endif
+ if (params.has(CAP_PROP_N_THREADS))
+ {
+ nThreads = params.get<int>(CAP_PROP_N_THREADS);
+ }
if (params.warnUnusedParameters())
{
CV_LOG_ERROR(NULL, "VIDEOIO/FFMPEG: unsupported parameters in .open(), see logger INFO channel for details. Bailout");
}
else
{
+ CV_LOG_DEBUG(NULL, "VIDEOIO/FFMPEG: using capture options from environment: " << options);
#if LIBAVUTIL_BUILD >= (LIBAVUTIL_VERSION_MICRO >= 100 ? CALC_FFMPEG_VERSION(52, 17, 100) : CALC_FFMPEG_VERSION(52, 7, 0))
av_dict_parse_string(&dict, options, ";", "|", 0);
#else
#endif
continue;
}
+ context->thread_count = nThreads;
fill_codec_context(context, dict);
#ifdef CV_FFMPEG_CODECPAR
avcodec_parameters_to_context(context, par);
frame.width = context->width;
frame.height = context->height;
- frame.cn = 3;
frame.step = 0;
frame.data = NULL;
get_rotation_angle();
#if USE_AV_INTERRUPT_CALLBACK
// activate interrupt callback
+ interrupt_metadata.timeout = 0;
get_monotonic_time(&interrupt_metadata.value);
interrupt_metadata.timeout_after_ms = read_timeout;
#endif
ret = got_picture ? 0 : -1;
#endif
if (ret >= 0) {
- //picture_pts = picture->best_effort_timestamp;
- if( picture_pts == AV_NOPTS_VALUE_ )
- picture_pts = picture->CV_FFMPEG_PTS_FIELD != AV_NOPTS_VALUE_ && picture->CV_FFMPEG_PTS_FIELD != 0 ? picture->CV_FFMPEG_PTS_FIELD : picture->pkt_dts;
-
valid = true;
} else if (ret == AVERROR(EAGAIN)) {
continue;
}
}
- if (valid)
+ if (valid) {
+ if( picture_pts == AV_NOPTS_VALUE_ )
+ picture_pts = picture->CV_FFMPEG_PTS_FIELD != AV_NOPTS_VALUE_ && picture->CV_FFMPEG_PTS_FIELD != 0 ? picture->CV_FFMPEG_PTS_FIELD : picture->pkt_dts;
frame_number++;
+ }
if (!rawMode && valid && first_frame_number < 0)
first_frame_number = dts_to_frame_number(picture_pts);
return valid;
}
-bool CvCapture_FFMPEG::retrieveFrame(int flag, unsigned char** data, int* step, int* width, int* height, int* cn)
+bool CvCapture_FFMPEG::retrieveFrame(int flag, unsigned char** data, int* step, int* width, int* height, int* cn, int* depth)
{
if (!video_st || !context)
return false;
*width = *step;
*height = 1;
*cn = 1;
+ *depth = CV_8U;
return ret;
}
if (!sw_picture || !sw_picture->data[0])
return false;
+ CV_LOG_DEBUG(NULL, "Input picture format: " << av_get_pix_fmt_name((AVPixelFormat)sw_picture->format));
+ const AVPixelFormat result_format = convertRGB ? AV_PIX_FMT_BGR24 : (AVPixelFormat)sw_picture->format;
+ switch (result_format)
+ {
+ case AV_PIX_FMT_BGR24: *depth = CV_8U; *cn = 3; break;
+ case AV_PIX_FMT_GRAY8: *depth = CV_8U; *cn = 1; break;
+ case AV_PIX_FMT_GRAY16LE: *depth = CV_16U; *cn = 1; break;
+ default:
+ CV_LOG_WARNING(NULL, "Unknown/unsupported picture format: " << av_get_pix_fmt_name(result_format)
+ << ", will be treated as 8UC1.");
+ *depth = CV_8U;
+ *cn = 1;
+ break; // TODO: return false?
+ }
+
if( img_convert_ctx == NULL ||
frame.width != video_st->CV_FFMPEG_CODEC_FIELD->width ||
frame.height != video_st->CV_FFMPEG_CODEC_FIELD->height ||
buffer_width, buffer_height,
(AVPixelFormat)sw_picture->format,
buffer_width, buffer_height,
- AV_PIX_FMT_BGR24,
+ result_format,
SWS_BICUBIC,
NULL, NULL, NULL
);
#if USE_AV_FRAME_GET_BUFFER
av_frame_unref(&rgb_picture);
- rgb_picture.format = AV_PIX_FMT_BGR24;
+ rgb_picture.format = result_format;
rgb_picture.width = buffer_width;
rgb_picture.height = buffer_height;
if (0 != av_frame_get_buffer(&rgb_picture, 32))
int aligns[AV_NUM_DATA_POINTERS];
avcodec_align_dimensions2(video_st->codec, &buffer_width, &buffer_height, aligns);
rgb_picture.data[0] = (uint8_t*)realloc(rgb_picture.data[0],
- _opencv_ffmpeg_av_image_get_buffer_size( AV_PIX_FMT_BGR24,
+ _opencv_ffmpeg_av_image_get_buffer_size( result_format,
buffer_width, buffer_height ));
_opencv_ffmpeg_av_image_fill_arrays(&rgb_picture, rgb_picture.data[0],
- AV_PIX_FMT_BGR24, buffer_width, buffer_height );
+ result_format, buffer_width, buffer_height );
#endif
frame.width = video_st->CV_FFMPEG_CODEC_FIELD->width;
frame.height = video_st->CV_FFMPEG_CODEC_FIELD->height;
- frame.cn = 3;
frame.data = rgb_picture.data[0];
frame.step = rgb_picture.linesize[0];
}
img_convert_ctx,
sw_picture->data,
sw_picture->linesize,
- 0, context->coded_height,
+ 0, sw_picture->height,
rgb_picture.data,
rgb_picture.linesize
);
*step = frame.step;
*width = frame.width;
*height = frame.height;
- *cn = frame.cn;
#if USE_AV_HW_CODECS
if (sw_picture != picture)
return (double)((rotation_auto && ((rotation_angle%180) != 0)) ? frame.height : frame.width);
case CAP_PROP_FRAME_HEIGHT:
return (double)((rotation_auto && ((rotation_angle%180) != 0)) ? frame.width : frame.height);
+ case CAP_PROP_FRAME_TYPE:
+ return (double)av_get_picture_type_char(picture->pict_type);
case CAP_PROP_FPS:
return get_fps();
- case CAP_PROP_FOURCC:
+ case CAP_PROP_FOURCC: {
codec_id = video_st->CV_FFMPEG_CODEC_FIELD->codec_id;
codec_tag = (double) video_st->CV_FFMPEG_CODEC_FIELD->codec_tag;
}
codec_fourcc = _opencv_avcodec_get_name(codec_id);
- if(!codec_fourcc || strlen(codec_fourcc) < 4 || strcmp(codec_fourcc, "unknown_codec") == 0)
+ if (!codec_fourcc || strcmp(codec_fourcc, "unknown_codec") == 0 || strlen(codec_fourcc) != 4)
{
- return codec_tag;
+ const struct AVCodecTag* fallback_tags[] = {
+ // APIchanges:
+ // 2012-01-31 - dd6d3b0 - lavf 54.01.0
+ // Add avformat_get_riff_video_tags() and avformat_get_riff_audio_tags().
+ avformat_get_riff_video_tags(),
+#if LIBAVFORMAT_BUILD >= CALC_FFMPEG_VERSION(55, 25, 100) && defined LIBAVFORMAT_VERSION_MICRO && LIBAVFORMAT_VERSION_MICRO >= 100
+ // APIchanges: ffmpeg only
+ // 2014-01-19 - 1a193c4 - lavf 55.25.100 - avformat.h
+ // Add avformat_get_mov_video_tags() and avformat_get_mov_audio_tags().
+ avformat_get_mov_video_tags(),
+#endif
+ codec_bmp_tags, // fallback for avformat < 54.1
+ NULL };
+ return av_codec_get_tag(fallback_tags, codec_id);
}
return (double) CV_FOURCC(codec_fourcc[0], codec_fourcc[1], codec_fourcc[2], codec_fourcc[3]);
+ }
case CAP_PROP_SAR_NUM:
return _opencv_ffmpeg_get_sample_aspect_ratio(ic->streams[video_stream]).num;
case CAP_PROP_SAR_DEN:
if (rawMode)
return -1;
break;
+ case CAP_PROP_CONVERT_RGB:
+ return convertRGB;
case CAP_PROP_LRF_HAS_KEY_FRAME: {
const AVPacket& p = bsfc ? packet_filtered : packet;
return ((p.flags & AV_PKT_FLAG_KEY) != 0) ? 1 : 0;
case CAP_PROP_STREAM_OPEN_TIME_USEC:
//ic->start_time_realtime is in microseconds
return ((double)ic->start_time_realtime);
+ case CAP_PROP_N_THREADS:
+ return static_cast<double>(context->thread_count);
default:
break;
}
data = av_stream_get_side_data(video_st, AV_PKT_DATA_DISPLAYMATRIX, NULL);
if (data)
{
- rotation_angle = cvRound(av_display_rotation_get((const int32_t*)data));
+ rotation_angle = -cvRound(av_display_rotation_get((const int32_t*)data));
if (rotation_angle < 0)
rotation_angle += 360;
}
if (value == -1)
return setRaw();
return false;
+ case CAP_PROP_CONVERT_RGB:
+ convertRGB = (value != 0);
+ return true;
case CAP_PROP_ORIENTATION_AUTO:
#if LIBAVUTIL_BUILD >= CALC_FFMPEG_VERSION(52, 94, 100)
rotation_auto = value != 0 ? true : false;
c->gop_size = 12; /* emit one intra frame every twelve frames at most */
c->pix_fmt = pixel_format;
-
if (c->codec_id == CV_CODEC(CODEC_ID_MPEG2VIDEO)) {
c->max_b_frames = 2;
}
return false;
}
}
- else if (input_pix_fmt == AV_PIX_FMT_GRAY8) {
+ else if (input_pix_fmt == AV_PIX_FMT_GRAY8 || input_pix_fmt == AV_PIX_FMT_GRAY16LE) {
if (cn != 1) {
return false;
}
}
else {
+ CV_LOG_WARNING(NULL, "Input data does not match selected pixel format: "
+ << av_get_pix_fmt_name(input_pix_fmt)
+ << ", number of channels: " << cn);
CV_Assert(false);
}
close();
const bool is_color = params.get(VIDEOWRITER_PROP_IS_COLOR, true);
+ const int depth = params.get(VIDEOWRITER_PROP_DEPTH, CV_8U);
+ const bool is_supported = depth == CV_8U || (depth == CV_16U && !is_color);
+ if (!is_supported)
+ {
+ CV_LOG_WARNING(NULL, "Unsupported depth/isColor combination is selected, "
+ "only CV_8UC1/CV_8UC3/CV_16UC1 are supported.");
+ return false;
+ }
if (params.has(VIDEOWRITER_PROP_HW_ACCELERATION))
{
va_type = params.get<VideoAccelerationType>(VIDEOWRITER_PROP_HW_ACCELERATION, VIDEO_ACCELERATION_NONE);
return false;
/* determine optimal pixel format */
- if (is_color) {
- input_pix_fmt = AV_PIX_FMT_BGR24;
+ if (is_color)
+ {
+ switch (depth)
+ {
+ case CV_8U: input_pix_fmt = AV_PIX_FMT_BGR24; break;
+ default:
+ CV_LOG_WARNING(NULL, "Unsupported input depth for color image: " << depth);
+ return false;
+ }
}
- else {
- input_pix_fmt = AV_PIX_FMT_GRAY8;
+ else
+ {
+ switch (depth)
+ {
+ case CV_8U: input_pix_fmt = AV_PIX_FMT_GRAY8; break;
+ case CV_16U: input_pix_fmt = AV_PIX_FMT_GRAY16LE; break;
+ default:
+ CV_LOG_WARNING(NULL, "Unsupported input depth for grayscale image: " << depth);
+ return false;
+ }
}
+ CV_LOG_DEBUG(NULL, "Selected pixel format: " << av_get_pix_fmt_name(input_pix_fmt));
if (fourcc == -1)
{
AVDictionary *dict = NULL;
#if !defined(NO_GETENV) && (LIBAVUTIL_VERSION_MAJOR >= 53)
char* options = getenv("OPENCV_FFMPEG_WRITER_OPTIONS");
- if (options) {
+ if (options)
+ {
+ CV_LOG_DEBUG(NULL, "VIDEOIO/FFMPEG: using writer options from environment: " << options);
av_dict_parse_string(&dict, options, ";", "|", 0);
}
#endif
int cvRetrieveFrame_FFMPEG(CvCapture_FFMPEG* capture, unsigned char** data, int* step, int* width, int* height, int* cn)
{
- return capture->retrieveFrame(0, data, step, width, height, cn);
+ int depth = CV_8U;
+ return cvRetrieveFrame2_FFMPEG(capture, data, step, width, height, cn, &depth);
+}
+
+int cvRetrieveFrame2_FFMPEG(CvCapture_FFMPEG* capture, unsigned char** data, int* step, int* width, int* height, int* cn, int* depth)
+{
+ return capture->retrieveFrame(0, data, step, width, height, cn, depth);
}
static CvVideoWriter_FFMPEG* cvCreateVideoWriterWithParams_FFMPEG( const char* filename, int fourcc, double fps,