2 * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
18 #include "./vpx_config.h"
21 #include "third_party/libyuv/include/libyuv/scale.h"
27 #include "vpx/vpx_decoder.h"
28 #include "vpx_ports/mem_ops.h"
29 #include "vpx_ports/vpx_timer.h"
31 #if CONFIG_VP8_DECODER || CONFIG_VP9_DECODER
32 #include "vpx/vp8dx.h"
35 #include "./md5_utils.h"
37 #include "./tools_common.h"
39 #include "./webmdec.h"
43 static const char *exec_name;
45 struct VpxDecInputContext {
46 struct VpxInputContext *vpx_input_ctx;
47 struct WebmInputContext *webm_ctx;
50 static const arg_def_t looparg =
51 ARG_DEF(NULL, "loops", 1, "Number of times to decode the file");
52 static const arg_def_t codecarg = ARG_DEF(NULL, "codec", 1, "Codec to use");
53 static const arg_def_t use_yv12 =
54 ARG_DEF(NULL, "yv12", 0, "Output raw YV12 frames");
55 static const arg_def_t use_i420 =
56 ARG_DEF(NULL, "i420", 0, "Output raw I420 frames");
57 static const arg_def_t flipuvarg =
58 ARG_DEF(NULL, "flipuv", 0, "Flip the chroma planes in the output");
59 static const arg_def_t rawvideo =
60 ARG_DEF(NULL, "rawvideo", 0, "Output raw YUV frames");
61 static const arg_def_t noblitarg =
62 ARG_DEF(NULL, "noblit", 0, "Don't process the decoded frames");
63 static const arg_def_t progressarg =
64 ARG_DEF(NULL, "progress", 0, "Show progress after each frame decodes");
65 static const arg_def_t limitarg =
66 ARG_DEF(NULL, "limit", 1, "Stop decoding after n frames");
67 static const arg_def_t skiparg =
68 ARG_DEF(NULL, "skip", 1, "Skip the first n input frames");
69 static const arg_def_t postprocarg =
70 ARG_DEF(NULL, "postproc", 0, "Postprocess decoded frames");
71 static const arg_def_t summaryarg =
72 ARG_DEF(NULL, "summary", 0, "Show timing summary");
73 static const arg_def_t outputfile =
74 ARG_DEF("o", "output", 1, "Output file name pattern (see below)");
75 static const arg_def_t threadsarg =
76 ARG_DEF("t", "threads", 1, "Max threads to use");
77 static const arg_def_t frameparallelarg =
78 ARG_DEF(NULL, "frame-parallel", 0, "Frame parallel decode");
79 static const arg_def_t verbosearg =
80 ARG_DEF("v", "verbose", 0, "Show version string");
81 static const arg_def_t error_concealment =
82 ARG_DEF(NULL, "error-concealment", 0, "Enable decoder error-concealment");
83 static const arg_def_t scalearg =
84 ARG_DEF("S", "scale", 0, "Scale output frames uniformly");
85 static const arg_def_t continuearg =
86 ARG_DEF("k", "keep-going", 0, "(debug) Continue decoding after error");
87 static const arg_def_t fb_arg =
88 ARG_DEF(NULL, "frame-buffers", 1, "Number of frame buffers to use");
89 static const arg_def_t md5arg =
90 ARG_DEF(NULL, "md5", 0, "Compute the MD5 sum of the decoded frame");
91 #if CONFIG_VP9_HIGHBITDEPTH
92 static const arg_def_t outbitdeptharg =
93 ARG_DEF(NULL, "output-bit-depth", 1, "Output bit-depth for decoded frames");
96 static const arg_def_t *all_args[] = { &codecarg,
116 #if CONFIG_VP9_HIGHBITDEPTH
121 #if CONFIG_VP8_DECODER
122 static const arg_def_t addnoise_level =
123 ARG_DEF(NULL, "noise-level", 1, "Enable VP8 postproc add noise");
124 static const arg_def_t deblock =
125 ARG_DEF(NULL, "deblock", 0, "Enable VP8 deblocking");
126 static const arg_def_t demacroblock_level = ARG_DEF(
127 NULL, "demacroblock-level", 1, "Enable VP8 demacroblocking, w/ level");
128 static const arg_def_t mfqe =
129 ARG_DEF(NULL, "mfqe", 0, "Enable multiframe quality enhancement");
131 static const arg_def_t *vp8_pp_args[] = { &addnoise_level, &deblock,
132 &demacroblock_level, &mfqe, NULL };
136 static INLINE int libyuv_scale(vpx_image_t *src, vpx_image_t *dst,
137 FilterModeEnum mode) {
138 #if CONFIG_VP9_HIGHBITDEPTH
139 if (src->fmt == VPX_IMG_FMT_I42016) {
140 assert(dst->fmt == VPX_IMG_FMT_I42016);
142 (uint16_t *)src->planes[VPX_PLANE_Y], src->stride[VPX_PLANE_Y] / 2,
143 (uint16_t *)src->planes[VPX_PLANE_U], src->stride[VPX_PLANE_U] / 2,
144 (uint16_t *)src->planes[VPX_PLANE_V], src->stride[VPX_PLANE_V] / 2,
145 src->d_w, src->d_h, (uint16_t *)dst->planes[VPX_PLANE_Y],
146 dst->stride[VPX_PLANE_Y] / 2, (uint16_t *)dst->planes[VPX_PLANE_U],
147 dst->stride[VPX_PLANE_U] / 2, (uint16_t *)dst->planes[VPX_PLANE_V],
148 dst->stride[VPX_PLANE_V] / 2, dst->d_w, dst->d_h, mode);
151 assert(src->fmt == VPX_IMG_FMT_I420);
152 assert(dst->fmt == VPX_IMG_FMT_I420);
153 return I420Scale(src->planes[VPX_PLANE_Y], src->stride[VPX_PLANE_Y],
154 src->planes[VPX_PLANE_U], src->stride[VPX_PLANE_U],
155 src->planes[VPX_PLANE_V], src->stride[VPX_PLANE_V], src->d_w,
156 src->d_h, dst->planes[VPX_PLANE_Y], dst->stride[VPX_PLANE_Y],
157 dst->planes[VPX_PLANE_U], dst->stride[VPX_PLANE_U],
158 dst->planes[VPX_PLANE_V], dst->stride[VPX_PLANE_V], dst->d_w,
163 void usage_exit(void) {
167 "Usage: %s <options> filename\n\n"
170 arg_show_usage(stderr, all_args);
171 #if CONFIG_VP8_DECODER
172 fprintf(stderr, "\nVP8 Postprocessing Options:\n");
173 arg_show_usage(stderr, vp8_pp_args);
176 "\nOutput File Patterns:\n\n"
177 " The -o argument specifies the name of the file(s) to "
178 "write to. If the\n argument does not include any escape "
179 "characters, the output will be\n written to a single file. "
180 "Otherwise, the filename will be calculated by\n expanding "
181 "the following escape characters:\n");
183 "\n\t%%w - Frame width"
184 "\n\t%%h - Frame height"
185 "\n\t%%<n> - Frame number, zero padded to <n> places (1..9)"
186 "\n\n Pattern arguments are only supported in conjunction "
187 "with the --yv12 and\n --i420 options. If the -o option is "
188 "not specified, the output will be\n directed to stdout.\n");
189 fprintf(stderr, "\nIncluded decoders:\n\n");
191 for (i = 0; i < get_vpx_decoder_count(); ++i) {
192 const VpxInterface *const decoder = get_vpx_decoder_by_index(i);
193 fprintf(stderr, " %-6s - %s\n", decoder->name,
194 vpx_codec_iface_name(decoder->codec_interface()));
200 static int raw_read_frame(FILE *infile, uint8_t **buffer, size_t *bytes_read,
201 size_t *buffer_size) {
202 char raw_hdr[RAW_FRAME_HDR_SZ];
203 size_t frame_size = 0;
205 if (fread(raw_hdr, RAW_FRAME_HDR_SZ, 1, infile) != 1) {
206 if (!feof(infile)) warn("Failed to read RAW frame size\n");
208 const size_t kCorruptFrameThreshold = 256 * 1024 * 1024;
209 const size_t kFrameTooSmallThreshold = 256 * 1024;
210 frame_size = mem_get_le32(raw_hdr);
212 if (frame_size > kCorruptFrameThreshold) {
213 warn("Read invalid frame size (%u)\n", (unsigned int)frame_size);
217 if (frame_size < kFrameTooSmallThreshold) {
218 warn("Warning: Read invalid frame size (%u) - not a raw file?\n",
219 (unsigned int)frame_size);
222 if (frame_size > *buffer_size) {
223 uint8_t *new_buf = realloc(*buffer, 2 * frame_size);
226 *buffer_size = 2 * frame_size;
228 warn("Failed to allocate compressed data buffer\n");
235 if (fread(*buffer, 1, frame_size, infile) != frame_size) {
236 warn("Failed to read full frame\n");
239 *bytes_read = frame_size;
245 static int read_frame(struct VpxDecInputContext *input, uint8_t **buf,
246 size_t *bytes_in_buffer, size_t *buffer_size) {
247 switch (input->vpx_input_ctx->file_type) {
250 return webm_read_frame(input->webm_ctx, buf, bytes_in_buffer);
253 return raw_read_frame(input->vpx_input_ctx->file, buf, bytes_in_buffer,
256 return ivf_read_frame(input->vpx_input_ctx->file, buf, bytes_in_buffer,
262 static void update_image_md5(const vpx_image_t *img, const int planes[3],
266 for (i = 0; i < 3; ++i) {
267 const int plane = planes[i];
268 const unsigned char *buf = img->planes[plane];
269 const int stride = img->stride[plane];
270 const int w = vpx_img_plane_width(img, plane) *
271 ((img->fmt & VPX_IMG_FMT_HIGHBITDEPTH) ? 2 : 1);
272 const int h = vpx_img_plane_height(img, plane);
274 for (y = 0; y < h; ++y) {
275 MD5Update(md5, buf, w);
281 static void write_image_file(const vpx_image_t *img, const int planes[3],
284 #if CONFIG_VP9_HIGHBITDEPTH
285 const int bytes_per_sample = ((img->fmt & VPX_IMG_FMT_HIGHBITDEPTH) ? 2 : 1);
287 const int bytes_per_sample = 1;
290 for (i = 0; i < 3; ++i) {
291 const int plane = planes[i];
292 const unsigned char *buf = img->planes[plane];
293 const int stride = img->stride[plane];
294 const int w = vpx_img_plane_width(img, plane);
295 const int h = vpx_img_plane_height(img, plane);
297 for (y = 0; y < h; ++y) {
298 fwrite(buf, bytes_per_sample, w, file);
304 static int file_is_raw(struct VpxInputContext *input) {
307 vpx_codec_stream_info_t si;
311 if (fread(buf, 1, 32, input->file) == 32) {
314 if (mem_get_le32(buf) < 256 * 1024 * 1024) {
315 for (i = 0; i < get_vpx_decoder_count(); ++i) {
316 const VpxInterface *const decoder = get_vpx_decoder_by_index(i);
317 if (!vpx_codec_peek_stream_info(decoder->codec_interface(), buf + 4,
320 input->fourcc = decoder->fourcc;
322 input->height = si.h;
323 input->framerate.numerator = 30;
324 input->framerate.denominator = 1;
335 static void show_progress(int frame_in, int frame_out, uint64_t dx_time) {
337 "%d decoded frames/%d showed frames in %" PRId64 " us (%.2f fps)\r",
338 frame_in, frame_out, dx_time,
339 (double)frame_out * 1000000.0 / (double)dx_time);
342 struct ExternalFrameBuffer {
348 struct ExternalFrameBufferList {
349 int num_external_frame_buffers;
350 struct ExternalFrameBuffer *ext_fb;
353 // Callback used by libvpx to request an external frame buffer. |cb_priv|
354 // Application private data passed into the set function. |min_size| is the
355 // minimum size in bytes needed to decode the next frame. |fb| pointer to the
357 static int get_vp9_frame_buffer(void *cb_priv, size_t min_size,
358 vpx_codec_frame_buffer_t *fb) {
360 struct ExternalFrameBufferList *const ext_fb_list =
361 (struct ExternalFrameBufferList *)cb_priv;
362 if (ext_fb_list == NULL) return -1;
364 // Find a free frame buffer.
365 for (i = 0; i < ext_fb_list->num_external_frame_buffers; ++i) {
366 if (!ext_fb_list->ext_fb[i].in_use) break;
369 if (i == ext_fb_list->num_external_frame_buffers) return -1;
371 if (ext_fb_list->ext_fb[i].size < min_size) {
372 free(ext_fb_list->ext_fb[i].data);
373 ext_fb_list->ext_fb[i].data = (uint8_t *)calloc(min_size, sizeof(uint8_t));
374 if (!ext_fb_list->ext_fb[i].data) return -1;
376 ext_fb_list->ext_fb[i].size = min_size;
379 fb->data = ext_fb_list->ext_fb[i].data;
380 fb->size = ext_fb_list->ext_fb[i].size;
381 ext_fb_list->ext_fb[i].in_use = 1;
383 // Set the frame buffer's private data to point at the external frame buffer.
384 fb->priv = &ext_fb_list->ext_fb[i];
388 // Callback used by libvpx when there are no references to the frame buffer.
389 // |cb_priv| user private data passed into the set function. |fb| pointer
390 // to the frame buffer.
391 static int release_vp9_frame_buffer(void *cb_priv,
392 vpx_codec_frame_buffer_t *fb) {
393 struct ExternalFrameBuffer *const ext_fb =
394 (struct ExternalFrameBuffer *)fb->priv;
400 static void generate_filename(const char *pattern, char *out, size_t q_len,
401 unsigned int d_w, unsigned int d_h,
402 unsigned int frame_in) {
403 const char *p = pattern;
407 char *next_pat = strchr(p, '%');
412 /* parse the pattern */
415 case 'w': snprintf(q, q_len - 1, "%d", d_w); break;
416 case 'h': snprintf(q, q_len - 1, "%d", d_h); break;
417 case '1': snprintf(q, q_len - 1, "%d", frame_in); break;
418 case '2': snprintf(q, q_len - 1, "%02d", frame_in); break;
419 case '3': snprintf(q, q_len - 1, "%03d", frame_in); break;
420 case '4': snprintf(q, q_len - 1, "%04d", frame_in); break;
421 case '5': snprintf(q, q_len - 1, "%05d", frame_in); break;
422 case '6': snprintf(q, q_len - 1, "%06d", frame_in); break;
423 case '7': snprintf(q, q_len - 1, "%07d", frame_in); break;
424 case '8': snprintf(q, q_len - 1, "%08d", frame_in); break;
425 case '9': snprintf(q, q_len - 1, "%09d", frame_in); break;
426 default: die("Unrecognized pattern %%%c\n", p[1]); break;
430 if (pat_len >= q_len - 1) die("Output filename too long.\n");
437 /* copy the next segment */
439 copy_len = strlen(p);
441 copy_len = next_pat - p;
443 if (copy_len >= q_len - 1) die("Output filename too long.\n");
445 memcpy(q, p, copy_len);
454 static int is_single_file(const char *outfile_pattern) {
455 const char *p = outfile_pattern;
459 if (p && p[1] >= '1' && p[1] <= '9')
460 return 0; // pattern contains sequence number, so it's not unique
467 static void print_md5(unsigned char digest[16], const char *filename) {
470 for (i = 0; i < 16; ++i) printf("%02x", digest[i]);
471 printf(" %s\n", filename);
474 static FILE *open_outfile(const char *name) {
475 if (strcmp("-", name) == 0) {
476 set_binary_mode(stdout);
479 FILE *file = fopen(name, "wb");
480 if (!file) fatal("Failed to open output file '%s'", name);
485 #if CONFIG_VP9_HIGHBITDEPTH
486 static int img_shifted_realloc_required(const vpx_image_t *img,
487 const vpx_image_t *shifted,
488 vpx_img_fmt_t required_fmt) {
489 return img->d_w != shifted->d_w || img->d_h != shifted->d_h ||
490 required_fmt != shifted->fmt;
494 static int main_loop(int argc, const char **argv_) {
495 vpx_codec_ctx_t decoder;
498 int ret = EXIT_FAILURE;
500 size_t bytes_in_buffer = 0, buffer_size = 0;
502 int frame_in = 0, frame_out = 0, flipuv = 0, noblit = 0;
503 int do_md5 = 0, progress = 0, frame_parallel = 0;
504 int stop_after = 0, postproc = 0, summary = 0, quiet = 1;
508 const VpxInterface *interface = NULL;
509 const VpxInterface *fourcc_interface = NULL;
510 uint64_t dx_time = 0;
512 char **argv, **argi, **argj;
518 vpx_codec_dec_cfg_t cfg = { 0, 0, 0 };
519 #if CONFIG_VP9_HIGHBITDEPTH
520 unsigned int output_bit_depth = 0;
522 #if CONFIG_VP8_DECODER
523 vp8_postproc_cfg_t vp8_pp_cfg = { 0, 0, 0 };
525 int frames_corrupted = 0;
528 vpx_image_t *scaled_img = NULL;
529 #if CONFIG_VP9_HIGHBITDEPTH
530 vpx_image_t *img_shifted = NULL;
532 int frame_avail, got_data, flush_decoder = 0;
533 int num_external_frame_buffers = 0;
534 struct ExternalFrameBufferList ext_fb_list = { 0, NULL };
536 const char *outfile_pattern = NULL;
537 char outfile_name[PATH_MAX] = { 0 };
538 FILE *outfile = NULL;
541 unsigned char md5_digest[16];
543 struct VpxDecInputContext input = { NULL, NULL };
544 struct VpxInputContext vpx_input_ctx;
546 struct WebmInputContext webm_ctx;
547 memset(&(webm_ctx), 0, sizeof(webm_ctx));
548 input.webm_ctx = &webm_ctx;
550 input.vpx_input_ctx = &vpx_input_ctx;
552 /* Parse command line */
553 exec_name = argv_[0];
554 argv = argv_dup(argc - 1, argv_ + 1);
556 for (argi = argj = argv; (*argj = *argi); argi += arg.argv_step) {
557 memset(&arg, 0, sizeof(arg));
560 if (arg_match(&arg, &codecarg, argi)) {
561 interface = get_vpx_decoder_by_name(arg.val);
563 die("Error: Unrecognized argument (%s) to --codec\n", arg.val);
564 } else if (arg_match(&arg, &looparg, argi)) {
566 } else if (arg_match(&arg, &outputfile, argi))
567 outfile_pattern = arg.val;
568 else if (arg_match(&arg, &use_yv12, argi)) {
572 } else if (arg_match(&arg, &use_i420, argi)) {
576 } else if (arg_match(&arg, &rawvideo, argi)) {
578 } else if (arg_match(&arg, &flipuvarg, argi))
580 else if (arg_match(&arg, &noblitarg, argi))
582 else if (arg_match(&arg, &progressarg, argi))
584 else if (arg_match(&arg, &limitarg, argi))
585 stop_after = arg_parse_uint(&arg);
586 else if (arg_match(&arg, &skiparg, argi))
587 arg_skip = arg_parse_uint(&arg);
588 else if (arg_match(&arg, &postprocarg, argi))
590 else if (arg_match(&arg, &md5arg, argi))
592 else if (arg_match(&arg, &summaryarg, argi))
594 else if (arg_match(&arg, &threadsarg, argi))
595 cfg.threads = arg_parse_uint(&arg);
596 #if CONFIG_VP9_DECODER
597 else if (arg_match(&arg, &frameparallelarg, argi))
600 else if (arg_match(&arg, &verbosearg, argi))
602 else if (arg_match(&arg, &scalearg, argi))
604 else if (arg_match(&arg, &fb_arg, argi))
605 num_external_frame_buffers = arg_parse_uint(&arg);
606 else if (arg_match(&arg, &continuearg, argi))
608 #if CONFIG_VP9_HIGHBITDEPTH
609 else if (arg_match(&arg, &outbitdeptharg, argi)) {
610 output_bit_depth = arg_parse_uint(&arg);
613 #if CONFIG_VP8_DECODER
614 else if (arg_match(&arg, &addnoise_level, argi)) {
616 vp8_pp_cfg.post_proc_flag |= VP8_ADDNOISE;
617 vp8_pp_cfg.noise_level = arg_parse_uint(&arg);
618 } else if (arg_match(&arg, &demacroblock_level, argi)) {
620 vp8_pp_cfg.post_proc_flag |= VP8_DEMACROBLOCK;
621 vp8_pp_cfg.deblocking_level = arg_parse_uint(&arg);
622 } else if (arg_match(&arg, &deblock, argi)) {
624 vp8_pp_cfg.post_proc_flag |= VP8_DEBLOCK;
625 } else if (arg_match(&arg, &mfqe, argi)) {
627 vp8_pp_cfg.post_proc_flag |= VP8_MFQE;
628 } else if (arg_match(&arg, &error_concealment, argi)) {
631 #endif // CONFIG_VP8_DECODER
636 /* Check for unrecognized options */
637 for (argi = argv; *argi; argi++)
638 if (argi[0][0] == '-' && strlen(argi[0]) > 1)
639 die("Error: Unrecognized option %s\n", *argi);
641 /* Handle non-option arguments */
649 infile = strcmp(fn, "-") ? fopen(fn, "rb") : set_binary_mode(stdin);
652 fatal("Failed to open input file '%s'", strcmp(fn, "-") ? fn : "stdin");
654 #if CONFIG_OS_SUPPORT
655 /* Make sure we don't dump to the terminal, unless forced to with -o - */
656 if (!outfile_pattern && isatty(fileno(stdout)) && !do_md5 && !noblit) {
658 "Not dumping raw video to your terminal. Use '-o -' to "
663 input.vpx_input_ctx->file = infile;
664 if (file_is_ivf(input.vpx_input_ctx))
665 input.vpx_input_ctx->file_type = FILE_TYPE_IVF;
667 else if (file_is_webm(input.webm_ctx, input.vpx_input_ctx))
668 input.vpx_input_ctx->file_type = FILE_TYPE_WEBM;
670 else if (file_is_raw(input.vpx_input_ctx))
671 input.vpx_input_ctx->file_type = FILE_TYPE_RAW;
673 fprintf(stderr, "Unrecognized input file type.\n");
675 fprintf(stderr, "vpxdec was built without WebM container support.\n");
680 outfile_pattern = outfile_pattern ? outfile_pattern : "-";
681 single_file = is_single_file(outfile_pattern);
683 if (!noblit && single_file) {
684 generate_filename(outfile_pattern, outfile_name, PATH_MAX,
685 vpx_input_ctx.width, vpx_input_ctx.height, 0);
689 outfile = open_outfile(outfile_name);
692 if (use_y4m && !noblit) {
695 "YUV4MPEG2 not supported with output patterns,"
696 " try --i420 or --yv12 or --rawvideo.\n");
701 if (vpx_input_ctx.file_type == FILE_TYPE_WEBM) {
702 if (webm_guess_framerate(input.webm_ctx, input.vpx_input_ctx)) {
704 "Failed to guess framerate -- error parsing "
712 fourcc_interface = get_vpx_decoder_by_fourcc(vpx_input_ctx.fourcc);
713 if (interface && fourcc_interface && interface != fourcc_interface)
714 warn("Header indicates codec: %s\n", fourcc_interface->name);
716 interface = fourcc_interface;
718 if (!interface) interface = get_vpx_decoder_by_index(0);
720 dec_flags = (postproc ? VPX_CODEC_USE_POSTPROC : 0) |
721 (ec_enabled ? VPX_CODEC_USE_ERROR_CONCEALMENT : 0) |
722 (frame_parallel ? VPX_CODEC_USE_FRAME_THREADING : 0);
723 if (vpx_codec_dec_init(&decoder, interface->codec_interface(), &cfg,
725 fprintf(stderr, "Failed to initialize decoder: %s\n",
726 vpx_codec_error(&decoder));
730 if (!quiet) fprintf(stderr, "%s\n", decoder.name);
732 #if CONFIG_VP8_DECODER
733 if (vp8_pp_cfg.post_proc_flag &&
734 vpx_codec_control(&decoder, VP8_SET_POSTPROC, &vp8_pp_cfg)) {
735 fprintf(stderr, "Failed to configure postproc: %s\n",
736 vpx_codec_error(&decoder));
741 if (arg_skip) fprintf(stderr, "Skipping first %d frames.\n", arg_skip);
743 if (read_frame(&input, &buf, &bytes_in_buffer, &buffer_size)) break;
747 if (num_external_frame_buffers > 0) {
748 ext_fb_list.num_external_frame_buffers = num_external_frame_buffers;
749 ext_fb_list.ext_fb = (struct ExternalFrameBuffer *)calloc(
750 num_external_frame_buffers, sizeof(*ext_fb_list.ext_fb));
751 if (vpx_codec_set_frame_buffer_functions(&decoder, get_vp9_frame_buffer,
752 release_vp9_frame_buffer,
754 fprintf(stderr, "Failed to configure external frame buffers: %s\n",
755 vpx_codec_error(&decoder));
764 while (frame_avail || got_data) {
765 vpx_codec_iter_t iter = NULL;
767 struct vpx_usec_timer timer;
771 if (!stop_after || frame_in < stop_after) {
772 if (!read_frame(&input, &buf, &bytes_in_buffer, &buffer_size)) {
776 vpx_usec_timer_start(&timer);
778 if (vpx_codec_decode(&decoder, buf, (unsigned int)bytes_in_buffer, NULL,
780 const char *detail = vpx_codec_error_detail(&decoder);
781 warn("Failed to decode frame %d: %s", frame_in,
782 vpx_codec_error(&decoder));
784 if (detail) warn("Additional information: %s", detail);
785 if (!keep_going) goto fail;
788 vpx_usec_timer_mark(&timer);
789 dx_time += vpx_usec_timer_elapsed(&timer);
797 vpx_usec_timer_start(&timer);
800 // Flush the decoder in frame parallel decode.
801 if (vpx_codec_decode(&decoder, NULL, 0, NULL, 0)) {
802 warn("Failed to flush decoder: %s", vpx_codec_error(&decoder));
807 if ((img = vpx_codec_get_frame(&decoder, &iter))) {
812 vpx_usec_timer_mark(&timer);
813 dx_time += (unsigned int)vpx_usec_timer_elapsed(&timer);
815 if (!frame_parallel &&
816 vpx_codec_control(&decoder, VP8D_GET_FRAME_CORRUPTED, &corrupted)) {
817 warn("Failed VP8_GET_FRAME_CORRUPTED: %s", vpx_codec_error(&decoder));
818 if (!keep_going) goto fail;
820 frames_corrupted += corrupted;
822 if (progress) show_progress(frame_in, frame_out, dx_time);
824 if (!noblit && img) {
825 const int PLANES_YUV[] = { VPX_PLANE_Y, VPX_PLANE_U, VPX_PLANE_V };
826 const int PLANES_YVU[] = { VPX_PLANE_Y, VPX_PLANE_V, VPX_PLANE_U };
827 const int *planes = flipuv ? PLANES_YVU : PLANES_YUV;
830 if (frame_out == 1) {
831 // If the output frames are to be scaled to a fixed display size then
832 // use the width and height specified in the container. If either of
833 // these is set to 0, use the display size set in the first frame
834 // header. If that is unavailable, use the raw decoded size of the
835 // first decoded frame.
836 int render_width = vpx_input_ctx.width;
837 int render_height = vpx_input_ctx.height;
838 if (!render_width || !render_height) {
840 if (vpx_codec_control(&decoder, VP9D_GET_DISPLAY_SIZE,
842 // As last resort use size of first frame as display size.
843 render_width = img->d_w;
844 render_height = img->d_h;
846 render_width = render_size[0];
847 render_height = render_size[1];
851 vpx_img_alloc(NULL, img->fmt, render_width, render_height, 16);
852 scaled_img->bit_depth = img->bit_depth;
855 if (img->d_w != scaled_img->d_w || img->d_h != scaled_img->d_h) {
857 libyuv_scale(img, scaled_img, kFilterBox);
861 "Failed to scale output frame: %s.\n"
862 "Scaling is disabled in this configuration. "
863 "To enable scaling, configure with --enable-libyuv\n",
864 vpx_codec_error(&decoder));
869 #if CONFIG_VP9_HIGHBITDEPTH
870 // Default to codec bit depth if output bit depth not set
871 if (!output_bit_depth && single_file && !do_md5) {
872 output_bit_depth = img->bit_depth;
874 // Shift up or down if necessary
875 if (output_bit_depth != 0 && output_bit_depth != img->bit_depth) {
876 const vpx_img_fmt_t shifted_fmt =
877 output_bit_depth == 8
878 ? img->fmt ^ (img->fmt & VPX_IMG_FMT_HIGHBITDEPTH)
879 : img->fmt | VPX_IMG_FMT_HIGHBITDEPTH;
881 img_shifted_realloc_required(img, img_shifted, shifted_fmt)) {
882 vpx_img_free(img_shifted);
887 vpx_img_alloc(NULL, shifted_fmt, img->d_w, img->d_h, 16);
888 img_shifted->bit_depth = output_bit_depth;
890 if (output_bit_depth > img->bit_depth) {
891 vpx_img_upshift(img_shifted, img, output_bit_depth - img->bit_depth);
893 vpx_img_downshift(img_shifted, img,
894 img->bit_depth - output_bit_depth);
902 char buf[Y4M_BUFFER_SIZE] = { 0 };
904 if (img->fmt == VPX_IMG_FMT_I440 || img->fmt == VPX_IMG_FMT_I44016) {
905 fprintf(stderr, "Cannot produce y4m output for 440 sampling.\n");
908 if (frame_out == 1) {
910 len = y4m_write_file_header(
911 buf, sizeof(buf), vpx_input_ctx.width, vpx_input_ctx.height,
912 &vpx_input_ctx.framerate, img->fmt, img->bit_depth);
914 MD5Update(&md5_ctx, (md5byte *)buf, (unsigned int)len);
921 len = y4m_write_frame_header(buf, sizeof(buf));
923 MD5Update(&md5_ctx, (md5byte *)buf, (unsigned int)len);
928 if (frame_out == 1) {
929 // Check if --yv12 or --i420 options are consistent with the
930 // bit-stream decoded
932 if (img->fmt != VPX_IMG_FMT_I420 &&
933 img->fmt != VPX_IMG_FMT_I42016) {
934 fprintf(stderr, "Cannot produce i420 output for bit-stream.\n");
939 if ((img->fmt != VPX_IMG_FMT_I420 &&
940 img->fmt != VPX_IMG_FMT_YV12) ||
941 img->bit_depth != 8) {
942 fprintf(stderr, "Cannot produce yv12 output for bit-stream.\n");
950 update_image_md5(img, planes, &md5_ctx);
952 write_image_file(img, planes, outfile);
955 generate_filename(outfile_pattern, outfile_name, PATH_MAX, img->d_w,
959 update_image_md5(img, planes, &md5_ctx);
960 MD5Final(md5_digest, &md5_ctx);
961 print_md5(md5_digest, outfile_name);
963 outfile = open_outfile(outfile_name);
964 write_image_file(img, planes, outfile);
971 if (summary || progress) {
972 show_progress(frame_in, frame_out, dx_time);
973 fprintf(stderr, "\n");
976 if (frames_corrupted) {
977 fprintf(stderr, "WARNING: %d frames corrupted.\n", frames_corrupted);
984 if (vpx_codec_destroy(&decoder)) {
985 fprintf(stderr, "Failed to destroy decoder: %s\n",
986 vpx_codec_error(&decoder));
991 if (!noblit && single_file) {
993 MD5Final(md5_digest, &md5_ctx);
994 print_md5(md5_digest, outfile_name);
1001 if (input.vpx_input_ctx->file_type == FILE_TYPE_WEBM)
1002 webm_free(input.webm_ctx);
1005 if (input.vpx_input_ctx->file_type != FILE_TYPE_WEBM) free(buf);
1007 if (scaled_img) vpx_img_free(scaled_img);
1008 #if CONFIG_VP9_HIGHBITDEPTH
1009 if (img_shifted) vpx_img_free(img_shifted);
1012 for (i = 0; i < ext_fb_list.num_external_frame_buffers; ++i) {
1013 free(ext_fb_list.ext_fb[i].data);
1015 free(ext_fb_list.ext_fb);
1023 int main(int argc, const char **argv_) {
1024 unsigned int loops = 1, i;
1025 char **argv, **argi, **argj;
1029 argv = argv_dup(argc - 1, argv_ + 1);
1030 for (argi = argj = argv; (*argj = *argi); argi += arg.argv_step) {
1031 memset(&arg, 0, sizeof(arg));
1034 if (arg_match(&arg, &looparg, argi)) {
1035 loops = arg_parse_uint(&arg);
1040 for (i = 0; !error && i < loops; i++) error = main_loop(argc, argv_);