avutils: Add missing space in doc
[platform/upstream/gstreamer.git] / subprojects / gst-libav / ext / libav / gstavutils.c
1 /* GStreamer
2  * Copyright (c) 2009 Edward Hervey <bilboed@bilboed.com>
3  *
4  * This library is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Library General Public
6  * License as published by the Free Software Foundation; either
7  * version 2 of the License, or (at your option) any later version.
8  *
9  * This library is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
12  * Library General Public License for more details.
13  *
14  * You should have received a copy of the GNU Library General Public
15  * License along with this library; if not, write to the
16  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17  * Boston, MA 02110-1301, USA.
18  */
19
20 #ifdef HAVE_CONFIG_H
21 #include "config.h"
22 #endif
23 #include "gstavutils.h"
24 #ifdef HAVE_UNISTD_H
25 #include <unistd.h>
26 #endif
27 #ifdef __APPLE__
28 #include <sys/sysctl.h>
29 #endif
30 #ifdef __MINGW32__
31 #include <stdlib.h>
32 #endif
33
34 #include <libavutil/mem.h>
35
36 const gchar *
37 gst_ffmpeg_get_codecid_longname (enum AVCodecID codec_id)
38 {
39   const AVCodec *codec;
40   /* Let's use what ffmpeg can provide us */
41
42   if ((codec = avcodec_find_decoder (codec_id)) ||
43       (codec = avcodec_find_encoder (codec_id)))
44     return codec->long_name;
45   return NULL;
46 }
47
48 gint
49 av_smp_format_depth (enum AVSampleFormat smp_fmt)
50 {
51   gint depth = -1;
52   switch (smp_fmt) {
53     case AV_SAMPLE_FMT_U8:
54     case AV_SAMPLE_FMT_U8P:
55       depth = 1;
56       break;
57     case AV_SAMPLE_FMT_S16:
58     case AV_SAMPLE_FMT_S16P:
59       depth = 2;
60       break;
61     case AV_SAMPLE_FMT_S32:
62     case AV_SAMPLE_FMT_S32P:
63     case AV_SAMPLE_FMT_FLT:
64     case AV_SAMPLE_FMT_FLTP:
65       depth = 4;
66       break;
67     case AV_SAMPLE_FMT_DBL:
68     case AV_SAMPLE_FMT_DBLP:
69       depth = 8;
70       break;
71     default:
72       GST_ERROR ("UNHANDLED SAMPLE FORMAT !");
73       break;
74   }
75   return depth;
76 }
77
78
79 /*
80  * Fill in pointers to memory in a AVFrame, where
81  * everything is aligned by 4 (as required by X).
82  * This is mostly a copy from imgconvert.c with some
83  * small changes.
84  */
85
86 #define FF_COLOR_RGB      0     /* RGB color space */
87 #define FF_COLOR_GRAY     1     /* gray color space */
88 #define FF_COLOR_YUV      2     /* YUV color space. 16 <= Y <= 235, 16 <= U, V <= 240 */
89 #define FF_COLOR_YUV_JPEG 3     /* YUV color space. 0 <= Y <= 255, 0 <= U, V <= 255 */
90
91 #define FF_PIXEL_PLANAR   0     /* each channel has one component in AVFrame */
92 #define FF_PIXEL_PACKED   1     /* only one components containing all the channels */
93 #define FF_PIXEL_PALETTE  2     /* one components containing indexes for a palette */
94
95 typedef struct PixFmtInfo
96 {
97   const char *name;
98   uint8_t nb_channels;          /* number of channels (including alpha) */
99   uint8_t color_type;           /* color type (see FF_COLOR_xxx constants) */
100   uint8_t pixel_type;           /* pixel storage type (see FF_PIXEL_xxx constants) */
101   uint8_t is_alpha:1;           /* true if alpha can be specified */
102   uint8_t x_chroma_shift;       /* X chroma subsampling factor is 2 ^ shift */
103   uint8_t y_chroma_shift;       /* Y chroma subsampling factor is 2 ^ shift */
104   uint8_t depth;                /* bit depth of the color components */
105 } PixFmtInfo;
106
107
108 /* this table gives more information about formats */
109 static PixFmtInfo pix_fmt_info[AV_PIX_FMT_NB];
110 void
111 gst_ffmpeg_init_pix_fmt_info (void)
112 {
113   /* YUV formats */
114   pix_fmt_info[AV_PIX_FMT_YUV420P].name = g_strdup ("yuv420p");
115   pix_fmt_info[AV_PIX_FMT_YUV420P].nb_channels = 3;
116   pix_fmt_info[AV_PIX_FMT_YUV420P].color_type = FF_COLOR_YUV;
117   pix_fmt_info[AV_PIX_FMT_YUV420P].pixel_type = FF_PIXEL_PLANAR;
118   pix_fmt_info[AV_PIX_FMT_YUV420P].depth = 8,
119       pix_fmt_info[AV_PIX_FMT_YUV420P].x_chroma_shift = 1,
120       pix_fmt_info[AV_PIX_FMT_YUV420P].y_chroma_shift = 1;
121
122   pix_fmt_info[AV_PIX_FMT_YUV422P].name = g_strdup ("yuv422p");
123   pix_fmt_info[AV_PIX_FMT_YUV422P].nb_channels = 3;
124   pix_fmt_info[AV_PIX_FMT_YUV422P].color_type = FF_COLOR_YUV;
125   pix_fmt_info[AV_PIX_FMT_YUV422P].pixel_type = FF_PIXEL_PLANAR;
126   pix_fmt_info[AV_PIX_FMT_YUV422P].depth = 8;
127   pix_fmt_info[AV_PIX_FMT_YUV422P].x_chroma_shift = 1;
128   pix_fmt_info[AV_PIX_FMT_YUV422P].y_chroma_shift = 0;
129
130   pix_fmt_info[AV_PIX_FMT_YUV444P].name = g_strdup ("yuv444p");
131   pix_fmt_info[AV_PIX_FMT_YUV444P].nb_channels = 3;
132   pix_fmt_info[AV_PIX_FMT_YUV444P].color_type = FF_COLOR_YUV;
133   pix_fmt_info[AV_PIX_FMT_YUV444P].pixel_type = FF_PIXEL_PLANAR;
134   pix_fmt_info[AV_PIX_FMT_YUV444P].depth = 8;
135   pix_fmt_info[AV_PIX_FMT_YUV444P].x_chroma_shift = 0;
136   pix_fmt_info[AV_PIX_FMT_YUV444P].y_chroma_shift = 0;
137
138   pix_fmt_info[AV_PIX_FMT_YUYV422].name = g_strdup ("yuv422");
139   pix_fmt_info[AV_PIX_FMT_YUYV422].nb_channels = 1;
140   pix_fmt_info[AV_PIX_FMT_YUYV422].color_type = FF_COLOR_YUV;
141   pix_fmt_info[AV_PIX_FMT_YUYV422].pixel_type = FF_PIXEL_PACKED;
142   pix_fmt_info[AV_PIX_FMT_YUYV422].depth = 8;
143   pix_fmt_info[AV_PIX_FMT_YUYV422].x_chroma_shift = 1;
144   pix_fmt_info[AV_PIX_FMT_YUYV422].y_chroma_shift = 0;
145
146   pix_fmt_info[AV_PIX_FMT_YUV410P].name = g_strdup ("yuv410p");
147   pix_fmt_info[AV_PIX_FMT_YUV410P].nb_channels = 3;
148   pix_fmt_info[AV_PIX_FMT_YUV410P].color_type = FF_COLOR_YUV;
149   pix_fmt_info[AV_PIX_FMT_YUV410P].pixel_type = FF_PIXEL_PLANAR;
150   pix_fmt_info[AV_PIX_FMT_YUV410P].depth = 8;
151   pix_fmt_info[AV_PIX_FMT_YUV410P].x_chroma_shift = 2;
152   pix_fmt_info[AV_PIX_FMT_YUV410P].y_chroma_shift = 2;
153
154   pix_fmt_info[AV_PIX_FMT_YUV411P].name = g_strdup ("yuv411p");
155   pix_fmt_info[AV_PIX_FMT_YUV411P].nb_channels = 3;
156   pix_fmt_info[AV_PIX_FMT_YUV411P].color_type = FF_COLOR_YUV;
157   pix_fmt_info[AV_PIX_FMT_YUV411P].pixel_type = FF_PIXEL_PLANAR;
158   pix_fmt_info[AV_PIX_FMT_YUV411P].depth = 8;
159   pix_fmt_info[AV_PIX_FMT_YUV411P].x_chroma_shift = 2;
160   pix_fmt_info[AV_PIX_FMT_YUV411P].y_chroma_shift = 0;
161
162   /* JPEG YUV */
163   pix_fmt_info[AV_PIX_FMT_YUVJ420P].name = g_strdup ("yuvj420p");
164   pix_fmt_info[AV_PIX_FMT_YUVJ420P].nb_channels = 3;
165   pix_fmt_info[AV_PIX_FMT_YUVJ420P].color_type = FF_COLOR_YUV_JPEG;
166   pix_fmt_info[AV_PIX_FMT_YUVJ420P].pixel_type = FF_PIXEL_PLANAR;
167   pix_fmt_info[AV_PIX_FMT_YUVJ420P].depth = 8;
168   pix_fmt_info[AV_PIX_FMT_YUVJ420P].x_chroma_shift = 1;
169   pix_fmt_info[AV_PIX_FMT_YUVJ420P].y_chroma_shift = 1;
170
171   pix_fmt_info[AV_PIX_FMT_YUVJ422P].name = g_strdup ("yuvj422p");
172   pix_fmt_info[AV_PIX_FMT_YUVJ422P].nb_channels = 3;
173   pix_fmt_info[AV_PIX_FMT_YUVJ422P].color_type = FF_COLOR_YUV_JPEG;
174   pix_fmt_info[AV_PIX_FMT_YUVJ422P].pixel_type = FF_PIXEL_PLANAR;
175   pix_fmt_info[AV_PIX_FMT_YUVJ422P].depth = 8;
176   pix_fmt_info[AV_PIX_FMT_YUVJ422P].x_chroma_shift = 1;
177   pix_fmt_info[AV_PIX_FMT_YUVJ422P].y_chroma_shift = 0;
178
179   pix_fmt_info[AV_PIX_FMT_YUVJ444P].name = g_strdup ("yuvj444p");
180   pix_fmt_info[AV_PIX_FMT_YUVJ444P].nb_channels = 3;
181   pix_fmt_info[AV_PIX_FMT_YUVJ444P].color_type = FF_COLOR_YUV_JPEG;
182   pix_fmt_info[AV_PIX_FMT_YUVJ444P].pixel_type = FF_PIXEL_PLANAR;
183   pix_fmt_info[AV_PIX_FMT_YUVJ444P].depth = 8;
184   pix_fmt_info[AV_PIX_FMT_YUVJ444P].x_chroma_shift = 0;
185   pix_fmt_info[AV_PIX_FMT_YUVJ444P].y_chroma_shift = 0;
186
187   /* RGB formats */
188   pix_fmt_info[AV_PIX_FMT_RGB24].name = g_strdup ("rgb24");
189   pix_fmt_info[AV_PIX_FMT_RGB24].nb_channels = 3;
190   pix_fmt_info[AV_PIX_FMT_RGB24].color_type = FF_COLOR_RGB;
191   pix_fmt_info[AV_PIX_FMT_RGB24].pixel_type = FF_PIXEL_PACKED;
192   pix_fmt_info[AV_PIX_FMT_RGB24].depth = 8;
193   pix_fmt_info[AV_PIX_FMT_RGB24].x_chroma_shift = 0;
194   pix_fmt_info[AV_PIX_FMT_RGB24].y_chroma_shift = 0;
195
196   pix_fmt_info[AV_PIX_FMT_BGR24].name = g_strdup ("bgr24");
197   pix_fmt_info[AV_PIX_FMT_BGR24].nb_channels = 3;
198   pix_fmt_info[AV_PIX_FMT_BGR24].color_type = FF_COLOR_RGB;
199   pix_fmt_info[AV_PIX_FMT_BGR24].pixel_type = FF_PIXEL_PACKED;
200   pix_fmt_info[AV_PIX_FMT_BGR24].depth = 8;
201   pix_fmt_info[AV_PIX_FMT_BGR24].x_chroma_shift = 0;
202   pix_fmt_info[AV_PIX_FMT_BGR24].y_chroma_shift = 0;
203
204   pix_fmt_info[AV_PIX_FMT_RGB32].name = g_strdup ("rgba32");
205   pix_fmt_info[AV_PIX_FMT_RGB32].nb_channels = 4;
206   pix_fmt_info[AV_PIX_FMT_RGB32].is_alpha = 1;
207   pix_fmt_info[AV_PIX_FMT_RGB32].color_type = FF_COLOR_RGB;
208   pix_fmt_info[AV_PIX_FMT_RGB32].pixel_type = FF_PIXEL_PACKED;
209   pix_fmt_info[AV_PIX_FMT_RGB32].depth = 8;
210   pix_fmt_info[AV_PIX_FMT_RGB32].x_chroma_shift = 0;
211   pix_fmt_info[AV_PIX_FMT_RGB32].y_chroma_shift = 0;
212
213   pix_fmt_info[AV_PIX_FMT_RGB565].name = g_strdup ("rgb565");
214   pix_fmt_info[AV_PIX_FMT_RGB565].nb_channels = 3;
215   pix_fmt_info[AV_PIX_FMT_RGB565].color_type = FF_COLOR_RGB;
216   pix_fmt_info[AV_PIX_FMT_RGB565].pixel_type = FF_PIXEL_PACKED;
217   pix_fmt_info[AV_PIX_FMT_RGB565].depth = 5;
218   pix_fmt_info[AV_PIX_FMT_RGB565].x_chroma_shift = 0;
219   pix_fmt_info[AV_PIX_FMT_RGB565].y_chroma_shift = 0;
220
221   pix_fmt_info[AV_PIX_FMT_RGB555].name = g_strdup ("rgb555");
222   pix_fmt_info[AV_PIX_FMT_RGB555].nb_channels = 4;
223   pix_fmt_info[AV_PIX_FMT_RGB555].is_alpha = 1;
224   pix_fmt_info[AV_PIX_FMT_RGB555].color_type = FF_COLOR_RGB;
225   pix_fmt_info[AV_PIX_FMT_RGB555].pixel_type = FF_PIXEL_PACKED;
226   pix_fmt_info[AV_PIX_FMT_RGB555].depth = 5;
227   pix_fmt_info[AV_PIX_FMT_RGB555].x_chroma_shift = 0;
228   pix_fmt_info[AV_PIX_FMT_RGB555].y_chroma_shift = 0;
229
230   /* gray / mono formats */
231   pix_fmt_info[AV_PIX_FMT_GRAY8].name = g_strdup ("gray");
232   pix_fmt_info[AV_PIX_FMT_GRAY8].nb_channels = 1;
233   pix_fmt_info[AV_PIX_FMT_GRAY8].color_type = FF_COLOR_GRAY;
234   pix_fmt_info[AV_PIX_FMT_GRAY8].pixel_type = FF_PIXEL_PLANAR;
235   pix_fmt_info[AV_PIX_FMT_GRAY8].depth = 8;
236
237   pix_fmt_info[AV_PIX_FMT_MONOWHITE].name = g_strdup ("monow");
238   pix_fmt_info[AV_PIX_FMT_MONOWHITE].nb_channels = 1;
239   pix_fmt_info[AV_PIX_FMT_MONOWHITE].color_type = FF_COLOR_GRAY;
240   pix_fmt_info[AV_PIX_FMT_MONOWHITE].pixel_type = FF_PIXEL_PLANAR;
241   pix_fmt_info[AV_PIX_FMT_MONOWHITE].depth = 1;
242
243   pix_fmt_info[AV_PIX_FMT_MONOBLACK].name = g_strdup ("monob");
244   pix_fmt_info[AV_PIX_FMT_MONOBLACK].nb_channels = 1;
245   pix_fmt_info[AV_PIX_FMT_MONOBLACK].color_type = FF_COLOR_GRAY;
246   pix_fmt_info[AV_PIX_FMT_MONOBLACK].pixel_type = FF_PIXEL_PLANAR;
247   pix_fmt_info[AV_PIX_FMT_MONOBLACK].depth = 1;
248
249   /* paletted formats */
250   pix_fmt_info[AV_PIX_FMT_PAL8].name = g_strdup ("pal8");
251   pix_fmt_info[AV_PIX_FMT_PAL8].nb_channels = 4;
252   pix_fmt_info[AV_PIX_FMT_PAL8].is_alpha = 1;
253   pix_fmt_info[AV_PIX_FMT_PAL8].color_type = FF_COLOR_RGB;
254   pix_fmt_info[AV_PIX_FMT_PAL8].pixel_type = FF_PIXEL_PALETTE;
255   pix_fmt_info[AV_PIX_FMT_PAL8].depth = 8;
256
257   pix_fmt_info[AV_PIX_FMT_YUVA420P].name = g_strdup ("yuva420p");
258   pix_fmt_info[AV_PIX_FMT_YUVA420P].nb_channels = 4;
259   pix_fmt_info[AV_PIX_FMT_YUVA420P].is_alpha = 1;
260   pix_fmt_info[AV_PIX_FMT_YUVA420P].color_type = FF_COLOR_YUV;
261   pix_fmt_info[AV_PIX_FMT_YUVA420P].pixel_type = FF_PIXEL_PLANAR;
262   pix_fmt_info[AV_PIX_FMT_YUVA420P].depth = 8,
263       pix_fmt_info[AV_PIX_FMT_YUVA420P].x_chroma_shift = 1,
264       pix_fmt_info[AV_PIX_FMT_YUVA420P].y_chroma_shift = 1;
265 };
266
267 int
268 gst_ffmpeg_avpicture_get_size (int pix_fmt, int width, int height)
269 {
270   AVFrame dummy_pict;
271
272   return gst_ffmpeg_avpicture_fill (&dummy_pict, NULL, pix_fmt, width, height);
273 }
274
275 #define GEN_MASK(x) ((1<<(x))-1)
276 #define ROUND_UP_X(v,x) (((v) + GEN_MASK(x)) & ~GEN_MASK(x))
277 #define ROUND_UP_2(x) ROUND_UP_X (x, 1)
278 #define ROUND_UP_4(x) ROUND_UP_X (x, 2)
279 #define ROUND_UP_8(x) ROUND_UP_X (x, 3)
280 #define DIV_ROUND_UP_X(v,x) (((v) + GEN_MASK(x)) >> (x))
281
282 int
283 gst_ffmpeg_avpicture_fill (AVFrame * picture,
284     uint8_t * ptr, enum AVPixelFormat pix_fmt, int width, int height)
285 {
286   int size, w2, h2, size2;
287   int stride, stride2;
288   PixFmtInfo *pinfo;
289
290   pinfo = &pix_fmt_info[pix_fmt];
291
292   switch (pix_fmt) {
293     case AV_PIX_FMT_YUV420P:
294     case AV_PIX_FMT_YUV422P:
295     case AV_PIX_FMT_YUV444P:
296     case AV_PIX_FMT_YUV410P:
297     case AV_PIX_FMT_YUV411P:
298     case AV_PIX_FMT_YUVJ420P:
299     case AV_PIX_FMT_YUVJ422P:
300     case AV_PIX_FMT_YUVJ444P:
301       stride = ROUND_UP_4 (width);
302       h2 = ROUND_UP_X (height, pinfo->y_chroma_shift);
303       size = stride * h2;
304       w2 = DIV_ROUND_UP_X (width, pinfo->x_chroma_shift);
305       stride2 = ROUND_UP_4 (w2);
306       h2 = DIV_ROUND_UP_X (height, pinfo->y_chroma_shift);
307       size2 = stride2 * h2;
308       picture->data[0] = ptr;
309       picture->data[1] = picture->data[0] + size;
310       picture->data[2] = picture->data[1] + size2;
311       picture->data[3] = NULL;
312       picture->linesize[0] = stride;
313       picture->linesize[1] = stride2;
314       picture->linesize[2] = stride2;
315       picture->linesize[3] = 0;
316       GST_DEBUG ("planes %d %d %d", 0, size, size + size2);
317       GST_DEBUG ("strides %d %d %d", stride, stride2, stride2);
318       return size + 2 * size2;
319     case AV_PIX_FMT_YUVA420P:
320       stride = ROUND_UP_4 (width);
321       h2 = ROUND_UP_X (height, pinfo->y_chroma_shift);
322       size = stride * h2;
323       w2 = DIV_ROUND_UP_X (width, pinfo->x_chroma_shift);
324       stride2 = ROUND_UP_4 (w2);
325       h2 = DIV_ROUND_UP_X (height, pinfo->y_chroma_shift);
326       size2 = stride2 * h2;
327       picture->data[0] = ptr;
328       picture->data[1] = picture->data[0] + size;
329       picture->data[2] = picture->data[1] + size2;
330       picture->data[3] = picture->data[2] + size2;
331       picture->linesize[0] = stride;
332       picture->linesize[1] = stride2;
333       picture->linesize[2] = stride2;
334       picture->linesize[3] = stride;
335       GST_DEBUG ("planes %d %d %d %d", 0, size, size + size2, size + 2 * size2);
336       GST_DEBUG ("strides %d %d %d %d", stride, stride2, stride2, stride);
337       return 2 * size + 2 * size2;
338     case AV_PIX_FMT_RGB24:
339     case AV_PIX_FMT_BGR24:
340       stride = ROUND_UP_4 (width * 3);
341       size = stride * height;
342       picture->data[0] = ptr;
343       picture->data[1] = NULL;
344       picture->data[2] = NULL;
345       picture->data[3] = NULL;
346       picture->linesize[0] = stride;
347       picture->linesize[1] = 0;
348       picture->linesize[2] = 0;
349       picture->linesize[3] = 0;
350       return size;
351       /*case AV_PIX_FMT_AYUV4444:
352          case AV_PIX_FMT_BGR32:
353          case AV_PIX_FMT_BGRA32:
354          case AV_PIX_FMT_RGB32: */
355     case AV_PIX_FMT_RGB32:
356       stride = width * 4;
357       size = stride * height;
358       picture->data[0] = ptr;
359       picture->data[1] = NULL;
360       picture->data[2] = NULL;
361       picture->data[3] = NULL;
362       picture->linesize[0] = stride;
363       picture->linesize[1] = 0;
364       picture->linesize[2] = 0;
365       picture->linesize[3] = 0;
366       return size;
367     case AV_PIX_FMT_RGB555:
368     case AV_PIX_FMT_RGB565:
369     case AV_PIX_FMT_YUYV422:
370     case AV_PIX_FMT_UYVY422:
371       stride = ROUND_UP_4 (width * 2);
372       size = stride * height;
373       picture->data[0] = ptr;
374       picture->data[1] = NULL;
375       picture->data[2] = NULL;
376       picture->data[3] = NULL;
377       picture->linesize[0] = stride;
378       picture->linesize[1] = 0;
379       picture->linesize[2] = 0;
380       picture->linesize[3] = 0;
381       return size;
382     case AV_PIX_FMT_UYYVYY411:
383       /* FIXME, probably not the right stride */
384       stride = ROUND_UP_4 (width);
385       size = stride * height;
386       picture->data[0] = ptr;
387       picture->data[1] = NULL;
388       picture->data[2] = NULL;
389       picture->data[3] = NULL;
390       picture->linesize[0] = width + width / 2;
391       picture->linesize[1] = 0;
392       picture->linesize[2] = 0;
393       picture->linesize[3] = 0;
394       return size + size / 2;
395     case AV_PIX_FMT_GRAY8:
396       stride = ROUND_UP_4 (width);
397       size = stride * height;
398       picture->data[0] = ptr;
399       picture->data[1] = NULL;
400       picture->data[2] = NULL;
401       picture->data[3] = NULL;
402       picture->linesize[0] = stride;
403       picture->linesize[1] = 0;
404       picture->linesize[2] = 0;
405       picture->linesize[3] = 0;
406       return size;
407     case AV_PIX_FMT_MONOWHITE:
408     case AV_PIX_FMT_MONOBLACK:
409       stride = ROUND_UP_4 ((width + 7) >> 3);
410       size = stride * height;
411       picture->data[0] = ptr;
412       picture->data[1] = NULL;
413       picture->data[2] = NULL;
414       picture->data[3] = NULL;
415       picture->linesize[0] = stride;
416       picture->linesize[1] = 0;
417       picture->linesize[2] = 0;
418       picture->linesize[3] = 0;
419       return size;
420     case AV_PIX_FMT_PAL8:
421       /* already forced to be with stride, so same result as other function */
422       stride = ROUND_UP_4 (width);
423       size = stride * height;
424       picture->data[0] = ptr;
425       picture->data[1] = ptr + size;    /* palette is stored here as 256 32 bit words */
426       picture->data[2] = NULL;
427       picture->data[3] = NULL;
428       picture->linesize[0] = stride;
429       picture->linesize[1] = 4;
430       picture->linesize[2] = 0;
431       picture->linesize[3] = 0;
432       return size + 256 * 4;
433     default:
434       picture->data[0] = NULL;
435       picture->data[1] = NULL;
436       picture->data[2] = NULL;
437       picture->data[3] = NULL;
438       return -1;
439   }
440
441   return 0;
442 }
443
444 /* Create a GstBuffer of the requested size and caps.
445  * The memory will be allocated by ffmpeg, making sure it's properly aligned
446  * for any processing. */
447
448 GstBuffer *
449 new_aligned_buffer (gint size)
450 {
451   GstBuffer *buf;
452   guint8 *data;
453
454   data = av_malloc (size);
455
456   buf = gst_buffer_new ();
457   gst_buffer_append_memory (buf,
458       gst_memory_new_wrapped (0, data, size, 0, size, data, av_free));
459
460   return buf;
461 }
462
463 int
464 gst_ffmpeg_auto_max_threads (void)
465 {
466   static gsize n_threads = 0;
467   if (g_once_init_enter (&n_threads)) {
468     int n = 1;
469 #if defined(_WIN32)
470     {
471       const char *s = getenv ("NUMBER_OF_PROCESSORS");
472       if (s) {
473         n = atoi (s);
474       }
475     }
476 #elif defined(__APPLE__)
477     {
478       int mib[] = { CTL_HW, HW_NCPU };
479       size_t dataSize = sizeof (int);
480
481       if (sysctl (mib, 2, &n, &dataSize, NULL, 0)) {
482         n = 1;
483       }
484     }
485 #else
486     n = sysconf (_SC_NPROCESSORS_CONF);
487 #endif
488     if (n < 1)
489       n = 1;
490
491     g_once_init_leave (&n_threads, n);
492   }
493
494   return (int) (n_threads);
495 }
496
497
498 GType
499 gst_av_codec_compliance_get_type (void)
500 {
501   static gsize compliance_type = 0;
502
503   if (g_once_init_enter (&compliance_type)) {
504     static const GEnumValue types[] = {
505       {GST_AV_CODEC_COMPLIANCE_AUTO,
506           "The decoder automatically decides. If the pipeline is live, it will "
507             "use `normal` mode, and `strict` otherwise.", "auto"},
508       {GST_AV_CODEC_COMPLIANCE_VERY_STRICT,
509           "VeryStrict: Strictly conform to an older more strict version "
510             "of the spec or reference software", "very-strict"},
511       {GST_AV_CODEC_COMPLIANCE_STRICT,
512           "Strict: Strictly conform to all the things in the spec no matter "
513             "what consequences", "strict"},
514       {GST_AV_CODEC_COMPLIANCE_NORMAL, "Normal", "normal"},
515       {GST_AV_CODEC_COMPLIANCE_UNOFFICIAL,
516             "Unofficial: Allow unofficial extensions "
517             "(decoder will not differentiate this with \"normal\")",
518           "unofficial"},
519       {GST_AV_CODEC_COMPLIANCE_EXPERIMENTAL,
520             "Experimental: Allow nonstandardized experimental things "
521             "(decoder will not differentiate this with \"normal\")",
522           "experimental"},
523       {0, NULL, NULL},
524     };
525     GType tmp = g_enum_register_static ("GstAvCodecCompliance", types);
526     g_once_init_leave (&compliance_type, tmp);
527   }
528
529   return (GType) compliance_type;
530 }