Upstream version 10.39.225.0
[platform/framework/web/crosswalk.git] / src / third_party / ffmpeg / libavcodec / libstagefright.cpp
1 /*
2  * Interface to the Android Stagefright library for
3  * H/W accelerated H.264 decoding
4  *
5  * Copyright (C) 2011 Mohamed Naufal
6  * Copyright (C) 2011 Martin Storsjö
7  *
8  * This file is part of FFmpeg.
9  *
10  * FFmpeg is free software; you can redistribute it and/or
11  * modify it under the terms of the GNU Lesser General Public
12  * License as published by the Free Software Foundation; either
13  * version 2.1 of the License, or (at your option) any later version.
14  *
15  * FFmpeg is distributed in the hope that it will be useful,
16  * but WITHOUT ANY WARRANTY; without even the implied warranty of
17  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
18  * Lesser General Public License for more details.
19  *
20  * You should have received a copy of the GNU Lesser General Public
21  * License along with FFmpeg; if not, write to the Free Software
22  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23  */
24
25 #include <binder/ProcessState.h>
26 #include <media/stagefright/MetaData.h>
27 #include <media/stagefright/MediaBufferGroup.h>
28 #include <media/stagefright/MediaDebug.h>
29 #include <media/stagefright/MediaDefs.h>
30 #include <media/stagefright/OMXClient.h>
31 #include <media/stagefright/OMXCodec.h>
32 #include <utils/List.h>
33 #include <new>
34 #include <map>
35
36 extern "C" {
37 #include "avcodec.h"
38 #include "libavutil/imgutils.h"
39 #include "internal.h"
40 }
41
42 #define OMX_QCOM_COLOR_FormatYVU420SemiPlanar 0x7FA30C00
43
44 using namespace android;
45
46 struct Frame {
47     status_t status;
48     size_t size;
49     int64_t time;
50     int key;
51     uint8_t *buffer;
52     AVFrame *vframe;
53 };
54
55 struct TimeStamp {
56     int64_t pts;
57     int64_t reordered_opaque;
58 };
59
60 class CustomSource;
61
62 struct StagefrightContext {
63     AVCodecContext *avctx;
64     AVBitStreamFilterContext *bsfc;
65     uint8_t* orig_extradata;
66     int orig_extradata_size;
67     sp<MediaSource> *source;
68     List<Frame*> *in_queue, *out_queue;
69     pthread_mutex_t in_mutex, out_mutex;
70     pthread_cond_t condition;
71     pthread_t decode_thread_id;
72
73     Frame *end_frame;
74     bool source_done;
75     volatile sig_atomic_t thread_started, thread_exited, stop_decode;
76
77     AVFrame *prev_frame;
78     std::map<int64_t, TimeStamp> *ts_map;
79     int64_t frame_index;
80
81     uint8_t *dummy_buf;
82     int dummy_bufsize;
83
84     OMXClient *client;
85     sp<MediaSource> *decoder;
86     const char *decoder_component;
87 };
88
89 class CustomSource : public MediaSource {
90 public:
91     CustomSource(AVCodecContext *avctx, sp<MetaData> meta) {
92         s = (StagefrightContext*)avctx->priv_data;
93         source_meta = meta;
94         frame_size  = (avctx->width * avctx->height * 3) / 2;
95         buf_group.add_buffer(new MediaBuffer(frame_size));
96     }
97
98     virtual sp<MetaData> getFormat() {
99         return source_meta;
100     }
101
102     virtual status_t start(MetaData *params) {
103         return OK;
104     }
105
106     virtual status_t stop() {
107         return OK;
108     }
109
110     virtual status_t read(MediaBuffer **buffer,
111                           const MediaSource::ReadOptions *options) {
112         Frame *frame;
113         status_t ret;
114
115         if (s->thread_exited)
116             return ERROR_END_OF_STREAM;
117         pthread_mutex_lock(&s->in_mutex);
118
119         while (s->in_queue->empty())
120             pthread_cond_wait(&s->condition, &s->in_mutex);
121
122         frame = *s->in_queue->begin();
123         ret = frame->status;
124
125         if (ret == OK) {
126             ret = buf_group.acquire_buffer(buffer);
127             if (ret == OK) {
128                 memcpy((*buffer)->data(), frame->buffer, frame->size);
129                 (*buffer)->set_range(0, frame->size);
130                 (*buffer)->meta_data()->clear();
131                 (*buffer)->meta_data()->setInt32(kKeyIsSyncFrame,frame->key);
132                 (*buffer)->meta_data()->setInt64(kKeyTime, frame->time);
133             } else {
134                 av_log(s->avctx, AV_LOG_ERROR, "Failed to acquire MediaBuffer\n");
135             }
136             av_freep(&frame->buffer);
137         }
138
139         s->in_queue->erase(s->in_queue->begin());
140         pthread_mutex_unlock(&s->in_mutex);
141
142         av_freep(&frame);
143         return ret;
144     }
145
146 private:
147     MediaBufferGroup buf_group;
148     sp<MetaData> source_meta;
149     StagefrightContext *s;
150     int frame_size;
151 };
152
153 void* decode_thread(void *arg)
154 {
155     AVCodecContext *avctx = (AVCodecContext*)arg;
156     StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
157     const AVPixFmtDescriptor *pix_desc = av_pix_fmt_desc_get(avctx->pix_fmt);
158     Frame* frame;
159     MediaBuffer *buffer;
160     int32_t w, h;
161     int decode_done = 0;
162     int ret;
163     int src_linesize[3];
164     const uint8_t *src_data[3];
165     int64_t out_frame_index = 0;
166
167     do {
168         buffer = NULL;
169         frame = (Frame*)av_mallocz(sizeof(Frame));
170         if (!frame) {
171             frame         = s->end_frame;
172             frame->status = AVERROR(ENOMEM);
173             decode_done   = 1;
174             s->end_frame  = NULL;
175             goto push_frame;
176         }
177         frame->status = (*s->decoder)->read(&buffer);
178         if (frame->status == OK) {
179             sp<MetaData> outFormat = (*s->decoder)->getFormat();
180             outFormat->findInt32(kKeyWidth , &w);
181             outFormat->findInt32(kKeyHeight, &h);
182             frame->vframe = av_frame_alloc();
183             if (!frame->vframe) {
184                 frame->status = AVERROR(ENOMEM);
185                 decode_done   = 1;
186                 buffer->release();
187                 goto push_frame;
188             }
189             ret = ff_get_buffer(avctx, frame->vframe, AV_GET_BUFFER_FLAG_REF);
190             if (ret < 0) {
191                 frame->status = ret;
192                 decode_done   = 1;
193                 buffer->release();
194                 goto push_frame;
195             }
196
197             // The OMX.SEC decoder doesn't signal the modified width/height
198             if (s->decoder_component && !strncmp(s->decoder_component, "OMX.SEC", 7) &&
199                 (w & 15 || h & 15)) {
200                 if (((w + 15)&~15) * ((h + 15)&~15) * 3/2 == buffer->range_length()) {
201                     w = (w + 15)&~15;
202                     h = (h + 15)&~15;
203                 }
204             }
205
206             if (!avctx->width || !avctx->height || avctx->width > w || avctx->height > h) {
207                 avctx->width  = w;
208                 avctx->height = h;
209             }
210
211             src_linesize[0] = av_image_get_linesize(avctx->pix_fmt, w, 0);
212             src_linesize[1] = av_image_get_linesize(avctx->pix_fmt, w, 1);
213             src_linesize[2] = av_image_get_linesize(avctx->pix_fmt, w, 2);
214
215             src_data[0] = (uint8_t*)buffer->data();
216             src_data[1] = src_data[0] + src_linesize[0] * h;
217             src_data[2] = src_data[1] + src_linesize[1] * -(-h>>pix_desc->log2_chroma_h);
218             av_image_copy(frame->vframe->data, frame->vframe->linesize,
219                           src_data, src_linesize,
220                           avctx->pix_fmt, avctx->width, avctx->height);
221
222             buffer->meta_data()->findInt64(kKeyTime, &out_frame_index);
223             if (out_frame_index && s->ts_map->count(out_frame_index) > 0) {
224                 frame->vframe->pts = (*s->ts_map)[out_frame_index].pts;
225                 frame->vframe->reordered_opaque = (*s->ts_map)[out_frame_index].reordered_opaque;
226                 s->ts_map->erase(out_frame_index);
227             }
228             buffer->release();
229             } else if (frame->status == INFO_FORMAT_CHANGED) {
230                 if (buffer)
231                     buffer->release();
232                 av_free(frame);
233                 continue;
234             } else {
235                 decode_done = 1;
236             }
237 push_frame:
238         while (true) {
239             pthread_mutex_lock(&s->out_mutex);
240             if (s->out_queue->size() >= 10) {
241                 pthread_mutex_unlock(&s->out_mutex);
242                 usleep(10000);
243                 continue;
244             }
245             break;
246         }
247         s->out_queue->push_back(frame);
248         pthread_mutex_unlock(&s->out_mutex);
249     } while (!decode_done && !s->stop_decode);
250
251     s->thread_exited = true;
252
253     return 0;
254 }
255
256 static av_cold int Stagefright_init(AVCodecContext *avctx)
257 {
258     StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
259     sp<MetaData> meta, outFormat;
260     int32_t colorFormat = 0;
261     int ret;
262
263     if (!avctx->extradata || !avctx->extradata_size || avctx->extradata[0] != 1)
264         return -1;
265
266     s->avctx = avctx;
267     s->bsfc  = av_bitstream_filter_init("h264_mp4toannexb");
268     if (!s->bsfc) {
269         av_log(avctx, AV_LOG_ERROR, "Cannot open the h264_mp4toannexb BSF!\n");
270         return -1;
271     }
272
273     s->orig_extradata_size = avctx->extradata_size;
274     s->orig_extradata = (uint8_t*) av_mallocz(avctx->extradata_size +
275                                               FF_INPUT_BUFFER_PADDING_SIZE);
276     if (!s->orig_extradata) {
277         ret = AVERROR(ENOMEM);
278         goto fail;
279     }
280     memcpy(s->orig_extradata, avctx->extradata, avctx->extradata_size);
281
282     meta = new MetaData;
283     if (!meta) {
284         ret = AVERROR(ENOMEM);
285         goto fail;
286     }
287     meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
288     meta->setInt32(kKeyWidth, avctx->width);
289     meta->setInt32(kKeyHeight, avctx->height);
290     meta->setData(kKeyAVCC, kTypeAVCC, avctx->extradata, avctx->extradata_size);
291
292     android::ProcessState::self()->startThreadPool();
293
294     s->source    = new sp<MediaSource>();
295     *s->source   = new CustomSource(avctx, meta);
296     s->in_queue  = new List<Frame*>;
297     s->out_queue = new List<Frame*>;
298     s->ts_map    = new std::map<int64_t, TimeStamp>;
299     s->client    = new OMXClient;
300     s->end_frame = (Frame*)av_mallocz(sizeof(Frame));
301     if (s->source == NULL || !s->in_queue || !s->out_queue || !s->client ||
302         !s->ts_map || !s->end_frame) {
303         ret = AVERROR(ENOMEM);
304         goto fail;
305     }
306
307     if (s->client->connect() !=  OK) {
308         av_log(avctx, AV_LOG_ERROR, "Cannot connect OMX client\n");
309         ret = -1;
310         goto fail;
311     }
312
313     s->decoder  = new sp<MediaSource>();
314     *s->decoder = OMXCodec::Create(s->client->interface(), meta,
315                                   false, *s->source, NULL,
316                                   OMXCodec::kClientNeedsFramebuffer);
317     if ((*s->decoder)->start() !=  OK) {
318         av_log(avctx, AV_LOG_ERROR, "Cannot start decoder\n");
319         ret = -1;
320         s->client->disconnect();
321         goto fail;
322     }
323
324     outFormat = (*s->decoder)->getFormat();
325     outFormat->findInt32(kKeyColorFormat, &colorFormat);
326     if (colorFormat == OMX_QCOM_COLOR_FormatYVU420SemiPlanar ||
327         colorFormat == OMX_COLOR_FormatYUV420SemiPlanar)
328         avctx->pix_fmt = AV_PIX_FMT_NV21;
329     else if (colorFormat == OMX_COLOR_FormatYCbYCr)
330         avctx->pix_fmt = AV_PIX_FMT_YUYV422;
331     else if (colorFormat == OMX_COLOR_FormatCbYCrY)
332         avctx->pix_fmt = AV_PIX_FMT_UYVY422;
333     else
334         avctx->pix_fmt = AV_PIX_FMT_YUV420P;
335
336     outFormat->findCString(kKeyDecoderComponent, &s->decoder_component);
337     if (s->decoder_component)
338         s->decoder_component = av_strdup(s->decoder_component);
339
340     pthread_mutex_init(&s->in_mutex, NULL);
341     pthread_mutex_init(&s->out_mutex, NULL);
342     pthread_cond_init(&s->condition, NULL);
343     return 0;
344
345 fail:
346     av_bitstream_filter_close(s->bsfc);
347     av_freep(&s->orig_extradata);
348     av_freep(&s->end_frame);
349     delete s->in_queue;
350     delete s->out_queue;
351     delete s->ts_map;
352     delete s->client;
353     return ret;
354 }
355
356 static int Stagefright_decode_frame(AVCodecContext *avctx, void *data,
357                                     int *got_frame, AVPacket *avpkt)
358 {
359     StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
360     Frame *frame;
361     status_t status;
362     int orig_size = avpkt->size;
363     AVPacket pkt = *avpkt;
364     AVFrame *ret_frame;
365
366     if (!s->thread_started) {
367         pthread_create(&s->decode_thread_id, NULL, &decode_thread, avctx);
368         s->thread_started = true;
369     }
370
371     if (avpkt && avpkt->data) {
372         av_bitstream_filter_filter(s->bsfc, avctx, NULL, &pkt.data, &pkt.size,
373                                    avpkt->data, avpkt->size, avpkt->flags & AV_PKT_FLAG_KEY);
374         avpkt = &pkt;
375     }
376
377     if (!s->source_done) {
378         if(!s->dummy_buf) {
379             s->dummy_buf = (uint8_t*)av_malloc(avpkt->size);
380             if (!s->dummy_buf)
381                 return AVERROR(ENOMEM);
382             s->dummy_bufsize = avpkt->size;
383             memcpy(s->dummy_buf, avpkt->data, avpkt->size);
384         }
385
386         frame = (Frame*)av_mallocz(sizeof(Frame));
387         if (avpkt->data) {
388             frame->status  = OK;
389             frame->size    = avpkt->size;
390             frame->key     = avpkt->flags & AV_PKT_FLAG_KEY ? 1 : 0;
391             frame->buffer  = (uint8_t*)av_malloc(avpkt->size);
392             if (!frame->buffer) {
393                 av_freep(&frame);
394                 return AVERROR(ENOMEM);
395             }
396             uint8_t *ptr = avpkt->data;
397             // The OMX.SEC decoder fails without this.
398             if (avpkt->size == orig_size + avctx->extradata_size) {
399                 ptr += avctx->extradata_size;
400                 frame->size = orig_size;
401             }
402             memcpy(frame->buffer, ptr, orig_size);
403             if (avpkt == &pkt)
404                 av_free(avpkt->data);
405
406             frame->time = ++s->frame_index;
407             (*s->ts_map)[s->frame_index].pts = avpkt->pts;
408             (*s->ts_map)[s->frame_index].reordered_opaque = avctx->reordered_opaque;
409         } else {
410             frame->status  = ERROR_END_OF_STREAM;
411             s->source_done = true;
412         }
413
414         while (true) {
415             if (s->thread_exited) {
416                 s->source_done = true;
417                 break;
418             }
419             pthread_mutex_lock(&s->in_mutex);
420             if (s->in_queue->size() >= 10) {
421                 pthread_mutex_unlock(&s->in_mutex);
422                 usleep(10000);
423                 continue;
424             }
425             s->in_queue->push_back(frame);
426             pthread_cond_signal(&s->condition);
427             pthread_mutex_unlock(&s->in_mutex);
428             break;
429         }
430     }
431     while (true) {
432         pthread_mutex_lock(&s->out_mutex);
433         if (!s->out_queue->empty()) break;
434         pthread_mutex_unlock(&s->out_mutex);
435         if (s->source_done) {
436             usleep(10000);
437             continue;
438         } else {
439             return orig_size;
440         }
441     }
442
443     frame = *s->out_queue->begin();
444     s->out_queue->erase(s->out_queue->begin());
445     pthread_mutex_unlock(&s->out_mutex);
446
447     ret_frame = frame->vframe;
448     status  = frame->status;
449     av_freep(&frame);
450
451     if (status == ERROR_END_OF_STREAM)
452         return 0;
453     if (status != OK) {
454         if (status == AVERROR(ENOMEM))
455             return status;
456         av_log(avctx, AV_LOG_ERROR, "Decode failed: %x\n", status);
457         return -1;
458     }
459
460     if (s->prev_frame)
461         av_frame_free(&s->prev_frame);
462     s->prev_frame = ret_frame;
463
464     *got_frame = 1;
465     *(AVFrame*)data = *ret_frame;
466     return orig_size;
467 }
468
469 static av_cold int Stagefright_close(AVCodecContext *avctx)
470 {
471     StagefrightContext *s = (StagefrightContext*)avctx->priv_data;
472     Frame *frame;
473
474     if (s->thread_started) {
475         if (!s->thread_exited) {
476             s->stop_decode = 1;
477
478             // Make sure decode_thread() doesn't get stuck
479             pthread_mutex_lock(&s->out_mutex);
480             while (!s->out_queue->empty()) {
481                 frame = *s->out_queue->begin();
482                 s->out_queue->erase(s->out_queue->begin());
483                 if (frame->vframe)
484                     av_frame_free(&frame->vframe);
485                 av_freep(&frame);
486             }
487             pthread_mutex_unlock(&s->out_mutex);
488
489             // Feed a dummy frame prior to signalling EOF.
490             // This is required to terminate the decoder(OMX.SEC)
491             // when only one frame is read during stream info detection.
492             if (s->dummy_buf && (frame = (Frame*)av_mallocz(sizeof(Frame)))) {
493                 frame->status = OK;
494                 frame->size   = s->dummy_bufsize;
495                 frame->key    = 1;
496                 frame->buffer = s->dummy_buf;
497                 pthread_mutex_lock(&s->in_mutex);
498                 s->in_queue->push_back(frame);
499                 pthread_cond_signal(&s->condition);
500                 pthread_mutex_unlock(&s->in_mutex);
501                 s->dummy_buf = NULL;
502             }
503
504             pthread_mutex_lock(&s->in_mutex);
505             s->end_frame->status = ERROR_END_OF_STREAM;
506             s->in_queue->push_back(s->end_frame);
507             pthread_cond_signal(&s->condition);
508             pthread_mutex_unlock(&s->in_mutex);
509             s->end_frame = NULL;
510         }
511
512         pthread_join(s->decode_thread_id, NULL);
513
514         if (s->prev_frame)
515             av_frame_free(&s->prev_frame);
516
517         s->thread_started = false;
518     }
519
520     while (!s->in_queue->empty()) {
521         frame = *s->in_queue->begin();
522         s->in_queue->erase(s->in_queue->begin());
523         if (frame->size)
524             av_freep(&frame->buffer);
525         av_freep(&frame);
526     }
527
528     while (!s->out_queue->empty()) {
529         frame = *s->out_queue->begin();
530         s->out_queue->erase(s->out_queue->begin());
531         if (frame->vframe)
532             av_frame_free(&frame->vframe);
533         av_freep(&frame);
534     }
535
536     (*s->decoder)->stop();
537     s->client->disconnect();
538
539     if (s->decoder_component)
540         av_freep(&s->decoder_component);
541     av_freep(&s->dummy_buf);
542     av_freep(&s->end_frame);
543
544     // Reset the extradata back to the original mp4 format, so that
545     // the next invocation (both when decoding and when called from
546     // av_find_stream_info) get the original mp4 format extradata.
547     av_freep(&avctx->extradata);
548     avctx->extradata = s->orig_extradata;
549     avctx->extradata_size = s->orig_extradata_size;
550
551     delete s->in_queue;
552     delete s->out_queue;
553     delete s->ts_map;
554     delete s->client;
555     delete s->decoder;
556     delete s->source;
557
558     pthread_mutex_destroy(&s->in_mutex);
559     pthread_mutex_destroy(&s->out_mutex);
560     pthread_cond_destroy(&s->condition);
561     av_bitstream_filter_close(s->bsfc);
562     return 0;
563 }
564
565 AVCodec ff_libstagefright_h264_decoder = {
566     "libstagefright_h264",
567     NULL_IF_CONFIG_SMALL("libstagefright H.264"),
568     AVMEDIA_TYPE_VIDEO,
569     AV_CODEC_ID_H264,
570     CODEC_CAP_DELAY,
571     NULL, //supported_framerates
572     NULL, //pix_fmts
573     NULL, //supported_samplerates
574     NULL, //sample_fmts
575     NULL, //channel_layouts
576     0,    //max_lowres
577     NULL, //priv_class
578     NULL, //profiles
579     sizeof(StagefrightContext),
580     NULL, //next
581     NULL, //init_thread_copy
582     NULL, //update_thread_context
583     NULL, //defaults
584     NULL, //init_static_data
585     Stagefright_init,
586     NULL, //encode
587     NULL, //encode2
588     Stagefright_decode_frame,
589     Stagefright_close,
590 };