- add sources.
[platform/framework/web/crosswalk.git] / src / content / common / gpu / media / android_video_decode_accelerator.cc
1 // Copyright (c) 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/common/gpu/media/android_video_decode_accelerator.h"
6
7 #include "base/bind.h"
8 #include "base/logging.h"
9 #include "base/message_loop/message_loop.h"
10 #include "base/metrics/histogram.h"
11 #include "content/common/gpu/gpu_channel.h"
12 #include "gpu/command_buffer/service/gles2_cmd_decoder.h"
13 #include "media/base/bitstream_buffer.h"
14 #include "media/base/limits.h"
15 #include "media/video/picture.h"
16 #include "ui/gl/android/scoped_java_surface.h"
17 #include "ui/gl/gl_bindings.h"
18
19 namespace content {
20
21 // Helper macros for dealing with failure.  If |result| evaluates false, emit
22 // |log| to ERROR, register |error| with the decoder, and return.
23 #define RETURN_ON_FAILURE(result, log, error)                       \
24   do {                                                              \
25     if (!(result)) {                                                \
26       DLOG(ERROR) << log;                                           \
27       base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind( \
28           &AndroidVideoDecodeAccelerator::NotifyError,              \
29           base::AsWeakPtr(this), error));                           \
30       state_ = ERROR;                                               \
31       return;                                                       \
32     }                                                               \
33   } while (0)
34
35 // TODO(dwkang): We only need kMaxVideoFrames to pass media stack's prerolling
36 // phase, but 1 is added due to crbug.com/176036. This should be tuned when we
37 // have actual use case.
38 enum { kNumPictureBuffers = media::limits::kMaxVideoFrames + 1 };
39
40 // Max number of bitstreams notified to the client with
41 // NotifyEndOfBitstreamBuffer() before getting output from the bitstream.
42 enum { kMaxBitstreamsNotifiedInAdvance = 32 };
43
44 // static
45 static inline const base::TimeDelta DecodePollDelay() {
46   return base::TimeDelta::FromMilliseconds(10);
47 }
48
49 static inline const base::TimeDelta NoWaitTimeOut() {
50   return base::TimeDelta::FromMicroseconds(0);
51 }
52
53 AndroidVideoDecodeAccelerator::AndroidVideoDecodeAccelerator(
54     media::VideoDecodeAccelerator::Client* client,
55     const base::WeakPtr<gpu::gles2::GLES2Decoder> decoder,
56     const base::Callback<bool(void)>& make_context_current)
57     : client_(client),
58       make_context_current_(make_context_current),
59       codec_(media::kCodecH264),
60       state_(NO_ERROR),
61       surface_texture_id_(0),
62       picturebuffers_requested_(false),
63       gl_decoder_(decoder) {
64 }
65
66 AndroidVideoDecodeAccelerator::~AndroidVideoDecodeAccelerator() {
67   DCHECK(thread_checker_.CalledOnValidThread());
68 }
69
70 bool AndroidVideoDecodeAccelerator::Initialize(
71     media::VideoCodecProfile profile) {
72   DCHECK(!media_codec_);
73   DCHECK(thread_checker_.CalledOnValidThread());
74
75   if (!media::MediaCodecBridge::IsAvailable())
76     return false;
77
78   if (profile == media::VP8PROFILE_MAIN) {
79     codec_ = media::kCodecVP8;
80   } else {
81     // TODO(dwkang): enable H264 once b/8125974 is fixed.
82     LOG(ERROR) << "Unsupported profile: " << profile;
83     return false;
84   }
85
86   // Only consider using MediaCodec if it's likely backed by hardware.
87   if (media::VideoCodecBridge::IsKnownUnaccelerated(codec_))
88     return false;
89
90   if (!make_context_current_.Run()) {
91     LOG(ERROR) << "Failed to make this decoder's GL context current.";
92     return false;
93   }
94
95   if (!gl_decoder_) {
96     LOG(ERROR) << "Failed to get gles2 decoder instance.";
97     return false;
98   }
99   glGenTextures(1, &surface_texture_id_);
100   glActiveTexture(GL_TEXTURE0);
101   glBindTexture(GL_TEXTURE_EXTERNAL_OES, surface_texture_id_);
102
103   glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
104   glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
105   glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
106                   GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
107   glTexParameteri(GL_TEXTURE_EXTERNAL_OES,
108                   GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
109   gl_decoder_->RestoreTextureUnitBindings(0);
110   gl_decoder_->RestoreActiveTexture();
111
112   surface_texture_ = new gfx::SurfaceTexture(surface_texture_id_);
113
114   if (!ConfigureMediaCodec()) {
115     LOG(ERROR) << "Failed to create MediaCodec instance.";
116     return false;
117   }
118
119   base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
120       &AndroidVideoDecodeAccelerator::NotifyInitializeDone,
121       base::AsWeakPtr(this)));
122   return true;
123 }
124
125 void AndroidVideoDecodeAccelerator::DoIOTask() {
126   if (state_ == ERROR) {
127     return;
128   }
129
130   QueueInput();
131   DequeueOutput();
132 }
133
134 void AndroidVideoDecodeAccelerator::QueueInput() {
135   if (bitstreams_notified_in_advance_.size() > kMaxBitstreamsNotifiedInAdvance)
136     return;
137   if (pending_bitstream_buffers_.empty())
138     return;
139
140   int input_buf_index = 0;
141   media::MediaCodecStatus status = media_codec_->DequeueInputBuffer(
142       NoWaitTimeOut(), &input_buf_index);
143   if (status != media::MEDIA_CODEC_OK) {
144     DCHECK(status == media::MEDIA_CODEC_DEQUEUE_INPUT_AGAIN_LATER ||
145            status == media::MEDIA_CODEC_ERROR);
146     return;
147   }
148
149   base::Time queued_time = pending_bitstream_buffers_.front().second;
150   UMA_HISTOGRAM_TIMES("Media.AVDA.InputQueueTime",
151                       base::Time::Now() - queued_time);
152   media::BitstreamBuffer& bitstream_buffer =
153       pending_bitstream_buffers_.front().first;
154
155   if (bitstream_buffer.id() == -1) {
156     media_codec_->QueueEOS(input_buf_index);
157     pending_bitstream_buffers_.pop();
158     return;
159   }
160
161   // Abuse the presentation time argument to propagate the bitstream
162   // buffer ID to the output, so we can report it back to the client in
163   // PictureReady().
164   base::TimeDelta timestamp =
165       base::TimeDelta::FromMicroseconds(bitstream_buffer.id());
166
167   scoped_ptr<base::SharedMemory> shm(
168       new base::SharedMemory(bitstream_buffer.handle(), true));
169
170   RETURN_ON_FAILURE(shm->Map(bitstream_buffer.size()),
171                     "Failed to SharedMemory::Map()",
172                     UNREADABLE_INPUT);
173
174   status =
175       media_codec_->QueueInputBuffer(input_buf_index,
176                                      static_cast<const uint8*>(shm->memory()),
177                                      bitstream_buffer.size(),
178                                      timestamp);
179   RETURN_ON_FAILURE(status == media::MEDIA_CODEC_OK,
180                     "Failed to QueueInputBuffer: " << status,
181                     PLATFORM_FAILURE);
182   pending_bitstream_buffers_.pop();
183
184   // We should call NotifyEndOfBitstreamBuffer(), when no more decoded output
185   // will be returned from the bitstream buffer. However, MediaCodec API is
186   // not enough to guarantee it.
187   // So, here, we calls NotifyEndOfBitstreamBuffer() in advance in order to
188   // keep getting more bitstreams from the client, and throttle them by using
189   // |bitstreams_notified_in_advance_|.
190   // TODO(dwkang): check if there is a way to remove this workaround.
191   base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
192       &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
193       base::AsWeakPtr(this), bitstream_buffer.id()));
194   bitstreams_notified_in_advance_.push_back(bitstream_buffer.id());
195 }
196
197 void AndroidVideoDecodeAccelerator::DequeueOutput() {
198   if (picturebuffers_requested_ && output_picture_buffers_.empty())
199     return;
200
201   if (!output_picture_buffers_.empty() && free_picture_ids_.empty()) {
202     // Don't have any picture buffer to send. Need to wait more.
203     return;
204   }
205
206   bool eos = false;
207   base::TimeDelta timestamp;
208   int32 buf_index = 0;
209   do {
210     size_t offset = 0;
211     size_t size = 0;
212
213     media::MediaCodecStatus status = media_codec_->DequeueOutputBuffer(
214         NoWaitTimeOut(), &buf_index, &offset, &size, &timestamp, &eos);
215     switch (status) {
216       case media::MEDIA_CODEC_DEQUEUE_OUTPUT_AGAIN_LATER:
217       case media::MEDIA_CODEC_ERROR:
218         return;
219
220       case media::MEDIA_CODEC_OUTPUT_FORMAT_CHANGED: {
221         int32 width, height;
222         media_codec_->GetOutputFormat(&width, &height);
223
224         if (!picturebuffers_requested_) {
225           picturebuffers_requested_ = true;
226           size_ = gfx::Size(width, height);
227           base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
228               &AndroidVideoDecodeAccelerator::RequestPictureBuffers,
229               base::AsWeakPtr(this)));
230         } else {
231           // Dynamic resolution change support is not specified by the Android
232           // platform at and before JB-MR1, so it's not possible to smoothly
233           // continue playback at this point.  Instead, error out immediately,
234           // expecting clients to Reset() as appropriate to avoid this.
235           // b/7093648
236           RETURN_ON_FAILURE(size_ == gfx::Size(width, height),
237                             "Dynamic resolution change is not supported.",
238                             PLATFORM_FAILURE);
239         }
240         return;
241       }
242
243       case media::MEDIA_CODEC_OUTPUT_BUFFERS_CHANGED:
244         RETURN_ON_FAILURE(media_codec_->GetOutputBuffers(),
245                           "Cannot get output buffer from MediaCodec.",
246                           PLATFORM_FAILURE);
247         break;
248
249       case media::MEDIA_CODEC_OK:
250         DCHECK_GE(buf_index, 0);
251         break;
252
253       default:
254         NOTREACHED();
255         break;
256     }
257   } while (buf_index < 0);
258
259   media_codec_->ReleaseOutputBuffer(buf_index, true);
260
261   if (eos) {
262     base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
263         &AndroidVideoDecodeAccelerator::NotifyFlushDone,
264         base::AsWeakPtr(this)));
265   } else {
266     int64 bitstream_buffer_id = timestamp.InMicroseconds();
267     SendCurrentSurfaceToClient(static_cast<int32>(bitstream_buffer_id));
268
269     // Removes ids former or equal than the id from decoder. Note that
270     // |bitstreams_notified_in_advance_| does not mean bitstream ids in decoder
271     // because of frame reordering issue. We just maintain this roughly and use
272     // for the throttling purpose.
273     std::list<int32>::iterator it;
274     for (it = bitstreams_notified_in_advance_.begin();
275         it != bitstreams_notified_in_advance_.end();
276         ++it) {
277       if (*it == bitstream_buffer_id) {
278         bitstreams_notified_in_advance_.erase(
279             bitstreams_notified_in_advance_.begin(), ++it);
280         break;
281       }
282     }
283   }
284 }
285
286 void AndroidVideoDecodeAccelerator::SendCurrentSurfaceToClient(
287     int32 bitstream_id) {
288   DCHECK(thread_checker_.CalledOnValidThread());
289   DCHECK_NE(bitstream_id, -1);
290   DCHECK(!free_picture_ids_.empty());
291
292   RETURN_ON_FAILURE(make_context_current_.Run(),
293                     "Failed to make this decoder's GL context current.",
294                     PLATFORM_FAILURE);
295
296   int32 picture_buffer_id = free_picture_ids_.front();
297   free_picture_ids_.pop();
298
299   float transfrom_matrix[16];
300   surface_texture_->UpdateTexImage();
301   surface_texture_->GetTransformMatrix(transfrom_matrix);
302
303   OutputBufferMap::const_iterator i =
304       output_picture_buffers_.find(picture_buffer_id);
305   RETURN_ON_FAILURE(i != output_picture_buffers_.end(),
306                     "Can't find a PictureBuffer for " << picture_buffer_id,
307                     PLATFORM_FAILURE);
308   uint32 picture_buffer_texture_id = i->second.texture_id();
309
310   RETURN_ON_FAILURE(gl_decoder_.get(),
311                     "Failed to get gles2 decoder instance.",
312                     ILLEGAL_STATE);
313   // Defer initializing the CopyTextureCHROMIUMResourceManager until it is
314   // needed because it takes 10s of milliseconds to initialize.
315   if (!copier_) {
316     copier_.reset(new gpu::CopyTextureCHROMIUMResourceManager());
317     copier_->Initialize(gl_decoder_.get());
318   }
319
320   // Here, we copy |surface_texture_id_| to the picture buffer instead of
321   // setting new texture to |surface_texture_| by calling attachToGLContext()
322   // because:
323   // 1. Once we call detachFrameGLContext(), it deletes the texture previous
324   //    attached.
325   // 2. SurfaceTexture requires us to apply a transform matrix when we show
326   //    the texture.
327   copier_->DoCopyTexture(gl_decoder_.get(), GL_TEXTURE_EXTERNAL_OES,
328                          GL_TEXTURE_2D, surface_texture_id_,
329                          picture_buffer_texture_id, 0, size_.width(),
330                          size_.height(), false, false, false);
331
332   base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
333       &AndroidVideoDecodeAccelerator::NotifyPictureReady,
334       base::AsWeakPtr(this), media::Picture(picture_buffer_id, bitstream_id)));
335 }
336
337 void AndroidVideoDecodeAccelerator::Decode(
338     const media::BitstreamBuffer& bitstream_buffer) {
339   DCHECK(thread_checker_.CalledOnValidThread());
340   if (bitstream_buffer.id() != -1 && bitstream_buffer.size() == 0) {
341     base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
342         &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
343         base::AsWeakPtr(this), bitstream_buffer.id()));
344     return;
345   }
346
347   pending_bitstream_buffers_.push(
348       std::make_pair(bitstream_buffer, base::Time::Now()));
349
350   DoIOTask();
351 }
352
353 void AndroidVideoDecodeAccelerator::AssignPictureBuffers(
354     const std::vector<media::PictureBuffer>& buffers) {
355   DCHECK(thread_checker_.CalledOnValidThread());
356   DCHECK(output_picture_buffers_.empty());
357   DCHECK(free_picture_ids_.empty());
358
359   for (size_t i = 0; i < buffers.size(); ++i) {
360     RETURN_ON_FAILURE(buffers[i].size() == size_,
361                       "Invalid picture buffer size was passed.",
362                       INVALID_ARGUMENT);
363     int32 id = buffers[i].id();
364     output_picture_buffers_.insert(std::make_pair(id, buffers[i]));
365     free_picture_ids_.push(id);
366     // Since the client might be re-using |picture_buffer_id| values, forget
367     // about previously-dismissed IDs now.  See ReusePictureBuffer() comment
368     // about "zombies" for why we maintain this set in the first place.
369     dismissed_picture_ids_.erase(id);
370   }
371
372   RETURN_ON_FAILURE(output_picture_buffers_.size() == kNumPictureBuffers,
373                     "Invalid picture buffers were passed.",
374                     INVALID_ARGUMENT);
375
376   DoIOTask();
377 }
378
379 void AndroidVideoDecodeAccelerator::ReusePictureBuffer(
380     int32 picture_buffer_id) {
381   DCHECK(thread_checker_.CalledOnValidThread());
382
383   // This ReusePictureBuffer() might have been in a pipe somewhere (queued in
384   // IPC, or in a PostTask either at the sender or receiver) when we sent a
385   // DismissPictureBuffer() for this |picture_buffer_id|.  Account for such
386   // potential "zombie" IDs here.
387   if (dismissed_picture_ids_.erase(picture_buffer_id))
388     return;
389
390   free_picture_ids_.push(picture_buffer_id);
391
392   DoIOTask();
393 }
394
395 void AndroidVideoDecodeAccelerator::Flush() {
396   DCHECK(thread_checker_.CalledOnValidThread());
397
398   Decode(media::BitstreamBuffer(-1, base::SharedMemoryHandle(), 0));
399 }
400
401 bool AndroidVideoDecodeAccelerator::ConfigureMediaCodec() {
402   DCHECK(surface_texture_.get());
403   media_codec_.reset(media::VideoCodecBridge::Create(codec_, false));
404
405   if (!media_codec_)
406     return false;
407
408   gfx::ScopedJavaSurface surface(surface_texture_.get());
409   // Pass a dummy 320x240 canvas size and let the codec signal the real size
410   // when it's known from the bitstream.
411   if (!media_codec_->Start(
412            codec_, gfx::Size(320, 240), surface.j_surface().obj(), NULL)) {
413     return false;
414   }
415   io_timer_.Start(FROM_HERE,
416                   DecodePollDelay(),
417                   this,
418                   &AndroidVideoDecodeAccelerator::DoIOTask);
419   return media_codec_->GetOutputBuffers();
420 }
421
422 void AndroidVideoDecodeAccelerator::Reset() {
423   DCHECK(thread_checker_.CalledOnValidThread());
424
425   while (!pending_bitstream_buffers_.empty()) {
426     int32 bitstream_buffer_id = pending_bitstream_buffers_.front().first.id();
427     pending_bitstream_buffers_.pop();
428
429     if (bitstream_buffer_id != -1) {
430       base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
431           &AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer,
432           base::AsWeakPtr(this), bitstream_buffer_id));
433     }
434   }
435   bitstreams_notified_in_advance_.clear();
436
437   for (OutputBufferMap::iterator it = output_picture_buffers_.begin();
438        it != output_picture_buffers_.end();
439        ++it) {
440     client_->DismissPictureBuffer(it->first);
441     dismissed_picture_ids_.insert(it->first);
442   }
443   output_picture_buffers_.clear();
444   std::queue<int32> empty;
445   std::swap(free_picture_ids_, empty);
446   CHECK(free_picture_ids_.empty());
447   picturebuffers_requested_ = false;
448
449   // On some devices, and up to at least JB-MR1,
450   // - flush() can fail after EOS (b/8125974); and
451   // - mid-stream resolution change is unsupported (b/7093648).
452   // To cope with these facts, we always stop & restart the codec on Reset().
453   io_timer_.Stop();
454   media_codec_->Stop();
455   ConfigureMediaCodec();
456   state_ = NO_ERROR;
457
458   base::MessageLoop::current()->PostTask(FROM_HERE, base::Bind(
459       &AndroidVideoDecodeAccelerator::NotifyResetDone, base::AsWeakPtr(this)));
460 }
461
462 void AndroidVideoDecodeAccelerator::Destroy() {
463   DCHECK(thread_checker_.CalledOnValidThread());
464
465   if (media_codec_) {
466     io_timer_.Stop();
467     media_codec_->Stop();
468   }
469   if (surface_texture_id_)
470     glDeleteTextures(1, &surface_texture_id_);
471   if (copier_)
472     copier_->Destroy();
473   delete this;
474 }
475
476 void AndroidVideoDecodeAccelerator::NotifyInitializeDone() {
477   client_->NotifyInitializeDone();
478 }
479
480 void AndroidVideoDecodeAccelerator::RequestPictureBuffers() {
481   client_->ProvidePictureBuffers(kNumPictureBuffers, size_, GL_TEXTURE_2D);
482 }
483
484 void AndroidVideoDecodeAccelerator::NotifyPictureReady(
485     const media::Picture& picture) {
486   client_->PictureReady(picture);
487 }
488
489 void AndroidVideoDecodeAccelerator::NotifyEndOfBitstreamBuffer(
490     int input_buffer_id) {
491   client_->NotifyEndOfBitstreamBuffer(input_buffer_id);
492 }
493
494 void AndroidVideoDecodeAccelerator::NotifyFlushDone() {
495   client_->NotifyFlushDone();
496 }
497
498 void AndroidVideoDecodeAccelerator::NotifyResetDone() {
499   client_->NotifyResetDone();
500 }
501
502 void AndroidVideoDecodeAccelerator::NotifyError(
503     media::VideoDecodeAccelerator::Error error) {
504   client_->NotifyError(error);
505 }
506
507 }  // namespace content