Upstream version 10.39.225.0
[platform/framework/web/crosswalk.git] / src / content / renderer / pepper / pepper_media_stream_video_track_host.cc
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/renderer/pepper/pepper_media_stream_video_track_host.h"
6
7 #include "base/base64.h"
8 #include "base/logging.h"
9 #include "base/rand_util.h"
10 #include "base/strings/utf_string_conversions.h"
11 #include "content/renderer/media/media_stream_video_track.h"
12 #include "media/base/bind_to_current_loop.h"
13 #include "media/base/yuv_convert.h"
14 #include "ppapi/c/pp_errors.h"
15 #include "ppapi/c/ppb_media_stream_video_track.h"
16 #include "ppapi/c/ppb_video_frame.h"
17 #include "ppapi/host/dispatch_host_message.h"
18 #include "ppapi/host/host_message_context.h"
19 #include "ppapi/proxy/ppapi_messages.h"
20 #include "ppapi/shared_impl/media_stream_buffer.h"
21
22 // IS_ALIGNED is also defined in
23 // third_party/webrtc/overrides/webrtc/base/basictypes.h
24 // TODO(ronghuawu): Avoid undef.
25 #undef IS_ALIGNED
26 #include "third_party/libyuv/include/libyuv.h"
27
28 using media::VideoFrame;
29 using ppapi::host::HostMessageContext;
30 using ppapi::MediaStreamVideoTrackShared;
31
32 namespace {
33
34 const int32_t kDefaultNumberOfBuffers = 4;
35 const int32_t kMaxNumberOfBuffers = 8;
36 // Filter mode for scaling frames.
37 const libyuv::FilterMode kFilterMode = libyuv::kFilterBox;
38
39 const char kPepperVideoSourceName[] = "PepperVideoSourceName";
40
41 // Default config for output mode.
42 const int kDefaultOutputFrameRate = 30;
43
44 media::VideoPixelFormat ToPixelFormat(PP_VideoFrame_Format format) {
45   switch (format) {
46     case PP_VIDEOFRAME_FORMAT_YV12:
47       return media::PIXEL_FORMAT_YV12;
48     case PP_VIDEOFRAME_FORMAT_I420:
49       return media::PIXEL_FORMAT_I420;
50     default:
51       DVLOG(1) << "Unsupported pixel format " << format;
52       return media::PIXEL_FORMAT_UNKNOWN;
53   }
54 }
55
56 PP_VideoFrame_Format ToPpapiFormat(VideoFrame::Format format) {
57   switch (format) {
58     case VideoFrame::YV12:
59       return PP_VIDEOFRAME_FORMAT_YV12;
60     case VideoFrame::I420:
61       return PP_VIDEOFRAME_FORMAT_I420;
62     default:
63       DVLOG(1) << "Unsupported pixel format " << format;
64       return PP_VIDEOFRAME_FORMAT_UNKNOWN;
65   }
66 }
67
68 VideoFrame::Format FromPpapiFormat(PP_VideoFrame_Format format) {
69   switch (format) {
70     case PP_VIDEOFRAME_FORMAT_YV12:
71       return VideoFrame::YV12;
72     case PP_VIDEOFRAME_FORMAT_I420:
73       return VideoFrame::I420;
74     default:
75       DVLOG(1) << "Unsupported pixel format " << format;
76       return VideoFrame::UNKNOWN;
77   }
78 }
79
80 // Compute size base on the size of frame received from MediaStreamVideoSink
81 // and size specified by plugin.
82 gfx::Size GetTargetSize(const gfx::Size& source, const gfx::Size& plugin) {
83   return gfx::Size(plugin.width() ? plugin.width() : source.width(),
84                    plugin.height() ? plugin.height() : source.height());
85 }
86
87 // Compute format base on the format of frame received from MediaStreamVideoSink
88 // and format specified by plugin.
89 PP_VideoFrame_Format GetTargetFormat(PP_VideoFrame_Format source,
90                                      PP_VideoFrame_Format plugin) {
91   return plugin != PP_VIDEOFRAME_FORMAT_UNKNOWN ? plugin : source;
92 }
93
94 void ConvertFromMediaVideoFrame(const scoped_refptr<media::VideoFrame>& src,
95                                 PP_VideoFrame_Format dst_format,
96                                 const gfx::Size& dst_size,
97                                 uint8_t* dst) {
98   CHECK(src->format() == VideoFrame::YV12 || src->format() == VideoFrame::I420);
99   if (dst_format == PP_VIDEOFRAME_FORMAT_BGRA) {
100     if (src->coded_size() == dst_size) {
101       libyuv::I420ToARGB(src->data(VideoFrame::kYPlane),
102                          src->stride(VideoFrame::kYPlane),
103                          src->data(VideoFrame::kUPlane),
104                          src->stride(VideoFrame::kUPlane),
105                          src->data(VideoFrame::kVPlane),
106                          src->stride(VideoFrame::kVPlane),
107                          dst,
108                          dst_size.width() * 4,
109                          dst_size.width(),
110                          dst_size.height());
111     } else {
112       media::ScaleYUVToRGB32(src->data(VideoFrame::kYPlane),
113                              src->data(VideoFrame::kUPlane),
114                              src->data(VideoFrame::kVPlane),
115                              dst,
116                              src->coded_size().width(),
117                              src->coded_size().height(),
118                              dst_size.width(),
119                              dst_size.height(),
120                              src->stride(VideoFrame::kYPlane),
121                              src->stride(VideoFrame::kUPlane),
122                              dst_size.width() * 4,
123                              media::YV12,
124                              media::ROTATE_0,
125                              media::FILTER_BILINEAR);
126     }
127   } else if (dst_format == PP_VIDEOFRAME_FORMAT_YV12 ||
128              dst_format == PP_VIDEOFRAME_FORMAT_I420) {
129     static const size_t kPlanesOrder[][3] = {
130         {VideoFrame::kYPlane, VideoFrame::kVPlane,
131          VideoFrame::kUPlane},  // YV12
132         {VideoFrame::kYPlane, VideoFrame::kUPlane,
133          VideoFrame::kVPlane},  // I420
134     };
135     const int plane_order = (dst_format == PP_VIDEOFRAME_FORMAT_YV12) ? 0 : 1;
136     int dst_width = dst_size.width();
137     int dst_height = dst_size.height();
138     libyuv::ScalePlane(src->data(kPlanesOrder[plane_order][0]),
139                        src->stride(kPlanesOrder[plane_order][0]),
140                        src->coded_size().width(),
141                        src->coded_size().height(),
142                        dst,
143                        dst_width,
144                        dst_width,
145                        dst_height,
146                        kFilterMode);
147     dst += dst_width * dst_height;
148     const int src_halfwidth = (src->coded_size().width() + 1) >> 1;
149     const int src_halfheight = (src->coded_size().height() + 1) >> 1;
150     const int dst_halfwidth = (dst_width + 1) >> 1;
151     const int dst_halfheight = (dst_height + 1) >> 1;
152     libyuv::ScalePlane(src->data(kPlanesOrder[plane_order][1]),
153                        src->stride(kPlanesOrder[plane_order][1]),
154                        src_halfwidth,
155                        src_halfheight,
156                        dst,
157                        dst_halfwidth,
158                        dst_halfwidth,
159                        dst_halfheight,
160                        kFilterMode);
161     dst += dst_halfwidth * dst_halfheight;
162     libyuv::ScalePlane(src->data(kPlanesOrder[plane_order][2]),
163                        src->stride(kPlanesOrder[plane_order][2]),
164                        src_halfwidth,
165                        src_halfheight,
166                        dst,
167                        dst_halfwidth,
168                        dst_halfwidth,
169                        dst_halfheight,
170                        kFilterMode);
171   } else {
172     NOTREACHED();
173   }
174 }
175
176 }  // namespace
177
178 namespace content {
179
180 // Internal class used for delivering video frames on the IO-thread to
181 // the MediaStreamVideoSource implementation.
182 class PepperMediaStreamVideoTrackHost::FrameDeliverer
183     : public base::RefCountedThreadSafe<FrameDeliverer> {
184  public:
185   FrameDeliverer(
186       const scoped_refptr<base::MessageLoopProxy>& io_message_loop_proxy,
187       const VideoCaptureDeliverFrameCB& new_frame_callback);
188
189   void DeliverVideoFrame(const scoped_refptr<media::VideoFrame>& frame,
190                          const media::VideoCaptureFormat& format);
191
192  private:
193   friend class base::RefCountedThreadSafe<FrameDeliverer>;
194   virtual ~FrameDeliverer();
195
196   void DeliverFrameOnIO(const scoped_refptr<media::VideoFrame>& frame,
197                         const media::VideoCaptureFormat& format);
198
199   scoped_refptr<base::MessageLoopProxy> io_message_loop_;
200   VideoCaptureDeliverFrameCB new_frame_callback_;
201
202   DISALLOW_COPY_AND_ASSIGN(FrameDeliverer);
203 };
204
205 PepperMediaStreamVideoTrackHost::FrameDeliverer::FrameDeliverer(
206     const scoped_refptr<base::MessageLoopProxy>& io_message_loop_proxy,
207     const VideoCaptureDeliverFrameCB& new_frame_callback)
208     : io_message_loop_(io_message_loop_proxy),
209       new_frame_callback_(new_frame_callback) {
210 }
211
212 PepperMediaStreamVideoTrackHost::FrameDeliverer::~FrameDeliverer() {
213 }
214
215 void PepperMediaStreamVideoTrackHost::FrameDeliverer::DeliverVideoFrame(
216     const scoped_refptr<media::VideoFrame>& frame,
217     const media::VideoCaptureFormat& format) {
218   io_message_loop_->PostTask(
219       FROM_HERE,
220       base::Bind(&FrameDeliverer::DeliverFrameOnIO,
221                  this, frame, format));
222 }
223
224 void PepperMediaStreamVideoTrackHost::FrameDeliverer::DeliverFrameOnIO(
225      const scoped_refptr<media::VideoFrame>& frame,
226      const media::VideoCaptureFormat& format) {
227   DCHECK(io_message_loop_->BelongsToCurrentThread());
228   // The time when this frame is generated is unknown so give a null value to
229   // |estimated_capture_time|.
230   new_frame_callback_.Run(frame, format, base::TimeTicks());
231 }
232
233 PepperMediaStreamVideoTrackHost::PepperMediaStreamVideoTrackHost(
234     RendererPpapiHost* host,
235     PP_Instance instance,
236     PP_Resource resource,
237     const blink::WebMediaStreamTrack& track)
238     : PepperMediaStreamTrackHostBase(host, instance, resource),
239       track_(track),
240       connected_(false),
241       number_of_buffers_(kDefaultNumberOfBuffers),
242       source_frame_format_(PP_VIDEOFRAME_FORMAT_UNKNOWN),
243       plugin_frame_format_(PP_VIDEOFRAME_FORMAT_UNKNOWN),
244       frame_data_size_(0),
245       type_(kRead),
246       output_started_(false),
247       weak_factory_(this) {
248   DCHECK(!track_.isNull());
249 }
250
251 PepperMediaStreamVideoTrackHost::PepperMediaStreamVideoTrackHost(
252     RendererPpapiHost* host,
253     PP_Instance instance,
254     PP_Resource resource)
255     : PepperMediaStreamTrackHostBase(host, instance, resource),
256       connected_(false),
257       number_of_buffers_(kDefaultNumberOfBuffers),
258       source_frame_format_(PP_VIDEOFRAME_FORMAT_UNKNOWN),
259       plugin_frame_format_(PP_VIDEOFRAME_FORMAT_UNKNOWN),
260       frame_data_size_(0),
261       type_(kWrite),
262       output_started_(false),
263       weak_factory_(this) {
264   InitBlinkTrack();
265   DCHECK(!track_.isNull());
266 }
267
268 bool PepperMediaStreamVideoTrackHost::IsMediaStreamVideoTrackHost() {
269   return true;
270 }
271
272 PepperMediaStreamVideoTrackHost::~PepperMediaStreamVideoTrackHost() {
273   OnClose();
274 }
275
276 void PepperMediaStreamVideoTrackHost::InitBuffers() {
277   gfx::Size size = GetTargetSize(source_frame_size_, plugin_frame_size_);
278   DCHECK(!size.IsEmpty());
279
280   PP_VideoFrame_Format format =
281       GetTargetFormat(source_frame_format_, plugin_frame_format_);
282   DCHECK_NE(format, PP_VIDEOFRAME_FORMAT_UNKNOWN);
283
284   if (format == PP_VIDEOFRAME_FORMAT_BGRA) {
285     frame_data_size_ = size.width() * size.height() * 4;
286   } else {
287     frame_data_size_ =
288         VideoFrame::AllocationSize(FromPpapiFormat(format), size);
289   }
290
291   DCHECK_GT(frame_data_size_, 0U);
292   int32_t buffer_size =
293       sizeof(ppapi::MediaStreamBuffer::Video) + frame_data_size_;
294   bool result = PepperMediaStreamTrackHostBase::InitBuffers(number_of_buffers_,
295                                                             buffer_size,
296                                                             type_);
297   CHECK(result);
298
299   if (type_ == kWrite) {
300     for (int32_t i = 0; i < buffer_manager()->number_of_buffers(); ++i) {
301       ppapi::MediaStreamBuffer::Video* buffer =
302           &(buffer_manager()->GetBufferPointer(i)->video);
303       buffer->header.size = buffer_manager()->buffer_size();
304       buffer->header.type = ppapi::MediaStreamBuffer::TYPE_VIDEO;
305       buffer->format = format;
306       buffer->size.width = size.width();
307       buffer->size.height = size.height();
308       buffer->data_size = frame_data_size_;
309     }
310
311     // Make all the frames avaiable to the plugin.
312     std::vector<int32_t> indices = buffer_manager()->DequeueBuffers();
313     SendEnqueueBuffersMessageToPlugin(indices);
314   }
315 }
316
317 void PepperMediaStreamVideoTrackHost::OnClose() {
318   if (connected_) {
319     MediaStreamVideoSink::RemoveFromVideoTrack(this, track_);
320     weak_factory_.InvalidateWeakPtrs();
321     connected_ = false;
322   }
323 }
324
325 int32_t PepperMediaStreamVideoTrackHost::OnHostMsgEnqueueBuffer(
326     ppapi::host::HostMessageContext* context, int32_t index) {
327   if (type_ == kRead) {
328     return PepperMediaStreamTrackHostBase::OnHostMsgEnqueueBuffer(context,
329                                                                   index);
330   } else {
331     return SendFrameToTrack(index);
332   }
333 }
334
335 int32_t PepperMediaStreamVideoTrackHost::SendFrameToTrack(int32_t index) {
336   DCHECK_EQ(type_, kWrite);
337
338   if (output_started_) {
339     // Sends the frame to blink video track.
340     ppapi::MediaStreamBuffer::Video* pp_frame =
341         &(buffer_manager()->GetBufferPointer(index)->video);
342
343     int32 y_stride = plugin_frame_size_.width();
344     int32 uv_stride = (plugin_frame_size_.width() + 1) / 2;
345     uint8* y_data = static_cast<uint8*>(pp_frame->data);
346     // Default to I420
347     uint8* u_data = y_data + plugin_frame_size_.GetArea();
348     uint8* v_data = y_data + (plugin_frame_size_.GetArea() * 5 / 4);
349     if (plugin_frame_format_ == PP_VIDEOFRAME_FORMAT_YV12) {
350       // Swap u and v for YV12.
351       uint8* tmp = u_data;
352       u_data = v_data;
353       v_data = tmp;
354     }
355
356     int64 ts_ms = static_cast<int64>(pp_frame->timestamp *
357                                      base::Time::kMillisecondsPerSecond);
358     scoped_refptr<VideoFrame> frame = media::VideoFrame::WrapExternalYuvData(
359         FromPpapiFormat(plugin_frame_format_),
360         plugin_frame_size_,
361         gfx::Rect(plugin_frame_size_),
362         plugin_frame_size_,
363         y_stride,
364         uv_stride,
365         uv_stride,
366         y_data,
367         u_data,
368         v_data,
369         base::TimeDelta::FromMilliseconds(ts_ms),
370         base::Closure());
371
372     frame_deliverer_->DeliverVideoFrame(
373         frame,
374         media::VideoCaptureFormat(plugin_frame_size_,
375                                   kDefaultOutputFrameRate,
376                                   ToPixelFormat(plugin_frame_format_)));
377   }
378
379   // Makes the frame available again for plugin.
380   SendEnqueueBufferMessageToPlugin(index);
381   return PP_OK;
382 }
383
384 void PepperMediaStreamVideoTrackHost::OnVideoFrame(
385     const scoped_refptr<VideoFrame>& frame,
386     const media::VideoCaptureFormat& format,
387     const base::TimeTicks& estimated_capture_time) {
388   DCHECK(frame.get());
389   // TODO(penghuang): Check |frame->end_of_stream()| and close the track.
390   PP_VideoFrame_Format ppformat = ToPpapiFormat(frame->format());
391   if (ppformat == PP_VIDEOFRAME_FORMAT_UNKNOWN)
392     return;
393
394   if (source_frame_size_.IsEmpty()) {
395     source_frame_size_ = frame->coded_size();
396     source_frame_format_ = ppformat;
397     InitBuffers();
398   }
399
400   int32_t index = buffer_manager()->DequeueBuffer();
401   // Drop frames if the underlying buffer is full.
402   if (index < 0) {
403     DVLOG(1) << "A frame is dropped.";
404     return;
405   }
406
407   CHECK_EQ(ppformat, source_frame_format_) << "Frame format is changed.";
408
409   gfx::Size size = GetTargetSize(source_frame_size_, plugin_frame_size_);
410   ppformat =
411       GetTargetFormat(source_frame_format_, plugin_frame_format_);
412   ppapi::MediaStreamBuffer::Video* buffer =
413       &(buffer_manager()->GetBufferPointer(index)->video);
414   buffer->header.size = buffer_manager()->buffer_size();
415   buffer->header.type = ppapi::MediaStreamBuffer::TYPE_VIDEO;
416   buffer->timestamp = frame->timestamp().InSecondsF();
417   buffer->format = ppformat;
418   buffer->size.width = size.width();
419   buffer->size.height = size.height();
420   buffer->data_size = frame_data_size_;
421   ConvertFromMediaVideoFrame(frame, ppformat, size, buffer->data);
422
423   SendEnqueueBufferMessageToPlugin(index);
424 }
425
426 void PepperMediaStreamVideoTrackHost::GetCurrentSupportedFormats(
427     int max_requested_width, int max_requested_height,
428     double max_requested_frame_rate,
429     const VideoCaptureDeviceFormatsCB& callback) {
430   if (type_ != kWrite) {
431     DVLOG(1) << "GetCurrentSupportedFormats is only supported in output mode.";
432     callback.Run(media::VideoCaptureFormats());
433     return;
434   }
435
436   media::VideoCaptureFormats formats;
437   formats.push_back(
438       media::VideoCaptureFormat(plugin_frame_size_,
439                                 kDefaultOutputFrameRate,
440                                 ToPixelFormat(plugin_frame_format_)));
441   callback.Run(formats);
442 }
443
444 void PepperMediaStreamVideoTrackHost::StartSourceImpl(
445     const media::VideoCaptureFormat& format,
446     const VideoCaptureDeliverFrameCB& frame_callback) {
447   output_started_ = true;
448   frame_deliverer_ = new FrameDeliverer(io_message_loop(), frame_callback);
449 }
450
451 void PepperMediaStreamVideoTrackHost::StopSourceImpl() {
452   output_started_ = false;
453   frame_deliverer_ = NULL;
454 }
455
456 void PepperMediaStreamVideoTrackHost::DidConnectPendingHostToResource() {
457   if (!connected_) {
458     MediaStreamVideoSink::AddToVideoTrack(
459         this,
460         media::BindToCurrentLoop(
461             base::Bind(
462                 &PepperMediaStreamVideoTrackHost::OnVideoFrame,
463                 weak_factory_.GetWeakPtr())),
464         track_);
465     connected_ = true;
466   }
467 }
468
469 int32_t PepperMediaStreamVideoTrackHost::OnResourceMessageReceived(
470     const IPC::Message& msg,
471     HostMessageContext* context) {
472   PPAPI_BEGIN_MESSAGE_MAP(PepperMediaStreamVideoTrackHost, msg)
473     PPAPI_DISPATCH_HOST_RESOURCE_CALL(
474         PpapiHostMsg_MediaStreamVideoTrack_Configure, OnHostMsgConfigure)
475   PPAPI_END_MESSAGE_MAP()
476   return PepperMediaStreamTrackHostBase::OnResourceMessageReceived(msg,
477                                                                    context);
478 }
479
480 int32_t PepperMediaStreamVideoTrackHost::OnHostMsgConfigure(
481     HostMessageContext* context,
482     const MediaStreamVideoTrackShared::Attributes& attributes) {
483   CHECK(MediaStreamVideoTrackShared::VerifyAttributes(attributes));
484
485   bool changed = false;
486   gfx::Size new_size(attributes.width, attributes.height);
487   if (GetTargetSize(source_frame_size_, plugin_frame_size_) !=
488       GetTargetSize(source_frame_size_, new_size)) {
489     changed = true;
490   }
491   plugin_frame_size_ = new_size;
492
493   int32_t buffers = attributes.buffers
494                         ? std::min(kMaxNumberOfBuffers, attributes.buffers)
495                         : kDefaultNumberOfBuffers;
496   if (buffers != number_of_buffers_)
497     changed = true;
498   number_of_buffers_ = buffers;
499
500   if (GetTargetFormat(source_frame_format_, plugin_frame_format_) !=
501       GetTargetFormat(source_frame_format_, attributes.format)) {
502     changed = true;
503   }
504   plugin_frame_format_ = attributes.format;
505
506   // If the first frame has been received, we will re-initialize buffers with
507   // new settings. Otherwise, we will initialize buffer when we receive
508   // the first frame, because plugin can only provide part of attributes
509   // which are not enough to initialize buffers.
510   if (changed && (type_ == kWrite || !source_frame_size_.IsEmpty()))
511     InitBuffers();
512
513   // TODO(ronghuawu): Ask the owner of DOMMediaStreamTrackToResource why
514   // source id instead of track id is used there.
515   const std::string id = track_.source().id().utf8();
516   context->reply_msg = PpapiPluginMsg_MediaStreamVideoTrack_ConfigureReply(id);
517   return PP_OK;
518 }
519
520 void PepperMediaStreamVideoTrackHost::InitBlinkTrack() {
521   std::string source_id;
522   base::Base64Encode(base::RandBytesAsString(64), &source_id);
523   blink::WebMediaStreamSource webkit_source;
524   webkit_source.initialize(base::UTF8ToUTF16(source_id),
525                            blink::WebMediaStreamSource::TypeVideo,
526                            base::UTF8ToUTF16(kPepperVideoSourceName));
527   webkit_source.setExtraData(this);
528
529   const bool enabled = true;
530   blink::WebMediaConstraints constraints;
531   constraints.initialize();
532   track_ = MediaStreamVideoTrack::CreateVideoTrack(
533        this, constraints,
534        base::Bind(
535            &PepperMediaStreamVideoTrackHost::OnTrackStarted,
536            base::Unretained(this)),
537        enabled);
538 }
539
540 void PepperMediaStreamVideoTrackHost::OnTrackStarted(
541     MediaStreamSource* source,
542     MediaStreamRequestResult result,
543     const blink::WebString& result_name) {
544   DVLOG(3) << "OnTrackStarted result: " << result;
545 }
546
547 }  // namespace content