Upstream version 10.39.225.0
[platform/framework/web/crosswalk.git] / src / content / renderer / media / webrtc / media_stream_remote_video_source.cc
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/renderer/media/webrtc/media_stream_remote_video_source.h"
6
7 #include "base/bind.h"
8 #include "base/callback_helpers.h"
9 #include "base/location.h"
10 #include "base/message_loop/message_loop_proxy.h"
11 #include "content/renderer/media/native_handle_impl.h"
12 #include "media/base/bind_to_current_loop.h"
13 #include "media/base/video_frame.h"
14 #include "media/base/video_frame_pool.h"
15 #include "media/base/video_util.h"
16 #include "third_party/libjingle/source/talk/media/base/videoframe.h"
17
18 namespace content {
19
20 // Internal class used for receiving frames from the webrtc track on a
21 // libjingle thread and forward it to the IO-thread.
22 class MediaStreamRemoteVideoSource::RemoteVideoSourceDelegate
23     : public base::RefCountedThreadSafe<RemoteVideoSourceDelegate>,
24       public webrtc::VideoRendererInterface {
25  public:
26   RemoteVideoSourceDelegate(
27       const scoped_refptr<base::MessageLoopProxy>& io_message_loop,
28       const VideoCaptureDeliverFrameCB& new_frame_callback);
29
30  protected:
31   friend class base::RefCountedThreadSafe<RemoteVideoSourceDelegate>;
32   virtual ~RemoteVideoSourceDelegate();
33
34   // Implements webrtc::VideoRendererInterface used for receiving video frames
35   // from the PeerConnection video track. May be called on a libjingle internal
36   // thread.
37   virtual void SetSize(int width, int height) OVERRIDE;
38   virtual void RenderFrame(const cricket::VideoFrame* frame) OVERRIDE;
39
40   void DoRenderFrameOnIOThread(scoped_refptr<media::VideoFrame> video_frame,
41                                const media::VideoCaptureFormat& format);
42  private:
43   // Bound to the render thread.
44   base::ThreadChecker thread_checker_;
45
46   scoped_refptr<base::MessageLoopProxy> io_message_loop_;
47   // |frame_pool_| is only accessed on whatever
48   // thread webrtc::VideoRendererInterface::RenderFrame is called on.
49   media::VideoFramePool frame_pool_;
50
51   // |frame_callback_| is accessed on the IO thread.
52   VideoCaptureDeliverFrameCB frame_callback_;
53 };
54
55 MediaStreamRemoteVideoSource::
56 RemoteVideoSourceDelegate::RemoteVideoSourceDelegate(
57     const scoped_refptr<base::MessageLoopProxy>& io_message_loop,
58     const VideoCaptureDeliverFrameCB& new_frame_callback)
59     : io_message_loop_(io_message_loop),
60       frame_callback_(new_frame_callback) {
61 }
62
63 MediaStreamRemoteVideoSource::
64 RemoteVideoSourceDelegate::~RemoteVideoSourceDelegate() {
65 }
66
67 void MediaStreamRemoteVideoSource::
68 RemoteVideoSourceDelegate::SetSize(int width, int height) {
69 }
70
71 void MediaStreamRemoteVideoSource::
72 RemoteVideoSourceDelegate::RenderFrame(
73     const cricket::VideoFrame* frame) {
74   base::TimeDelta timestamp = base::TimeDelta::FromMicroseconds(
75       frame->GetElapsedTime() / rtc::kNumNanosecsPerMicrosec);
76
77   scoped_refptr<media::VideoFrame> video_frame;
78   if (frame->GetNativeHandle() != NULL) {
79     NativeHandleImpl* handle =
80         static_cast<NativeHandleImpl*>(frame->GetNativeHandle());
81     video_frame = static_cast<media::VideoFrame*>(handle->GetHandle());
82     video_frame->set_timestamp(timestamp);
83   } else {
84     gfx::Size size(frame->GetWidth(), frame->GetHeight());
85     video_frame = frame_pool_.CreateFrame(
86         media::VideoFrame::YV12, size, gfx::Rect(size), size, timestamp);
87
88     // Non-square pixels are unsupported.
89     DCHECK_EQ(frame->GetPixelWidth(), 1u);
90     DCHECK_EQ(frame->GetPixelHeight(), 1u);
91
92     int y_rows = frame->GetHeight();
93     int uv_rows = frame->GetChromaHeight();
94     CopyYPlane(
95         frame->GetYPlane(), frame->GetYPitch(), y_rows, video_frame.get());
96     CopyUPlane(
97         frame->GetUPlane(), frame->GetUPitch(), uv_rows, video_frame.get());
98     CopyVPlane(
99         frame->GetVPlane(), frame->GetVPitch(), uv_rows, video_frame.get());
100   }
101
102   media::VideoPixelFormat pixel_format =
103       (video_frame->format() == media::VideoFrame::YV12) ?
104           media::PIXEL_FORMAT_YV12 : media::PIXEL_FORMAT_TEXTURE;
105
106   media::VideoCaptureFormat format(
107       gfx::Size(video_frame->natural_size().width(),
108                 video_frame->natural_size().height()),
109                 MediaStreamVideoSource::kUnknownFrameRate,
110                 pixel_format);
111
112   io_message_loop_->PostTask(
113       FROM_HERE,
114       base::Bind(&RemoteVideoSourceDelegate::DoRenderFrameOnIOThread,
115                  this, video_frame, format));
116 }
117
118 void MediaStreamRemoteVideoSource::
119 RemoteVideoSourceDelegate::DoRenderFrameOnIOThread(
120     scoped_refptr<media::VideoFrame> video_frame,
121     const media::VideoCaptureFormat& format) {
122   DCHECK(io_message_loop_->BelongsToCurrentThread());
123   // TODO(hclam): Give the estimated capture time.
124   frame_callback_.Run(video_frame, format, base::TimeTicks());
125 }
126
127 MediaStreamRemoteVideoSource::MediaStreamRemoteVideoSource(
128     webrtc::VideoTrackInterface* remote_track)
129     : remote_track_(remote_track),
130       last_state_(remote_track->state()) {
131   remote_track_->RegisterObserver(this);
132 }
133
134 MediaStreamRemoteVideoSource::~MediaStreamRemoteVideoSource() {
135   remote_track_->UnregisterObserver(this);
136 }
137
138 void MediaStreamRemoteVideoSource::GetCurrentSupportedFormats(
139     int max_requested_width,
140     int max_requested_height,
141     double max_requested_frame_rate,
142     const VideoCaptureDeviceFormatsCB& callback) {
143   DCHECK(thread_checker_.CalledOnValidThread());
144   media::VideoCaptureFormats formats;
145   // Since the remote end is free to change the resolution at any point in time
146   // the supported formats are unknown.
147   callback.Run(formats);
148 }
149
150 void MediaStreamRemoteVideoSource::StartSourceImpl(
151     const media::VideoCaptureFormat& format,
152     const VideoCaptureDeliverFrameCB& frame_callback) {
153   DCHECK(thread_checker_.CalledOnValidThread());
154   DCHECK(!delegate_.get());
155   delegate_ = new RemoteVideoSourceDelegate(io_message_loop(), frame_callback);
156   remote_track_->AddRenderer(delegate_.get());
157   OnStartDone(MEDIA_DEVICE_OK);
158 }
159
160 void MediaStreamRemoteVideoSource::StopSourceImpl() {
161   DCHECK(thread_checker_.CalledOnValidThread());
162   DCHECK(state() != MediaStreamVideoSource::ENDED);
163   remote_track_->RemoveRenderer(delegate_.get());
164 }
165
166 webrtc::VideoRendererInterface*
167 MediaStreamRemoteVideoSource::RenderInterfaceForTest() {
168   return delegate_.get();
169 }
170
171 void MediaStreamRemoteVideoSource::OnChanged() {
172   DCHECK(thread_checker_.CalledOnValidThread());
173   webrtc::MediaStreamTrackInterface::TrackState state = remote_track_->state();
174   if (state != last_state_) {
175     last_state_ = state;
176     switch (state) {
177       case webrtc::MediaStreamTrackInterface::kInitializing:
178         // Ignore the kInitializing state since there is no match in
179         // WebMediaStreamSource::ReadyState.
180         break;
181       case webrtc::MediaStreamTrackInterface::kLive:
182         SetReadyState(blink::WebMediaStreamSource::ReadyStateLive);
183         break;
184       case webrtc::MediaStreamTrackInterface::kEnded:
185         SetReadyState(blink::WebMediaStreamSource::ReadyStateEnded);
186         break;
187       default:
188         NOTREACHED();
189         break;
190     }
191   }
192 }
193
194 }  // namespace content