Update To 11.40.268.0
[platform/framework/web/crosswalk.git] / src / remoting / host / video_scheduler.cc
index 7fc1983..898bd5e 100644 (file)
 #include "remoting/protocol/cursor_shape_stub.h"
 #include "remoting/protocol/message_decoder.h"
 #include "remoting/protocol/video_stub.h"
+#include "third_party/webrtc/modules/desktop_capture/desktop_capturer.h"
 #include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
+#include "third_party/webrtc/modules/desktop_capture/mouse_cursor.h"
 #include "third_party/webrtc/modules/desktop_capture/mouse_cursor_shape.h"
-#include "third_party/webrtc/modules/desktop_capture/screen_capturer.h"
 
 namespace remoting {
 
@@ -30,11 +31,25 @@ namespace remoting {
 // TODO(hclam): Move this value to CaptureScheduler.
 static const int kMaxPendingFrames = 2;
 
+// Interval between empty keep-alive frames. These frames are sent only when the
+// stream is paused or inactive for some other reason (e.g. when blocked on
+// capturer). To prevent PseudoTCP from resetting congestion window this value
+// must be smaller than the minimum RTO used in PseudoTCP, which is 250ms.
+static const int kKeepAlivePacketIntervalMs = 200;
+
+static bool g_enable_timestamps = false;
+
+// static
+void VideoScheduler::EnableTimestampsForTests() {
+  g_enable_timestamps = true;
+}
+
 VideoScheduler::VideoScheduler(
     scoped_refptr<base::SingleThreadTaskRunner> capture_task_runner,
     scoped_refptr<base::SingleThreadTaskRunner> encode_task_runner,
     scoped_refptr<base::SingleThreadTaskRunner> network_task_runner,
-    scoped_ptr<webrtc::ScreenCapturer> capturer,
+    scoped_ptr<webrtc::DesktopCapturer> capturer,
+    scoped_ptr<webrtc::MouseCursorMonitor> mouse_cursor_monitor,
     scoped_ptr<VideoEncoder> encoder,
     protocol::CursorShapeStub* cursor_stub,
     protocol::VideoStub* video_stub)
@@ -42,6 +57,7 @@ VideoScheduler::VideoScheduler(
       encode_task_runner_(encode_task_runner),
       network_task_runner_(network_task_runner),
       capturer_(capturer.Pass()),
+      mouse_cursor_monitor_(mouse_cursor_monitor.Pass()),
       encoder_(encoder.Pass()),
       cursor_stub_(cursor_stub),
       video_stub_(video_stub),
@@ -52,6 +68,7 @@ VideoScheduler::VideoScheduler(
       sequence_number_(0) {
   DCHECK(network_task_runner_->BelongsToCurrentThread());
   DCHECK(capturer_);
+  DCHECK(mouse_cursor_monitor_);
   DCHECK(encoder_);
   DCHECK(cursor_stub_);
   DCHECK(video_stub_);
@@ -70,14 +87,18 @@ void VideoScheduler::OnCaptureCompleted(webrtc::DesktopFrame* frame) {
 
   scoped_ptr<webrtc::DesktopFrame> owned_frame(frame);
 
-  if (frame) {
+  if (owned_frame) {
     scheduler_.RecordCaptureTime(
-        base::TimeDelta::FromMilliseconds(frame->capture_time_ms()));
+        base::TimeDelta::FromMilliseconds(owned_frame->capture_time_ms()));
   }
 
+  // Even when |frame| is NULL we still need to post it to the encode thread
+  // to make sure frames are freed in the same order they are received and
+  // that we don't start capturing frame n+2 before frame n is freed.
   encode_task_runner_->PostTask(
       FROM_HERE, base::Bind(&VideoScheduler::EncodeFrame, this,
-                            base::Passed(&owned_frame), sequence_number_));
+                            base::Passed(&owned_frame), sequence_number_,
+                            base::TimeTicks::Now()));
 
   // If a frame was skipped, try to capture it again.
   if (did_skip_frame_) {
@@ -86,11 +107,10 @@ void VideoScheduler::OnCaptureCompleted(webrtc::DesktopFrame* frame) {
   }
 }
 
-void VideoScheduler::OnCursorShapeChanged(
-    webrtc::MouseCursorShape* cursor_shape) {
+void VideoScheduler::OnMouseCursor(webrtc::MouseCursor* cursor) {
   DCHECK(capture_task_runner_->BelongsToCurrentThread());
 
-  scoped_ptr<webrtc::MouseCursorShape> owned_cursor(cursor_shape);
+  scoped_ptr<webrtc::MouseCursor> owned_cursor(cursor);
 
   // Do nothing if the scheduler is being stopped.
   if (!capturer_)
@@ -98,17 +118,33 @@ void VideoScheduler::OnCursorShapeChanged(
 
   scoped_ptr<protocol::CursorShapeInfo> cursor_proto(
       new protocol::CursorShapeInfo());
-  cursor_proto->set_width(cursor_shape->size.width());
-  cursor_proto->set_height(cursor_shape->size.height());
-  cursor_proto->set_hotspot_x(cursor_shape->hotspot.x());
-  cursor_proto->set_hotspot_y(cursor_shape->hotspot.y());
-  cursor_proto->set_data(cursor_shape->data);
+  cursor_proto->set_width(cursor->image()->size().width());
+  cursor_proto->set_height(cursor->image()->size().height());
+  cursor_proto->set_hotspot_x(cursor->hotspot().x());
+  cursor_proto->set_hotspot_y(cursor->hotspot().y());
+
+  std::string data;
+  uint8_t* current_row = cursor->image()->data();
+  for (int y = 0; y < cursor->image()->size().height(); ++y) {
+    cursor_proto->mutable_data()->append(
+        current_row,
+        current_row + cursor->image()->size().width() *
+            webrtc::DesktopFrame::kBytesPerPixel);
+    current_row += cursor->image()->stride();
+  }
 
   network_task_runner_->PostTask(
       FROM_HERE, base::Bind(&VideoScheduler::SendCursorShape, this,
                             base::Passed(&cursor_proto)));
 }
 
+void VideoScheduler::OnMouseCursorPosition(
+    webrtc::MouseCursorMonitor::CursorState state,
+    const webrtc::DesktopVector& position) {
+  // We're not subscribing to mouse position changes.
+  NOTREACHED();
+}
+
 void VideoScheduler::Start() {
   DCHECK(network_task_runner_->BelongsToCurrentThread());
 
@@ -123,8 +159,10 @@ void VideoScheduler::Stop() {
   cursor_stub_ = NULL;
   video_stub_ = NULL;
 
-  capture_task_runner_->PostTask(FROM_HERE,
-      base::Bind(&VideoScheduler::StopOnCaptureThread, this));
+  keep_alive_timer_.reset();
+
+  capture_task_runner_->PostTask(
+      FROM_HERE, base::Bind(&VideoScheduler::StopOnCaptureThread, this));
 }
 
 void VideoScheduler::Pause(bool pause) {
@@ -156,9 +194,37 @@ void VideoScheduler::UpdateSequenceNumber(int64 sequence_number) {
   sequence_number_ = sequence_number;
 }
 
+void VideoScheduler::SetLosslessEncode(bool want_lossless) {
+  if (!encode_task_runner_->BelongsToCurrentThread()) {
+    DCHECK(network_task_runner_->BelongsToCurrentThread());
+    encode_task_runner_->PostTask(
+        FROM_HERE, base::Bind(&VideoScheduler::SetLosslessEncode,
+                              this, want_lossless));
+    return;
+  }
+
+  encoder_->SetLosslessEncode(want_lossless);
+}
+
+void VideoScheduler::SetLosslessColor(bool want_lossless) {
+  if (!encode_task_runner_->BelongsToCurrentThread()) {
+    DCHECK(network_task_runner_->BelongsToCurrentThread());
+    encode_task_runner_->PostTask(
+        FROM_HERE, base::Bind(&VideoScheduler::SetLosslessColor,
+                              this, want_lossless));
+    return;
+  }
+
+  encoder_->SetLosslessColor(want_lossless);
+}
+
 // Private methods -----------------------------------------------------------
 
 VideoScheduler::~VideoScheduler() {
+  // Destroy the capturer and encoder on their respective threads.
+  capture_task_runner_->DeleteSoon(FROM_HERE, capturer_.release());
+  capture_task_runner_->DeleteSoon(FROM_HERE, mouse_cursor_monitor_.release());
+  encode_task_runner_->DeleteSoon(FROM_HERE, encoder_.release());
 }
 
 // Capturer thread -------------------------------------------------------------
@@ -167,13 +233,18 @@ void VideoScheduler::StartOnCaptureThread() {
   DCHECK(capture_task_runner_->BelongsToCurrentThread());
   DCHECK(!capture_timer_);
 
-  // Start the capturer and let it notify us if cursor shape changes.
-  capturer_->SetMouseShapeObserver(this);
+  // Start mouse cursor monitor.
+  mouse_cursor_monitor_->Init(this, webrtc::MouseCursorMonitor::SHAPE_ONLY);
+
+  // Start the capturer.
   capturer_->Start(this);
 
   capture_timer_.reset(new base::OneShotTimer<VideoScheduler>());
+  keep_alive_timer_.reset(new base::DelayTimer<VideoScheduler>(
+      FROM_HERE, base::TimeDelta::FromMilliseconds(kKeepAlivePacketIntervalMs),
+      this, &VideoScheduler::SendKeepAlivePacket));
 
-  // Capture first frame immedately.
+  // Capture first frame immediately.
   CaptureNextFrame();
 }
 
@@ -223,6 +294,9 @@ void VideoScheduler::CaptureNextFrame() {
 
   capture_pending_ = true;
 
+  // Capture the mouse shape.
+  mouse_cursor_monitor_->Capture();
+
   // And finally perform one capture.
   capturer_->Capture(webrtc::DesktopRegion());
 }
@@ -249,19 +323,39 @@ void VideoScheduler::SendVideoPacket(scoped_ptr<VideoPacket> packet) {
     return;
 
   video_stub_->ProcessVideoPacket(
-      packet.Pass(), base::Bind(&VideoScheduler::VideoFrameSentCallback, this));
+      packet.Pass(), base::Bind(&VideoScheduler::OnVideoPacketSent, this));
 }
 
-void VideoScheduler::VideoFrameSentCallback() {
+void VideoScheduler::OnVideoPacketSent() {
   DCHECK(network_task_runner_->BelongsToCurrentThread());
 
   if (!video_stub_)
     return;
 
+  keep_alive_timer_->Reset();
+
   capture_task_runner_->PostTask(
       FROM_HERE, base::Bind(&VideoScheduler::FrameCaptureCompleted, this));
 }
 
+void VideoScheduler::SendKeepAlivePacket() {
+  DCHECK(network_task_runner_->BelongsToCurrentThread());
+
+  if (!video_stub_)
+    return;
+
+  video_stub_->ProcessVideoPacket(
+      make_scoped_ptr(new VideoPacket()),
+      base::Bind(&VideoScheduler::OnKeepAlivePacketSent, this));
+}
+
+void VideoScheduler::OnKeepAlivePacketSent() {
+  DCHECK(network_task_runner_->BelongsToCurrentThread());
+
+  if (keep_alive_timer_)
+    keep_alive_timer_->Reset();
+}
+
 void VideoScheduler::SendCursorShape(
     scoped_ptr<protocol::CursorShapeInfo> cursor_shape) {
   DCHECK(network_task_runner_->BelongsToCurrentThread());
@@ -276,23 +370,29 @@ void VideoScheduler::SendCursorShape(
 
 void VideoScheduler::EncodeFrame(
     scoped_ptr<webrtc::DesktopFrame> frame,
-    int64 sequence_number) {
+    int64 sequence_number,
+    base::TimeTicks timestamp) {
   DCHECK(encode_task_runner_->BelongsToCurrentThread());
 
-  // If there is nothing to encode then send an empty keep-alive packet.
+  // If there is nothing to encode then send an empty packet.
   if (!frame || frame->updated_region().is_empty()) {
+    capture_task_runner_->DeleteSoon(FROM_HERE, frame.release());
     scoped_ptr<VideoPacket> packet(new VideoPacket());
     packet->set_client_sequence_number(sequence_number);
     network_task_runner_->PostTask(
-        FROM_HERE, base::Bind(&VideoScheduler::SendVideoPacket, this,
-                              base::Passed(&packet)));
-    capture_task_runner_->DeleteSoon(FROM_HERE, frame.release());
+        FROM_HERE,
+        base::Bind(
+            &VideoScheduler::SendVideoPacket, this, base::Passed(&packet)));
     return;
   }
 
   scoped_ptr<VideoPacket> packet = encoder_->Encode(*frame);
   packet->set_client_sequence_number(sequence_number);
 
+  if (g_enable_timestamps) {
+    packet->set_timestamp(timestamp.ToInternalValue());
+  }
+
   // Destroy the frame before sending |packet| because SendVideoPacket() may
   // trigger another frame to be captured, and the screen capturer expects the
   // old frame to be freed by then.