#include "content/browser/renderer_host/media/video_capture_controller.h"
+#include <map>
#include <set>
#include "base/bind.h"
#include "base/debug/trace_event.h"
-#include "base/memory/scoped_ptr.h"
+#include "base/metrics/histogram.h"
+#include "base/metrics/sparse_histogram.h"
#include "base/stl_util.h"
#include "content/browser/renderer_host/media/media_stream_manager.h"
#include "content/browser/renderer_host/media/video_capture_manager.h"
+#include "content/common/gpu/client/gl_helper.h"
#include "content/public/browser/browser_thread.h"
+#include "gpu/command_buffer/common/mailbox_holder.h"
#include "media/base/video_frame.h"
#include "media/base/video_util.h"
#include "media/base/yuv_convert.h"
-
-#if !defined(AVOID_LIBYUV_FOR_ANDROID_WEBVIEW)
#include "third_party/libyuv/include/libyuv.h"
+
+#if defined(OS_ANDROID)
+#include "content/browser/renderer_host/image_transport_factory_android.h"
+#else
+#include "content/browser/compositor/image_transport_factory.h"
#endif
+using media::VideoCaptureFormat;
+
namespace content {
-// The number of buffers that VideoCaptureBufferPool should allocate.
-static const int kNoOfBuffers = 3;
+namespace {
+
+static const int kInfiniteRatio = 99999;
+
+#define UMA_HISTOGRAM_ASPECT_RATIO(name, width, height) \
+ UMA_HISTOGRAM_SPARSE_SLOWLY( \
+ name, \
+ (height) ? ((width) * 100) / (height) : kInfiniteRatio);
+
+class PoolBuffer : public media::VideoCaptureDevice::Client::Buffer {
+ public:
+ PoolBuffer(const scoped_refptr<VideoCaptureBufferPool>& pool,
+ int buffer_id,
+ void* data,
+ size_t size)
+ : Buffer(buffer_id, data, size), pool_(pool) {
+ DCHECK(pool_);
+ }
+
+ private:
+ virtual ~PoolBuffer() { pool_->RelinquishProducerReservation(id()); }
+
+ const scoped_refptr<VideoCaptureBufferPool> pool_;
+};
+
+class SyncPointClientImpl : public media::VideoFrame::SyncPointClient {
+ public:
+ explicit SyncPointClientImpl(GLHelper* gl_helper) : gl_helper_(gl_helper) {}
+ virtual ~SyncPointClientImpl() {}
+ virtual uint32 InsertSyncPoint() OVERRIDE {
+ return gl_helper_->InsertSyncPoint();
+ }
+ virtual void WaitSyncPoint(uint32 sync_point) OVERRIDE {
+ gl_helper_->WaitSyncPoint(sync_point);
+ }
+
+ private:
+ GLHelper* gl_helper_;
+};
+
+void ReturnVideoFrame(const scoped_refptr<media::VideoFrame>& video_frame,
+ uint32 sync_point) {
+ DCHECK_CURRENTLY_ON(BrowserThread::UI);
+#if defined(OS_ANDROID)
+ GLHelper* gl_helper =
+ ImageTransportFactoryAndroid::GetInstance()->GetGLHelper();
+#else
+ GLHelper* gl_helper = ImageTransportFactory::GetInstance()->GetGLHelper();
+#endif
+ DCHECK(gl_helper);
+ // UpdateReleaseSyncPoint() creates a new sync_point using |gl_helper|, so
+ // wait the given |sync_point| using |gl_helper|.
+ gl_helper->WaitSyncPoint(sync_point);
+ SyncPointClientImpl client(gl_helper);
+ video_frame->UpdateReleaseSyncPoint(&client);
+}
+
+} // anonymous namespace
struct VideoCaptureController::ControllerClient {
- ControllerClient(
- const VideoCaptureControllerID& id,
- VideoCaptureControllerEventHandler* handler,
- base::ProcessHandle render_process,
- const media::VideoCaptureParams& params)
+ ControllerClient(const VideoCaptureControllerID& id,
+ VideoCaptureControllerEventHandler* handler,
+ base::ProcessHandle render_process,
+ media::VideoCaptureSessionId session_id,
+ const media::VideoCaptureParams& params)
: controller_id(id),
event_handler(handler),
render_process_handle(render_process),
+ session_id(session_id),
parameters(params),
- session_closed(false) {
- }
+ session_closed(false) {}
~ControllerClient() {}
// ID used for identifying this object.
- VideoCaptureControllerID controller_id;
- VideoCaptureControllerEventHandler* event_handler;
+ const VideoCaptureControllerID controller_id;
+ VideoCaptureControllerEventHandler* const event_handler;
// Handle to the render process that will receive the capture buffers.
- base::ProcessHandle render_process_handle;
- media::VideoCaptureParams parameters;
+ const base::ProcessHandle render_process_handle;
+ const media::VideoCaptureSessionId session_id;
+ const media::VideoCaptureParams parameters;
// Buffers that are currently known to this client.
std::set<int> known_buffers;
- // Buffers currently held by this client.
- std::set<int> active_buffers;
+ // Buffers currently held by this client, and syncpoint callback to call when
+ // they are returned from the client.
+ typedef std::map<int, scoped_refptr<media::VideoFrame> > ActiveBufferMap;
+ ActiveBufferMap active_buffers;
// State of capture session, controlled by VideoCaptureManager directly. This
// transitions to true as soon as StopSession() occurs, at which point the
virtual ~VideoCaptureDeviceClient();
// VideoCaptureDevice::Client implementation.
- virtual scoped_refptr<media::VideoFrame> ReserveOutputBuffer(
+ virtual scoped_refptr<Buffer> ReserveOutputBuffer(
+ media::VideoFrame::Format format,
const gfx::Size& size) OVERRIDE;
- virtual void OnIncomingCapturedFrame(const uint8* data,
- int length,
- base::Time timestamp,
- int rotation,
- bool flip_vert,
- bool flip_horiz) OVERRIDE;
+ virtual void OnIncomingCapturedData(const uint8* data,
+ int length,
+ const VideoCaptureFormat& frame_format,
+ int rotation,
+ base::TimeTicks timestamp) OVERRIDE;
virtual void OnIncomingCapturedVideoFrame(
+ const scoped_refptr<Buffer>& buffer,
+ const VideoCaptureFormat& buffer_format,
const scoped_refptr<media::VideoFrame>& frame,
- base::Time timestamp) OVERRIDE;
- virtual void OnError() OVERRIDE;
- virtual void OnFrameInfo(
- const media::VideoCaptureCapability& info) OVERRIDE;
- virtual void OnFrameInfoChanged(
- const media::VideoCaptureCapability& info) OVERRIDE;
+ base::TimeTicks timestamp) OVERRIDE;
+ virtual void OnError(const std::string& reason) OVERRIDE;
+ virtual void OnLog(const std::string& message) OVERRIDE;
private:
- scoped_refptr<media::VideoFrame> DoReserveI420VideoFrame(
- const gfx::Size& size,
- int rotation);
+ scoped_refptr<Buffer> DoReserveOutputBuffer(media::VideoFrame::Format format,
+ const gfx::Size& dimensions);
// The controller to which we post events.
const base::WeakPtr<VideoCaptureController> controller_;
// The pool of shared-memory buffers used for capturing.
const scoped_refptr<VideoCaptureBufferPool> buffer_pool_;
- // Chopped pixels in width/height in case video capture device has odd
- // numbers for width/height.
- int chopped_width_;
- int chopped_height_;
-
- // Tracks the current frame format.
- media::VideoCaptureCapability frame_info_;
+ bool first_frame_;
};
-VideoCaptureController::VideoCaptureController()
- : buffer_pool_(new VideoCaptureBufferPool(kNoOfBuffers)),
+VideoCaptureController::VideoCaptureController(int max_buffers)
+ : buffer_pool_(new VideoCaptureBufferPool(max_buffers)),
state_(VIDEO_CAPTURE_STATE_STARTED),
weak_ptr_factory_(this) {
}
VideoCaptureController::VideoCaptureDeviceClient::VideoCaptureDeviceClient(
const base::WeakPtr<VideoCaptureController>& controller,
const scoped_refptr<VideoCaptureBufferPool>& buffer_pool)
- : controller_(controller),
- buffer_pool_(buffer_pool),
- chopped_width_(0),
- chopped_height_(0) {}
+ : controller_(controller), buffer_pool_(buffer_pool), first_frame_(true) {}
VideoCaptureController::VideoCaptureDeviceClient::~VideoCaptureDeviceClient() {}
const VideoCaptureControllerID& id,
VideoCaptureControllerEventHandler* event_handler,
base::ProcessHandle render_process,
+ media::VideoCaptureSessionId session_id,
const media::VideoCaptureParams& params) {
- DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK_CURRENTLY_ON(BrowserThread::IO);
DVLOG(1) << "VideoCaptureController::AddClient, id " << id.device_id
- << ", (" << params.requested_format.width
- << ", " << params.requested_format.height
+ << ", " << params.requested_format.frame_size.ToString()
<< ", " << params.requested_format.frame_rate
- << ", " << params.session_id
+ << ", " << session_id
<< ")";
+ // If this is the first client added to the controller, cache the parameters.
+ if (!controller_clients_.size())
+ video_capture_format_ = params.requested_format;
+
// Signal error in case device is already in error state.
if (state_ == VIDEO_CAPTURE_STATE_ERROR) {
event_handler->OnError(id);
if (FindClient(id, event_handler, controller_clients_))
return;
- ControllerClient* client = new ControllerClient(id, event_handler,
- render_process, params);
+ ControllerClient* client = new ControllerClient(
+ id, event_handler, render_process, session_id, params);
// If we already have gotten frame_info from the device, repeat it to the new
// client.
if (state_ == VIDEO_CAPTURE_STATE_STARTED) {
int VideoCaptureController::RemoveClient(
const VideoCaptureControllerID& id,
VideoCaptureControllerEventHandler* event_handler) {
- DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK_CURRENTLY_ON(BrowserThread::IO);
DVLOG(1) << "VideoCaptureController::RemoveClient, id " << id.device_id;
ControllerClient* client = FindClient(id, event_handler, controller_clients_);
return kInvalidMediaCaptureSessionId;
// Take back all buffers held by the |client|.
- for (std::set<int>::iterator buffer_it = client->active_buffers.begin();
+ for (ControllerClient::ActiveBufferMap::iterator buffer_it =
+ client->active_buffers.begin();
buffer_it != client->active_buffers.end();
++buffer_it) {
- int buffer_id = *buffer_it;
- buffer_pool_->RelinquishConsumerHold(buffer_id, 1);
+ buffer_pool_->RelinquishConsumerHold(buffer_it->first, 1);
}
client->active_buffers.clear();
- int session_id = client->parameters.session_id;
+ int session_id = client->session_id;
controller_clients_.remove(client);
delete client;
}
void VideoCaptureController::StopSession(int session_id) {
- DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK_CURRENTLY_ON(BrowserThread::IO);
DVLOG(1) << "VideoCaptureController::StopSession, id " << session_id;
ControllerClient* client = FindClient(session_id, controller_clients_);
void VideoCaptureController::ReturnBuffer(
const VideoCaptureControllerID& id,
VideoCaptureControllerEventHandler* event_handler,
- int buffer_id) {
- DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ int buffer_id,
+ uint32 sync_point) {
+ DCHECK_CURRENTLY_ON(BrowserThread::IO);
ControllerClient* client = FindClient(id, event_handler, controller_clients_);
// If this buffer is not held by this client, or this client doesn't exist
// in controller, do nothing.
- if (!client || !client->active_buffers.erase(buffer_id)) {
+ ControllerClient::ActiveBufferMap::iterator iter;
+ if (!client || (iter = client->active_buffers.find(buffer_id)) ==
+ client->active_buffers.end()) {
NOTREACHED();
return;
}
-
+ scoped_refptr<media::VideoFrame> frame = iter->second;
+ client->active_buffers.erase(iter);
buffer_pool_->RelinquishConsumerHold(buffer_id, 1);
+
+ if (sync_point)
+ BrowserThread::PostTask(BrowserThread::UI,
+ FROM_HERE,
+ base::Bind(&ReturnVideoFrame, frame, sync_point));
}
-scoped_refptr<media::VideoFrame>
+const media::VideoCaptureFormat&
+VideoCaptureController::GetVideoCaptureFormat() const {
+ DCHECK_CURRENTLY_ON(BrowserThread::IO);
+ return video_capture_format_;
+}
+
+scoped_refptr<media::VideoCaptureDevice::Client::Buffer>
VideoCaptureController::VideoCaptureDeviceClient::ReserveOutputBuffer(
+ media::VideoFrame::Format format,
const gfx::Size& size) {
- return DoReserveI420VideoFrame(size, 0);
+ return DoReserveOutputBuffer(format, size);
}
-void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedFrame(
+void VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedData(
const uint8* data,
int length,
- base::Time timestamp,
+ const VideoCaptureFormat& frame_format,
int rotation,
- bool flip_vert,
- bool flip_horiz) {
- TRACE_EVENT0("video", "VideoCaptureController::OnIncomingCapturedFrame");
+ base::TimeTicks timestamp) {
+ TRACE_EVENT0("video", "VideoCaptureController::OnIncomingCapturedData");
- if (!frame_info_.IsValid())
+ if (!frame_format.IsValid())
return;
- scoped_refptr<media::VideoFrame> dst = DoReserveI420VideoFrame(
- gfx::Size(frame_info_.width, frame_info_.height), rotation);
+ // Chopped pixels in width/height in case video capture device has odd
+ // numbers for width/height.
+ int chopped_width = 0;
+ int chopped_height = 0;
+ int new_unrotated_width = frame_format.frame_size.width();
+ int new_unrotated_height = frame_format.frame_size.height();
+
+ if (new_unrotated_width & 1) {
+ --new_unrotated_width;
+ chopped_width = 1;
+ }
+ if (new_unrotated_height & 1) {
+ --new_unrotated_height;
+ chopped_height = 1;
+ }
- if (!dst.get())
+ int destination_width = new_unrotated_width;
+ int destination_height = new_unrotated_height;
+ if (rotation == 90 || rotation == 270) {
+ destination_width = new_unrotated_height;
+ destination_height = new_unrotated_width;
+ }
+ const gfx::Size dimensions(destination_width, destination_height);
+ if (!media::VideoFrame::IsValidConfig(media::VideoFrame::I420,
+ dimensions,
+ gfx::Rect(dimensions),
+ dimensions)) {
return;
-#if !defined(AVOID_LIBYUV_FOR_ANDROID_WEBVIEW)
+ }
- uint8* yplane = dst->data(media::VideoFrame::kYPlane);
- uint8* uplane = dst->data(media::VideoFrame::kUPlane);
- uint8* vplane = dst->data(media::VideoFrame::kVPlane);
- int yplane_stride = frame_info_.width;
- int uv_plane_stride = (frame_info_.width + 1) / 2;
+ scoped_refptr<Buffer> buffer =
+ DoReserveOutputBuffer(media::VideoFrame::I420, dimensions);
+
+ if (!buffer)
+ return;
+ uint8* yplane = NULL;
+ bool flip = false;
+ yplane = reinterpret_cast<uint8*>(buffer->data());
+ uint8* uplane =
+ yplane +
+ media::VideoFrame::PlaneAllocationSize(
+ media::VideoFrame::I420, media::VideoFrame::kYPlane, dimensions);
+ uint8* vplane =
+ uplane +
+ media::VideoFrame::PlaneAllocationSize(
+ media::VideoFrame::I420, media::VideoFrame::kUPlane, dimensions);
+ int yplane_stride = dimensions.width();
+ int uv_plane_stride = yplane_stride / 2;
int crop_x = 0;
int crop_y = 0;
- int destination_width = frame_info_.width;
- int destination_height = frame_info_.height;
libyuv::FourCC origin_colorspace = libyuv::FOURCC_ANY;
- // Assuming rotation happens first and flips next, we can consolidate both
- // vertical and horizontal flips together with rotation into two variables:
- // new_rotation = (rotation + 180 * vertical_flip) modulo 360
- // new_vertical_flip = horizontal_flip XOR vertical_flip
- int new_rotation_angle = (rotation + 180 * flip_vert) % 360;
+
libyuv::RotationMode rotation_mode = libyuv::kRotate0;
- if (new_rotation_angle == 90)
+ if (rotation == 90)
rotation_mode = libyuv::kRotate90;
- else if (new_rotation_angle == 180)
+ else if (rotation == 180)
rotation_mode = libyuv::kRotate180;
- else if (new_rotation_angle == 270)
+ else if (rotation == 270)
rotation_mode = libyuv::kRotate270;
- switch (frame_info_.color) {
+ switch (frame_format.pixel_format) {
case media::PIXEL_FORMAT_UNKNOWN: // Color format not set.
break;
case media::PIXEL_FORMAT_I420:
- DCHECK(!chopped_width_ && !chopped_height_);
+ DCHECK(!chopped_width && !chopped_height);
origin_colorspace = libyuv::FOURCC_I420;
break;
case media::PIXEL_FORMAT_YV12:
- DCHECK(!chopped_width_ && !chopped_height_);
+ DCHECK(!chopped_width && !chopped_height);
origin_colorspace = libyuv::FOURCC_YV12;
break;
case media::PIXEL_FORMAT_NV21:
- DCHECK(!chopped_width_ && !chopped_height_);
- origin_colorspace = libyuv::FOURCC_NV12;
+ DCHECK(!chopped_width && !chopped_height);
+ origin_colorspace = libyuv::FOURCC_NV21;
break;
case media::PIXEL_FORMAT_YUY2:
- DCHECK(!chopped_width_ && !chopped_height_);
+ DCHECK(!chopped_width && !chopped_height);
origin_colorspace = libyuv::FOURCC_YUY2;
break;
case media::PIXEL_FORMAT_UYVY:
- DCHECK(!chopped_width_ && !chopped_height_);
+ DCHECK(!chopped_width && !chopped_height);
origin_colorspace = libyuv::FOURCC_UYVY;
break;
case media::PIXEL_FORMAT_RGB24:
- origin_colorspace = libyuv::FOURCC_RAW;
+ origin_colorspace = libyuv::FOURCC_24BG;
+#if defined(OS_WIN)
+ // TODO(wjia): Currently, for RGB24 on WIN, capture device always
+ // passes in positive src_width and src_height. Remove this hardcoded
+ // value when nagative src_height is supported. The negative src_height
+ // indicates that vertical flipping is needed.
+ flip = true;
+#endif
break;
case media::PIXEL_FORMAT_ARGB:
origin_colorspace = libyuv::FOURCC_ARGB;
NOTREACHED();
}
- int need_convert_rgb24_on_win = false;
-#if defined(OS_WIN)
- // kRGB24 on Windows start at the bottom line and has a negative stride. This
- // is not supported by libyuv, so the media API is used instead.
- if (frame_info_.color == media::PIXEL_FORMAT_RGB24) {
- // Rotation and flipping is not supported in kRGB24 and OS_WIN case.
- DCHECK(!rotation && !flip_vert && !flip_horiz);
- need_convert_rgb24_on_win = true;
- }
-#endif
- if (need_convert_rgb24_on_win) {
- int rgb_stride = -3 * (frame_info_.width + chopped_width_);
- const uint8* rgb_src =
- data + 3 * (frame_info_.width + chopped_width_) *
- (frame_info_.height - 1 + chopped_height_);
- media::ConvertRGB24ToYUV(rgb_src,
- yplane,
- uplane,
- vplane,
- frame_info_.width,
- frame_info_.height,
- rgb_stride,
- yplane_stride,
- uv_plane_stride);
- } else {
- if (new_rotation_angle==90 || new_rotation_angle==270){
- // To be compatible with non-libyuv code in RotatePlaneByPixels, when
- // rotating by 90/270, only the maximum square portion located in the
- // center of the image is rotated. F.i. 640x480 pixels, only the central
- // 480 pixels would be rotated and the leftmost and rightmost 80 columns
- // would be ignored. This process is called letterboxing.
- int letterbox_thickness = abs(frame_info_.width - frame_info_.height) / 2;
- if (destination_width > destination_height) {
- yplane += letterbox_thickness;
- uplane += letterbox_thickness / 2;
- vplane += letterbox_thickness / 2;
- destination_width = destination_height;
- } else {
- yplane += letterbox_thickness * destination_width;
- uplane += (letterbox_thickness * destination_width) / 2;
- vplane += (letterbox_thickness * destination_width) / 2;
- destination_height = destination_width;
- }
- }
- libyuv::ConvertToI420(
- data, length,
- yplane, yplane_stride,
- uplane, uv_plane_stride,
- vplane, uv_plane_stride,
- crop_x, crop_y,
- frame_info_.width + chopped_width_,
- frame_info_.height * (flip_vert ^ flip_horiz ? -1 : 1),
- destination_width,
- destination_height,
- rotation_mode,
- origin_colorspace);
- }
-#else
- // Libyuv is not linked in for Android WebView builds, but video capture is
- // not used in those builds either. Whenever libyuv is added in that build,
- // address all these #ifdef parts, see http://crbug.com/299611 .
- NOTREACHED();
-#endif // if !defined(AVOID_LIBYUV_FOR_ANDROID_WEBVIEW)
+ libyuv::ConvertToI420(data,
+ length,
+ yplane,
+ yplane_stride,
+ uplane,
+ uv_plane_stride,
+ vplane,
+ uv_plane_stride,
+ crop_x,
+ crop_y,
+ frame_format.frame_size.width(),
+ (flip ? -frame_format.frame_size.height() :
+ frame_format.frame_size.height()),
+ new_unrotated_width,
+ new_unrotated_height,
+ rotation_mode,
+ origin_colorspace);
+ scoped_refptr<media::VideoFrame> frame =
+ media::VideoFrame::WrapExternalPackedMemory(
+ media::VideoFrame::I420,
+ dimensions,
+ gfx::Rect(dimensions),
+ dimensions,
+ yplane,
+ media::VideoFrame::AllocationSize(media::VideoFrame::I420,
+ dimensions),
+ base::SharedMemory::NULLHandle(),
+ base::TimeDelta(),
+ base::Closure());
+ DCHECK(frame);
+
+ VideoCaptureFormat format(
+ dimensions, frame_format.frame_rate, media::PIXEL_FORMAT_I420);
BrowserThread::PostTask(
BrowserThread::IO,
FROM_HERE,
- base::Bind(&VideoCaptureController::DoIncomingCapturedFrameOnIOThread,
- controller_,
- dst,
- frame_info_.frame_rate,
- timestamp));
+ base::Bind(
+ &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread,
+ controller_,
+ buffer,
+ format,
+ frame,
+ timestamp));
+
+ if (first_frame_) {
+ UMA_HISTOGRAM_COUNTS("Media.VideoCapture.Width",
+ frame_format.frame_size.width());
+ UMA_HISTOGRAM_COUNTS("Media.VideoCapture.Height",
+ frame_format.frame_size.height());
+ UMA_HISTOGRAM_ASPECT_RATIO("Media.VideoCapture.AspectRatio",
+ frame_format.frame_size.width(),
+ frame_format.frame_size.height());
+ UMA_HISTOGRAM_COUNTS("Media.VideoCapture.FrameRate",
+ frame_format.frame_rate);
+ UMA_HISTOGRAM_ENUMERATION("Media.VideoCapture.PixelFormat",
+ frame_format.pixel_format,
+ media::PIXEL_FORMAT_MAX);
+ first_frame_ = false;
+ }
}
void
VideoCaptureController::VideoCaptureDeviceClient::OnIncomingCapturedVideoFrame(
+ const scoped_refptr<Buffer>& buffer,
+ const VideoCaptureFormat& buffer_format,
const scoped_refptr<media::VideoFrame>& frame,
- base::Time timestamp) {
-
- // If this is a frame that belongs to the buffer pool, we can forward it
- // directly to the IO thread and be done.
- if (buffer_pool_->RecognizeReservedBuffer(
- frame->shared_memory_handle()) >= 0) {
- BrowserThread::PostTask(BrowserThread::IO,
- FROM_HERE,
- base::Bind(&VideoCaptureController::DoIncomingCapturedFrameOnIOThread,
- controller_, frame, frame_info_.frame_rate, timestamp));
- return;
- }
-
- NOTREACHED() << "Frames should always belong to the buffer pool.";
+ base::TimeTicks timestamp) {
+ BrowserThread::PostTask(
+ BrowserThread::IO,
+ FROM_HERE,
+ base::Bind(
+ &VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread,
+ controller_,
+ buffer,
+ buffer_format,
+ frame,
+ timestamp));
}
-void VideoCaptureController::VideoCaptureDeviceClient::OnError() {
+void VideoCaptureController::VideoCaptureDeviceClient::OnError(
+ const std::string& reason) {
+ MediaStreamManager::SendMessageToNativeLog(
+ "Error on video capture: " + reason);
BrowserThread::PostTask(BrowserThread::IO,
FROM_HERE,
base::Bind(&VideoCaptureController::DoErrorOnIOThread, controller_));
}
-void VideoCaptureController::VideoCaptureDeviceClient::OnFrameInfo(
- const media::VideoCaptureCapability& info) {
- frame_info_ = info;
- // Handle cases when |info| has odd numbers for width/height.
- if (info.width & 1) {
- --frame_info_.width;
- chopped_width_ = 1;
- } else {
- chopped_width_ = 0;
- }
- if (info.height & 1) {
- --frame_info_.height;
- chopped_height_ = 1;
- } else {
- chopped_height_ = 0;
- }
+void VideoCaptureController::VideoCaptureDeviceClient::OnLog(
+ const std::string& message) {
+ MediaStreamManager::SendMessageToNativeLog("Video capture: " + message);
}
-void VideoCaptureController::VideoCaptureDeviceClient::OnFrameInfoChanged(
- const media::VideoCaptureCapability& info) {
- OnFrameInfo(info);
-}
+scoped_refptr<media::VideoCaptureDevice::Client::Buffer>
+VideoCaptureController::VideoCaptureDeviceClient::DoReserveOutputBuffer(
+ media::VideoFrame::Format format,
+ const gfx::Size& dimensions) {
+ size_t frame_bytes = 0;
+ if (format == media::VideoFrame::NATIVE_TEXTURE) {
+ DCHECK_EQ(dimensions.width(), 0);
+ DCHECK_EQ(dimensions.height(), 0);
+ } else {
+ // The capture pipeline expects I420 for now.
+ DCHECK_EQ(format, media::VideoFrame::I420)
+ << "Non-I420 output buffer format " << format << " requested";
+ frame_bytes = media::VideoFrame::AllocationSize(format, dimensions);
+ }
-scoped_refptr<media::VideoFrame>
-VideoCaptureController::VideoCaptureDeviceClient::DoReserveI420VideoFrame(
- const gfx::Size& size,
- int rotation) {
int buffer_id_to_drop = VideoCaptureBufferPool::kInvalidId;
- scoped_refptr<media::VideoFrame> frame =
- buffer_pool_->ReserveI420VideoFrame(size, rotation, &buffer_id_to_drop);
+ int buffer_id =
+ buffer_pool_->ReserveForProducer(frame_bytes, &buffer_id_to_drop);
+ if (buffer_id == VideoCaptureBufferPool::kInvalidId)
+ return NULL;
+ void* data;
+ size_t size;
+ buffer_pool_->GetBufferInfo(buffer_id, &data, &size);
+
+ scoped_refptr<media::VideoCaptureDevice::Client::Buffer> output_buffer(
+ new PoolBuffer(buffer_pool_, buffer_id, data, size));
+
if (buffer_id_to_drop != VideoCaptureBufferPool::kInvalidId) {
BrowserThread::PostTask(BrowserThread::IO,
FROM_HERE,
base::Bind(&VideoCaptureController::DoBufferDestroyedOnIOThread,
controller_, buffer_id_to_drop));
}
- return frame;
+
+ return output_buffer;
}
VideoCaptureController::~VideoCaptureController() {
controller_clients_.end());
}
-void VideoCaptureController::DoIncomingCapturedFrameOnIOThread(
- const scoped_refptr<media::VideoFrame>& reserved_frame,
- int frame_rate,
- base::Time timestamp) {
- DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
-
- int buffer_id = buffer_pool_->RecognizeReservedBuffer(
- reserved_frame->shared_memory_handle());
- if (buffer_id < 0) {
- NOTREACHED();
- return;
- }
-
- media::VideoCaptureFormat frame_format(
- reserved_frame->coded_size().width(),
- reserved_frame->coded_size().height(),
- frame_rate,
- media::VariableResolutionVideoCaptureDevice);
+void VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread(
+ const scoped_refptr<media::VideoCaptureDevice::Client::Buffer>& buffer,
+ const media::VideoCaptureFormat& buffer_format,
+ const scoped_refptr<media::VideoFrame>& frame,
+ base::TimeTicks timestamp) {
+ DCHECK_CURRENTLY_ON(BrowserThread::IO);
+ DCHECK_NE(buffer->id(), VideoCaptureBufferPool::kInvalidId);
int count = 0;
if (state_ == VIDEO_CAPTURE_STATE_STARTED) {
if (client->session_closed)
continue;
- bool is_new_buffer = client->known_buffers.insert(buffer_id).second;
- if (is_new_buffer) {
- // On the first use of a buffer on a client, share the memory handle.
- size_t memory_size = 0;
- base::SharedMemoryHandle remote_handle = buffer_pool_->ShareToProcess(
- buffer_id, client->render_process_handle, &memory_size);
- client->event_handler->OnBufferCreated(client->controller_id,
- remote_handle,
- memory_size,
- buffer_id);
+ if (frame->format() == media::VideoFrame::NATIVE_TEXTURE) {
+ client->event_handler->OnMailboxBufferReady(client->controller_id,
+ buffer->id(),
+ *frame->mailbox_holder(),
+ buffer_format,
+ timestamp);
+ } else {
+ bool is_new_buffer = client->known_buffers.insert(buffer->id()).second;
+ if (is_new_buffer) {
+ // On the first use of a buffer on a client, share the memory handle.
+ size_t memory_size = 0;
+ base::SharedMemoryHandle remote_handle = buffer_pool_->ShareToProcess(
+ buffer->id(), client->render_process_handle, &memory_size);
+ client->event_handler->OnBufferCreated(
+ client->controller_id, remote_handle, memory_size, buffer->id());
+ }
+
+ client->event_handler->OnBufferReady(
+ client->controller_id, buffer->id(), buffer_format, timestamp);
}
- client->event_handler->OnBufferReady(client->controller_id,
- buffer_id, timestamp,
- frame_format);
- bool inserted = client->active_buffers.insert(buffer_id).second;
- DCHECK(inserted) << "Unexpected duplicate buffer: " << buffer_id;
+ bool inserted =
+ client->active_buffers.insert(std::make_pair(buffer->id(), frame))
+ .second;
+ DCHECK(inserted) << "Unexpected duplicate buffer: " << buffer->id();
count++;
}
}
- buffer_pool_->HoldForConsumers(buffer_id, count);
+ buffer_pool_->HoldForConsumers(buffer->id(), count);
}
void VideoCaptureController::DoErrorOnIOThread() {
- DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK_CURRENTLY_ON(BrowserThread::IO);
state_ = VIDEO_CAPTURE_STATE_ERROR;
for (ControllerClients::iterator client_it = controller_clients_.begin();
void VideoCaptureController::DoBufferDestroyedOnIOThread(
int buffer_id_to_drop) {
- DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK_CURRENTLY_ON(BrowserThread::IO);
for (ControllerClients::iterator client_it = controller_clients_.begin();
client_it != controller_clients_.end(); ++client_it) {
const ControllerClients& clients) {
for (ControllerClients::const_iterator client_it = clients.begin();
client_it != clients.end(); ++client_it) {
- if ((*client_it)->parameters.session_id == session_id) {
+ if ((*client_it)->session_id == session_id) {
return *client_it;
}
}
}
int VideoCaptureController::GetClientCount() {
- DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
+ DCHECK_CURRENTLY_ON(BrowserThread::IO);
return controller_clients_.size();
}