#include "base/metrics/histogram.h"
#include "base/metrics/sparse_histogram.h"
#include "base/stl_util.h"
+#include "base/strings/stringprintf.h"
#include "content/browser/renderer_host/media/media_stream_manager.h"
#include "content/browser/renderer_host/media/video_capture_manager.h"
#include "content/common/gpu/client/gl_helper.h"
void* data,
size_t size)
: Buffer(buffer_id, data, size), pool_(pool) {
- DCHECK(pool_);
+ DCHECK(pool_.get());
}
private:
render_process_handle(render_process),
session_id(session_id),
parameters(params),
- session_closed(false) {}
+ session_closed(false),
+ paused(false) {}
~ControllerClient() {}
// implicitly), we could avoid tracking this state here in the Controller, and
// simplify the code in both places.
bool session_closed;
+
+ // Indicates whether the client is paused, if true, VideoCaptureController
+ // stops updating its buffer.
+ bool paused;
};
// Receives events from the VideoCaptureDevice and posts them to a
// The pool of shared-memory buffers used for capturing.
const scoped_refptr<VideoCaptureBufferPool> buffer_pool_;
-
- bool first_frame_;
};
VideoCaptureController::VideoCaptureController(int max_buffers)
: buffer_pool_(new VideoCaptureBufferPool(max_buffers)),
state_(VIDEO_CAPTURE_STATE_STARTED),
+ frame_received_(false),
weak_ptr_factory_(this) {
}
VideoCaptureController::VideoCaptureDeviceClient::VideoCaptureDeviceClient(
const base::WeakPtr<VideoCaptureController>& controller,
const scoped_refptr<VideoCaptureBufferPool>& buffer_pool)
- : controller_(controller), buffer_pool_(buffer_pool), first_frame_(true) {}
+ : controller_(controller), buffer_pool_(buffer_pool) {}
VideoCaptureController::VideoCaptureDeviceClient::~VideoCaptureDeviceClient() {}
return session_id;
}
+void VideoCaptureController::PauseOrResumeClient(
+ const VideoCaptureControllerID& id,
+ VideoCaptureControllerEventHandler* event_handler,
+ bool pause) {
+ DCHECK_CURRENTLY_ON(BrowserThread::IO);
+ DVLOG(1) << "VideoCaptureController::PauseOrResumeClient, id "
+ << id.device_id << ", " << pause;
+
+ ControllerClient* client = FindClient(id, event_handler, controller_clients_);
+ if (!client)
+ return;
+
+ DCHECK(client->paused != pause);
+ client->paused = pause;
+}
+
void VideoCaptureController::StopSession(int session_id) {
DCHECK_CURRENTLY_ON(BrowserThread::IO);
DVLOG(1) << "VideoCaptureController::StopSession, id " << session_id;
scoped_refptr<Buffer> buffer =
DoReserveOutputBuffer(media::VideoFrame::I420, dimensions);
- if (!buffer)
+ if (!buffer.get())
return;
uint8* yplane = NULL;
bool flip = false;
base::SharedMemory::NULLHandle(),
base::TimeDelta(),
base::Closure());
- DCHECK(frame);
+ DCHECK(frame.get());
VideoCaptureFormat format(
dimensions, frame_format.frame_rate, media::PIXEL_FORMAT_I420);
format,
frame,
timestamp));
-
- if (first_frame_) {
- UMA_HISTOGRAM_COUNTS("Media.VideoCapture.Width",
- frame_format.frame_size.width());
- UMA_HISTOGRAM_COUNTS("Media.VideoCapture.Height",
- frame_format.frame_size.height());
- UMA_HISTOGRAM_ASPECT_RATIO("Media.VideoCapture.AspectRatio",
- frame_format.frame_size.width(),
- frame_format.frame_size.height());
- UMA_HISTOGRAM_COUNTS("Media.VideoCapture.FrameRate",
- frame_format.frame_rate);
- UMA_HISTOGRAM_ENUMERATION("Media.VideoCapture.PixelFormat",
- frame_format.pixel_format,
- media::PIXEL_FORMAT_MAX);
- first_frame_ = false;
- }
}
void
void VideoCaptureController::VideoCaptureDeviceClient::OnError(
const std::string& reason) {
- MediaStreamManager::SendMessageToNativeLog(
- "Error on video capture: " + reason);
+ const std::string log_message = base::StringPrintf(
+ "Error on video capture: %s, OS message: %s",
+ reason.c_str(),
+ logging::SystemErrorCodeToString(
+ logging::GetLastSystemErrorCode()).c_str());
+ DLOG(ERROR) << log_message;
+ MediaStreamManager::SendMessageToNativeLog(log_message);
BrowserThread::PostTask(BrowserThread::IO,
FROM_HERE,
base::Bind(&VideoCaptureController::DoErrorOnIOThread, controller_));
VideoCaptureController::~VideoCaptureController() {
STLDeleteContainerPointers(controller_clients_.begin(),
controller_clients_.end());
+ UMA_HISTOGRAM_BOOLEAN("Media.VideoCapture.FramesReceived", frame_received_);
}
void VideoCaptureController::DoIncomingCapturedVideoFrameOnIOThread(
for (ControllerClients::iterator client_it = controller_clients_.begin();
client_it != controller_clients_.end(); ++client_it) {
ControllerClient* client = *client_it;
- if (client->session_closed)
+ if (client->session_closed || client->paused)
continue;
if (frame->format() == media::VideoFrame::NATIVE_TEXTURE) {
}
client->event_handler->OnBufferReady(
- client->controller_id, buffer->id(), buffer_format, timestamp);
+ client->controller_id, buffer->id(), buffer_format,
+ frame->visible_rect(), timestamp);
}
bool inserted =
}
}
+ if (!frame_received_) {
+ UMA_HISTOGRAM_COUNTS("Media.VideoCapture.Width",
+ buffer_format.frame_size.width());
+ UMA_HISTOGRAM_COUNTS("Media.VideoCapture.Height",
+ buffer_format.frame_size.height());
+ UMA_HISTOGRAM_ASPECT_RATIO("Media.VideoCapture.AspectRatio",
+ buffer_format.frame_size.width(),
+ buffer_format.frame_size.height());
+ UMA_HISTOGRAM_COUNTS("Media.VideoCapture.FrameRate",
+ buffer_format.frame_rate);
+ UMA_HISTOGRAM_ENUMERATION("Media.VideoCapture.PixelFormat",
+ buffer_format.pixel_format,
+ media::PIXEL_FORMAT_MAX);
+ frame_received_ = true;
+ }
+
buffer_pool_->HoldForConsumers(buffer->id(), count);
}
return NULL;
}
-int VideoCaptureController::GetClientCount() {
+int VideoCaptureController::GetClientCount() const {
DCHECK_CURRENTLY_ON(BrowserThread::IO);
return controller_clients_.size();
}
+int VideoCaptureController::GetActiveClientCount() const {
+ DCHECK_CURRENTLY_ON(BrowserThread::IO);
+ int active_client_count = 0;
+ for (ControllerClient* client : controller_clients_) {
+ if (!client->paused)
+ ++active_client_count;
+ }
+ return active_client_count;
+}
+
} // namespace content