1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/rtc_video_encoder.h"
8 #include "base/location.h"
9 #include "base/logging.h"
10 #include "base/memory/scoped_vector.h"
11 #include "base/message_loop/message_loop_proxy.h"
12 #include "base/metrics/histogram.h"
13 #include "base/synchronization/waitable_event.h"
14 #include "media/base/bitstream_buffer.h"
15 #include "media/base/video_frame.h"
16 #include "media/base/video_util.h"
17 #include "media/filters/gpu_video_accelerator_factories.h"
18 #include "media/video/video_encode_accelerator.h"
19 #include "third_party/webrtc/system_wrappers/interface/tick_util.h"
21 #define NOTIFY_ERROR(x) \
23 DLOG(ERROR) << "calling NotifyError(): " << x; \
29 // This private class of RTCVideoEncoder does the actual work of communicating
30 // with a media::VideoEncodeAccelerator for handling video encoding. It can
31 // be created on any thread, but should subsequently be posted to (and Destroy()
32 // called on) a single thread. Callbacks to RTCVideoEncoder are posted to the
33 // thread on which the instance was constructed.
35 // This class separates state related to the thread that RTCVideoEncoder
36 // operates on (presently the libjingle worker thread) from the thread that
37 // |gpu_factories_| provides for accelerator operations (presently the media
38 // thread). The RTCVideoEncoder class can be deleted directly by WebRTC, while
39 // RTCVideoEncoder::Impl stays around long enough to properly shut down the VEA.
40 class RTCVideoEncoder::Impl
41 : public media::VideoEncodeAccelerator::Client,
42 public base::RefCountedThreadSafe<RTCVideoEncoder::Impl> {
44 Impl(const base::WeakPtr<RTCVideoEncoder>& weak_encoder,
45 const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories);
47 // Create the VEA and call Initialize() on it. Called once per instantiation,
48 // and then the instance is bound forevermore to whichever thread made the
50 // RTCVideoEncoder expects to be able to call this function synchronously from
51 // its own thread, hence the |async_waiter| and |async_retval| arguments.
52 void CreateAndInitializeVEA(const gfx::Size& input_visible_size,
54 media::VideoCodecProfile profile,
55 base::WaitableEvent* async_waiter,
56 int32_t* async_retval);
57 // Enqueue a frame from WebRTC for encoding.
58 // RTCVideoEncoder expects to be able to call this function synchronously from
59 // its own thread, hence the |async_waiter| and |async_retval| arguments.
60 void Enqueue(const webrtc::I420VideoFrame* input_frame,
62 base::WaitableEvent* async_waiter,
63 int32_t* async_retval);
65 // RTCVideoEncoder is given a buffer to be passed to WebRTC through the
66 // RTCVideoEncoder::ReturnEncodedImage() function. When that is complete,
67 // the buffer is returned to Impl by its index using this function.
68 void UseOutputBitstreamBufferId(int32 bitstream_buffer_id);
70 // Request encoding parameter change for the underlying encoder.
71 void RequestEncodingParametersChange(uint32 bitrate, uint32 framerate);
73 // Destroy this Impl's encoder. The destructor is not explicitly called, as
74 // Impl is a base::RefCountedThreadSafe.
77 // media::VideoEncodeAccelerator::Client implementation.
78 virtual void NotifyInitializeDone() OVERRIDE;
79 virtual void RequireBitstreamBuffers(unsigned int input_count,
80 const gfx::Size& input_coded_size,
81 size_t output_buffer_size) OVERRIDE;
82 virtual void BitstreamBufferReady(int32 bitstream_buffer_id,
84 bool key_frame) OVERRIDE;
85 virtual void NotifyError(media::VideoEncodeAccelerator::Error error) OVERRIDE;
88 friend class base::RefCountedThreadSafe<Impl>;
91 kInputBufferExtraCount = 1, // The number of input buffers allocated, more
92 // than what is requested by
93 // VEA::RequireBitstreamBuffers().
94 kOutputBufferCount = 3,
99 // Perform encoding on an input frame from the input queue.
100 void EncodeOneFrame();
102 // Notify that an input frame is finished for encoding. |index| is the index
103 // of the completed frame in |input_buffers_|.
104 void EncodeFrameFinished(int index);
106 // Set up/signal |async_waiter_| and |async_retval_|; see declarations below.
107 void RegisterAsyncWaiter(base::WaitableEvent* waiter, int32_t* retval);
108 void SignalAsyncWaiter(int32_t retval);
110 base::ThreadChecker thread_checker_;
112 // Weak pointer to the parent RTCVideoEncoder, for posting back VEA::Client
114 const base::WeakPtr<RTCVideoEncoder> weak_encoder_;
116 // The message loop on which to post callbacks to |weak_encoder_|.
117 const scoped_refptr<base::MessageLoopProxy> encoder_message_loop_proxy_;
119 // Factory for creating VEAs, shared memory buffers, etc.
120 const scoped_refptr<media::GpuVideoAcceleratorFactories> gpu_factories_;
122 // webrtc::VideoEncoder expects InitEncode() and Encode() to be synchronous.
123 // Do this by waiting on the |async_waiter_| and returning the return value in
124 // |async_retval_| when initialization completes, encoding completes, or
126 base::WaitableEvent* async_waiter_;
127 int32_t* async_retval_;
129 // The underlying VEA to perform encoding on.
130 scoped_ptr<media::VideoEncodeAccelerator> video_encoder_;
132 // Next input frame. Since there is at most one next frame, a single-element
133 // queue is sufficient.
134 const webrtc::I420VideoFrame* input_next_frame_;
136 // Whether to encode a keyframe next.
137 bool input_next_frame_keyframe_;
140 gfx::Size input_frame_coded_size_;
141 gfx::Size input_visible_size_;
143 // Shared memory buffers for input/output with the VEA.
144 ScopedVector<base::SharedMemory> input_buffers_;
145 ScopedVector<base::SharedMemory> output_buffers_;
147 // Input buffers ready to be filled with input from Encode(). As a LIFO since
148 // we don't care about ordering.
149 std::vector<int> input_buffers_free_;
151 DISALLOW_COPY_AND_ASSIGN(Impl);
154 RTCVideoEncoder::Impl::Impl(
155 const base::WeakPtr<RTCVideoEncoder>& weak_encoder,
156 const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories)
157 : weak_encoder_(weak_encoder),
158 encoder_message_loop_proxy_(base::MessageLoopProxy::current()),
159 gpu_factories_(gpu_factories),
162 input_next_frame_(NULL),
163 input_next_frame_keyframe_(false) {
164 thread_checker_.DetachFromThread();
167 void RTCVideoEncoder::Impl::CreateAndInitializeVEA(
168 const gfx::Size& input_visible_size,
170 media::VideoCodecProfile profile,
171 base::WaitableEvent* async_waiter,
172 int32_t* async_retval) {
173 DVLOG(3) << "Impl::CreateAndInitializeVEA()";
174 DCHECK(thread_checker_.CalledOnValidThread());
176 RegisterAsyncWaiter(async_waiter, async_retval);
178 // Check for overflow converting bitrate (kilobits/sec) to bits/sec.
179 if (bitrate > kuint32max / 1000) {
180 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
184 video_encoder_ = gpu_factories_->CreateVideoEncodeAccelerator(this).Pass();
185 if (!video_encoder_) {
186 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
189 input_visible_size_ = input_visible_size;
190 video_encoder_->Initialize(
191 media::VideoFrame::I420, input_visible_size_, profile, bitrate * 1000);
194 void RTCVideoEncoder::Impl::Enqueue(const webrtc::I420VideoFrame* input_frame,
196 base::WaitableEvent* async_waiter,
197 int32_t* async_retval) {
198 DVLOG(3) << "Impl::Enqueue()";
199 DCHECK(thread_checker_.CalledOnValidThread());
200 DCHECK(!input_next_frame_);
202 RegisterAsyncWaiter(async_waiter, async_retval);
203 input_next_frame_ = input_frame;
204 input_next_frame_keyframe_ = force_keyframe;
206 if (!input_buffers_free_.empty())
210 void RTCVideoEncoder::Impl::UseOutputBitstreamBufferId(
211 int32 bitstream_buffer_id) {
212 DVLOG(3) << "Impl::UseOutputBitstreamBufferIndex(): "
213 "bitstream_buffer_id=" << bitstream_buffer_id;
214 DCHECK(thread_checker_.CalledOnValidThread());
215 if (video_encoder_) {
216 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer(
218 output_buffers_[bitstream_buffer_id]->handle(),
219 output_buffers_[bitstream_buffer_id]->mapped_size()));
223 void RTCVideoEncoder::Impl::RequestEncodingParametersChange(uint32 bitrate,
225 DVLOG(3) << "Impl::RequestEncodingParametersChange(): bitrate=" << bitrate
226 << ", framerate=" << framerate;
227 DCHECK(thread_checker_.CalledOnValidThread());
229 // Check for overflow converting bitrate (kilobits/sec) to bits/sec.
230 if (bitrate > kuint32max / 1000) {
231 NOTIFY_ERROR(media::VideoEncodeAccelerator::kInvalidArgumentError);
236 video_encoder_->RequestEncodingParametersChange(bitrate * 1000, framerate);
239 void RTCVideoEncoder::Impl::Destroy() {
240 DVLOG(3) << "Impl::Destroy()";
241 DCHECK(thread_checker_.CalledOnValidThread());
243 video_encoder_.release()->Destroy();
246 void RTCVideoEncoder::Impl::NotifyInitializeDone() {
247 DVLOG(3) << "Impl::NotifyInitializeDone()";
248 DCHECK(thread_checker_.CalledOnValidThread());
251 void RTCVideoEncoder::Impl::RequireBitstreamBuffers(
252 unsigned int input_count,
253 const gfx::Size& input_coded_size,
254 size_t output_buffer_size) {
255 DVLOG(3) << "Impl::RequireBitstreamBuffers(): input_count=" << input_count
256 << ", input_coded_size=" << input_coded_size.ToString()
257 << ", output_buffer_size=" << output_buffer_size;
258 DCHECK(thread_checker_.CalledOnValidThread());
263 input_frame_coded_size_ = input_coded_size;
265 for (unsigned int i = 0; i < input_count + kInputBufferExtraCount; ++i) {
266 base::SharedMemory* shm =
267 gpu_factories_->CreateSharedMemory(media::VideoFrame::AllocationSize(
268 media::VideoFrame::I420, input_coded_size));
270 DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): "
271 "failed to create input buffer " << i;
272 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
275 input_buffers_.push_back(shm);
276 input_buffers_free_.push_back(i);
279 for (int i = 0; i < kOutputBufferCount; ++i) {
280 base::SharedMemory* shm =
281 gpu_factories_->CreateSharedMemory(output_buffer_size);
283 DLOG(ERROR) << "Impl::RequireBitstreamBuffers(): "
284 "failed to create output buffer " << i;
285 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
288 output_buffers_.push_back(shm);
291 // Immediately provide all output buffers to the VEA.
292 for (size_t i = 0; i < output_buffers_.size(); ++i) {
293 video_encoder_->UseOutputBitstreamBuffer(media::BitstreamBuffer(
294 i, output_buffers_[i]->handle(), output_buffers_[i]->mapped_size()));
296 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
299 void RTCVideoEncoder::Impl::BitstreamBufferReady(int32 bitstream_buffer_id,
302 DVLOG(3) << "Impl::BitstreamBufferReady(): "
303 "bitstream_buffer_id=" << bitstream_buffer_id
304 << ", payload_size=" << payload_size
305 << ", key_frame=" << key_frame;
306 DCHECK(thread_checker_.CalledOnValidThread());
308 if (bitstream_buffer_id < 0 ||
309 bitstream_buffer_id >= static_cast<int>(output_buffers_.size())) {
310 DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid bitstream_buffer_id="
311 << bitstream_buffer_id;
312 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
315 base::SharedMemory* output_buffer = output_buffers_[bitstream_buffer_id];
316 if (payload_size > output_buffer->mapped_size()) {
317 DLOG(ERROR) << "Impl::BitstreamBufferReady(): invalid payload_size="
319 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
323 // Use webrtc timestamps to ensure correct RTP sender behavior.
324 // TODO(hshi): obtain timestamp from the capturer, see crbug.com/284783.
325 const int64 capture_time_ms = webrtc::TickTime::MillisecondTimestamp();
327 scoped_ptr<webrtc::EncodedImage> image(new webrtc::EncodedImage(
328 reinterpret_cast<uint8_t*>(output_buffer->memory()),
330 output_buffer->mapped_size()));
331 image->_encodedWidth = input_visible_size_.width();
332 image->_encodedHeight = input_visible_size_.height();
333 // Convert capture time to 90 kHz RTP timestamp.
334 image->_timeStamp = static_cast<uint32_t>(90 * capture_time_ms);
335 image->capture_time_ms_ = capture_time_ms;
336 image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
337 image->_completeFrame = true;
339 encoder_message_loop_proxy_->PostTask(
341 base::Bind(&RTCVideoEncoder::ReturnEncodedImage,
343 base::Passed(&image),
344 bitstream_buffer_id));
347 void RTCVideoEncoder::Impl::NotifyError(
348 media::VideoEncodeAccelerator::Error error) {
349 DVLOG(3) << "Impl::NotifyError(): error=" << error;
350 DCHECK(thread_checker_.CalledOnValidThread());
353 case media::VideoEncodeAccelerator::kInvalidArgumentError:
354 retval = WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
357 retval = WEBRTC_VIDEO_CODEC_ERROR;
361 video_encoder_.release()->Destroy();
364 SignalAsyncWaiter(retval);
366 encoder_message_loop_proxy_->PostTask(
368 base::Bind(&RTCVideoEncoder::NotifyError, weak_encoder_, retval));
372 RTCVideoEncoder::Impl::~Impl() { DCHECK(!video_encoder_); }
374 void RTCVideoEncoder::Impl::EncodeOneFrame() {
375 DVLOG(3) << "Impl::EncodeOneFrame()";
376 DCHECK(thread_checker_.CalledOnValidThread());
377 DCHECK(input_next_frame_);
378 DCHECK(!input_buffers_free_.empty());
380 // EncodeOneFrame() may re-enter EncodeFrameFinished() if VEA::Encode() fails,
381 // we receive a VEA::NotifyError(), and the media::VideoFrame we pass to
382 // Encode() gets destroyed early. Handle this by resetting our
383 // input_next_frame_* state before we hand off the VideoFrame to the VEA.
384 const webrtc::I420VideoFrame* next_frame = input_next_frame_;
385 bool next_frame_keyframe = input_next_frame_keyframe_;
386 input_next_frame_ = NULL;
387 input_next_frame_keyframe_ = false;
389 if (!video_encoder_) {
390 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_ERROR);
394 const int index = input_buffers_free_.back();
395 base::SharedMemory* input_buffer = input_buffers_[index];
396 scoped_refptr<media::VideoFrame> frame =
397 media::VideoFrame::WrapExternalPackedMemory(
398 media::VideoFrame::I420,
399 input_frame_coded_size_,
400 gfx::Rect(input_visible_size_),
402 reinterpret_cast<uint8*>(input_buffer->memory()),
403 input_buffer->mapped_size(),
404 input_buffer->handle(),
406 base::Bind(&RTCVideoEncoder::Impl::EncodeFrameFinished, this, index));
408 DLOG(ERROR) << "Impl::EncodeOneFrame(): failed to create frame";
409 NOTIFY_ERROR(media::VideoEncodeAccelerator::kPlatformFailureError);
413 // Do a strided copy of the input frame to match the input requirements for
415 // TODO(sheu): support zero-copy from WebRTC. http://crbug.com/269312
416 media::CopyYPlane(next_frame->buffer(webrtc::kYPlane),
417 next_frame->stride(webrtc::kYPlane),
418 next_frame->height(),
420 media::CopyUPlane(next_frame->buffer(webrtc::kUPlane),
421 next_frame->stride(webrtc::kUPlane),
422 next_frame->height(),
424 media::CopyVPlane(next_frame->buffer(webrtc::kVPlane),
425 next_frame->stride(webrtc::kVPlane),
426 next_frame->height(),
429 video_encoder_->Encode(frame, next_frame_keyframe);
430 input_buffers_free_.pop_back();
431 SignalAsyncWaiter(WEBRTC_VIDEO_CODEC_OK);
434 void RTCVideoEncoder::Impl::EncodeFrameFinished(int index) {
435 DVLOG(3) << "Impl::EncodeFrameFinished(): index=" << index;
436 DCHECK(thread_checker_.CalledOnValidThread());
438 DCHECK_LT(index, static_cast<int>(input_buffers_.size()));
439 input_buffers_free_.push_back(index);
440 if (input_next_frame_)
444 void RTCVideoEncoder::Impl::RegisterAsyncWaiter(base::WaitableEvent* waiter,
446 DCHECK(thread_checker_.CalledOnValidThread());
447 DCHECK(!async_waiter_);
448 DCHECK(!async_retval_);
449 async_waiter_ = waiter;
450 async_retval_ = retval;
453 void RTCVideoEncoder::Impl::SignalAsyncWaiter(int32_t retval) {
454 DCHECK(thread_checker_.CalledOnValidThread());
455 *async_retval_ = retval;
456 async_waiter_->Signal();
457 async_retval_ = NULL;
458 async_waiter_ = NULL;
463 ////////////////////////////////////////////////////////////////////////////////
467 ////////////////////////////////////////////////////////////////////////////////
469 RTCVideoEncoder::RTCVideoEncoder(
470 webrtc::VideoCodecType type,
471 media::VideoCodecProfile profile,
472 const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories)
473 : video_codec_type_(type),
474 video_codec_profile_(profile),
475 gpu_factories_(gpu_factories),
476 encoded_image_callback_(NULL),
477 impl_status_(WEBRTC_VIDEO_CODEC_UNINITIALIZED),
478 weak_this_factory_(this) {
479 DVLOG(1) << "RTCVideoEncoder(): profile=" << profile;
482 RTCVideoEncoder::~RTCVideoEncoder() {
483 DCHECK(thread_checker_.CalledOnValidThread());
488 int32_t RTCVideoEncoder::InitEncode(const webrtc::VideoCodec* codec_settings,
489 int32_t number_of_cores,
490 uint32_t max_payload_size) {
491 DVLOG(1) << "InitEncode(): codecType=" << codec_settings->codecType
492 << ", width=" << codec_settings->width
493 << ", height=" << codec_settings->height
494 << ", startBitrate=" << codec_settings->startBitrate;
495 DCHECK(thread_checker_.CalledOnValidThread());
498 weak_this_factory_.InvalidateWeakPtrs();
499 impl_ = new Impl(weak_this_factory_.GetWeakPtr(), gpu_factories_);
500 base::WaitableEvent initialization_waiter(true, false);
501 int32_t initialization_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
502 gpu_factories_->GetTaskRunner()->PostTask(
504 base::Bind(&RTCVideoEncoder::Impl::CreateAndInitializeVEA,
506 gfx::Size(codec_settings->width, codec_settings->height),
507 codec_settings->startBitrate,
508 video_codec_profile_,
509 &initialization_waiter,
510 &initialization_retval));
512 // webrtc::VideoEncoder expects this call to be synchronous.
513 initialization_waiter.Wait();
514 RecordInitEncodeUMA(initialization_retval);
515 return initialization_retval;
518 int32_t RTCVideoEncoder::Encode(
519 const webrtc::I420VideoFrame& input_image,
520 const webrtc::CodecSpecificInfo* codec_specific_info,
521 const std::vector<webrtc::VideoFrameType>* frame_types) {
522 DVLOG(3) << "Encode()";
523 // TODO(sheu): figure out why this check fails.
524 // DCHECK(thread_checker_.CalledOnValidThread());
526 DVLOG(3) << "Encode(): returning impl_status_=" << impl_status_;
530 base::WaitableEvent encode_waiter(true, false);
531 int32_t encode_retval = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
532 gpu_factories_->GetTaskRunner()->PostTask(
534 base::Bind(&RTCVideoEncoder::Impl::Enqueue,
537 (frame_types->front() == webrtc::kKeyFrame),
541 // webrtc::VideoEncoder expects this call to be synchronous.
542 encode_waiter.Wait();
543 DVLOG(3) << "Encode(): returning encode_retval=" << encode_retval;
544 return encode_retval;
547 int32_t RTCVideoEncoder::RegisterEncodeCompleteCallback(
548 webrtc::EncodedImageCallback* callback) {
549 DVLOG(3) << "RegisterEncodeCompleteCallback()";
550 DCHECK(thread_checker_.CalledOnValidThread());
552 DVLOG(3) << "RegisterEncodeCompleteCallback(): returning " << impl_status_;
556 encoded_image_callback_ = callback;
557 return WEBRTC_VIDEO_CODEC_OK;
560 int32_t RTCVideoEncoder::Release() {
561 DVLOG(3) << "Release()";
562 DCHECK(thread_checker_.CalledOnValidThread());
565 gpu_factories_->GetTaskRunner()->PostTask(
566 FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_));
568 weak_this_factory_.InvalidateWeakPtrs();
569 impl_status_ = WEBRTC_VIDEO_CODEC_UNINITIALIZED;
571 return WEBRTC_VIDEO_CODEC_OK;
574 int32_t RTCVideoEncoder::SetChannelParameters(uint32_t packet_loss, int rtt) {
575 DVLOG(3) << "SetChannelParameters(): packet_loss=" << packet_loss
577 DCHECK(thread_checker_.CalledOnValidThread());
579 return WEBRTC_VIDEO_CODEC_OK;
582 int32_t RTCVideoEncoder::SetRates(uint32_t new_bit_rate, uint32_t frame_rate) {
583 DVLOG(3) << "SetRates(): new_bit_rate=" << new_bit_rate
584 << ", frame_rate=" << frame_rate;
585 DCHECK(thread_checker_.CalledOnValidThread());
587 DVLOG(3) << "SetRates(): returning " << impl_status_;
591 gpu_factories_->GetTaskRunner()->PostTask(
593 base::Bind(&RTCVideoEncoder::Impl::RequestEncodingParametersChange,
597 return WEBRTC_VIDEO_CODEC_OK;
600 void RTCVideoEncoder::ReturnEncodedImage(scoped_ptr<webrtc::EncodedImage> image,
601 int32 bitstream_buffer_id) {
602 DCHECK(thread_checker_.CalledOnValidThread());
603 DVLOG(3) << "ReturnEncodedImage(): "
604 "bitstream_buffer_id=" << bitstream_buffer_id;
606 if (!encoded_image_callback_)
609 webrtc::CodecSpecificInfo info;
610 memset(&info, 0, sizeof(info));
611 info.codecType = video_codec_type_;
612 if (video_codec_type_ == webrtc::kVideoCodecVP8) {
613 info.codecSpecific.VP8.pictureId = -1;
614 info.codecSpecific.VP8.tl0PicIdx = -1;
615 info.codecSpecific.VP8.keyIdx = -1;
618 // Generate a header describing a single fragment.
619 webrtc::RTPFragmentationHeader header;
620 memset(&header, 0, sizeof(header));
621 header.VerifyAndAllocateFragmentationHeader(1);
622 header.fragmentationOffset[0] = 0;
623 header.fragmentationLength[0] = image->_length;
624 header.fragmentationPlType[0] = 0;
625 header.fragmentationTimeDiff[0] = 0;
627 int32_t retval = encoded_image_callback_->Encoded(*image, &info, &header);
629 DVLOG(2) << "ReturnEncodedImage(): encoded_image_callback_ returned "
633 // The call through webrtc::EncodedImageCallback is synchronous, so we can
634 // immediately recycle the output buffer back to the Impl.
635 gpu_factories_->GetTaskRunner()->PostTask(
637 base::Bind(&RTCVideoEncoder::Impl::UseOutputBitstreamBufferId,
639 bitstream_buffer_id));
642 void RTCVideoEncoder::NotifyError(int32_t error) {
643 DCHECK(thread_checker_.CalledOnValidThread());
644 DVLOG(1) << "NotifyError(): error=" << error;
646 impl_status_ = error;
647 gpu_factories_->GetTaskRunner()->PostTask(
648 FROM_HERE, base::Bind(&RTCVideoEncoder::Impl::Destroy, impl_));
652 void RTCVideoEncoder::RecordInitEncodeUMA(int32_t init_retval) {
653 UMA_HISTOGRAM_BOOLEAN("Media.RTCVideoEncoderInitEncodeSuccess",
654 init_retval == WEBRTC_VIDEO_CODEC_OK);
655 if (init_retval == WEBRTC_VIDEO_CODEC_OK) {
656 UMA_HISTOGRAM_ENUMERATION("Media.RTCVideoEncoderProfile",
657 video_codec_profile_,
658 media::VIDEO_CODEC_PROFILE_MAX);
662 } // namespace content