[WebRTC] H264 Encoder from UserData 16/293816/16
authorMichal Jurkiewicz <m.jurkiewicz@samsung.com>
Thu, 25 May 2023 13:03:20 +0000 (15:03 +0200)
committerm.jurkiewicz <m.jurkiewicz@samsung.com>
Mon, 5 Jun 2023 13:26:03 +0000 (15:26 +0200)
* Enable encoding H264 codec using HW Encoder in 'UserData' mode.
* Remove 'Subscaler' encoding mode as it is not used anymore.
* Clean remaining code.

Bug: https://cam.sprc.samsung.pl/browse/VDGAME-275
Change-Id: I1b129dd0b50dc8412ceb86abd1878396c4740a69
Signed-off-by: Michal Jurkiewicz <m.jurkiewicz@samsung.com>
24 files changed:
media/capture/video_capturer_source.h
third_party/blink/public/platform/modules/mediastream/web_media_stream_sink.h
third_party/blink/public/platform/modules/video_capture/web_video_capture_impl_manager.h
third_party/blink/public/web/modules/mediastream/media_stream_video_source.h
third_party/blink/renderer/modules/mediastream/local_video_capturer_source.cc
third_party/blink/renderer/modules/mediastream/local_video_capturer_source.h
third_party/blink/renderer/modules/mediastream/media_stream_video_capturer_source.cc
third_party/blink/renderer/modules/mediastream/media_stream_video_capturer_source.h
third_party/blink/renderer/modules/mediastream/media_stream_video_track.cc
third_party/blink/renderer/modules/mediastream/media_stream_video_track.h
third_party/blink/renderer/modules/peerconnection/media_stream_video_webrtc_sink.cc
third_party/blink/renderer/modules/peerconnection/media_stream_video_webrtc_sink.h
third_party/blink/renderer/platform/exported/video_capture/web_video_capture_impl_manager.cc
third_party/blink/renderer/platform/peerconnection/webrtc_video_track_source.cc
third_party/blink/renderer/platform/peerconnection/webrtc_video_track_source.h
third_party/blink/renderer/platform/video_capture/video_capture_impl.cc
third_party/blink/renderer/platform/video_capture/video_capture_impl.h
third_party/webrtc/api/video/video_stream_encoder_interface.h
third_party/webrtc/video/video_stream_encoder.cc
third_party/webrtc/video/video_stream_encoder.h
tizen_src/chromium_impl/third_party/blink/renderer/platform/peerconnection/tizen_video_encoder_facade.cc
tizen_src/chromium_impl/third_party/blink/renderer/platform/peerconnection/tizen_video_encoder_facade.h
tizen_src/chromium_impl/third_party/blink/renderer/platform/peerconnection/tizen_video_encoder_facade_utils.cc
tizen_src/chromium_impl/third_party/blink/renderer/platform/peerconnection/tizen_video_encoder_facade_utils.h

index 19dd7093ff5172d19abfbb71b69a37647af4a569..c80ec0554144ec5e97180cc33ca117b3f6e2ee4a 100644 (file)
@@ -131,8 +131,8 @@ class CAPTURE_EXPORT VideoCapturerSource {
   // Sends a log message to the source.
   virtual void OnLog(const std::string& message) {}
 
-  // Indicate that the current encoder sink is vp8
-  virtual void OnWebRtcSendStreamEncoderVp8(bool enable) {}
+  // Indicate that the current encoder sink operates in userdata mode
+  virtual void OnWebRtcSendStreamUserDataEncoder(bool enable) {}
 
   // Encoder had a new size
   virtual void OnNewTargetSize(const gfx::Size& target_size) {}
index 99949f8d3699df268b9dbdd36c9a80bc894e1755..6c79fbd8a88bbfb9b0c78ae5f12663564b300a36 100644 (file)
@@ -22,9 +22,10 @@ class BLINK_PLATFORM_EXPORT WebMediaStreamSink {
   virtual void OnContentHintChanged(
       WebMediaStreamTrack::ContentHintType content_hint) {}
 
-  using WebRtcSendStreamEncoderVp8CB = base::RepeatingCallback<void(bool)>;
-  virtual void SetWebRtcSendStreamEncoderVp8CB(WebRtcSendStreamEncoderVp8CB) {}
-  virtual void UnsetWebRtcSendStreamEncoderVp8CB() {}
+  using WebRtcSendStreamEncoderUserDataCB = base::RepeatingCallback<void(bool)>;
+  virtual void SetWebRtcSendStreamEncoderUserDataCB(
+      WebRtcSendStreamEncoderUserDataCB) {}
+  virtual void UnsetWebRtcSendStreamUserDataEncoderCB() {}
 
   virtual void PauseSink() {}
   virtual void StartSink() {}
index 7b111a692366186efdd4b85c9935a5bda9a75763..1433bcc290d0ea9ff93911d5899a67c298ee5b18 100644 (file)
@@ -124,8 +124,8 @@ class BLINK_PLATFORM_EXPORT WebVideoCaptureImplManager {
   VideoCaptureFeedbackCB GetFeedbackCallback(
       const media::VideoCaptureSessionId& id) const;
 
-  void OnWebRtcSendStreamEncoderVp8(const media::VideoCaptureSessionId& id,
-                                    bool enable);
+  void OnWebRtcSendStreamUserDataEncoder(const media::VideoCaptureSessionId& id,
+                                         bool enable);
 
  private:
   // Holds bookkeeping info for each VideoCaptureImpl shared by clients.
index 0863a0ed98b3b0e88d40a3ca022a84a8f16ce9fe..a8f6f66bcb533e7919d59d04b1c97fc04bc32ee8 100644 (file)
@@ -191,7 +191,7 @@ class BLINK_MODULES_EXPORT MediaStreamVideoSource
 
   virtual base::WeakPtr<MediaStreamVideoSource> GetWeakPtr() const = 0;
 
-  virtual void OnWebRtcSendStreamEncoderVp8(bool enable) {}
+  virtual void OnWebRtcSendStreamUserDataEncoder(bool enable) {}
 
  protected:
   // MediaStreamSource implementation.
index bedea08dadf70838ec5185624430996f3ef03a61..c3c36a9aab21d8a60e6ec0c965875c64229445ca 100644 (file)
@@ -97,9 +97,9 @@ void LocalVideoCapturerSource::OnLog(const std::string& message) {
   manager_->OnLog(session_id_, WebString::FromUTF8(message));
 }
 
-void LocalVideoCapturerSource::OnWebRtcSendStreamEncoderVp8(bool enable) {
+void LocalVideoCapturerSource::OnWebRtcSendStreamUserDataEncoder(bool enable) {
   DCHECK_CALLED_ON_VALID_THREAD(thread_checker_);
-  manager_->OnWebRtcSendStreamEncoderVp8(session_id_, enable);
+  manager_->OnWebRtcSendStreamUserDataEncoder(session_id_, enable);
 }
 
 void LocalVideoCapturerSource::OnNewTargetSize(const gfx::Size& target_size) {
index 7aacfa3551f674f72e77afd8a9fd7862b5ae62ad..1e7918c3fc76164c1c5b2b090b87fe7bc52b5f7b 100644 (file)
@@ -61,7 +61,7 @@ class MODULES_EXPORT LocalVideoCapturerSource
   void OnFrameDropped(media::VideoCaptureFrameDropReason reason) override;
   void OnLog(const std::string& message) override;
   media::VideoCaptureFeedbackCB GetFeedbackCallback() const override;
-  void OnWebRtcSendStreamEncoderVp8(bool enable) override;
+  void OnWebRtcSendStreamUserDataEncoder(bool enable) override;
   void OnNewTargetSize(const gfx::Size& target_size) override;
   void SetAiZoomSettings(const media::TizenAiZoomSettings& settings) override;
 
index 1ab87b9899d3309aacd68237eced52107b3bad81..51a0a0e899c78276cbc4c9eedfed77aa7e22f7b1 100644 (file)
@@ -270,8 +270,9 @@ MediaStreamVideoCapturerSource::GetSourceForTesting() {
   return source_.get();
 }
 
-void MediaStreamVideoCapturerSource::OnWebRtcSendStreamEncoderVp8(bool enable) {
-  source_->OnWebRtcSendStreamEncoderVp8(enable);
+void MediaStreamVideoCapturerSource::OnWebRtcSendStreamUserDataEncoder(
+    bool enable) {
+  source_->OnWebRtcSendStreamUserDataEncoder(enable);
 }
 
 }  // namespace blink
index 347e101d9376ac36a2a937f45456a8270ab76747..8795356bab271d897e06c4e19f3634caa9b30696 100644 (file)
@@ -59,7 +59,7 @@ class MODULES_EXPORT MediaStreamVideoCapturerSource
 
   media::VideoCapturerSource* GetSourceForTesting();
 
-  void OnWebRtcSendStreamEncoderVp8(bool enable) override;
+  void OnWebRtcSendStreamUserDataEncoder(bool enable) override;
 
  protected:
   void SetAiZoomSettings(const media::TizenAiZoomSettings& settings) override;
index ab8b5c7d61eff73a0a615f1d45fe8ea6614dbf17..ec83d382782809e4a0e228be083b07f98a40438e 100644 (file)
@@ -563,9 +563,9 @@ void MediaStreamVideoTrack::AddSink(
   } else if (uses_alpha == MediaStreamVideoSink::UsesAlpha::kNo) {
     alpha_discarding_sinks_.insert(sink);
   }
-  sink->SetWebRtcSendStreamEncoderVp8CB(
-      base::BindRepeating(&MediaStreamVideoTrack::OnWebRtcSendStreamEncoderVp8,
-                          base::Unretained(this)));
+  sink->SetWebRtcSendStreamEncoderUserDataCB(base::BindRepeating(
+      &MediaStreamVideoTrack::OnWebRtcSendStreamUserDataEncoder,
+      base::Unretained(this)));
   // Request source to deliver a frame because a new sink is added.
   if (!source_)
     return;
@@ -597,7 +597,7 @@ void MediaStreamVideoTrack::RemoveSink(WebMediaStreamSink* sink) {
   RemoveSinkInternal(&sinks_, sink);
   alpha_using_sinks_.erase(sink);
   alpha_discarding_sinks_.erase(sink);
-  sink->UnsetWebRtcSendStreamEncoderVp8CB();
+  sink->UnsetWebRtcSendStreamUserDataEncoderCB();
   frame_deliverer_->RemoveCallback(sink);
   secure_tracker_.Remove(sink);
   if (!source_)
@@ -810,11 +810,11 @@ void MediaStreamVideoTrack::OnFrameDropped(
   source_->OnFrameDropped(reason);
 }
 
-void MediaStreamVideoTrack::OnWebRtcSendStreamEncoderVp8(bool enable) {
+void MediaStreamVideoTrack::OnWebRtcSendStreamUserDataEncoder(bool enable) {
   DCHECK_CALLED_ON_VALID_THREAD(main_render_thread_checker_);
   if (!source_)
     return;
-  source_->OnWebRtcSendStreamEncoderVp8(enable);
+  source_->OnWebRtcSendStreamUserDataEncoder(enable);
 }
 
 void MediaStreamVideoTrack::SetMinimumFrameRate(double min_frame_rate) {
index e95a18bd65607ff3afe8e2809279e4867a10fc55..0ceccad2430e09aa3103134d0c5e0896716af842 100644 (file)
@@ -185,7 +185,7 @@ class MODULES_EXPORT MediaStreamVideoTrack : public MediaStreamTrackPlatform {
     is_screencast_ = is_screencast;
   }
 
-  void OnWebRtcSendStreamEncoderVp8(bool enable);
+  void OnWebRtcSendStreamUserDataEncoder(bool enable);
 
   void PlayerConsumerPaused();
   void PlayerConsumerResumed();
index 0ee93068736b3c066d6c63471829644cf005f7ee..2e3e02d2a26663c5a8b4ce142fb915f0e28629ee 100644 (file)
@@ -300,15 +300,15 @@ MediaStreamVideoWebRtcSink::SourceNeedsDenoisingForTesting() const {
   return video_source_->needs_denoising();
 }
 
-void MediaStreamVideoWebRtcSink::SetWebRtcSendStreamEncoderVp8CB(
-    WebRtcSendStreamEncoderVp8CB vp8cb) {
+void MediaStreamVideoWebRtcSink::SetWebRtcSendStreamEncoderUserDataCB(
+    WebRtcSendStreamEncoderUserDataCB user_data_cb) {
   if (video_source_)
-    video_source_->SetWebRtcSendStreamEncoderVp8CB(vp8cb);
+    video_source_->SetWebRtcSendStreamUserDataEncoderCB(user_data_cb);
 }
 
-void MediaStreamVideoWebRtcSink::UnsetWebRtcSendStreamEncoderVp8CB() {
+void MediaStreamVideoWebRtcSink::UnsetWebRtcSendStreamUserDataEncoderCB() {
   if (video_source_)
-    video_source_->UnsetWebRtcSendStreamEncoderVp8CB();
+    video_source_->UnsetWebRtcSendStreamUserDataEncoderCB();
 }
 
 void MediaStreamVideoWebRtcSink::PauseSink() {
index 071a43034ee163538de77a754a9672a54cda7b82..83bfcdf0d6bf69665b687247c96acd61fdffddb9 100644 (file)
@@ -47,9 +47,9 @@ class MODULES_EXPORT MediaStreamVideoWebRtcSink : public MediaStreamVideoSink {
 
   double GetRequiredMinFramesPerSec() const override { return 1; }
 
-  void SetWebRtcSendStreamEncoderVp8CB(
-      WebRtcSendStreamEncoderVp8CB vp8cb) override;
-  void UnsetWebRtcSendStreamEncoderVp8CB() override;
+  void SetWebRtcSendStreamEncoderUserDataCB(
+      WebRtcSendStreamEncoderUserDataCB) override;
+  void UnsetWebRtcSendStreamUserDataEncoderCB() override;
 
   void PauseSink() override;
   void StartSink() override;
index e7c0c5fee1df2dac484bc9226e83393aa8632588..949cb95f71913b6dcfdd0fd715cad1f98b14ae01 100644 (file)
@@ -237,7 +237,7 @@ WebVideoCaptureImplManager::CreateVideoCaptureImplForTesting(
   return nullptr;
 }
 
-void WebVideoCaptureImplManager::OnWebRtcSendStreamEncoderVp8(
+void WebVideoCaptureImplManager::OnWebRtcSendStreamUserDataEncoder(
     const media::VideoCaptureSessionId& id,
     bool enable) {
   DCHECK(render_main_task_runner_->BelongsToCurrentThread());
@@ -248,8 +248,9 @@ void WebVideoCaptureImplManager::OnWebRtcSendStreamEncoderVp8(
   // Use of base::Unretained() is safe because |devices_| is released on the
   // |io_task_runner()| as well.
   Platform::Current()->GetIOTaskRunner()->PostTask(
-      FROM_HERE, base::BindOnce(&VideoCaptureImpl::OnWebRtcSendStreamEncoderVp8,
-                                base::Unretained(it->impl.get()), enable));
+      FROM_HERE,
+      base::BindOnce(&VideoCaptureImpl::OnWebRtcSendStreamUserDataEncoder,
+                     base::Unretained(it->impl.get()), enable));
 }
 
 void WebVideoCaptureImplManager::StopCapture(
index 33659f16644eff17d79ce7b09f9bce867f5387ea..2607fd8b444e2608cc62f9662d3fd69621942047 100644 (file)
@@ -384,13 +384,13 @@ void WebRtcVideoTrackSource::OnEncodedVideoFrameCaptured(
 }
 #endif
 
-void WebRtcVideoTrackSource::SetWebRtcSendStreamEncoderVp8CB(
-    WebMediaStreamSink::WebRtcSendStreamEncoderVp8CB vp8cb) {
-  vp8cb_ = vp8cb;
+void WebRtcVideoTrackSource::SetWebRtcSendStreamUserDataEncoderCB(
+    WebMediaStreamSink::WebRtcSendStreamEncoderUserDataCB user_data_callback) {
+  user_data_callback_ = user_data_callback;
 }
 
-void WebRtcVideoTrackSource::UnsetWebRtcSendStreamEncoderVp8CB() {
-  vp8cb_.Reset();
+void WebRtcVideoTrackSource::UnsetWebRtcSendStreamUserDataEncoderCB() {
+  user_data_callback_.Reset();
 }
 
 WebRtcVideoTrackSource::FrameAdaptationParams
@@ -450,12 +450,18 @@ void WebRtcVideoTrackSource::DeliverFrame(
 void WebRtcVideoTrackSource::OnAddOrUpdateSink(
     rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
   if (sink->IsEncoderSink()) {
-    webrtc::VideoStreamEncoder* encoder =
-        static_cast<webrtc::VideoStreamEncoder*>(sink);
-    if (encoder->IsVp8Encoder() && !vp8cb_.is_null()) {
-      LOG(INFO) << "the current sink is vp8 encoder, enable espp decoded data "
-                   "callback";
-      vp8cb_.Run(true);
+    if (!user_data_callback_.is_null()
+#if !defined(TIZEN_CAPI_ENCODER_TV_API)
+        // Old H264 HW encoder implementation supports only input from
+        // subscaler, so data callback needs to be disabled due to possible
+        // performance degradation.
+        && static_cast<webrtc::VideoStreamEncoder*>(sink)->IsVp8Encoder()
+#endif
+    ) {
+      LOG(INFO)
+          << "the current sink is user data encoder, enable espp decoded data "
+             "callback";
+      user_data_callback_.Run(true);
     }
   }
 }
@@ -463,11 +469,16 @@ void WebRtcVideoTrackSource::OnAddOrUpdateSink(
 void WebRtcVideoTrackSource::OnRemoveSink(
     rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
   if (sink->IsEncoderSink()) {
-    webrtc::VideoStreamEncoder* encoder =
-        static_cast<webrtc::VideoStreamEncoder*>(sink);
-    if (encoder->IsVp8Encoder() && !vp8cb_.is_null()) {
-      LOG(INFO) << "the vp8 encoder is removed";
-      vp8cb_.Run(false);
+    if (!user_data_callback_.is_null()
+#if !defined(TIZEN_CAPI_ENCODER_TV_API)
+        // Old H264 HW encoder implementation supports only input from
+        // subscaler, so data callback needs to be disabled due to possible
+        // performance degradation.
+        && static_cast<webrtc::VideoStreamEncoder*>(sink)->IsVp8Encoder()
+#endif
+    ) {
+      LOG(INFO) << "user data encoder is removed";
+      user_data_callback_.Run(false);
     }
   }
 }
index c19513083ff793319b3c368470679f6ccac6e5c4..0463a87a75ffe43ead772f4dac3cb9f0ed390798 100644 (file)
@@ -71,9 +71,9 @@ class PLATFORM_EXPORT WebRtcVideoTrackSource
   using webrtc::VideoTrackSourceInterface::AddOrUpdateSink;
   using webrtc::VideoTrackSourceInterface::RemoveSink;
 
-  void SetWebRtcSendStreamEncoderVp8CB(
-      WebMediaStreamSink::WebRtcSendStreamEncoderVp8CB vp8cb);
-  void UnsetWebRtcSendStreamEncoderVp8CB();
+  void SetWebRtcSendStreamUserDataEncoderCB(
+      WebMediaStreamSink::WebRtcSendStreamEncoderUserDataCB);
+  void UnsetWebRtcSendStreamUserDataEncoderCB();
 
   void Pause();
   void Start();
@@ -120,7 +120,7 @@ class PLATFORM_EXPORT WebRtcVideoTrackSource
 
   const media::VideoCaptureFeedbackCB callback_;
 
-  WebMediaStreamSink::WebRtcSendStreamEncoderVp8CB vp8cb_;
+  WebMediaStreamSink::WebRtcSendStreamEncoderUserDataCB user_data_callback_;
 };
 
 }  // namespace blink
index 4d826a3141bc27fff3b4750d1c704f8ef7a7f0a2..e9bb8689564aec4a712907aa2784dfca6553dff0 100644 (file)
@@ -1132,10 +1132,12 @@ void VideoCaptureImpl::OnSetAiZoomSettings(
 
 constexpr base::TimeDelta VideoCaptureImpl::kCaptureStartTimeout;
 
-void VideoCaptureImpl::OnWebRtcSendStreamEncoderVp8(bool enable) {
+void VideoCaptureImpl::OnWebRtcSendStreamUserDataEncoder(bool enable) {
 #if defined(OS_TIZEN_TV_PRODUCT)
   if (platform_video_decoder_)
     platform_video_decoder_->EnableTbmBufferCallBack(enable);
+#else
+  ALLOW_UNUSED_LOCAL(enable);
 #endif
 }
 
index 1baffd7167bf37cdf74b176876a3432ed5ed7c85..29df056991e9b339f04df1e18db80545213c5202 100644 (file)
@@ -140,7 +140,7 @@ class PLATFORM_EXPORT VideoCaptureImpl
 
   void OnSetAiZoomSettings(const media::TizenAiZoomSettings& settings);
 
-  void OnWebRtcSendStreamEncoderVp8(bool enable);
+  void OnWebRtcSendStreamUserDataEncoder(bool enable);
 
  private:
   friend class VideoCaptureImplTest;
index d767322752a6a1832f4740b450c0e77bf15a0a5a..4cc4ebbc5f1247af43cb06e284a759606d6523d6 100644 (file)
@@ -132,7 +132,9 @@ class VideoStreamEncoderInterface : public rtc::VideoSinkInterface<VideoFrame> {
   // guaranteed that no encoded frames will be delivered to the sink.
   virtual void Stop() = 0;
 
+#if !defined(TIZEN_CAPI_ENCODER_TV_API)
   virtual bool IsVp8Encoder() = 0;
+#endif
 };
 
 }  // namespace webrtc
index 9cfc63d541ff5296b29ce1de790b80423ae1aa13..4908a4147d6268aeedc72f25f5f6606ad45a7ce4 100644 (file)
@@ -1301,52 +1301,50 @@ void VideoStreamEncoder::OnFrame(const VideoFrame& video_frame) {
   int64_t post_time_us = clock_->CurrentTime().us();
   ++posted_frames_waiting_for_encode_;
 
-  encoder_queue_.PostTask(
-      [this, incoming_frame, post_time_us, log_stats]() {
-        RTC_DCHECK_RUN_ON(&encoder_queue_);
-        encoder_stats_observer_->OnIncomingFrame(incoming_frame.width(),
-                                                 incoming_frame.height());
-        ++captured_frame_count_;
-        const int posted_frames_waiting_for_encode =
-            posted_frames_waiting_for_encode_.fetch_sub(1);
-        RTC_DCHECK_GT(posted_frames_waiting_for_encode, 0);
-        CheckForAnimatedContent(incoming_frame, post_time_us);
-        bool cwnd_frame_drop =
-            cwnd_frame_drop_interval_ &&
-            (cwnd_frame_counter_++ % cwnd_frame_drop_interval_.value() == 0);
-        if (posted_frames_waiting_for_encode == 1 && !cwnd_frame_drop) {
-          MaybeEncodeVideoFrame(incoming_frame, post_time_us);
-        } else {
-          if (cwnd_frame_drop) {
-            // Frame drop by congestion window pushback. Do not encode this
-            // frame.
-            ++dropped_frame_cwnd_pushback_count_;
-            encoder_stats_observer_->OnFrameDropped(
-                VideoStreamEncoderObserver::DropReason::kCongestionWindow);
-          } else {
-            // There is a newer frame in flight. Do not encode this frame.
-            RTC_LOG(LS_VERBOSE)
-                << "Incoming frame dropped due to that the encoder is blocked.";
-            ++dropped_frame_encoder_block_count_;
-            encoder_stats_observer_->OnFrameDropped(
-                VideoStreamEncoderObserver::DropReason::kEncoderQueue);
-          }
-          accumulated_update_rect_.Union(incoming_frame.update_rect());
-          accumulated_update_rect_is_valid_ &= incoming_frame.has_update_rect();
-        }
-        if (log_stats) {
-          RTC_LOG(LS_INFO) << "Number of frames: captured "
-                           << captured_frame_count_
-                           << ", dropped (due to congestion window pushback) "
-                           << dropped_frame_cwnd_pushback_count_
-                           << ", dropped (due to encoder blocked) "
-                           << dropped_frame_encoder_block_count_
-                           << ", interval_ms " << kFrameLogIntervalMs;
-          captured_frame_count_ = 0;
-          dropped_frame_cwnd_pushback_count_ = 0;
-          dropped_frame_encoder_block_count_ = 0;
-        }
-      });
+  encoder_queue_.PostTask([this, incoming_frame, post_time_us, log_stats]() {
+    RTC_DCHECK_RUN_ON(&encoder_queue_);
+    encoder_stats_observer_->OnIncomingFrame(incoming_frame.width(),
+                                             incoming_frame.height());
+    ++captured_frame_count_;
+    const int posted_frames_waiting_for_encode =
+        posted_frames_waiting_for_encode_.fetch_sub(1);
+    RTC_DCHECK_GT(posted_frames_waiting_for_encode, 0);
+    CheckForAnimatedContent(incoming_frame, post_time_us);
+    bool cwnd_frame_drop =
+        cwnd_frame_drop_interval_ &&
+        (cwnd_frame_counter_++ % cwnd_frame_drop_interval_.value() == 0);
+    if (posted_frames_waiting_for_encode == 1 && !cwnd_frame_drop) {
+      MaybeEncodeVideoFrame(incoming_frame, post_time_us);
+    } else {
+      if (cwnd_frame_drop) {
+        // Frame drop by congestion window pushback. Do not encode this
+        // frame.
+        ++dropped_frame_cwnd_pushback_count_;
+        encoder_stats_observer_->OnFrameDropped(
+            VideoStreamEncoderObserver::DropReason::kCongestionWindow);
+      } else {
+        // There is a newer frame in flight. Do not encode this frame.
+        RTC_LOG(LS_VERBOSE)
+            << "Incoming frame dropped due to that the encoder is blocked.";
+        ++dropped_frame_encoder_block_count_;
+        encoder_stats_observer_->OnFrameDropped(
+            VideoStreamEncoderObserver::DropReason::kEncoderQueue);
+      }
+      accumulated_update_rect_.Union(incoming_frame.update_rect());
+      accumulated_update_rect_is_valid_ &= incoming_frame.has_update_rect();
+    }
+    if (log_stats) {
+      RTC_LOG(LS_INFO) << "Number of frames: captured " << captured_frame_count_
+                       << ", dropped (due to congestion window pushback) "
+                       << dropped_frame_cwnd_pushback_count_
+                       << ", dropped (due to encoder blocked) "
+                       << dropped_frame_encoder_block_count_ << ", interval_ms "
+                       << kFrameLogIntervalMs;
+      captured_frame_count_ = 0;
+      dropped_frame_cwnd_pushback_count_ = 0;
+      dropped_frame_encoder_block_count_ = 0;
+    }
+  });
 }
 
 #if defined(OS_TIZEN)
@@ -1469,16 +1467,10 @@ void VideoStreamEncoder::SetEncoderRates(
   // bitrate.
   // TODO(perkj): Make sure all known encoder implementations handle zero
   // target bitrate and remove this check.
-#if !defined(OS_TIZEN_TV_PRODUCT)
-  // We need to pause even though `HasInternalSource` returns
-  // false. Also in newer Chromium revision `has_internal_source`
-  // no longer exists. How to do it properly? We should respect
-  // `Encode` requests instead of driving encoder internal.
   if (!HasInternalSource() &&
       rate_settings.rate_control.bitrate.get_sum_bps() == 0) {
     return;
   }
-#endif  // !defined(OS_TIZEN_TV_PRODUCT)
 
   if (block_bitrate_changes_) {
     auto new_rate_control = rate_settings.rate_control;
@@ -2046,9 +2038,11 @@ DataRate VideoStreamEncoder::UpdateTargetBitrate(DataRate target_bitrate,
   return updated_target_bitrate;
 }
 
+#if !defined(TIZEN_CAPI_ENCODER_TV_API)
 bool VideoStreamEncoder::IsVp8Encoder() {
   return encoder_config_.codec_type == VideoCodecType::kVideoCodecVP8;
 }
+#endif
 
 void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate,
                                           DataRate stable_target_bitrate,
index 36d5f78ca0450e736d89085fc60468958ff2838f..42e5fffc018b90a2c0d63917a33cfec3ec5f08d6 100644 (file)
@@ -118,7 +118,10 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface,
                                double cwnd_reduce_ratio);
 
   bool IsEncoderSink() override { return true; }
+
+#if !defined(TIZEN_CAPI_ENCODER_TV_API)
   bool IsVp8Encoder() override;
+#endif
 
 #if defined(TIZEN_HW_ENCODER) && !defined(TIZEN_CAPI_ENCODER_TV_API)
   void TrackEnded() override;
index 99bd35a7cdf6eba95ce404da88611746794070c9..55cf3795c94c46c84635d2a4dbf5de5db8dc1d92 100644 (file)
@@ -6,9 +6,12 @@
 
 #include <media/media_packet_internal.h>
 
+#include "absl/types/optional.h"
 #include "base/auto_reset.h"
+#include "base/memory/scoped_refptr.h"
 #include "base/threading/thread_task_runner_handle.h"
 #include "media/base/tizen/logger/media_logger.h"
+#include "media/base/video_types.h"
 #include "third_party/blink/renderer/platform/peerconnection/tizen_video_encoder_facade_helper.h"
 #include "third_party/blink/renderer/platform/peerconnection/video_coding_utils.h"
 #include "third_party/blink/renderer/platform/webrtc/webrtc_video_frame_adapter.h"
@@ -79,6 +82,47 @@ std::ostream& operator<<(std::ostream& os, EncoderState encoder_state) {
     }                                                                        \
   } while (0)
 
+scoped_refptr<media::VideoFrame> GetVideoFrameWithTBMSurface(
+    const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& video_frame_buffer) {
+  if (video_frame_buffer->type() != webrtc::VideoFrameBuffer::Type::kNative)
+    return nullptr;
+
+  const auto* frame_adapter =
+      static_cast<blink::WebRtcVideoFrameAdapter*>(video_frame_buffer.get());
+  CHECK(frame_adapter);
+
+  auto video_frame = frame_adapter->getMediaVideoFrame();
+  CHECK(video_frame);
+
+  if (video_frame->format() ==
+          media::VideoPixelFormat::PIXEL_FORMAT_TBM_INTER_PROCESS_BUFFER ||
+      video_frame->format() ==
+          media::VideoPixelFormat::PIXEL_FORMAT_TBM_SURFACE) {
+    return video_frame;
+  }
+
+  return nullptr;
+}
+
+rtc::scoped_refptr<const webrtc::NV12BufferInterface> GetNV12Buffer(
+    const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& video_frame_buffer) {
+  rtc::scoped_refptr<const webrtc::NV12BufferInterface> nv12_buffer;
+  if (video_frame_buffer->type() == webrtc::VideoFrameBuffer::Type::kNV12) {
+    nv12_buffer = rtc::scoped_refptr<const webrtc::NV12BufferInterface>(
+        video_frame_buffer->GetNV12());
+  } else {
+    auto i420_buffer = video_frame_buffer->ToI420();
+    if (!i420_buffer) {
+      TIZEN_MEDIA_LOG_NO_INSTANCE(WARNING)
+          << "Could not convert video frame buffer to I420";
+      return nullptr;
+    }
+    nv12_buffer = webrtc::NV12Buffer::Copy(*i420_buffer);
+  }
+
+  return nv12_buffer;
+}
+
 }  // namespace
 
 TizenVideoEncoderFacade::TizenVideoEncoderFacade(
@@ -122,11 +166,6 @@ void TizenVideoEncoderFacade::AddStream(StreamDescription client_description) {
   auto stream_id = std::get<BackendParams>(client_description).stream_id;
   const auto& client_params = std::get<StreamParams>(client_description);
 
-  // We are in a preparing stage, before first `Encode` call, so we still don't
-  // know if encoding will be performed using data from subscaler or data
-  // provided by user. Because of that, we need to check if provided stream
-  // parameters fulfills encoder requirements for at least one of encoding modes
-  // - subscaler or user data mode.
   if (!ValidateParamsInternal(client_params)) {
     TIZEN_MEDIA_LOG(ERROR)
         << "Cannot add stream - provided parameters are not supported";
@@ -198,37 +237,12 @@ void TizenVideoEncoderFacade::Encode(
     return;
   }
 
-  auto encoding_mode = GetFrameEncodingModeInternal(frame);
-  if (encoding_mode == EncodingMode::kUnspecified &&
-      encoding_mode_ != EncodingMode::kUnspecified) {
-    TIZEN_MEDIA_LOG(WARNING)
-        << "Cannot determine encoding mode from frame - assuming no "
-           "encoding mode change";
-  } else if (encoding_mode == EncodingMode::kUnspecified) {
-    TIZEN_MEDIA_LOG(WARNING)
-        << "Encoding frame failed - cannot determine encoding mode"
-        << " from provided frame and encoder does not have any mode set.";
-    return;
-  }
-
-  if (encoding_mode_ == EncodingMode::kUnspecified) {
-    ReconfigureEncoderModeInternal(encoding_mode);
-  }
-
-  if (encoding_mode != encoding_mode_) {
-    TIZEN_MEDIA_LOG(WARNING)
-        << "Frame with different encoding mode (" << encoding_mode
-        << ") than previously set (" << encoding_mode_
-        << ") received. Reconfiguring encoder.";
-    ReconfigureEncoderModeInternal(encoding_mode);
-  }
-
   StreamParams frame_params{{ClientParamsKeys::kWidth, frame.width()},
                             {ClientParamsKeys::kHeight, frame.height()}};
   if (!ValidateParamsInternal(frame_params)) {
     TIZEN_MEDIA_LOG(ERROR)
         << "Encoding frame failed - provided frame resolution is not"
-        << " supported in encoder in current mode - " << encoding_mode_;
+        << " supported.";
     return;
   }
 
@@ -240,19 +254,8 @@ void TizenVideoEncoderFacade::Encode(
     return;
   }
 
-  if (encoding_mode_ == EncodingMode::kEncodingFromSubscaler) {
-    // Data is provided to encoder directly from subscaler - return here.
-
-    // To prevent running `frame_dropped_closure`, it is replaced by by empty
-    // closure. Simple usage of `operator=` will not work, because it runs
-    // internal callback.
-    frame_dropped_closure.ReplaceClosure({});
-    return;
-  }
-
-  CHECK(encoding_mode_ == EncodingMode::kEncodingFromUserData);
-  EncodeUserDataInternal(std::move(frame), std::move(frame_types),
-                         std::move(frame_dropped_closure));
+  EncodeInternal(std::move(frame), std::move(frame_types),
+                 std::move(frame_dropped_closure));
 }
 
 void TizenVideoEncoderFacade::UpdateStreamBitrate(
@@ -329,6 +332,8 @@ void TizenVideoEncoderFacade::Initialize() {
       RTCVideoEncoderFacadeHelper::OnEncoderStateChangedCallback, this));
 
   CHECK_AND_RETURN(encoder_set_type(encoder_.get(), ENCODER_TYPE_H264));
+  CHECK_AND_RETURN(
+      encoder_set_input_type(encoder_.get(), ENCODER_INPUT_TYPE_USER));
 
   SetEncoderParametersRangeInternal();
 }
@@ -360,33 +365,17 @@ void TizenVideoEncoderFacade::Resume() {
 ////////// RTCVideoEncoderFacadeTizen::TizenVideoEncoderFacade private
 /////////////
 
-void TizenVideoEncoderFacade::EncodeUserDataInternal(
+void TizenVideoEncoderFacade::EncodeInternal(
     webrtc::VideoFrame frame,
     std::vector<webrtc::VideoFrameType> frame_types,
     base::ScopedClosureRunner frame_dropped_closure) {
   CHECK(encoder_task_runner_->BelongsToCurrentThread());
-  CHECK(encoding_mode_ == EncodingMode::kEncodingFromUserData);
-  CHECK(frame.video_frame_buffer());
 
-  rtc::scoped_refptr<const webrtc::NV12BufferInterface> nv12_buffer;
-
-  if (frame.video_frame_buffer()->type() ==
-      webrtc::VideoFrameBuffer::Type::kNV12) {
-    nv12_buffer = rtc::scoped_refptr<const webrtc::NV12BufferInterface>(
-        frame.video_frame_buffer()->GetNV12());
+  ExternalBuffer external_buffer;
 
-  } else {
-    auto i420_buffer = frame.video_frame_buffer()->ToI420();
-    if (!i420_buffer) {
-      TIZEN_MEDIA_LOG(WARNING)
-          << "Could not convert video frame buffer to I420";
-      return;
-    }
-    nv12_buffer = webrtc::NV12Buffer::Copy(*i420_buffer);
-  }
-
-  if (!nv12_buffer) {
-    TIZEN_MEDIA_LOG(WARNING) << "NV12 buffer is null";
+  auto video_frame_buffer = frame.video_frame_buffer();
+  if (!video_frame_buffer) {
+    TIZEN_MEDIA_LOG(ERROR) << "Video frame buffer for provided frame is empty";
     return;
   }
 
@@ -400,10 +389,10 @@ void TizenVideoEncoderFacade::EncodeUserDataInternal(
 
       CHECK_AND_RETURN_MEDIA_FORMAT(
           media_format_set_video_mime(media_format.get(), MEDIA_FORMAT_NV12));
-      CHECK_AND_RETURN_MEDIA_FORMAT(media_format_set_video_width(
-          media_format.get(), nv12_buffer->width()));
-      CHECK_AND_RETURN_MEDIA_FORMAT(media_format_set_video_height(
-          media_format.get(), nv12_buffer->height()));
+      CHECK_AND_RETURN_MEDIA_FORMAT(
+          media_format_set_video_width(media_format.get(), frame.width()));
+      CHECK_AND_RETURN_MEDIA_FORMAT(
+          media_format_set_video_height(media_format.get(), frame.height()));
       CHECK_AND_RETURN_MEDIA_FORMAT(media_format_set_video_frame_rate(
           media_format.get(), cricket::kDefaultVideoEncodeMaxFramerate));
     }
@@ -416,11 +405,27 @@ void TizenVideoEncoderFacade::EncodeUserDataInternal(
     // once.
     for (size_t i = 0; i < packets_being_encoded_.size() + 1; ++i) {
       media_packet_h media_packet_ptr{nullptr};
-      CHECK_AND_RETURN_MEDIA_PACKET(media_packet_new_from_external_memory(
-          media_format.get(), const_cast<uint8_t*>(nv12_buffer->DataY()),
-          NV12DataSize(nv12_buffer->height(), nv12_buffer->StrideY(),
-                       nv12_buffer->StrideUV()),
-          nullptr, nullptr, &media_packet_ptr));
+
+      if (auto video_frame_with_tbm_surface =
+              GetVideoFrameWithTBMSurface(video_frame_buffer)) {
+        CHECK_AND_RETURN_MEDIA_PACKET(media_packet_new_from_tbm_surface(
+            media_format.get(),
+            static_cast<tbm_surface_h>(
+                video_frame_with_tbm_surface->GetTbmBuffer().tbm_surface),
+            nullptr, nullptr, &media_packet_ptr));
+        external_buffer = std::move(video_frame_with_tbm_surface);
+      } else if (auto nv12_buffer = GetNV12Buffer(video_frame_buffer)) {
+        CHECK_AND_RETURN_MEDIA_PACKET(media_packet_new_from_external_memory(
+            media_format.get(), const_cast<uint8_t*>(nv12_buffer->DataY()),
+            NV12DataSize(nv12_buffer->height(), nv12_buffer->StrideY(),
+                         nv12_buffer->StrideUV()),
+            nullptr, nullptr, &media_packet_ptr));
+        external_buffer = std::move(nv12_buffer);
+      } else {
+        TIZEN_MEDIA_LOG(WARNING) << "Could not get data from VideoFrameBuffer";
+        return;
+      }
+
       media_packet = MediaPacketType(media_packet_ptr);
 
       if (!packets_being_encoded_.count(media_packet.get()))
@@ -444,77 +449,11 @@ void TizenVideoEncoderFacade::EncodeUserDataInternal(
   // callback.
   frame_dropped_closure.ReplaceClosure({});
 
-  packets_to_encode_.emplace(std::move(media_packet), std::move(nv12_buffer));
+  packets_to_encode_.emplace(std::move(media_packet),
+                             std::move(external_buffer));
   SubmitPacketInternal();
 }
 
-EncodingMode TizenVideoEncoderFacade::GetFrameEncodingModeInternal(
-    const webrtc::VideoFrame& frame) {
-  CHECK(encoder_task_runner_->BelongsToCurrentThread());
-
-  auto buffer = frame.video_frame_buffer();
-  if (!buffer) {
-    TIZEN_MEDIA_LOG(ERROR) << "Video frame buffer for provided frame is empty";
-    ReportErrorToProxyInternal();
-    return EncodingMode::kUnspecified;
-  }
-
-  if (buffer->type() != webrtc::VideoFrameBuffer::Type::kNative) {
-    TIZEN_MEDIA_LOG(DEBUG) << "Using encoding from user data";
-    return EncodingMode::kEncodingFromUserData;
-  }
-
-  const auto* frame_adapter =
-      static_cast<blink::WebRtcVideoFrameAdapter*>(buffer.get());
-  CHECK(frame_adapter);
-
-  auto video_frame = frame_adapter->getMediaVideoFrame();
-  CHECK(video_frame);
-
-  if (video_frame->storage_type() == media::VideoFrame::STORAGE_HOLE) {
-    TIZEN_MEDIA_LOG(DEBUG) << "Using encoding from subscaler";
-    return EncodingMode::kEncodingFromSubscaler;
-  }
-
-  TIZEN_MEDIA_LOG(DEBUG) << "Using encoding from user data";
-  return EncodingMode::kEncodingFromUserData;
-}
-
-void TizenVideoEncoderFacade::ReconfigureEncoderModeInternal(
-    EncodingMode new_encoding_mode) {
-  TIZEN_MEDIA_LOG(INFO) << new_encoding_mode;
-  CHECK(encoder_task_runner_->BelongsToCurrentThread());
-
-  if (encoding_mode_ == new_encoding_mode) {
-    TIZEN_MEDIA_LOG(VERBOSE);
-    return;
-  }
-
-  if (encoder_state_ != EncoderState::kPreparing &&
-      encoder_state_ != EncoderState::kEncoding) {
-    TIZEN_MEDIA_LOG(ERROR)
-        << "Reconfiguring encoding mode failed - wrong encoder state";
-    return;
-  }
-
-  auto need_to_restart_encoder = encoder_state_ == EncoderState::kEncoding;
-  if (need_to_restart_encoder) {
-    StopInternal();
-  }
-
-  if (encoder_state_ != EncoderState::kPreparing) {
-    TIZEN_MEDIA_LOG(INFO) << "Cannot reconfigure encoder in state = "
-                          << encoder_state_;
-    return;
-  }
-
-  encoding_mode_ = new_encoding_mode;
-  SetEncoderInputTypeInternal();
-
-  if (need_to_restart_encoder)
-    StartInternal();
-}
-
 void TizenVideoEncoderFacade::ReportErrorToProxyInternal() {
   TIZEN_MEDIA_LOG(INFO);
   CHECK(encoder_task_runner_->BelongsToCurrentThread());
@@ -550,68 +489,37 @@ void TizenVideoEncoderFacade::ReportErrorToProxyInternal() {
   };
 }
 
-void TizenVideoEncoderFacade::SetEncoderInputTypeInternal() {
+void TizenVideoEncoderFacade::SetEncoderParametersRangeInternal() {
+  TIZEN_MEDIA_LOG(INFO);
   CHECK(encoder_task_runner_->BelongsToCurrentThread());
-  CHECK(encoding_mode_ != EncodingMode::kUnspecified);
 
-  if (encoder_state_ != EncoderState::kPreparing) {
-    TIZEN_MEDIA_LOG(WARNING)
-        << "Setting encoder input type failed - wrong encoder state";
-    return;
-  }
+  encoder_params_map_ = {
+      {ClientParamsKeys::kStreamId,
+       {.name = absl::nullopt, .range = {0, ENCODER_STREAM_MAX - 1}}},
+      {ClientParamsKeys::kBitrate,
+       {.name = ENCODER_VIDEO_PARAM_BITRATE, .range = {}}},
+      {ClientParamsKeys::kFramerate,
+       {.name = ENCODER_VIDEO_PARAM_FRAMERATE, .range = {}}},
+      {ClientParamsKeys::kHeight,
+       {.name = ENCODER_VIDEO_PARAM_HEIGHT, .range = {}}},
+      {ClientParamsKeys::kWidth,
+       {.name = ENCODER_VIDEO_PARAM_WIDTH, .range = {}}},
+  };
 
-  TIZEN_MEDIA_LOG(INFO) << "Setting encoder input type: "
-                        << ToEncoderInputType(encoding_mode_) << " ("
-                        << encoding_mode_ << ")";
+  encoder_param_s param{.type = ENCODER_TYPE_H264,
+                        .input_type = ENCODER_INPUT_TYPE_USER};
 
-  CHECK_AND_RETURN(encoder_set_input_type(encoder_.get(),
-                                          ToEncoderInputType(encoding_mode_)));
-}
+  for (auto& entry : encoder_params_map_) {
+    if (!entry.second.name.has_value())
+      continue;
 
-void TizenVideoEncoderFacade::SetEncoderParametersRangeInternal() {
-  TIZEN_MEDIA_LOG(INFO);
-  CHECK(encoder_task_runner_->BelongsToCurrentThread());
+    int start;
+    int end;
 
-  // Get allowed values for both subscaler encoding and user data encoding.
-  // Before first `Encode` method is called, we don't know if we are working in
-  // subscaler encoding or user data encoding mode, so we need to get supported
-  // values for both of these modes.
-  {
-    EncoderParamsMapType initial_values = {
-        {ClientParamsKeys::kStreamId,
-         {.name = absl::nullopt, .range = {0, ENCODER_STREAM_MAX - 1}}},
-        {ClientParamsKeys::kBitrate,
-         {.name = ENCODER_VIDEO_PARAM_BITRATE, .range = {}}},
-        {ClientParamsKeys::kFramerate,
-         {.name = ENCODER_VIDEO_PARAM_FRAMERATE, .range = {}}},
-        {ClientParamsKeys::kHeight,
-         {.name = ENCODER_VIDEO_PARAM_HEIGHT, .range = {}}},
-        {ClientParamsKeys::kWidth,
-         {.name = ENCODER_VIDEO_PARAM_WIDTH, .range = {}}},
-    };
-
-    encoder_params_map_ = {
-        {EncodingMode::kEncodingFromSubscaler, initial_values},
-        {EncodingMode::kEncodingFromUserData, initial_values}};
-  }
-
-  for (auto& encoding_params : encoder_params_map_) {
-    encoder_param_s param{
-        .type = ENCODER_TYPE_H264,
-        .input_type = ToEncoderInputType(encoding_params.first)};
-
-    for (auto& entry : encoding_params.second) {
-      if (!entry.second.name.has_value())
-        continue;
-
-      int start;
-      int end;
-
-      CHECK_AND_RETURN(encoder_get_param_range(
-          encoder_.get(), &param, entry.second.name.value(), &start, &end));
-      entry.second.range = {static_cast<uint32_t>(start),
-                            static_cast<uint32_t>(end)};
-    }
+    CHECK_AND_RETURN(encoder_get_param_range(
+        encoder_.get(), &param, entry.second.name.value(), &start, &end));
+    entry.second.range = {static_cast<uint32_t>(start),
+                          static_cast<uint32_t>(end)};
   }
 }
 
@@ -715,37 +623,12 @@ bool TizenVideoEncoderFacade::ValidateParamsInternal(
   CHECK(encoder_task_runner_->BelongsToCurrentThread());
   CHECK(!encoder_params_map_.empty());
 
-  auto validation_succeeded = false;
-  if (encoding_mode_ != EncodingMode::kUnspecified) {
-    // encoding_mode_ was already set, so we know which encoder parameters are
-    // appropriate for us.
-    validation_succeeded =
-        ValidateParamsForEncodingModeInternal(encoding_mode_, client_params);
-  } else {
-    // encoding_mode_ was not set yet, so best we can do, is to check if there
-    // is any encoding mode of platform encoder, which will support provided
-    // parameters.
-    for (const auto& mode_parameters_pair : encoder_params_map_)
-      validation_succeeded |= ValidateParamsForEncodingModeInternal(
-          mode_parameters_pair.first, client_params);
-  }
-
-  return validation_succeeded;
-}
-
-bool TizenVideoEncoderFacade::ValidateParamsForEncodingModeInternal(
-    EncodingMode encoding_mode,
-    const StreamParams& client_params) {
-  CHECK(encoder_task_runner_->BelongsToCurrentThread());
-  CHECK(encoder_params_map_.count(encoding_mode));
-
   for (const auto& param : client_params) {
-    const auto& encoder_parameter_struct =
-        encoder_params_map_.at(encoding_mode).at(param.first);
+    const auto& encoder_parameter_struct = encoder_params_map_.at(param.first);
     if (!gfx::Range{param.second}.IsBoundedBy(encoder_parameter_struct.range)) {
       TIZEN_MEDIA_LOG(WARNING)
           << "Unsupported value `" << param.second << "` for parameter `"
-          << param.first << "` in encoding mode: `" << encoding_mode
+          << param.first
           << "`. Supported range: " << encoder_parameter_struct.range;
       return false;
     }
@@ -754,22 +637,6 @@ bool TizenVideoEncoderFacade::ValidateParamsForEncodingModeInternal(
   return true;
 }
 
-encoder_input_type_e TizenVideoEncoderFacade::ToEncoderInputType(
-    EncodingMode encoding_mode) const {
-  switch (encoding_mode) {
-    case EncodingMode::kEncodingFromSubscaler:
-      return ENCODER_INPUT_TYPE_SYSTEM;
-    case EncodingMode::kEncodingFromUserData:
-      return ENCODER_INPUT_TYPE_USER;
-    case EncodingMode::kUnspecified:
-      TIZEN_MEDIA_LOG(ERROR) << "No mapping from EncodingMode::kUnspecified to "
-                             << "encoder_input_type_e";
-      return {};
-  }
-  NOTREACHED();
-  return {};
-}
-
 void TizenVideoEncoderFacade::OnDroppedFrame() {
   TIZEN_MEDIA_LOG(INFO);
   CHECK(encoder_task_runner_->BelongsToCurrentThread());
index 573b3e1e94baa0971a72f419677c91b5872069de..acef88ae14fd32d2074374095ca3942f74b3173d 100644 (file)
 #include <encoder.h>
 
 #include "absl/types/optional.h"
+#include "absl/types/variant.h"
 #include "base/memory/weak_ptr.h"
 #include "base/single_thread_task_runner.h"
+#include "media/base/video_frame.h"
 #include "third_party/blink/renderer/platform/peerconnection/tizen_video_encoder_facade_client.h"
 #include "third_party/blink/renderer/platform/peerconnection/tizen_video_encoder_facade_utils.h"
 #include "third_party/blink/renderer/platform/peerconnection/tizen_video_encoder_utils.h"
@@ -77,14 +79,9 @@ class TizenVideoEncoderFacade {
 
  private:
   // Internal method used to pass user data to encoder.
-  void EncodeUserDataInternal(webrtc::VideoFrame frame,
-                              std::vector<webrtc::VideoFrameType>,
-                              base::ScopedClosureRunner frame_dropped_closure);
-  EncodingMode GetFrameEncodingModeInternal(const webrtc::VideoFrame&);
-
-  // Method used for setting new encoder mode. If player is currently encoding,
-  // it will be stopped and resumed after reconfiguration.
-  void ReconfigureEncoderModeInternal(EncodingMode new_encoding_mode);
+  void EncodeInternal(webrtc::VideoFrame frame,
+                      std::vector<webrtc::VideoFrameType>,
+                      base::ScopedClosureRunner frame_dropped_closure);
 
   // Method used for reporting error from both platform encoder and this
   // class to `TizenVideoEncoderClient`'s. After it is called,
@@ -92,9 +89,6 @@ class TizenVideoEncoderFacade {
   // suspend-resume scenario.
   void ReportErrorToProxyInternal();
 
-  // Helper method used to set Encoding Mode on platform encoder.
-  void SetEncoderInputTypeInternal();
-
   // Method used to collect supported value ranges (bitrate, framerate,
   // dimensions) from platform encoder for further validation of newly-added
   // streams and provided video frames.
@@ -109,9 +103,6 @@ class TizenVideoEncoderFacade {
   // Method used for verifying provided parameters against value ranges provided
   // by platform encoder.
   bool ValidateParamsInternal(const StreamParams&);
-  bool ValidateParamsForEncodingModeInternal(EncodingMode, const StreamParams&);
-
-  encoder_input_type_e ToEncoderInputType(EncodingMode) const;
 
   // Callback invoked when frame will be dropped due to encoding error.
   void OnDroppedFrame();
@@ -142,11 +133,11 @@ class TizenVideoEncoderFacade {
       std::unique_ptr<std::remove_pointer<encoder_h>::type, EncoderDeleter>;
   EncoderType encoder_;
 
-  EncodingMode encoding_mode_{EncodingMode::kUnspecified};
-
   // External buffer used to create media_packet_h. It needs to be released when
   // input packet is processed by platform encoder.
-  using ExternalBuffer = rtc::scoped_refptr<const webrtc::NV12BufferInterface>;
+  using ExternalBuffer =
+      absl::variant<rtc::scoped_refptr<const webrtc::NV12BufferInterface>,
+                    scoped_refptr<media::VideoFrame>>;
 
   // Map of packets waiting to be encoded in internal encoder queue
   std::unordered_map<media_packet_h, ExternalBuffer> packets_being_encoded_;
@@ -184,10 +175,10 @@ class TizenVideoEncoderFacade {
   using EncoderParamsMapType =
       std::unordered_map<ClientParamsKeys, EncoderParam>;
 
-  // Map containing ranges of supported values for specified parameters for
-  // encoding mode. It can be used to validate new stream parameters or
-  // video frame provided to `Encode` method.
-  std::unordered_map<EncodingMode, EncoderParamsMapType> encoder_params_map_{};
+  // Map containing ranges of supported input parameter values for encoder. It
+  // can be used to validate new stream parameters or video frame provided to
+  // `Encode` method.
+  EncoderParamsMapType encoder_params_map_{};
 
   // Needed to allow posting tasks from platform encoder callbacks.
   friend struct RTCVideoEncoderFacadeHelper;
index e6995532d77e6a8cdabc993ae44d5f8f62145dd2..f1526e1a1f314d6ab763805d642a3b3f486018ab 100644 (file)
@@ -88,15 +88,6 @@ std::ostream& operator<<(std::ostream& os, media_packet_error_e err) {
   return os;
 }
 
-std::ostream& operator<<(std::ostream& os, const EncodingMode& encoding_mode) {
-  switch (encoding_mode) {
-    DECLARE_CASE_VALUE(EncodingMode::kUnspecified);
-    DECLARE_CASE_VALUE(EncodingMode::kEncodingFromSubscaler);
-    DECLARE_CASE_VALUE(EncodingMode::kEncodingFromUserData);
-  }
-  return os;
-}
-
 #undef DECLARE_CASE_VALUE
 
 void EncoderDeleter::operator()(encoder_h encoder) {
index 0ad98f6affd2597cee41118ea4b7df7a6249427c..c64a4a4349aa1123733421e0b3117333ef58bb87 100644 (file)
 namespace blink {
 namespace tizen {
 
-// Enum representing currently used encoding mode of platform encoder.
-// Once set, it cannot be changed.
-enum class EncodingMode {
-  kUnspecified,
-  kEncodingFromSubscaler,
-  kEncodingFromUserData
-};
-
 int NV12DataSize(int height, int stride_y, int stride_uv);
 
 std::ostream& operator<<(std::ostream& os, encoder_input_type_e status);
@@ -32,7 +24,6 @@ std::ostream& operator<<(std::ostream& os, encoder_buffer_status_e status);
 std::ostream& operator<<(std::ostream& os, encoder_error_e err);
 std::ostream& operator<<(std::ostream& os, media_format_error_e err);
 std::ostream& operator<<(std::ostream& os, media_packet_error_e err);
-std::ostream& operator<<(std::ostream& os, const EncodingMode& encoding_mode);
 
 struct EncoderDeleter {
   void operator()(encoder_h encoder);