[M120 Migration][MM] Handle live stream duration and currenttime
[platform/framework/web/chromium-efl.git] / third_party / blink / renderer / platform / media / web_media_player_impl.cc
1 // Copyright 2013 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "third_party/blink/renderer/platform/media/web_media_player_impl.h"
6
7 #include <algorithm>
8 #include <cmath>
9 #include <limits>
10 #include <memory>
11 #include <string>
12 #include <utility>
13
14 #include "base/check.h"
15 #include "base/command_line.h"
16 #include "base/debug/alias.h"
17 #include "base/debug/crash_logging.h"
18 #include "base/feature_list.h"
19 #include "base/functional/bind.h"
20 #include "base/functional/callback.h"
21 #include "base/functional/callback_helpers.h"
22 #include "base/location.h"
23 #include "base/memory/weak_ptr.h"
24 #include "base/metrics/histogram_functions.h"
25 #include "base/metrics/histogram_macros.h"
26 #include "base/strings/string_number_conversions.h"
27 #include "base/strings/stringprintf.h"
28 #include "base/task/bind_post_task.h"
29 #include "base/task/sequenced_task_runner.h"
30 #include "base/task/single_thread_task_runner.h"
31 #include "base/task/thread_pool.h"
32 #include "base/trace_event/memory_dump_manager.h"
33 #include "base/trace_event/trace_event.h"
34 #include "build/build_config.h"
35 #include "cc/layers/video_layer.h"
36 #include "components/viz/common/gpu/raster_context_provider.h"
37 #include "media/audio/null_audio_sink.h"
38 #include "media/base/audio_renderer_sink.h"
39 #include "media/base/cdm_context.h"
40 #include "media/base/demuxer.h"
41 #include "media/base/encryption_scheme.h"
42 #include "media/base/limits.h"
43 #include "media/base/media_content_type.h"
44 #include "media/base/media_log.h"
45 #include "media/base/media_player_logging_id.h"
46 #include "media/base/media_switches.h"
47 #include "media/base/media_url_demuxer.h"
48 #include "media/base/memory_dump_provider_proxy.h"
49 #include "media/base/renderer.h"
50 #include "media/base/routing_token_callback.h"
51 #include "media/base/supported_types.h"
52 #include "media/base/timestamp_constants.h"
53 #include "media/base/video_frame.h"
54 #include "media/filters/chunk_demuxer.h"
55 #include "media/filters/ffmpeg_demuxer.h"
56 #include "media/filters/memory_data_source.h"
57 #include "media/filters/pipeline_controller.h"
58 #include "media/learning/common/learning_task_controller.h"
59 #include "media/learning/common/media_learning_tasks.h"
60 #include "media/learning/mojo/public/cpp/mojo_learning_task_controller.h"
61 #include "media/media_buildflags.h"
62 #include "media/remoting/remoting_constants.h"
63 #include "mojo/public/cpp/bindings/pending_receiver.h"
64 #include "mojo/public/cpp/bindings/pending_remote.h"
65 #include "net/base/data_url.h"
66 #include "services/device/public/mojom/battery_monitor.mojom-blink.h"
67 #include "third_party/blink/public/common/media/display_type.h"
68 #include "third_party/blink/public/common/media/watch_time_reporter.h"
69 #include "third_party/blink/public/common/thread_safe_browser_interface_broker_proxy.h"
70 #include "third_party/blink/public/platform/media/url_index.h"
71 #include "third_party/blink/public/platform/web_content_decryption_module.h"
72 #include "third_party/blink/public/platform/web_encrypted_media_types.h"
73 #include "third_party/blink/public/platform/web_fullscreen_video_status.h"
74 #include "third_party/blink/public/platform/web_media_player_client.h"
75 #include "third_party/blink/public/platform/web_media_player_encrypted_media_client.h"
76 #include "third_party/blink/public/platform/web_media_player_source.h"
77 #include "third_party/blink/public/platform/web_media_source.h"
78 #include "third_party/blink/public/platform/web_runtime_features.h"
79 #include "third_party/blink/public/platform/web_security_origin.h"
80 #include "third_party/blink/public/platform/web_string.h"
81 #include "third_party/blink/public/platform/web_surface_layer_bridge.h"
82 #include "third_party/blink/public/platform/web_url.h"
83 #include "third_party/blink/public/platform/webaudiosourceprovider_impl.h"
84 #include "third_party/blink/public/strings/grit/blink_strings.h"
85 #include "third_party/blink/public/web/modules/media/webmediaplayer_util.h"
86 #include "third_party/blink/public/web/web_document.h"
87 #include "third_party/blink/public/web/web_frame.h"
88 #include "third_party/blink/public/web/web_local_frame.h"
89 #include "third_party/blink/public/web/web_view.h"
90 #include "third_party/blink/renderer/platform/media/buffered_data_source_host_impl.h"
91 #include "third_party/blink/renderer/platform/media/power_status_helper.h"
92 #include "third_party/blink/renderer/platform/media/video_decode_stats_reporter.h"
93 #include "third_party/blink/renderer/platform/media/web_content_decryption_module_impl.h"
94 #include "third_party/blink/renderer/platform/media/web_media_source_impl.h"
95 #include "ui/gfx/geometry/size.h"
96
97 #if BUILDFLAG(ENABLE_HLS_DEMUXER)
98 #include "third_party/blink/renderer/platform/media/hls_data_source_provider_impl.h"
99 #endif  // BUILDFLAG(ENABLE_HLS_DEMUXER)
100
101 #if BUILDFLAG(IS_ANDROID)
102 #include "media/base/android/media_codec_util.h"
103 #endif
104
105 #if defined(TIZEN_VIDEO_HOLE)
106 #include "cc/base/math_util.h"
107
108 // NSW : Is this necessary?
109 namespace {
110 const base::TimeDelta kLayerBoundUpdateInterval = base::Milliseconds(50);
111 }  // namespace
112 #endif
113
114 namespace blink {
115 namespace {
116
117 enum SplitHistogramTypes {
118   kTotal = 0x1 << 0,
119   kPlaybackType = 0x1 << 1,
120   kEncrypted = 0x1 << 2,
121 };
122
123 namespace learning = ::media::learning;
124 using ::media::Demuxer;
125 using ::media::MediaLogEvent;
126 using ::media::MediaLogProperty;
127 using ::media::MediaTrack;
128
129 void SetSinkIdOnMediaThread(scoped_refptr<WebAudioSourceProviderImpl> sink,
130                             const std::string& device_id,
131                             media::OutputDeviceStatusCB callback) {
132   sink->SwitchOutputDevice(device_id, std::move(callback));
133 }
134
135 bool IsBackgroundSuspendEnabled(const WebMediaPlayerImpl* wmpi) {
136   if (base::CommandLine::ForCurrentProcess()->HasSwitch(
137           switches::kDisableBackgroundMediaSuspend)) {
138     return false;
139   }
140   return wmpi->IsBackgroundMediaSuspendEnabled();
141 }
142
143 bool IsResumeBackgroundVideosEnabled() {
144   return base::FeatureList::IsEnabled(media::kResumeBackgroundVideo);
145 }
146
147 bool IsNetworkStateError(WebMediaPlayer::NetworkState state) {
148   bool result = state == WebMediaPlayer::kNetworkStateFormatError ||
149                 state == WebMediaPlayer::kNetworkStateNetworkError ||
150                 state == WebMediaPlayer::kNetworkStateDecodeError;
151   DCHECK_EQ(state > WebMediaPlayer::kNetworkStateLoaded, result);
152   return result;
153 }
154
155 gfx::Size GetRotatedVideoSize(media::VideoRotation rotation,
156                               gfx::Size natural_size) {
157   if (rotation == media::VIDEO_ROTATION_90 ||
158       rotation == media::VIDEO_ROTATION_270)
159     return gfx::Size(natural_size.height(), natural_size.width());
160   return natural_size;
161 }
162
163 void RecordEncryptedEvent(bool encrypted_event_fired) {
164   UMA_HISTOGRAM_BOOLEAN("Media.EME.EncryptedEvent", encrypted_event_fired);
165 }
166
167 // How much time must have elapsed since loading last progressed before we
168 // assume that the decoder will have had time to complete preroll.
169 constexpr base::TimeDelta kPrerollAttemptTimeout = base::Seconds(3);
170
171 // Maximum number, per-WMPI, of media logs of playback rate changes.
172 constexpr int kMaxNumPlaybackRateLogs = 10;
173
174 int GetSwitchToLocalMessage(
175     media::MediaObserverClient::ReasonToSwitchToLocal reason) {
176   switch (reason) {
177     case media::MediaObserverClient::ReasonToSwitchToLocal::NORMAL:
178       return IDS_MEDIA_REMOTING_STOP_TEXT;
179     case media::MediaObserverClient::ReasonToSwitchToLocal::
180         POOR_PLAYBACK_QUALITY:
181       return IDS_MEDIA_REMOTING_STOP_BY_PLAYBACK_QUALITY_TEXT;
182     case media::MediaObserverClient::ReasonToSwitchToLocal::PIPELINE_ERROR:
183       return IDS_MEDIA_REMOTING_STOP_BY_ERROR_TEXT;
184     case media::MediaObserverClient::ReasonToSwitchToLocal::ROUTE_TERMINATED:
185       return WebMediaPlayerClient::kMediaRemotingStopNoText;
186   }
187   NOTREACHED();
188   // To suppress compiler warning on Windows.
189   return WebMediaPlayerClient::kMediaRemotingStopNoText;
190 }
191
192 // These values are persisted to UMA. Entries should not be renumbered and
193 // numeric values should never be reused.
194 // TODO(crbug.com/825041): This should use EncryptionScheme when kUnencrypted
195 // removed.
196 enum class EncryptionSchemeUMA { kCenc = 0, kCbcs = 1, kCount };
197
198 EncryptionSchemeUMA DetermineEncryptionSchemeUMAValue(
199     media::EncryptionScheme encryption_scheme) {
200   if (encryption_scheme == media::EncryptionScheme::kCbcs)
201     return EncryptionSchemeUMA::kCbcs;
202
203   DCHECK_EQ(encryption_scheme, media::EncryptionScheme::kCenc);
204   return EncryptionSchemeUMA::kCenc;
205 }
206
207 // Handles destruction of media::Renderer dependent components after the
208 // renderer has been destructed on the media thread.
209 void DestructionHelper(
210     scoped_refptr<base::SingleThreadTaskRunner> main_task_runner,
211     scoped_refptr<base::SingleThreadTaskRunner> vfc_task_runner,
212     std::unique_ptr<media::DemuxerManager> demuxer_manager,
213     std::unique_ptr<VideoFrameCompositor> compositor,
214     std::unique_ptr<media::CdmContextRef> cdm_context_1,
215     std::unique_ptr<media::CdmContextRef> cdm_context_2,
216     std::unique_ptr<media::MediaLog> media_log,
217     std::unique_ptr<media::RendererFactorySelector> renderer_factory_selector,
218     std::unique_ptr<WebSurfaceLayerBridge> bridge) {
219   // We release `bridge` after pipeline stop to ensure layout tests receive
220   // painted video frames before test harness exit.
221   main_task_runner->DeleteSoon(FROM_HERE, std::move(bridge));
222
223   // Since the media::Renderer is gone we can now destroy the compositor and
224   // renderer factory selector.
225   vfc_task_runner->DeleteSoon(FROM_HERE, std::move(compositor));
226   main_task_runner->DeleteSoon(FROM_HERE, std::move(renderer_factory_selector));
227
228   // ChunkDemuxer can be deleted on any thread, but other demuxers are bound to
229   // the main thread and must be deleted there now that the renderer is gone.
230   if (demuxer_manager &&
231       demuxer_manager->GetDemuxerType() != media::DemuxerType::kChunkDemuxer) {
232     main_task_runner->DeleteSoon(FROM_HERE, std::move(demuxer_manager));
233     main_task_runner->DeleteSoon(FROM_HERE, std::move(cdm_context_1));
234     main_task_runner->DeleteSoon(FROM_HERE, std::move(cdm_context_2));
235     main_task_runner->DeleteSoon(FROM_HERE, std::move(media_log));
236     return;
237   }
238
239   // ChunkDemuxer's streams may contain much buffered, compressed media that
240   // may need to be paged back in during destruction.  Paging delay may exceed
241   // the renderer hang monitor's threshold on at least Windows while also
242   // blocking other work on the renderer main thread, so we do the actual
243   // destruction in the background without blocking WMPI destruction or
244   // `task_runner`.  On advice of task_scheduler OWNERS, MayBlock() is not
245   // used because virtual memory overhead is not considered blocking I/O; and
246   // CONTINUE_ON_SHUTDOWN is used to allow process termination to not block on
247   // completing the task.
248   base::ThreadPool::PostTask(
249       FROM_HERE,
250       {base::TaskPriority::BEST_EFFORT,
251        base::TaskShutdownBehavior::CONTINUE_ON_SHUTDOWN},
252       base::BindOnce(
253           [](scoped_refptr<base::SingleThreadTaskRunner> main_task_runner,
254              std::unique_ptr<media::DemuxerManager> demuxer_manager,
255              std::unique_ptr<media::CdmContextRef> cdm_context_1,
256              std::unique_ptr<media::CdmContextRef> cdm_context_2,
257              std::unique_ptr<media::MediaLog> media_log) {
258             demuxer_manager.reset();
259             main_task_runner->DeleteSoon(FROM_HERE, std::move(cdm_context_1));
260             main_task_runner->DeleteSoon(FROM_HERE, std::move(cdm_context_2));
261             main_task_runner->DeleteSoon(FROM_HERE, std::move(media_log));
262           },
263           std::move(main_task_runner), std::move(demuxer_manager),
264           std::move(cdm_context_1), std::move(cdm_context_2),
265           std::move(media_log)));
266 }
267
268 std::string SanitizeUserStringProperty(WebString value) {
269   std::string converted = value.Utf8();
270   return base::IsStringUTF8(converted) ? converted : "[invalid property]";
271 }
272
273 void CreateAllocation(base::trace_event::ProcessMemoryDump* pmd,
274                       int32_t id,
275                       const char* name,
276                       int64_t bytes) {
277   if (bytes <= 0)
278     return;
279   auto full_name =
280       base::StringPrintf("media/webmediaplayer/%s/player_0x%x", name, id);
281   auto* dump = pmd->CreateAllocatorDump(full_name);
282
283   dump->AddScalar(base::trace_event::MemoryAllocatorDump::kNameSize,
284                   base::trace_event::MemoryAllocatorDump::kUnitsBytes, bytes);
285
286   auto* std_allocator = base::trace_event::MemoryDumpManager::GetInstance()
287                             ->system_allocator_pool_name();
288   if (std_allocator)
289     pmd->AddSuballocation(dump->guid(), std_allocator);
290 }
291
292 // Determine whether we should update MediaPosition in `delegate_`.
293 bool MediaPositionNeedsUpdate(
294     const media_session::MediaPosition& old_position,
295     const media_session::MediaPosition& new_position) {
296   if (old_position.playback_rate() != new_position.playback_rate() ||
297       old_position.duration() != new_position.duration() ||
298       old_position.end_of_media() != new_position.end_of_media()) {
299     return true;
300   }
301
302   // Special handling for "infinite" position required to avoid calculations
303   // involving infinities.
304   if (new_position.GetPosition().is_max())
305     return !old_position.GetPosition().is_max();
306
307   // MediaPosition is potentially changed upon each OnTimeUpdate() call. In
308   // practice most of these calls happen periodically during normal playback,
309   // with unchanged rate and duration. If we want to avoid updating
310   // MediaPosition unnecessarily, we need to compare the current time
311   // calculated from the old and new MediaPositions with some tolerance. That's
312   // because we don't know the exact time when GetMediaTime() calculated the
313   // media position. We choose an arbitrary tolerance that is high enough to
314   // eliminate a lot of MediaPosition updates and low enough not to make a
315   // perceptible difference.
316   const auto drift =
317       (old_position.GetPosition() - new_position.GetPosition()).magnitude();
318   return drift > base::Milliseconds(100);
319 }
320
321 // Returns whether the player uses AudioService. This is needed to enable
322 // AudioStreamMonitor (for audio indicator) when not using AudioService.
323 // TODO(crbug.com/1017943): Support other RendererTypes.
324 bool UsesAudioService(media::RendererType renderer_type) {
325   return renderer_type != media::RendererType::kMediaFoundation;
326 }
327
328 }  // namespace
329
330 STATIC_ASSERT_ENUM(WebMediaPlayer::kCorsModeUnspecified,
331                    UrlData::CORS_UNSPECIFIED);
332 STATIC_ASSERT_ENUM(WebMediaPlayer::kCorsModeAnonymous, UrlData::CORS_ANONYMOUS);
333 STATIC_ASSERT_ENUM(WebMediaPlayer::kCorsModeUseCredentials,
334                    UrlData::CORS_USE_CREDENTIALS);
335
336 WebMediaPlayerImpl::WebMediaPlayerImpl(
337     WebLocalFrame* frame,
338     WebMediaPlayerClient* client,
339     WebMediaPlayerEncryptedMediaClient* encrypted_client,
340     WebMediaPlayerDelegate* delegate,
341     std::unique_ptr<media::RendererFactorySelector> renderer_factory_selector,
342     UrlIndex* url_index,
343     std::unique_ptr<VideoFrameCompositor> compositor,
344     std::unique_ptr<media::MediaLog> media_log,
345     media::MediaPlayerLoggingID player_id,
346     WebMediaPlayerBuilder::DeferLoadCB defer_load_cb,
347     scoped_refptr<media::SwitchableAudioRendererSink> audio_renderer_sink,
348     scoped_refptr<base::SequencedTaskRunner> media_task_runner,
349     scoped_refptr<base::TaskRunner> worker_task_runner,
350     scoped_refptr<base::SingleThreadTaskRunner> compositor_task_runner,
351     scoped_refptr<base::SingleThreadTaskRunner>
352         video_frame_compositor_task_runner,
353     WebMediaPlayerBuilder::AdjustAllocatedMemoryCB adjust_allocated_memory_cb,
354     WebContentDecryptionModule* initial_cdm,
355     media::RequestRoutingTokenCallback request_routing_token_cb,
356     base::WeakPtr<media::MediaObserver> media_observer,
357     bool enable_instant_source_buffer_gc,
358     bool embedded_media_experience_enabled,
359     mojo::PendingRemote<media::mojom::MediaMetricsProvider> metrics_provider,
360     CreateSurfaceLayerBridgeCB create_bridge_callback,
361     scoped_refptr<viz::RasterContextProvider> raster_context_provider,
362 #if defined(TIZEN_VIDEO_HOLE)
363     bool is_video_hole,
364 #endif
365     bool use_surface_layer,
366     bool is_background_suspend_enabled,
367     bool is_background_video_playback_enabled,
368     bool is_background_video_track_optimization_supported,
369     std::unique_ptr<media::Demuxer> demuxer_override,
370     scoped_refptr<ThreadSafeBrowserInterfaceBrokerProxy> remote_interfaces)
371     : frame_(frame),
372       main_task_runner_(frame->GetTaskRunner(TaskType::kMediaElementEvent)),
373       media_task_runner_(std::move(media_task_runner)),
374       worker_task_runner_(std::move(worker_task_runner)),
375       media_player_id_(player_id),
376       media_log_(std::move(media_log)),
377       client_(client),
378       encrypted_client_(encrypted_client),
379       delegate_(delegate),
380       delegate_has_audio_(HasUnmutedAudio()),
381       defer_load_cb_(std::move(defer_load_cb)),
382       adjust_allocated_memory_cb_(std::move(adjust_allocated_memory_cb)),
383       demuxer_manager_(std::make_unique<media::DemuxerManager>(
384           this,
385           media_task_runner_,
386           media_log_.get(),
387           frame_->GetDocument().SiteForCookies(),
388           frame_->GetDocument().TopFrameOrigin(),
389           frame_->GetDocument().HasStorageAccess(),
390           enable_instant_source_buffer_gc,
391           std::move(demuxer_override))),
392       tick_clock_(base::DefaultTickClock::GetInstance()),
393       url_index_(url_index),
394       raster_context_provider_(std::move(raster_context_provider)),
395       vfc_task_runner_(std::move(video_frame_compositor_task_runner)),
396       compositor_(std::move(compositor)),
397       renderer_factory_selector_(std::move(renderer_factory_selector)),
398       observer_(std::move(media_observer)),
399       embedded_media_experience_enabled_(embedded_media_experience_enabled),
400       use_surface_layer_(use_surface_layer),
401       create_bridge_callback_(std::move(create_bridge_callback)),
402       request_routing_token_cb_(std::move(request_routing_token_cb)),
403       media_metrics_provider_(std::move(metrics_provider)),
404       is_background_suspend_enabled_(is_background_suspend_enabled),
405       is_background_video_playback_enabled_(
406           is_background_video_playback_enabled),
407       is_background_video_track_optimization_supported_(
408           is_background_video_track_optimization_supported),
409       should_pause_background_muted_audio_(
410           base::FeatureList::IsEnabled(media::kPauseBackgroundMutedAudio)),
411       simple_watch_timer_(
412           base::BindRepeating(&WebMediaPlayerImpl::OnSimpleWatchTimerTick,
413                               base::Unretained(this)),
414           base::BindRepeating(&WebMediaPlayerImpl::GetCurrentTimeInternal,
415                               base::Unretained(this))),
416 #if defined(TIZEN_VIDEO_HOLE)
417       is_video_hole_(is_video_hole),
418 #endif
419       will_play_helper_(nullptr) {
420   DVLOG(1) << __func__;
421   DCHECK(adjust_allocated_memory_cb_);
422   DCHECK(renderer_factory_selector_);
423   DCHECK(client_);
424   DCHECK(delegate_);
425
426   if (base::FeatureList::IsEnabled(media::kMediaPowerExperiment)) {
427     // The battery monitor is only available through the blink provider.
428     DCHECK(remote_interfaces);
429     auto battery_monitor_cb = base::BindRepeating(
430         [](scoped_refptr<ThreadSafeBrowserInterfaceBrokerProxy>
431                remote_interfaces) {
432           mojo::PendingRemote<device::mojom::blink::BatteryMonitor>
433               battery_monitor;
434           remote_interfaces->GetInterface(
435               battery_monitor.InitWithNewPipeAndPassReceiver());
436           return battery_monitor;
437         },
438         remote_interfaces);
439     power_status_helper_ =
440         std::make_unique<PowerStatusHelper>(std::move(battery_monitor_cb));
441   }
442
443   weak_this_ = weak_factory_.GetWeakPtr();
444
445   // Using base::Unretained(this) is safe because the `pipeline` is owned by
446   // `this` and the callback will always be made on the main task runner.
447   // Not using base::BindPostTaskToCurrentDefault() because CreateRenderer() is
448   // a sync call.
449   auto pipeline = std::make_unique<media::PipelineImpl>(
450       media_task_runner_, main_task_runner_,
451       base::BindRepeating(&WebMediaPlayerImpl::CreateRenderer,
452                           base::Unretained(this)),
453 #if defined(TIZEN_VIDEO_HOLE)
454       is_video_hole_,
455 #endif
456       media_log_.get());
457
458   // base::Unretained for |demuxer_manager_| is safe, because it outlives
459   // |pipeline_controller_|.
460   pipeline_controller_ = std::make_unique<media::PipelineController>(
461       std::move(pipeline),
462       base::BindRepeating(&WebMediaPlayerImpl::OnPipelineSeeked, weak_this_),
463       base::BindRepeating(&WebMediaPlayerImpl::OnPipelineSuspended, weak_this_),
464       base::BindRepeating(&WebMediaPlayerImpl::OnBeforePipelineResume,
465                           weak_this_),
466       base::BindRepeating(&WebMediaPlayerImpl::OnPipelineResumed, weak_this_),
467       base::BindRepeating(&media::DemuxerManager::OnPipelineError,
468                           base::Unretained(demuxer_manager_.get())));
469
470   buffered_data_source_host_ = std::make_unique<BufferedDataSourceHostImpl>(
471       base::BindRepeating(&WebMediaPlayerImpl::OnProgress, weak_this_),
472       tick_clock_);
473
474   // If we're supposed to force video overlays, then make sure that they're
475   // enabled all the time.
476   always_enable_overlays_ = base::CommandLine::ForCurrentProcess()->HasSwitch(
477       switches::kForceVideoOverlays);
478
479   if (base::FeatureList::IsEnabled(media::kOverlayFullscreenVideo))
480     overlay_mode_ = OverlayMode::kUseAndroidOverlay;
481   else
482     overlay_mode_ = OverlayMode::kNoOverlays;
483
484   delegate_id_ = delegate_->AddObserver(this);
485   delegate_->SetIdle(delegate_id_, true);
486
487   media_log_->AddEvent<MediaLogEvent::kWebMediaPlayerCreated>(
488       url::Origin(frame_->GetSecurityOrigin()).GetURL().spec());
489
490   media_log_->SetProperty<MediaLogProperty::kFrameUrl>(
491       SanitizeUserStringProperty(frame_->GetDocument().Url().GetString()));
492   media_log_->SetProperty<MediaLogProperty::kFrameTitle>(
493       SanitizeUserStringProperty(frame_->GetDocument().Title()));
494
495   if (initial_cdm)
496     SetCdmInternal(initial_cdm);
497
498   // Report a false "EncrytpedEvent" here as a baseline.
499   RecordEncryptedEvent(false);
500
501   auto on_audio_source_provider_set_client_callback = base::BindOnce(
502       [](base::WeakPtr<WebMediaPlayerImpl> self,
503          WebMediaPlayerClient* const client) {
504         if (!self)
505           return;
506         client->DidDisableAudioOutputSinkChanges();
507       },
508       weak_this_, client_);
509
510   // TODO(xhwang): When we use an external Renderer, many methods won't work,
511   // e.g. GetCurrentFrameFromCompositor(). See http://crbug.com/434861
512   audio_source_provider_ = new WebAudioSourceProviderImpl(
513       std::move(audio_renderer_sink), media_log_.get(),
514       std::move(on_audio_source_provider_set_client_callback));
515
516   if (observer_)
517     observer_->SetClient(this);
518
519   memory_usage_reporting_timer_.SetTaskRunner(
520       frame_->GetTaskRunner(TaskType::kInternalMedia));
521
522   main_thread_mem_dumper_ = std::make_unique<media::MemoryDumpProviderProxy>(
523       "WebMediaPlayer_MainThread", main_task_runner_,
524       base::BindRepeating(&WebMediaPlayerImpl::OnMainThreadMemoryDump,
525                           weak_this_, media_player_id_));
526
527   media_metrics_provider_->AcquirePlaybackEventsRecorder(
528       playback_events_recorder_.BindNewPipeAndPassReceiver());
529
530   // MediaMetricsProvider may drop the request for PlaybackEventsRecorder if
531   // it's not interested in recording these events.
532   playback_events_recorder_.reset_on_disconnect();
533
534 #if defined(TIZEN_VIDEO_HOLE)
535   if (is_video_hole_) {
536     vfc_task_runner_->PostTask(
537         FROM_HERE,
538         base::BindOnce(
539             &VideoFrameCompositor::SetDrawableContentRectChangedCallback,
540             base::Unretained(compositor_.get()),
541             base::BindPostTaskToCurrentDefault(base::BindRepeating(
542                 &WebMediaPlayerImpl::OnDrawableContentRectChanged,
543                 weak_this_))));
544   }
545 #endif
546 #if BUILDFLAG(IS_ANDROID)
547   renderer_factory_selector_->SetRemotePlayStateChangeCB(
548       base::BindPostTaskToCurrentDefault(base::BindRepeating(
549           &WebMediaPlayerImpl::OnRemotePlayStateChange, weak_this_)));
550 #endif  // defined (IS_ANDROID)
551 }
552
553 WebMediaPlayerImpl::~WebMediaPlayerImpl() {
554   DVLOG(1) << __func__;
555   DCHECK(main_task_runner_->BelongsToCurrentThread());
556
557   ReportSessionUMAs();
558
559   if (set_cdm_result_) {
560     DVLOG(2)
561         << "Resolve pending SetCdmInternal() when media player is destroyed.";
562     set_cdm_result_->Complete();
563     set_cdm_result_.reset();
564   }
565
566   suppress_destruction_errors_ = true;
567   demuxer_manager_->DisallowFallback();
568
569   delegate_->PlayerGone(delegate_id_);
570   delegate_->RemoveObserver(delegate_id_);
571   delegate_ = nullptr;
572
573   // Finalize any watch time metrics before destroying the pipeline.
574   watch_time_reporter_.reset();
575
576   // Unregister dump providers on their corresponding threads.
577   media_task_runner_->DeleteSoon(FROM_HERE,
578                                  std::move(media_thread_mem_dumper_));
579   main_thread_mem_dumper_.reset();
580
581   // The underlying Pipeline must be stopped before it is destroyed.
582   //
583   // Note: This destruction happens synchronously on the media thread and
584   // `demuxer_manager_`, `compositor_`, and `media_log_` must outlive
585   // this process. They will be destructed by the DestructionHelper below
586   // after trampolining through the media thread.
587   pipeline_controller_->Stop();
588
589   if (last_reported_memory_usage_)
590     adjust_allocated_memory_cb_.Run(-last_reported_memory_usage_);
591
592   // Destruct compositor resources in the proper order.
593   client_->SetCcLayer(nullptr);
594
595   client_->MediaRemotingStopped(WebMediaPlayerClient::kMediaRemotingStopNoText);
596
597   if (!surface_layer_for_video_enabled_ && video_layer_)
598     video_layer_->StopUsingProvider();
599
600   simple_watch_timer_.Stop();
601   media_log_->OnWebMediaPlayerDestroyed();
602
603   demuxer_manager_->StopAndResetClient(nullptr);
604   demuxer_manager_->InvalidateWeakPtrs();
605
606   // Disconnect from the surface layer. We still preserve the `bridge_` until
607   // after pipeline shutdown to ensure any pending frames are painted for tests.
608   if (bridge_)
609     bridge_->ClearObserver();
610
611   // Disconnect from the MediaObserver implementation since it's lifetime is
612   // tied to the RendererFactorySelector which can't be destroyed until after
613   // the Pipeline stops.
614   //
615   // Note: We can't use a WeakPtr with the RendererFactory because its methods
616   // are called on the media thread and this destruction takes place on the
617   // renderer thread.
618   if (observer_)
619     observer_->SetClient(nullptr);
620
621   // If we're in the middle of an observation, then finish it.
622   will_play_helper_.CompleteObservationIfNeeded(learning::TargetValue(false));
623
624   // Handle destruction of things that need to be destructed after the pipeline
625   // completes stopping on the media thread.
626   media_task_runner_->PostTask(
627       FROM_HERE,
628       base::BindOnce(&DestructionHelper, std::move(main_task_runner_),
629                      std::move(vfc_task_runner_), std::move(demuxer_manager_),
630                      std::move(compositor_), std::move(cdm_context_ref_),
631                      std::move(pending_cdm_context_ref_), std::move(media_log_),
632                      std::move(renderer_factory_selector_),
633                      std::move(bridge_)));
634 }
635
636 WebMediaPlayer::LoadTiming WebMediaPlayerImpl::Load(
637     LoadType load_type,
638     const WebMediaPlayerSource& source,
639     CorsMode cors_mode,
640     bool is_cache_disabled) {
641   // Only URL or MSE blob URL is supported.
642   DCHECK(source.IsURL());
643   WebURL url = source.GetAsURL();
644   DVLOG(1) << __func__ << "(" << load_type << ", " << GURL(url) << ", "
645            << cors_mode << ")";
646
647   bool is_deferred = false;
648
649   if (defer_load_cb_) {
650     is_deferred = defer_load_cb_.Run(
651         base::BindOnce(&WebMediaPlayerImpl::DoLoad, weak_this_, load_type, url,
652                        cors_mode, is_cache_disabled));
653   } else {
654     DoLoad(load_type, url, cors_mode, is_cache_disabled);
655   }
656
657   return is_deferred ? LoadTiming::kDeferred : LoadTiming::kImmediate;
658 }
659
660 void WebMediaPlayerImpl::OnWebLayerUpdated() {}
661
662 void WebMediaPlayerImpl::RegisterContentsLayer(cc::Layer* layer) {
663   DCHECK(bridge_);
664   bridge_->SetContentsOpaque(opaque_);
665   client_->SetCcLayer(layer);
666 }
667
668 void WebMediaPlayerImpl::UnregisterContentsLayer(cc::Layer* layer) {
669   // `client_` will unregister its cc::Layer if given a nullptr.
670   client_->SetCcLayer(nullptr);
671 }
672
673 void WebMediaPlayerImpl::OnSurfaceIdUpdated(viz::SurfaceId surface_id) {
674   // TODO(726619): Handle the behavior when Picture-in-Picture mode is
675   // disabled.
676   // The viz::SurfaceId may be updated when the video begins playback or when
677   // the size of the video changes.
678   if (client_ && !client_->IsAudioElement()) {
679     client_->OnPictureInPictureStateChange();
680   }
681 }
682
683 void WebMediaPlayerImpl::EnableOverlay() {
684   overlay_enabled_ = true;
685   if (request_routing_token_cb_ &&
686       overlay_mode_ == OverlayMode::kUseAndroidOverlay) {
687     overlay_routing_token_is_pending_ = true;
688     token_available_cb_.Reset(
689         base::BindOnce(&WebMediaPlayerImpl::OnOverlayRoutingToken, weak_this_));
690     request_routing_token_cb_.Run(token_available_cb_.callback());
691   }
692
693   // We have requested (and maybe already have) overlay information.  If the
694   // restarted decoder requests overlay information, then we'll defer providing
695   // it if it hasn't arrived yet.  Otherwise, this would be a race, since we
696   // don't know if the request for overlay info or restart will complete first.
697   if (decoder_requires_restart_for_overlay_)
698     ScheduleRestart();
699 }
700
701 void WebMediaPlayerImpl::DisableOverlay() {
702   overlay_enabled_ = false;
703   if (overlay_mode_ == OverlayMode::kUseAndroidOverlay) {
704     token_available_cb_.Cancel();
705     overlay_routing_token_is_pending_ = false;
706     overlay_routing_token_ = media::OverlayInfo::RoutingToken();
707   }
708
709   if (decoder_requires_restart_for_overlay_)
710     ScheduleRestart();
711   else
712     MaybeSendOverlayInfoToDecoder();
713 }
714
715 void WebMediaPlayerImpl::EnteredFullscreen() {
716   overlay_info_.is_fullscreen = true;
717
718 #if defined(TIZEN_VIDEO_HOLE) && !BUILDFLAG(IS_TIZEN_TV)
719   if (ShouldUseVideoHole()) {
720     LOG(INFO) << __func__ << " Video hole turns on when fullscreen mode.";
721     vfc_task_runner_->PostTask(
722         FROM_HERE,
723         base::BindOnce(
724             &VideoFrameCompositor::SetDrawableContentRectChangedCallback,
725             base::Unretained(compositor_.get()),
726             base::BindPostTaskToCurrentDefault(base::BindRepeating(
727                 &WebMediaPlayerImpl::OnDrawableContentRectChanged,
728                 weak_this_))));
729   }
730 #endif
731
732 #if defined(TIZEN_MULTIMEDIA)
733   if (HasVideo()) {
734     pipeline_controller_->ToggleFullscreenMode(
735         true, base::BindPostTaskToCurrentDefault(base::BindOnce(
736                   &WebMediaPlayerImpl::FullscreenModeToggled, weak_this_)));
737   }
738 #endif
739
740   // `always_enable_overlays_` implies that we're already in overlay mode, so
741   // take no action here.  Otherwise, switch to an overlay if it's allowed and
742   // if it will display properly.
743   if (!always_enable_overlays_ && overlay_mode_ != OverlayMode::kNoOverlays &&
744       DoesOverlaySupportMetadata()) {
745     EnableOverlay();
746   }
747
748   // We send this only if we can send multiple calls.  Otherwise, either (a)
749   // we already sent it and we don't have a callback anyway (we reset it when
750   // it's called in restart mode), or (b) we'll send this later when the surface
751   // actually arrives.  GVD assumes that the first overlay info will have the
752   // routing information.  Note that we set `is_fullscreen_` earlier, so that
753   // if EnableOverlay() can include fullscreen info in case it sends the overlay
754   // info before returning.
755   if (!decoder_requires_restart_for_overlay_)
756     MaybeSendOverlayInfoToDecoder();
757 }
758
759 void WebMediaPlayerImpl::ExitedFullscreen() {
760   overlay_info_.is_fullscreen = false;
761
762 #if defined(TIZEN_MULTIMEDIA)
763   if (HasVideo()) {
764     LOG(INFO) << __func__ << " Video hole turns off when not fullscreen mode.";
765     pipeline_controller_->ToggleFullscreenMode(
766         false, base::BindPostTaskToCurrentDefault(base::BindOnce(
767                    &WebMediaPlayerImpl::FullscreenModeToggled, weak_this_)));
768   }
769 #endif
770
771   // If we're in overlay mode, then exit it unless we're supposed to allow
772   // overlays all the time.
773   if (!always_enable_overlays_ && overlay_enabled_)
774     DisableOverlay();
775
776   // See EnteredFullscreen for why we do this.
777   if (!decoder_requires_restart_for_overlay_)
778     MaybeSendOverlayInfoToDecoder();
779 }
780
781 void WebMediaPlayerImpl::BecameDominantVisibleContent(bool is_dominant) {
782   if (observer_)
783     observer_->OnBecameDominantVisibleContent(is_dominant);
784 }
785
786 void WebMediaPlayerImpl::SetIsEffectivelyFullscreen(
787     WebFullscreenVideoStatus fullscreen_video_status) {
788   if (power_status_helper_) {
789     // We don't care about pip, so anything that's "not fullscreen" is good
790     // enough for us.
791     power_status_helper_->SetIsFullscreen(
792         fullscreen_video_status !=
793         WebFullscreenVideoStatus::kNotEffectivelyFullscreen);
794   }
795 }
796
797 void WebMediaPlayerImpl::OnHasNativeControlsChanged(bool has_native_controls) {
798   if (!watch_time_reporter_)
799     return;
800
801   if (has_native_controls)
802     watch_time_reporter_->OnNativeControlsEnabled();
803   else
804     watch_time_reporter_->OnNativeControlsDisabled();
805 }
806
807 void WebMediaPlayerImpl::OnDisplayTypeChanged(DisplayType display_type) {
808   DVLOG(2) << __func__ << ": display_type=" << static_cast<int>(display_type);
809
810   if (surface_layer_for_video_enabled_) {
811     vfc_task_runner_->PostTask(
812         FROM_HERE,
813         base::BindOnce(&VideoFrameCompositor::SetForceSubmit,
814                        base::Unretained(compositor_.get()),
815                        display_type == DisplayType::kPictureInPicture));
816
817     if (display_type == DisplayType::kPictureInPicture) {
818       // In picture in picture mode, since the video is compositing in the PIP
819       // windows, stop composting it in the original window. One exception is
820       // for persistent video, where can happen in auto-pip mode, where the
821       // video is not playing in the regular Picture-in-Picture mode.
822       if (!client_->IsInAutoPIP()) {
823         client_->SetCcLayer(nullptr);
824       }
825
826       // Resumes playback if it was paused when hidden.
827       if (paused_when_hidden_) {
828         paused_when_hidden_ = false;
829         client_->ResumePlayback();
830       }
831     } else {
832       // Resume compositing in the original window if not already doing so.
833       client_->SetCcLayer(bridge_->GetCcLayer());
834     }
835   }
836
837   if (watch_time_reporter_) {
838     switch (display_type) {
839       case DisplayType::kInline:
840         watch_time_reporter_->OnDisplayTypeInline();
841         break;
842       case DisplayType::kFullscreen:
843         watch_time_reporter_->OnDisplayTypeFullscreen();
844         break;
845       case DisplayType::kPictureInPicture:
846         watch_time_reporter_->OnDisplayTypePictureInPicture();
847         break;
848     }
849   }
850
851   SetPersistentState(display_type == DisplayType::kPictureInPicture);
852   UpdatePlayState();
853 }
854
855 void WebMediaPlayerImpl::DoLoad(LoadType load_type,
856                                 const WebURL& url,
857                                 CorsMode cors_mode,
858                                 bool is_cache_disabled) {
859   TRACE_EVENT1("media", "WebMediaPlayerImpl::DoLoad", "id", media_player_id_);
860   DVLOG(1) << __func__;
861   DCHECK(main_task_runner_->BelongsToCurrentThread());
862
863   is_cache_disabled_ = is_cache_disabled;
864   cors_mode_ = cors_mode;
865
866   // Start a new observation.  If there was one before, then we didn't play it.
867   will_play_helper_.CompleteObservationIfNeeded(learning::TargetValue(false));
868   // For now, send in an empty set of features.  We should fill some in here,
869   // and / or ask blink (via `client_`) for features from the DOM.
870   learning::FeatureDictionary dict;
871   will_play_helper_.BeginObservation(dict);
872
873 #if BUILDFLAG(IS_ANDROID)
874   // Only allow credentials if the crossorigin attribute is unspecified
875   // (kCorsModeUnspecified) or "use-credentials" (kCorsModeUseCredentials).
876   // This value is only used by the MediaPlayerRenderer.
877   // See https://crbug.com/936566.
878   demuxer_manager_->SetAllowMediaPlayerRendererCredentials(cors_mode !=
879                                                            kCorsModeAnonymous);
880 #endif  // BUILDFLAG(IS_ANDROID)
881
882   // Note: `url` may be very large, take care when making copies.
883   demuxer_manager_->SetLoadedUrl(GURL(url));
884   load_type_ = load_type;
885
886   ReportMetrics(load_type, demuxer_manager_->LoadedUrl(), media_log_.get());
887
888   // Set subresource URL for crash reporting; will be truncated to 256 bytes.
889   static base::debug::CrashKeyString* subresource_url =
890       base::debug::AllocateCrashKeyString("subresource_url",
891                                           base::debug::CrashKeySize::Size256);
892   base::debug::SetCrashKeyString(subresource_url,
893                                  demuxer_manager_->LoadedUrl().spec());
894
895   SetNetworkState(WebMediaPlayer::kNetworkStateLoading);
896   SetReadyState(WebMediaPlayer::kReadyStateHaveNothing);
897
898   // Do a truncation to kMaxUrlLength+1 at most; we can add ellipsis later.
899   media_log_->AddEvent<MediaLogEvent::kLoad>(
900       url.GetString().Substring(0, media::kMaxUrlLength + 1).Utf8());
901   load_start_time_ = base::TimeTicks::Now();
902
903   // If we're adapting, then restart the smoothness experiment.
904   if (smoothness_helper_)
905     smoothness_helper_.reset();
906
907   media_metrics_provider_->Initialize(
908       load_type == kLoadTypeMediaSource,
909       load_type == kLoadTypeURL
910           ? GetMediaURLScheme(demuxer_manager_->LoadedUrl())
911           : media::mojom::MediaURLScheme::kUnknown,
912       media::mojom::MediaStreamType::kNone);
913
914   // If a demuxer override was specified or a Media Source pipeline will be
915   // used, the pipeline can start immediately.
916   if (demuxer_manager_->HasDemuxerOverride() ||
917       load_type == kLoadTypeMediaSource ||
918       demuxer_manager_->LoadedUrl().SchemeIs(
919           media::remoting::kRemotingScheme)) {
920     StartPipeline();
921     return;
922   }
923
924   // Short circuit the more complex loading path for data:// URLs. Sending
925   // them through the network based loading path just wastes memory and causes
926   // worse performance since reads become asynchronous.
927   if (demuxer_manager_->LoadedUrl().SchemeIs(url::kDataScheme)) {
928     std::string mime_type, charset, data;
929     if (!net::DataURL::Parse(demuxer_manager_->LoadedUrl(), &mime_type,
930                              &charset, &data) ||
931         data.empty()) {
932       return MemoryDataSourceInitialized(false, 0);
933     }
934     size_t data_size = data.size();
935     demuxer_manager_->SetDataSource(
936         std::make_unique<media::MemoryDataSource>(std::move(data)));
937     MemoryDataSourceInitialized(true, data_size);
938     return;
939   }
940
941   auto data_source = std::make_unique<MultiBufferDataSource>(
942       main_task_runner_,
943       url_index_->GetByUrl(
944           url, static_cast<UrlData::CorsMode>(cors_mode),
945           is_cache_disabled ? UrlIndex::kCacheDisabled : UrlIndex::kNormal),
946       media_log_.get(), buffered_data_source_host_.get(),
947       base::BindRepeating(&WebMediaPlayerImpl::NotifyDownloading, weak_this_));
948
949   auto* mb_data_source = data_source.get();
950   demuxer_manager_->SetDataSource(std::move(data_source));
951
952   mb_data_source->OnRedirect(base::BindRepeating(
953       &WebMediaPlayerImpl::OnDataSourceRedirected, weak_this_));
954   mb_data_source->SetPreload(preload_);
955   mb_data_source->SetIsClientAudioElement(client_->IsAudioElement());
956   mb_data_source->Initialize(base::BindOnce(
957       &WebMediaPlayerImpl::MultiBufferDataSourceInitialized, weak_this_));
958 }
959
960 void WebMediaPlayerImpl::Play() {
961   DVLOG(1) << __func__;
962   DCHECK(main_task_runner_->BelongsToCurrentThread());
963
964   // User initiated play unlocks background video playback.
965   if (frame_->HasTransientUserActivation())
966     video_locked_when_paused_when_hidden_ = false;
967
968   // TODO(sandersd): Do we want to reset the idle timer here?
969   delegate_->SetIdle(delegate_id_, false);
970   paused_ = false;
971   pipeline_controller_->SetPlaybackRate(playback_rate_);
972   background_pause_timer_.Stop();
973 #if defined(TIZEN_MULTIMEDIA)
974   was_suspended_by_player_ = false;
975 #endif
976
977   if (observer_)
978     observer_->OnPlaying();
979
980   // Try to create the smoothness helper, in case we were paused before.
981   UpdateSmoothnessHelper();
982
983   if (playback_events_recorder_)
984     playback_events_recorder_->OnPlaying();
985
986   watch_time_reporter_->SetAutoplayInitiated(client_->WasAutoplayInitiated());
987
988   // If we're seeking we'll trigger the watch time reporter upon seek completed;
989   // we don't want to start it here since the seek time is unstable. E.g., when
990   // playing content with a positive start time we would have a zero seek time.
991   if (!Seeking()) {
992     DCHECK(watch_time_reporter_);
993     watch_time_reporter_->OnPlaying();
994   }
995
996   if (video_decode_stats_reporter_)
997     video_decode_stats_reporter_->OnPlaying();
998
999   simple_watch_timer_.Start();
1000   media_metrics_provider_->SetHasPlayed();
1001   media_log_->AddEvent<MediaLogEvent::kPlay>();
1002
1003   MaybeUpdateBufferSizesForPlayback();
1004   UpdatePlayState();
1005
1006   // Notify the learning task, if needed.
1007   will_play_helper_.CompleteObservationIfNeeded(learning::TargetValue(true));
1008 }
1009
1010 void WebMediaPlayerImpl::Pause() {
1011   DVLOG(1) << __func__;
1012   DCHECK(main_task_runner_->BelongsToCurrentThread());
1013
1014   // We update the paused state even when casting, since we expect pause() to be
1015   // called when casting begins, and when we exit casting we should end up in a
1016   // paused state.
1017   paused_ = true;
1018
1019   // No longer paused because it was hidden.
1020   paused_when_hidden_ = false;
1021
1022   UpdateSmoothnessHelper();
1023
1024   // User initiated pause locks background videos.
1025   if (frame_->HasTransientUserActivation())
1026     video_locked_when_paused_when_hidden_ = true;
1027
1028   pipeline_controller_->SetPlaybackRate(0.0);
1029
1030   // For states <= kReadyStateHaveMetadata, we may not have a renderer yet.
1031   if (highest_ready_state_ > WebMediaPlayer::kReadyStateHaveMetadata)
1032     paused_time_ = pipeline_controller_->GetMediaTime();
1033
1034   if (observer_)
1035     observer_->OnPaused();
1036
1037   if (playback_events_recorder_)
1038     playback_events_recorder_->OnPaused();
1039
1040   DCHECK(watch_time_reporter_);
1041   watch_time_reporter_->OnPaused();
1042
1043   if (video_decode_stats_reporter_)
1044     video_decode_stats_reporter_->OnPaused();
1045
1046   simple_watch_timer_.Stop();
1047   media_log_->AddEvent<MediaLogEvent::kPause>();
1048
1049   UpdatePlayState();
1050 }
1051
1052 void WebMediaPlayerImpl::OnFrozen() {
1053   DVLOG(1) << __func__;
1054   DCHECK(main_task_runner_->BelongsToCurrentThread());
1055
1056   // We should already be paused before we are frozen.
1057   DCHECK(paused_);
1058
1059   if (observer_)
1060     observer_->OnFrozen();
1061 }
1062
1063 void WebMediaPlayerImpl::Seek(double seconds) {
1064   DVLOG(1) << __func__ << "(" << seconds << "s)";
1065   DCHECK(main_task_runner_->BelongsToCurrentThread());
1066
1067 #if defined(TIZEN_MULTIMEDIA)
1068   if (is_live_stream_) {
1069     LOG(INFO) << __func__ << "(" << seconds << "s)"
1070               << " min_seekable_time " << min_seekable_time_
1071               << " max_seekable_time " << max_seekable_time_;
1072     if (base::Seconds(seconds) < min_seekable_time_)
1073       seconds = min_seekable_time_.InSecondsF();
1074     else if (base::Seconds(seconds) > max_seekable_time_)
1075       seconds = max_seekable_time_.InSecondsF();
1076   }
1077 #endif
1078
1079   media_log_->AddEvent<MediaLogEvent::kSeek>(seconds);
1080   DoSeek(base::Seconds(seconds), true);
1081 }
1082
1083 void WebMediaPlayerImpl::DoSeek(base::TimeDelta time, bool time_updated) {
1084   DCHECK(main_task_runner_->BelongsToCurrentThread());
1085   TRACE_EVENT2("media", "WebMediaPlayerImpl::DoSeek", "target",
1086                time.InSecondsF(), "id", media_player_id_);
1087
1088   ReadyState old_state = ready_state_;
1089   if (ready_state_ > WebMediaPlayer::kReadyStateHaveMetadata)
1090     SetReadyState(WebMediaPlayer::kReadyStateHaveMetadata);
1091
1092   // When paused or ended, we know exactly what the current time is and can
1093   // elide seeks to it. However, there are three cases that are not elided:
1094   //   1) When the pipeline state is not stable.
1095   //      In this case we just let PipelineController decide what to do, as
1096   //      it has complete information.
1097   //   2) When the ready state was not kReadyStateHaveEnoughData.
1098   //      If playback has not started, it's possible to enter a state where
1099   //      OnBufferingStateChange() will not be called again to complete the
1100   //      seek.
1101   //   3) For MSE.
1102   //      Because the buffers may have changed between seeks, MSE seeks are
1103   //      never elided.
1104   if (paused_ && pipeline_controller_->IsStable() &&
1105       (paused_time_ == time || (ended_ && time == base::Seconds(Duration()))) &&
1106       GetDemuxerType() != media::DemuxerType::kChunkDemuxer) {
1107     if (old_state == kReadyStateHaveEnoughData) {
1108       // This will in turn SetReadyState() to signal the demuxer seek, followed
1109       // by timeChanged() to signal the renderer seek.
1110       should_notify_time_changed_ = true;
1111       main_task_runner_->PostTask(
1112           FROM_HERE, base::BindOnce(&WebMediaPlayerImpl::OnBufferingStateChange,
1113                                     weak_this_, media::BUFFERING_HAVE_ENOUGH,
1114                                     media::BUFFERING_CHANGE_REASON_UNKNOWN));
1115       return;
1116     }
1117   }
1118
1119   if (playback_events_recorder_)
1120     playback_events_recorder_->OnSeeking();
1121
1122   // Call this before setting `seeking_` so that the current media time can be
1123   // recorded by the reporter.
1124   if (watch_time_reporter_)
1125     watch_time_reporter_->OnSeeking();
1126
1127   // TODO(sandersd): Move `seeking_` to PipelineController.
1128   // TODO(sandersd): Do we want to reset the idle timer here?
1129   delegate_->SetIdle(delegate_id_, false);
1130   ended_ = false;
1131   seeking_ = true;
1132   seek_time_ = time;
1133   if (paused_)
1134     paused_time_ = time;
1135   pipeline_controller_->Seek(time, time_updated);
1136
1137   // This needs to be called after Seek() so that if a resume is triggered, it
1138   // is to the correct time.
1139   UpdatePlayState();
1140 }
1141
1142 void WebMediaPlayerImpl::SetRate(double rate) {
1143   DVLOG(1) << __func__ << "(" << rate << ")";
1144   DCHECK(main_task_runner_->BelongsToCurrentThread());
1145
1146   if (rate != playback_rate_) {
1147     LIMITED_MEDIA_LOG(INFO, media_log_.get(), num_playback_rate_logs_,
1148                       kMaxNumPlaybackRateLogs)
1149         << "Effective playback rate changed from " << playback_rate_ << " to "
1150         << rate;
1151   }
1152
1153   playback_rate_ = rate;
1154   if (!paused_)
1155     pipeline_controller_->SetPlaybackRate(rate);
1156
1157   MaybeUpdateBufferSizesForPlayback();
1158 }
1159
1160 void WebMediaPlayerImpl::SetVolume(double volume) {
1161   DVLOG(1) << __func__ << "(" << volume << ")";
1162   DCHECK(main_task_runner_->BelongsToCurrentThread());
1163   volume_ = volume;
1164   pipeline_controller_->SetVolume(volume_ * volume_multiplier_);
1165   if (watch_time_reporter_)
1166     watch_time_reporter_->OnVolumeChange(volume);
1167   client_->DidPlayerMutedStatusChange(volume == 0.0);
1168
1169   if (delegate_has_audio_ != HasUnmutedAudio()) {
1170     delegate_has_audio_ = HasUnmutedAudio();
1171     DidMediaMetadataChange();
1172
1173     // If we paused a background video since it was muted, the volume change
1174     // should resume the playback.
1175     if (paused_when_hidden_) {
1176       paused_when_hidden_ = false;
1177       // Calls UpdatePlayState() so return afterwards.
1178       client_->ResumePlayback();
1179       return;
1180     }
1181   }
1182
1183   // The play state is updated because the player might have left the autoplay
1184   // muted state.
1185   UpdatePlayState();
1186 }
1187
1188 void WebMediaPlayerImpl::SetLatencyHint(double seconds) {
1189   DVLOG(1) << __func__ << "(" << seconds << ")";
1190   DCHECK(main_task_runner_->BelongsToCurrentThread());
1191   absl::optional<base::TimeDelta> latency_hint;
1192   if (std::isfinite(seconds)) {
1193     DCHECK_GE(seconds, 0);
1194     latency_hint = base::Seconds(seconds);
1195   }
1196   pipeline_controller_->SetLatencyHint(latency_hint);
1197 }
1198
1199 void WebMediaPlayerImpl::SetPreservesPitch(bool preserves_pitch) {
1200   DCHECK(main_task_runner_->BelongsToCurrentThread());
1201   pipeline_controller_->SetPreservesPitch(preserves_pitch);
1202 }
1203
1204 void WebMediaPlayerImpl::SetWasPlayedWithUserActivation(
1205     bool was_played_with_user_activation) {
1206   DCHECK(main_task_runner_->BelongsToCurrentThread());
1207   pipeline_controller_->SetWasPlayedWithUserActivation(
1208       was_played_with_user_activation);
1209 }
1210
1211 void WebMediaPlayerImpl::OnRequestPictureInPicture() {
1212   if (!surface_layer_for_video_enabled_)
1213     ActivateSurfaceLayerForVideo();
1214
1215   DCHECK(bridge_);
1216   DCHECK(bridge_->GetSurfaceId().is_valid());
1217 }
1218
1219 bool WebMediaPlayerImpl::SetSinkId(
1220     const WebString& sink_id,
1221     WebSetSinkIdCompleteCallback completion_callback) {
1222   DCHECK(main_task_runner_->BelongsToCurrentThread());
1223   DVLOG(1) << __func__;
1224
1225   media::OutputDeviceStatusCB callback =
1226       ConvertToOutputDeviceStatusCB(std::move(completion_callback));
1227   auto sink_id_utf8 = sink_id.Utf8();
1228   media_task_runner_->PostTask(
1229       FROM_HERE, base::BindOnce(&SetSinkIdOnMediaThread, audio_source_provider_,
1230                                 sink_id_utf8, std::move(callback)));
1231   return true;
1232 }
1233
1234 STATIC_ASSERT_ENUM(WebMediaPlayer::kPreloadNone, media::DataSource::NONE);
1235 STATIC_ASSERT_ENUM(WebMediaPlayer::kPreloadMetaData,
1236                    media::DataSource::METADATA);
1237 STATIC_ASSERT_ENUM(WebMediaPlayer::kPreloadAuto, media::DataSource::AUTO);
1238
1239 void WebMediaPlayerImpl::SetPreload(WebMediaPlayer::Preload preload) {
1240   DVLOG(1) << __func__ << "(" << preload << ")";
1241   DCHECK(main_task_runner_->BelongsToCurrentThread());
1242   preload_ = static_cast<media::DataSource::Preload>(preload);
1243   demuxer_manager_->SetPreload(preload_);
1244 }
1245
1246 bool WebMediaPlayerImpl::HasVideo() const {
1247   DCHECK(main_task_runner_->BelongsToCurrentThread());
1248
1249   return pipeline_metadata_.has_video;
1250 }
1251
1252 bool WebMediaPlayerImpl::HasAudio() const {
1253   DCHECK(main_task_runner_->BelongsToCurrentThread());
1254
1255   return pipeline_metadata_.has_audio;
1256 }
1257
1258 void WebMediaPlayerImpl::EnabledAudioTracksChanged(
1259     const WebVector<WebMediaPlayer::TrackId>& enabledTrackIds) {
1260   DCHECK(main_task_runner_->BelongsToCurrentThread());
1261
1262   std::ostringstream logstr;
1263   std::vector<MediaTrack::Id> enabledMediaTrackIds;
1264   for (const auto& blinkTrackId : enabledTrackIds) {
1265     const auto track_id = MediaTrack::Id(blinkTrackId.Utf8().data());
1266     logstr << track_id << " ";
1267     enabledMediaTrackIds.push_back(track_id);
1268   }
1269   MEDIA_LOG(INFO, media_log_.get())
1270       << "Enabled audio tracks: [" << logstr.str() << "]";
1271   pipeline_controller_->OnEnabledAudioTracksChanged(enabledMediaTrackIds);
1272 }
1273
1274 void WebMediaPlayerImpl::SelectedVideoTrackChanged(
1275     WebMediaPlayer::TrackId* selectedTrackId) {
1276   DCHECK(main_task_runner_->BelongsToCurrentThread());
1277
1278   absl::optional<MediaTrack::Id> selected_video_track_id;
1279   if (selectedTrackId && !video_track_disabled_)
1280     selected_video_track_id = MediaTrack::Id(selectedTrackId->Utf8().data());
1281   MEDIA_LOG(INFO, media_log_.get())
1282       << "Selected video track: ["
1283       << selected_video_track_id.value_or(MediaTrack::Id()) << "]";
1284   pipeline_controller_->OnSelectedVideoTrackChanged(selected_video_track_id);
1285 }
1286
1287 gfx::Size WebMediaPlayerImpl::NaturalSize() const {
1288   DCHECK(main_task_runner_->BelongsToCurrentThread());
1289
1290   return pipeline_metadata_.natural_size;
1291 }
1292
1293 gfx::Size WebMediaPlayerImpl::VisibleSize() const {
1294   DCHECK(main_task_runner_->BelongsToCurrentThread());
1295   scoped_refptr<media::VideoFrame> video_frame =
1296       GetCurrentFrameFromCompositor();
1297   if (!video_frame)
1298     return gfx::Size();
1299
1300   return video_frame->visible_rect().size();
1301 }
1302
1303 bool WebMediaPlayerImpl::Paused() const {
1304   DCHECK(main_task_runner_->BelongsToCurrentThread());
1305   return pipeline_controller_->GetPlaybackRate() == 0.0f;
1306 }
1307
1308 bool WebMediaPlayerImpl::PausedWhenHidden() const {
1309   return paused_when_hidden_;
1310 }
1311
1312 bool WebMediaPlayerImpl::Seeking() const {
1313   DCHECK(main_task_runner_->BelongsToCurrentThread());
1314
1315   if (ready_state_ == WebMediaPlayer::kReadyStateHaveNothing)
1316     return false;
1317
1318   return seeking_;
1319 }
1320
1321 double WebMediaPlayerImpl::Duration() const {
1322   DCHECK(main_task_runner_->BelongsToCurrentThread());
1323
1324   if (ready_state_ == WebMediaPlayer::kReadyStateHaveNothing)
1325     return std::numeric_limits<double>::quiet_NaN();
1326
1327   // Some demuxer's might have more accurate duration information than the
1328   // pipeline, so check that first.
1329   absl::optional<double> duration = demuxer_manager_->GetDemuxerDuration();
1330   if (duration.has_value()) {
1331     return *duration;
1332   }
1333
1334   base::TimeDelta pipeline_duration = GetPipelineMediaDuration();
1335   return pipeline_duration == media::kInfiniteDuration
1336              ? std::numeric_limits<double>::infinity()
1337              : pipeline_duration.InSecondsF();
1338 }
1339
1340 double WebMediaPlayerImpl::timelineOffset() const {
1341   DCHECK(main_task_runner_->BelongsToCurrentThread());
1342
1343   if (pipeline_metadata_.timeline_offset.is_null())
1344     return std::numeric_limits<double>::quiet_NaN();
1345
1346   return pipeline_metadata_.timeline_offset.InMillisecondsFSinceUnixEpoch();
1347 }
1348
1349 base::TimeDelta WebMediaPlayerImpl::GetCurrentTimeInternal() const {
1350   DCHECK(main_task_runner_->BelongsToCurrentThread());
1351
1352   base::TimeDelta current_time;
1353   if (Seeking())
1354     current_time = seek_time_;
1355   else if (paused_)
1356     current_time = paused_time_;
1357   else
1358     current_time = pipeline_controller_->GetMediaTime();
1359
1360   // It's possible for `current_time` to be kInfiniteDuration here if the page
1361   // seeks to kInfiniteDuration (2**64 - 1) when Duration() is infinite.
1362   DCHECK_GE(current_time, base::TimeDelta());
1363   return current_time;
1364 }
1365
1366 double WebMediaPlayerImpl::CurrentTime() const {
1367   DCHECK(main_task_runner_->BelongsToCurrentThread());
1368   DCHECK_NE(ready_state_, WebMediaPlayer::kReadyStateHaveNothing);
1369
1370   // Even though we have an explicit ended signal, a lot of content doesn't have
1371   // an accurate duration -- with some formats (e.g., VBR MP3, OGG) it can't be
1372   // known without a complete play-through from beginning to end.
1373   //
1374   // The HTML5 spec says that upon ended, current time must equal duration. Due
1375   // to the aforementioned issue, if we rely exclusively on current time, we can
1376   // be a few milliseconds off of the duration.
1377   const auto duration = Duration();
1378   return (ended_ && !std::isinf(duration))
1379              ? duration
1380              : GetCurrentTimeInternal().InSecondsF();
1381 }
1382
1383 bool WebMediaPlayerImpl::IsEnded() const {
1384   DCHECK(main_task_runner_->BelongsToCurrentThread());
1385 #if defined(TIZEN_MULTIMEDIA)
1386   return (ended_ || (CurrentTime() >= Duration()));
1387 #else
1388   return ended_;
1389 #endif
1390 }
1391
1392 WebMediaPlayer::NetworkState WebMediaPlayerImpl::GetNetworkState() const {
1393   DCHECK(main_task_runner_->BelongsToCurrentThread());
1394   return network_state_;
1395 }
1396
1397 WebMediaPlayer::ReadyState WebMediaPlayerImpl::GetReadyState() const {
1398   DCHECK(main_task_runner_->BelongsToCurrentThread());
1399   return ready_state_;
1400 }
1401
1402 WebString WebMediaPlayerImpl::GetErrorMessage() const {
1403   DCHECK(main_task_runner_->BelongsToCurrentThread());
1404   return WebString::FromUTF8(media_log_->GetErrorMessage());
1405 }
1406
1407 WebTimeRanges WebMediaPlayerImpl::Buffered() const {
1408   DCHECK(main_task_runner_->BelongsToCurrentThread());
1409
1410   media::Ranges<base::TimeDelta> buffered_time_ranges =
1411       pipeline_controller_->GetBufferedTimeRanges();
1412
1413   const base::TimeDelta duration = GetPipelineMediaDuration();
1414   if (duration != media::kInfiniteDuration) {
1415     buffered_data_source_host_->AddBufferedTimeRanges(&buffered_time_ranges,
1416                                                       duration);
1417   }
1418   return ConvertToWebTimeRanges(buffered_time_ranges);
1419 }
1420
1421 WebTimeRanges WebMediaPlayerImpl::Seekable() const {
1422   DCHECK(main_task_runner_->BelongsToCurrentThread());
1423
1424   if (ready_state_ < WebMediaPlayer::kReadyStateHaveMetadata) {
1425     return WebTimeRanges();
1426   }
1427
1428   if (demuxer_manager_->IsLiveContent()) {
1429     return WebTimeRanges();
1430   }
1431
1432 #if defined(TIZEN_MULTIMEDIA)
1433   if (is_live_stream_) {
1434     const WebTimeRange seekable_range(min_seekable_time_.InSecondsF(),
1435                                       max_seekable_time_.InSecondsF());
1436     return WebTimeRanges(&seekable_range, 1);
1437   }
1438 #endif
1439
1440   const double seekable_end = Duration();
1441
1442   // Allow a special exception for seeks to zero for streaming sources with a
1443   // finite duration; this allows looping to work.
1444   const bool is_finite_stream = IsStreaming() && std::isfinite(seekable_end);
1445
1446   // Do not change the seekable range when using the MediaPlayerRenderer. It
1447   // will take care of dropping invalid seeks.
1448   const bool force_seeks_to_zero =
1449       !using_media_player_renderer_ && is_finite_stream;
1450
1451   // TODO(dalecurtis): Technically this allows seeking on media which return an
1452   // infinite duration so long as DataSource::IsStreaming() is false. While not
1453   // expected, disabling this breaks semi-live players, http://crbug.com/427412.
1454   const WebTimeRange seekable_range(0.0,
1455                                     force_seeks_to_zero ? 0.0 : seekable_end);
1456   return WebTimeRanges(&seekable_range, 1);
1457 }
1458
1459 bool WebMediaPlayerImpl::IsPrerollAttemptNeeded() {
1460   // TODO(sandersd): Replace with `highest_ready_state_since_seek_` if we need
1461   // to ensure that preroll always gets a chance to complete.
1462   // See http://crbug.com/671525.
1463   //
1464   // Note: Even though we get play/pause signals at kReadyStateHaveMetadata, we
1465   // must attempt to preroll until kReadyStateHaveFutureData so that the
1466   // canplaythrough event will be fired to the page (which may be waiting).
1467   //
1468   // TODO(dalecurtis): We should try signaling kReadyStateHaveFutureData upon
1469   // automatic-suspend of a non-playing element to avoid wasting resources.
1470   if (highest_ready_state_ >= ReadyState::kReadyStateHaveFutureData)
1471     return false;
1472
1473   // To suspend before we reach kReadyStateHaveCurrentData is only ok
1474   // if we know we're going to get woken up when we get more data, which
1475   // will only happen if the network is in the "Loading" state.
1476   // This happens when the network is fast, but multiple videos are loading
1477   // and multiplexing gets held up waiting for available threads.
1478   if (highest_ready_state_ <= ReadyState::kReadyStateHaveMetadata &&
1479       network_state_ != WebMediaPlayer::kNetworkStateLoading) {
1480     return true;
1481   }
1482
1483   if (preroll_attempt_pending_)
1484     return true;
1485
1486   // Freshly initialized; there has never been any loading progress. (Otherwise
1487   // `preroll_attempt_pending_` would be true when the start time is null.)
1488   if (preroll_attempt_start_time_.is_null())
1489     return false;
1490
1491   base::TimeDelta preroll_attempt_duration =
1492       tick_clock_->NowTicks() - preroll_attempt_start_time_;
1493   return preroll_attempt_duration < kPrerollAttemptTimeout;
1494 }
1495
1496 bool WebMediaPlayerImpl::DidLoadingProgress() {
1497   DCHECK(main_task_runner_->BelongsToCurrentThread());
1498
1499   // Note: Separate variables used to ensure both methods are called every time.
1500   const bool pipeline_progress = pipeline_controller_->DidLoadingProgress();
1501   const bool data_progress = buffered_data_source_host_->DidLoadingProgress();
1502   return pipeline_progress || data_progress;
1503 }
1504
1505 void WebMediaPlayerImpl::Paint(cc::PaintCanvas* canvas,
1506                                const gfx::Rect& rect,
1507                                cc::PaintFlags& flags) {
1508   DCHECK(main_task_runner_->BelongsToCurrentThread());
1509   TRACE_EVENT0("media", "WebMediaPlayerImpl:paint");
1510
1511   scoped_refptr<media::VideoFrame> video_frame =
1512       GetCurrentFrameFromCompositor();
1513   last_frame_request_time_ = tick_clock_->NowTicks();
1514   video_frame_readback_count_++;
1515   pipeline_controller_->OnExternalVideoFrameRequest();
1516
1517   video_renderer_.Paint(
1518       video_frame, canvas, gfx::RectF(rect), flags,
1519       pipeline_metadata_.video_decoder_config.video_transformation(),
1520       raster_context_provider_.get());
1521 }
1522
1523 scoped_refptr<media::VideoFrame>
1524 WebMediaPlayerImpl::GetCurrentFrameThenUpdate() {
1525   last_frame_request_time_ = tick_clock_->NowTicks();
1526   video_frame_readback_count_++;
1527   pipeline_controller_->OnExternalVideoFrameRequest();
1528   return GetCurrentFrameFromCompositor();
1529 }
1530
1531 absl::optional<media::VideoFrame::ID> WebMediaPlayerImpl::CurrentFrameId()
1532     const {
1533   DCHECK(main_task_runner_->BelongsToCurrentThread());
1534   TRACE_EVENT0("media", "WebMediaPlayerImpl::GetCurrentFrameID");
1535
1536   // We can't copy from protected frames.
1537   if (cdm_context_ref_)
1538     return absl::nullopt;
1539
1540   if (auto frame = compositor_->GetCurrentFrameOnAnyThread())
1541     return frame->unique_id();
1542   return absl::nullopt;
1543 }
1544
1545 media::PaintCanvasVideoRenderer*
1546 WebMediaPlayerImpl::GetPaintCanvasVideoRenderer() {
1547   DCHECK(main_task_runner_->BelongsToCurrentThread());
1548   return &video_renderer_;
1549 }
1550
1551 bool WebMediaPlayerImpl::WouldTaintOrigin() const {
1552   return demuxer_manager_->WouldTaintOrigin();
1553 }
1554
1555 double WebMediaPlayerImpl::MediaTimeForTimeValue(double timeValue) const {
1556   return base::Seconds(timeValue).InSecondsF();
1557 }
1558
1559 unsigned WebMediaPlayerImpl::DecodedFrameCount() const {
1560   DCHECK(main_task_runner_->BelongsToCurrentThread());
1561   return GetPipelineStatistics().video_frames_decoded;
1562 }
1563
1564 unsigned WebMediaPlayerImpl::DroppedFrameCount() const {
1565   DCHECK(main_task_runner_->BelongsToCurrentThread());
1566   return GetPipelineStatistics().video_frames_dropped;
1567 }
1568
1569 uint64_t WebMediaPlayerImpl::AudioDecodedByteCount() const {
1570   DCHECK(main_task_runner_->BelongsToCurrentThread());
1571   return GetPipelineStatistics().audio_bytes_decoded;
1572 }
1573
1574 uint64_t WebMediaPlayerImpl::VideoDecodedByteCount() const {
1575   DCHECK(main_task_runner_->BelongsToCurrentThread());
1576   return GetPipelineStatistics().video_bytes_decoded;
1577 }
1578
1579 bool WebMediaPlayerImpl::HasAvailableVideoFrame() const {
1580   return has_first_frame_;
1581 }
1582
1583 bool WebMediaPlayerImpl::HasReadableVideoFrame() const {
1584   return has_first_frame_ && is_frame_readable_;
1585 }
1586
1587 void WebMediaPlayerImpl::SetContentDecryptionModule(
1588     WebContentDecryptionModule* cdm,
1589     WebContentDecryptionModuleResult result) {
1590   DVLOG(1) << __func__ << ": cdm = " << cdm;
1591   DCHECK(main_task_runner_->BelongsToCurrentThread());
1592
1593   // Once the CDM is set it can't be cleared as there may be frames being
1594   // decrypted on other threads. So fail this request.
1595   // http://crbug.com/462365#c7.
1596   if (!cdm) {
1597     result.CompleteWithError(
1598         kWebContentDecryptionModuleExceptionInvalidStateError, 0,
1599         "The existing ContentDecryptionModule object cannot be removed at this "
1600         "time.");
1601     return;
1602   }
1603
1604   // Create a local copy of `result` to avoid problems with the callback
1605   // getting passed to the media thread and causing `result` to be destructed
1606   // on the wrong thread in some failure conditions. Blink should prevent
1607   // multiple simultaneous calls.
1608   DCHECK(!set_cdm_result_);
1609   set_cdm_result_ = std::make_unique<WebContentDecryptionModuleResult>(result);
1610
1611   SetCdmInternal(cdm);
1612 }
1613
1614 void WebMediaPlayerImpl::OnEncryptedMediaInitData(
1615     media::EmeInitDataType init_data_type,
1616     const std::vector<uint8_t>& init_data) {
1617   DCHECK(init_data_type != media::EmeInitDataType::UNKNOWN);
1618
1619   RecordEncryptedEvent(true);
1620
1621   // Recreate the watch time reporter if necessary.
1622   const bool was_encrypted = is_encrypted_;
1623   is_encrypted_ = true;
1624   if (!was_encrypted) {
1625     media_metrics_provider_->SetIsEME();
1626     if (watch_time_reporter_)
1627       CreateWatchTimeReporter();
1628
1629     // `was_encrypted` = false means we didn't have a CDM prior to observing
1630     // encrypted media init data. Reset the reporter until the CDM arrives. See
1631     // SetCdmInternal().
1632     DCHECK(!cdm_config_);
1633     video_decode_stats_reporter_.reset();
1634   }
1635
1636   encrypted_client_->Encrypted(
1637       init_data_type, init_data.data(),
1638       base::saturated_cast<unsigned int>(init_data.size()));
1639 }
1640
1641 #if BUILDFLAG(ENABLE_FFMPEG)
1642 void WebMediaPlayerImpl::AddAudioTrack(const std::string& id,
1643                                        const std::string& label,
1644                                        const std::string& language,
1645                                        bool is_first_track) {
1646   client_->AddAudioTrack(WebString::FromUTF8(id),
1647                          WebMediaPlayerClient::kAudioTrackKindMain,
1648                          WebString::FromUTF8(label),
1649                          WebString::FromUTF8(language), is_first_track);
1650 }
1651
1652 void WebMediaPlayerImpl::AddVideoTrack(const std::string& id,
1653                                        const std::string& label,
1654                                        const std::string& language,
1655                                        bool is_first_track) {
1656   client_->AddVideoTrack(WebString::FromUTF8(id),
1657                          WebMediaPlayerClient::kVideoTrackKindMain,
1658                          WebString::FromUTF8(label),
1659                          WebString::FromUTF8(language), is_first_track);
1660 }
1661 #endif  // BUILDFLAG(ENABLE_FFMPEG)
1662
1663 #if BUILDFLAG(ENABLE_HLS_DEMUXER)
1664
1665 void WebMediaPlayerImpl::GetUrlData(
1666     const GURL& gurl,
1667     base::OnceCallback<void(scoped_refptr<UrlData>)> cb) {
1668   DCHECK(main_task_runner_->BelongsToCurrentThread());
1669   auto url_data = url_index_->GetByUrl(
1670       gurl, static_cast<UrlData::CorsMode>(cors_mode_),
1671       is_cache_disabled_ ? UrlIndex::kCacheDisabled : UrlIndex::kNormal);
1672   std::move(cb).Run(std::move(url_data));
1673 }
1674
1675 base::SequenceBound<media::HlsDataSourceProvider>
1676 WebMediaPlayerImpl::GetHlsDataSourceProvider() {
1677   DCHECK(main_task_runner_->BelongsToCurrentThread());
1678   return base::SequenceBound<HlsDataSourceProviderImpl>(
1679       main_task_runner_,
1680       std::make_unique<MultiBufferDataSourceFactory>(
1681           media_log_.get(),
1682           base::BindRepeating(&WebMediaPlayerImpl::GetUrlData,
1683                               weak_factory_.GetWeakPtr()),
1684           main_task_runner_, tick_clock_));
1685 }
1686 #endif
1687
1688 void WebMediaPlayerImpl::SetCdmInternal(WebContentDecryptionModule* cdm) {
1689   DCHECK(main_task_runner_->BelongsToCurrentThread());
1690   DCHECK(cdm);
1691
1692   const bool was_encrypted = is_encrypted_;
1693   is_encrypted_ = true;
1694
1695   // Recreate the watch time reporter if necessary.
1696   if (!was_encrypted) {
1697     media_metrics_provider_->SetIsEME();
1698     if (watch_time_reporter_)
1699       CreateWatchTimeReporter();
1700   }
1701
1702   WebContentDecryptionModuleImpl* web_cdm =
1703       ToWebContentDecryptionModuleImpl(cdm);
1704   auto cdm_context_ref = web_cdm->GetCdmContextRef();
1705   if (!cdm_context_ref) {
1706     NOTREACHED();
1707     OnCdmAttached(false);
1708     return;
1709   }
1710
1711   // Arrival of `cdm_config_` unblocks recording of encrypted stats. Attempt to
1712   // create the stats reporter. Note, we do NOT guard this within !was_encypted
1713   // above because often the CDM arrives after the call to
1714   // OnEncryptedMediaInitData().
1715   cdm_config_ = web_cdm->GetCdmConfig();
1716   DCHECK(!cdm_config_->key_system.empty());
1717
1718   media_metrics_provider_->SetKeySystem(cdm_config_->key_system);
1719   if (cdm_config_->use_hw_secure_codecs)
1720     media_metrics_provider_->SetIsHardwareSecure();
1721   CreateVideoDecodeStatsReporter();
1722
1723   auto* cdm_context = cdm_context_ref->GetCdmContext();
1724   DCHECK(cdm_context);
1725
1726   // Keep the reference to the CDM, as it shouldn't be destroyed until
1727   // after the pipeline is done with the `cdm_context`.
1728   pending_cdm_context_ref_ = std::move(cdm_context_ref);
1729   pipeline_controller_->SetCdm(
1730       cdm_context,
1731       base::BindOnce(&WebMediaPlayerImpl::OnCdmAttached, weak_this_));
1732 }
1733
1734 void WebMediaPlayerImpl::OnCdmAttached(bool success) {
1735   DVLOG(1) << __func__ << ": success = " << success;
1736   DCHECK(main_task_runner_->BelongsToCurrentThread());
1737   DCHECK(pending_cdm_context_ref_);
1738
1739   media_log_->SetProperty<MediaLogProperty::kIsCdmAttached>(success);
1740
1741   // If the CDM is set from the constructor there is no promise
1742   // (`set_cdm_result_`) to fulfill.
1743   if (success) {
1744     // This will release the previously attached CDM (if any).
1745     cdm_context_ref_ = std::move(pending_cdm_context_ref_);
1746     if (set_cdm_result_) {
1747       set_cdm_result_->Complete();
1748       set_cdm_result_.reset();
1749     }
1750
1751     return;
1752   }
1753
1754   pending_cdm_context_ref_.reset();
1755   if (set_cdm_result_) {
1756     set_cdm_result_->CompleteWithError(
1757         kWebContentDecryptionModuleExceptionNotSupportedError, 0,
1758         "Unable to set ContentDecryptionModule object");
1759     set_cdm_result_.reset();
1760   }
1761 }
1762
1763 void WebMediaPlayerImpl::OnPipelineSeeked(bool time_updated) {
1764   TRACE_EVENT2("media", "WebMediaPlayerImpl::OnPipelineSeeked", "target",
1765                seek_time_.InSecondsF(), "id", media_player_id_);
1766   seeking_ = false;
1767   seek_time_ = base::TimeDelta();
1768
1769   if (paused_) {
1770     paused_time_ = pipeline_controller_->GetMediaTime();
1771   } else {
1772     DCHECK(watch_time_reporter_);
1773     watch_time_reporter_->OnPlaying();
1774     if (playback_events_recorder_)
1775       playback_events_recorder_->OnPlaying();
1776   }
1777   if (time_updated)
1778     should_notify_time_changed_ = true;
1779
1780   // Reset underflow duration upon seek; this prevents looping videos and user
1781   // actions from artificially inflating the duration.
1782   underflow_timer_.reset();
1783
1784   // Background video optimizations are delayed when shown/hidden if pipeline
1785   // is seeking.
1786   UpdateBackgroundVideoOptimizationState();
1787
1788   // If we successfully completed a suspended startup, we need to make a call to
1789   // UpdatePlayState() in case any events which should trigger a resume have
1790   // occurred during startup.
1791   if (attempting_suspended_start_ &&
1792       pipeline_controller_->IsPipelineSuspended()) {
1793     skip_metrics_due_to_startup_suspend_ = true;
1794
1795     // If we successfully completed a suspended startup, signal that we have
1796     // reached BUFFERING_HAVE_ENOUGH so that canplay and canplaythrough fire
1797     // correctly. We must unfortunately always do this because it's valid for
1798     // elements to play while not visible nor even in the DOM.
1799     //
1800     // Note: This call is dual purpose, it is also responsible for triggering an
1801     // UpdatePlayState() call which may need to resume the pipeline once Blink
1802     // has been told about the ReadyState change.
1803     OnBufferingStateChangeInternal(media::BUFFERING_HAVE_ENOUGH,
1804                                    media::BUFFERING_CHANGE_REASON_UNKNOWN,
1805                                    true);
1806   }
1807
1808   attempting_suspended_start_ = false;
1809 }
1810
1811 void WebMediaPlayerImpl::OnPipelineSuspended() {
1812   // Add a log event so the player shows up as "SUSPENDED" in media-internals.
1813   media_log_->AddEvent<MediaLogEvent::kSuspended>();
1814
1815   pending_oneshot_suspend_ = false;
1816
1817   if (attempting_suspended_start_) {
1818     DCHECK(pipeline_controller_->IsSuspended());
1819     did_lazy_load_ = !has_poster_ && HasVideo();
1820   }
1821
1822   // Tell the data source we have enough data so that it may release the
1823   // connection (unless blink is waiting on us to signal play()).
1824   if (demuxer_manager_->HasDataSource() && !CouldPlayIfEnoughData()) {
1825     // `attempting_suspended_start_` will be cleared by OnPipelineSeeked() which
1826     // will occur after this method during a suspended startup.
1827     if (attempting_suspended_start_ && did_lazy_load_) {
1828       DCHECK(!has_first_frame_);
1829       DCHECK(have_enough_after_lazy_load_cb_.IsCancelled());
1830
1831       // For lazy load, we won't know if the element is non-visible until a
1832       // layout completes, so to avoid unnecessarily tearing down the network
1833       // connection, briefly (250ms chosen arbitrarily) delay signaling "have
1834       // enough" to the MultiBufferDataSource.
1835       //
1836       // base::Unretained() is safe here since the base::CancelableOnceClosure
1837       // will cancel upon destruction of this class and `demuxer_manager_` is
1838       // gauranteeed to outlive us as a result of the DestructionHelper.
1839       have_enough_after_lazy_load_cb_.Reset(
1840           base::BindOnce(&media::DemuxerManager::OnBufferingHaveEnough,
1841                          base::Unretained(demuxer_manager_.get()), true));
1842       main_task_runner_->PostDelayedTask(
1843           FROM_HERE, have_enough_after_lazy_load_cb_.callback(),
1844           base::Milliseconds(250));
1845     } else {
1846       have_enough_after_lazy_load_cb_.Cancel();
1847       demuxer_manager_->OnBufferingHaveEnough(true);
1848     }
1849   }
1850
1851   ReportMemoryUsage();
1852
1853   if (pending_suspend_resume_cycle_) {
1854     pending_suspend_resume_cycle_ = false;
1855     UpdatePlayState();
1856   }
1857 }
1858
1859 void WebMediaPlayerImpl::OnBeforePipelineResume() {
1860   // Since we're resuming, cancel closing of the network connection.
1861   have_enough_after_lazy_load_cb_.Cancel();
1862
1863   // We went through suspended startup, so the player is only just now spooling
1864   // up for playback. As such adjust `load_start_time_` so it reports the same
1865   // metric as what would be reported if we had not suspended at startup.
1866   if (skip_metrics_due_to_startup_suspend_) {
1867     // In the event that the call to SetReadyState() initiated after pipeline
1868     // startup immediately tries to start playback, we should not update
1869     // `load_start_time_` to avoid losing visibility into the impact of a
1870     // suspended startup on the time until first frame / play ready for cases
1871     // where suspended startup was applied incorrectly.
1872     if (!attempting_suspended_start_)
1873       load_start_time_ = base::TimeTicks::Now() - time_to_metadata_;
1874     skip_metrics_due_to_startup_suspend_ = false;
1875   }
1876
1877   // Enable video track if we disabled it in the background - this way the new
1878   // renderer will attach its callbacks to the video stream properly.
1879   // TODO(avayvod): Remove this when disabling and enabling video tracks in
1880   // non-playing state works correctly. See https://crbug.com/678374.
1881   EnableVideoTrackIfNeeded();
1882   is_pipeline_resuming_ = true;
1883 }
1884
1885 void WebMediaPlayerImpl::OnPipelineResumed() {
1886   is_pipeline_resuming_ = false;
1887
1888   UpdateBackgroundVideoOptimizationState();
1889 }
1890
1891 void WebMediaPlayerImpl::OnChunkDemuxerOpened(media::ChunkDemuxer* demuxer) {
1892   DCHECK(main_task_runner_->BelongsToCurrentThread());
1893   client_->MediaSourceOpened(new WebMediaSourceImpl(demuxer));
1894 }
1895
1896 void WebMediaPlayerImpl::OnFallback(media::PipelineStatus status) {
1897   media_metrics_provider_->OnFallback(std::move(status).AddHere());
1898 }
1899
1900 void WebMediaPlayerImpl::StopForDemuxerReset() {
1901   DVLOG(1) << __func__;
1902   DCHECK(main_task_runner_->BelongsToCurrentThread());
1903   DCHECK(pipeline_controller_);
1904   pipeline_controller_->Stop();
1905
1906   // delete the thread dumper on the media thread.
1907   media_task_runner_->DeleteSoon(FROM_HERE,
1908                                  std::move(media_thread_mem_dumper_));
1909 }
1910
1911 bool WebMediaPlayerImpl::IsSecurityOriginCryptographic() const {
1912   return url::Origin(frame_->GetSecurityOrigin())
1913       .GetURL()
1914       .SchemeIsCryptographic();
1915 }
1916
1917 void WebMediaPlayerImpl::UpdateLoadedUrl(const GURL& url) {
1918   demuxer_manager_->SetLoadedUrl(url);
1919 }
1920
1921 void WebMediaPlayerImpl::DemuxerRequestsSeek(base::TimeDelta seek_time) {
1922   DCHECK(main_task_runner_->BelongsToCurrentThread());
1923   DoSeek(seek_time, true);
1924 }
1925
1926 void WebMediaPlayerImpl::RestartForHls() {
1927   DCHECK(main_task_runner_->BelongsToCurrentThread());
1928   observer_->OnHlsManifestDetected();
1929
1930   // Use the media player renderer if the native hls demuxer isn't compiled in
1931   // or if the feature is disabled.
1932 #if BUILDFLAG(ENABLE_HLS_DEMUXER)
1933   if (!base::FeatureList::IsEnabled(media::kBuiltInHlsPlayer)) {
1934     renderer_factory_selector_->SetBaseRendererType(
1935         media::RendererType::kMediaPlayer);
1936   }
1937 #elif BUILDFLAG(IS_ANDROID)
1938   renderer_factory_selector_->SetBaseRendererType(
1939       media::RendererType::kMediaPlayer);
1940 #else
1941   // Shouldn't be reachable from desktop where hls is not enabled.
1942   NOTREACHED();
1943 #endif
1944   SetMemoryReportingState(false);
1945   StartPipeline();
1946 }
1947
1948 void WebMediaPlayerImpl::OnError(media::PipelineStatus status) {
1949   DVLOG(1) << __func__ << ": status=" << status;
1950   DCHECK(main_task_runner_->BelongsToCurrentThread());
1951   DCHECK(status != media::PIPELINE_OK);
1952
1953   if (suppress_destruction_errors_)
1954     return;
1955
1956 #if BUILDFLAG(IS_WIN)
1957   // Hardware context reset is not an error. Restart to recover.
1958   // TODO(crbug.com/1208618): Find a way to break the potential infinite loop of
1959   // restart -> PIPELINE_ERROR_HARDWARE_CONTEXT_RESET -> restart.
1960   if (status == media::PIPELINE_ERROR_HARDWARE_CONTEXT_RESET) {
1961     ScheduleRestart();
1962     return;
1963   }
1964 #endif  // BUILDFLAG(IS_WIN)
1965
1966   MaybeSetContainerNameForMetrics();
1967   simple_watch_timer_.Stop();
1968   media_log_->NotifyError(status);
1969   media_metrics_provider_->OnError(status);
1970   if (playback_events_recorder_)
1971     playback_events_recorder_->OnError(status);
1972   if (watch_time_reporter_)
1973     watch_time_reporter_->OnError(status);
1974
1975   if (ready_state_ == WebMediaPlayer::kReadyStateHaveNothing) {
1976     // Any error that occurs before reaching ReadyStateHaveMetadata should
1977     // be considered a format error.
1978     SetNetworkState(WebMediaPlayer::kNetworkStateFormatError);
1979   } else {
1980     SetNetworkState(PipelineErrorToNetworkState(status.code()));
1981   }
1982
1983   // PipelineController::Stop() is idempotent.
1984   pipeline_controller_->Stop();
1985
1986   UpdatePlayState();
1987 }
1988
1989 void WebMediaPlayerImpl::OnEnded() {
1990   TRACE_EVENT2("media", "WebMediaPlayerImpl::OnEnded", "duration", Duration(),
1991                "id", media_player_id_);
1992   DVLOG(1) << __func__;
1993   DCHECK(main_task_runner_->BelongsToCurrentThread());
1994
1995   // Ignore state changes until we've completed all outstanding operations.
1996   if (!pipeline_controller_->IsStable())
1997     return;
1998
1999   ended_ = true;
2000   client_->TimeChanged();
2001
2002   if (playback_events_recorder_)
2003     playback_events_recorder_->OnEnded();
2004
2005   // We don't actually want this to run until `client_` calls seek() or pause(),
2006   // but that should have already happened in timeChanged() and so this is
2007   // expected to be a no-op.
2008   UpdatePlayState();
2009 }
2010
2011 void WebMediaPlayerImpl::OnMetadata(const media::PipelineMetadata& metadata) {
2012   DVLOG(1) << __func__;
2013   DCHECK(main_task_runner_->BelongsToCurrentThread());
2014
2015   // Cache the `time_to_metadata_` to use for adjusting the TimeToFirstFrame and
2016   // TimeToPlayReady metrics later if we end up doing a suspended startup.
2017   time_to_metadata_ = base::TimeTicks::Now() - load_start_time_;
2018   media_metrics_provider_->SetTimeToMetadata(time_to_metadata_);
2019   WriteSplitHistogram<kPlaybackType | kEncrypted>(
2020       &base::UmaHistogramMediumTimes, "Media.TimeToMetadata",
2021       time_to_metadata_);
2022
2023   MaybeSetContainerNameForMetrics();
2024
2025   pipeline_metadata_ = metadata;
2026   if (power_status_helper_)
2027     power_status_helper_->SetMetadata(metadata);
2028
2029   if (HasAudio()) {
2030     media_metrics_provider_->SetHasAudio(metadata.audio_decoder_config.codec());
2031     RecordEncryptionScheme("Audio",
2032                            metadata.audio_decoder_config.encryption_scheme());
2033   }
2034
2035   if (HasVideo()) {
2036     media_metrics_provider_->SetHasVideo(metadata.video_decoder_config.codec());
2037     RecordEncryptionScheme("Video",
2038                            metadata.video_decoder_config.encryption_scheme());
2039
2040     if (overlay_enabled_) {
2041       // SurfaceView doesn't support rotated video, so transition back if
2042       // the video is now rotated.  If `always_enable_overlays_`, we keep the
2043       // overlay anyway so that the state machine keeps working.
2044       // TODO(liberato): verify if compositor feedback catches this.  If so,
2045       // then we don't need this check.
2046       if (!always_enable_overlays_ && !DoesOverlaySupportMetadata())
2047         DisableOverlay();
2048     }
2049
2050     if (use_surface_layer_) {
2051       ActivateSurfaceLayerForVideo();
2052     } else {
2053       DCHECK(!video_layer_);
2054       video_layer_ = cc::VideoLayer::Create(
2055           compositor_.get(),
2056           pipeline_metadata_.video_decoder_config.video_transformation());
2057       video_layer_->SetContentsOpaque(opaque_);
2058       client_->SetCcLayer(video_layer_.get());
2059     }
2060   }
2061
2062   if (observer_)
2063     observer_->OnMetadataChanged(pipeline_metadata_);
2064
2065   delegate_has_audio_ = HasUnmutedAudio();
2066   DidMediaMetadataChange();
2067
2068   // It could happen that the demuxer successfully completed initialization
2069   // (implying it had determined media metadata), but then removed all audio and
2070   // video streams and the ability to demux any A/V before `metadata` was
2071   // constructed and passed to us. One example is, with MSE-in-Workers, the
2072   // worker owning the MediaSource could have been terminated, or the app could
2073   // have explicitly removed all A/V SourceBuffers. That termination/removal
2074   // could race the construction of `metadata`. Regardless of load-type, we
2075   // shouldn't allow playback of a resource that has neither audio nor video.
2076   // We treat lack of A/V as if there were an error in the demuxer before
2077   // reaching HAVE_METADATA.
2078   if (!HasVideo() && !HasAudio()) {
2079     DVLOG(1) << __func__ << ": no audio and no video -> error";
2080     OnError(media::DEMUXER_ERROR_COULD_NOT_OPEN);
2081     return;  // Do not transition to HAVE_METADATA.
2082   }
2083
2084   // TODO(dalecurtis): Don't create these until kReadyStateHaveFutureData; when
2085   // we create them early we just increase the chances of needing to throw them
2086   // away unnecessarily.
2087   CreateWatchTimeReporter();
2088   CreateVideoDecodeStatsReporter();
2089
2090   // SetReadyState() may trigger all sorts of calls into this class (e.g.,
2091   // Play(), Pause(), etc) so do it last to avoid unexpected states during the
2092   // calls. An exception to this is UpdatePlayState(), which is safe to call and
2093   // needs to use the new ReadyState in its calculations.
2094   SetReadyState(WebMediaPlayer::kReadyStateHaveMetadata);
2095   UpdatePlayState();
2096 }
2097
2098 void WebMediaPlayerImpl::ActivateSurfaceLayerForVideo() {
2099   // Note that we might or might not already be in VideoLayer mode.
2100   DCHECK(!bridge_);
2101
2102   surface_layer_for_video_enabled_ = true;
2103
2104   // If we're in VideoLayer mode, then get rid of the layer.
2105   if (video_layer_) {
2106     client_->SetCcLayer(nullptr);
2107     video_layer_ = nullptr;
2108   }
2109
2110   bridge_ = std::move(create_bridge_callback_)
2111                 .Run(this, compositor_->GetUpdateSubmissionStateCallback());
2112   bridge_->CreateSurfaceLayer();
2113
2114   // TODO(tmathmeyer) does this need support for the reflection transformation
2115   // as well?
2116   vfc_task_runner_->PostTask(
2117       FROM_HERE,
2118       base::BindOnce(
2119           &VideoFrameCompositor::EnableSubmission,
2120           base::Unretained(compositor_.get()), bridge_->GetSurfaceId(),
2121           pipeline_metadata_.video_decoder_config.video_transformation(),
2122           IsInPictureInPicture()));
2123   bridge_->SetContentsOpaque(opaque_);
2124
2125   // If the element is already in Picture-in-Picture mode, it means that it
2126   // was set in this mode prior to this load, with a different
2127   // WebMediaPlayerImpl. The new player needs to send its id, size and
2128   // surface id to the browser process to make sure the states are properly
2129   // updated.
2130   // TODO(872056): the surface should be activated but for some reasons, it
2131   // does not. It is possible that this will no longer be needed after 872056
2132   // is fixed.
2133   if (IsInPictureInPicture())
2134     OnSurfaceIdUpdated(bridge_->GetSurfaceId());
2135 }
2136
2137 void WebMediaPlayerImpl::OnBufferingStateChange(
2138     media::BufferingState state,
2139     media::BufferingStateChangeReason reason) {
2140   OnBufferingStateChangeInternal(state, reason, false);
2141 }
2142
2143 void WebMediaPlayerImpl::CreateVideoDecodeStatsReporter() {
2144   // TODO(chcunningham): destroy reporter if we initially have video but the
2145   // track gets disabled. Currently not possible in default desktop Chrome.
2146   if (!HasVideo())
2147     return;
2148
2149   // Only record stats from the local pipeline.
2150   if (is_flinging_ || is_remote_rendering_ || using_media_player_renderer_)
2151     return;
2152
2153   // Stats reporter requires a valid config. We may not have one for HLS cases
2154   // where URL demuxer doesn't know details of the stream.
2155   if (!pipeline_metadata_.video_decoder_config.IsValidConfig())
2156     return;
2157
2158   // Profile must be known for use as index to save the reported stats.
2159   if (pipeline_metadata_.video_decoder_config.profile() ==
2160       media::VIDEO_CODEC_PROFILE_UNKNOWN) {
2161     return;
2162   }
2163
2164   // CdmConfig must be provided for use as index to save encrypted stats.
2165   if (is_encrypted_ && !cdm_config_) {
2166     return;
2167   } else if (cdm_config_) {
2168     DCHECK(!cdm_config_->key_system.empty());
2169   }
2170
2171   mojo::PendingRemote<media::mojom::VideoDecodeStatsRecorder> recorder;
2172   media_metrics_provider_->AcquireVideoDecodeStatsRecorder(
2173       recorder.InitWithNewPipeAndPassReceiver());
2174
2175   // Create capabilities reporter and synchronize its initial state.
2176   video_decode_stats_reporter_ = std::make_unique<VideoDecodeStatsReporter>(
2177       std::move(recorder),
2178       base::BindRepeating(&WebMediaPlayerImpl::GetPipelineStatistics,
2179                           base::Unretained(this)),
2180       pipeline_metadata_.video_decoder_config.profile(),
2181       pipeline_metadata_.natural_size, cdm_config_,
2182       frame_->GetTaskRunner(TaskType::kInternalMedia));
2183
2184   if (delegate_->IsFrameHidden())
2185     video_decode_stats_reporter_->OnHidden();
2186   else
2187     video_decode_stats_reporter_->OnShown();
2188
2189   if (paused_)
2190     video_decode_stats_reporter_->OnPaused();
2191   else
2192     video_decode_stats_reporter_->OnPlaying();
2193 }
2194
2195 void WebMediaPlayerImpl::OnProgress() {
2196   DVLOG(4) << __func__;
2197
2198   // See IsPrerollAttemptNeeded() for more details. We can't use that method
2199   // here since it considers `preroll_attempt_start_time_` and for OnProgress()
2200   // events we must make the attempt -- since there may not be another event.
2201   if (highest_ready_state_ < ReadyState::kReadyStateHaveFutureData) {
2202     // Reset the preroll attempt clock.
2203     preroll_attempt_pending_ = true;
2204     preroll_attempt_start_time_ = base::TimeTicks();
2205
2206     // Clear any 'stale' flag and give the pipeline a chance to resume. If we
2207     // are already resumed, this will cause `preroll_attempt_start_time_` to
2208     // be set.
2209     delegate_->ClearStaleFlag(delegate_id_);
2210     UpdatePlayState();
2211   } else if (ready_state_ == ReadyState::kReadyStateHaveFutureData &&
2212              CanPlayThrough()) {
2213     SetReadyState(WebMediaPlayer::kReadyStateHaveEnoughData);
2214   }
2215 }
2216
2217 bool WebMediaPlayerImpl::CanPlayThrough() {
2218   if (!base::FeatureList::IsEnabled(media::kSpecCompliantCanPlayThrough))
2219     return true;
2220   if (GetDemuxerType() == media::DemuxerType::kChunkDemuxer)
2221     return true;
2222 #if defined(TIZEN_MULTIMEDIA)
2223   if (GetDemuxerType() == media::DemuxerType::kMediaUrlDemuxer) {
2224     // TODO : Need to check buffered status
2225     return true;
2226   }
2227 #endif
2228   if (demuxer_manager_->DataSourceFullyBuffered()) {
2229     return true;
2230   }
2231   // If we're not currently downloading, we have as much buffer as
2232   // we're ever going to get, which means we say we can play through.
2233   if (network_state_ == WebMediaPlayer::kNetworkStateIdle)
2234     return true;
2235   return buffered_data_source_host_->CanPlayThrough(
2236       base::Seconds(CurrentTime()), base::Seconds(Duration()),
2237       playback_rate_ == 0.0 ? 1.0 : playback_rate_);
2238 }
2239
2240 void WebMediaPlayerImpl::OnBufferingStateChangeInternal(
2241     media::BufferingState state,
2242     media::BufferingStateChangeReason reason,
2243     bool for_suspended_start) {
2244   DVLOG(1) << __func__ << "(" << state << ", " << reason << ")";
2245   DCHECK(main_task_runner_->BelongsToCurrentThread());
2246
2247   // Ignore buffering state changes caused by back-to-back seeking, so as not
2248   // to assume the second seek has finished when it was only the first seek.
2249   if (pipeline_controller_->IsPendingSeek())
2250     return;
2251
2252   media_log_->AddEvent<MediaLogEvent::kBufferingStateChanged>(
2253       media::SerializableBufferingState<
2254           media::SerializableBufferingStateType::kPipeline>{
2255           state, reason, for_suspended_start});
2256
2257   if (state == media::BUFFERING_HAVE_ENOUGH && !for_suspended_start)
2258     media_metrics_provider_->SetHaveEnough();
2259
2260   if (state == media::BUFFERING_HAVE_ENOUGH) {
2261     TRACE_EVENT1("media", "WebMediaPlayerImpl::BufferingHaveEnough", "id",
2262                  media_player_id_);
2263     // The SetReadyState() call below may clear
2264     // `skip_metrics_due_to_startup_suspend_` so report this first.
2265     if (!have_reported_time_to_play_ready_ &&
2266         !skip_metrics_due_to_startup_suspend_) {
2267       DCHECK(!for_suspended_start);
2268       have_reported_time_to_play_ready_ = true;
2269       const base::TimeDelta elapsed = base::TimeTicks::Now() - load_start_time_;
2270       media_metrics_provider_->SetTimeToPlayReady(elapsed);
2271       WriteSplitHistogram<kPlaybackType | kEncrypted>(
2272           &base::UmaHistogramMediumTimes, "Media.TimeToPlayReady", elapsed);
2273     }
2274
2275     // Warning: This call may be re-entrant.
2276     SetReadyState(CanPlayThrough() ? WebMediaPlayer::kReadyStateHaveEnoughData
2277                                    : WebMediaPlayer::kReadyStateHaveFutureData);
2278
2279     // Let the DataSource know we have enough data -- this is the only function
2280     // during which we advance to (or past) the kReadyStateHaveEnoughData state.
2281     // It may use this information to update buffer sizes or release unused
2282     // network connections.
2283     MaybeUpdateBufferSizesForPlayback();
2284     if (demuxer_manager_->HasDataSource() && !CouldPlayIfEnoughData()) {
2285       // For LazyLoad this will be handled during OnPipelineSuspended().
2286       if (for_suspended_start && did_lazy_load_)
2287         DCHECK(!have_enough_after_lazy_load_cb_.IsCancelled());
2288       else
2289         demuxer_manager_->OnBufferingHaveEnough(false);
2290     }
2291
2292     // Blink expects a timeChanged() in response to a seek().
2293     if (should_notify_time_changed_) {
2294       should_notify_time_changed_ = false;
2295       client_->TimeChanged();
2296     }
2297
2298     // Once we have enough, start reporting the total memory usage. We'll also
2299     // report once playback starts.
2300     ReportMemoryUsage();
2301
2302     // Report the amount of time it took to leave the underflow state.
2303     if (underflow_timer_) {
2304       auto elapsed = underflow_timer_->Elapsed();
2305       RecordUnderflowDuration(elapsed);
2306       watch_time_reporter_->OnUnderflowComplete(elapsed);
2307       underflow_timer_.reset();
2308     }
2309
2310     if (playback_events_recorder_)
2311       playback_events_recorder_->OnBufferingComplete();
2312   } else {
2313     // Buffering has underflowed.
2314     DCHECK_EQ(state, media::BUFFERING_HAVE_NOTHING);
2315
2316     // Report the number of times we've entered the underflow state. Ensure we
2317     // only report the value when transitioning from HAVE_ENOUGH to
2318     // HAVE_NOTHING.
2319     if (ready_state_ == WebMediaPlayer::kReadyStateHaveEnoughData &&
2320         !seeking_) {
2321       underflow_timer_ = std::make_unique<base::ElapsedTimer>();
2322       watch_time_reporter_->OnUnderflow();
2323
2324       if (playback_events_recorder_)
2325         playback_events_recorder_->OnBuffering();
2326     }
2327
2328     // It shouldn't be possible to underflow if we've not advanced past
2329     // HAVE_CURRENT_DATA.
2330     DCHECK_GT(highest_ready_state_, WebMediaPlayer::kReadyStateHaveCurrentData);
2331     SetReadyState(WebMediaPlayer::kReadyStateHaveCurrentData);
2332   }
2333
2334   // If this is an NNR, then notify the smoothness helper about it.  Note that
2335   // it's unclear what we should do if there is no smoothness helper yet.  As it
2336   // is, we just discard the NNR.
2337   if (state == media::BUFFERING_HAVE_NOTHING &&
2338       reason == media::DECODER_UNDERFLOW && smoothness_helper_) {
2339     smoothness_helper_->NotifyNNR();
2340   }
2341
2342   UpdatePlayState();
2343 }
2344
2345 #if BUILDFLAG(IS_TIZEN_TV)
2346 void WebMediaPlayerImpl::SetParentalRatingResult(bool is_pass) {
2347   pipeline_controller_->SetParentalRatingResult(is_pass);
2348 }
2349 #endif
2350
2351 void WebMediaPlayerImpl::OnDurationChange() {
2352   DCHECK(main_task_runner_->BelongsToCurrentThread());
2353
2354   if (ready_state_ == WebMediaPlayer::kReadyStateHaveNothing)
2355     return;
2356
2357   client_->DurationChanged();
2358   DidMediaMetadataChange();
2359
2360   if (watch_time_reporter_)
2361     watch_time_reporter_->OnDurationChanged(GetPipelineMediaDuration());
2362 }
2363
2364 void WebMediaPlayerImpl::OnWaiting(media::WaitingReason reason) {
2365   DVLOG(2) << __func__ << ": reason=" << static_cast<int>(reason);
2366   DCHECK(main_task_runner_->BelongsToCurrentThread());
2367
2368   switch (reason) {
2369     case media::WaitingReason::kNoCdm:
2370     case media::WaitingReason::kNoDecryptionKey:
2371       encrypted_client_->DidBlockPlaybackWaitingForKey();
2372       // TODO(jrummell): didResumePlaybackBlockedForKey() should only be called
2373       // when a key has been successfully added (e.g. OnSessionKeysChange() with
2374       // `has_additional_usable_key` = true). http://crbug.com/461903
2375       encrypted_client_->DidResumePlaybackBlockedForKey();
2376       return;
2377
2378     // Ideally this should be handled by PipelineController directly without
2379     // being proxied here. But currently Pipeline::Client (`this`) is passed to
2380     // PipelineImpl directly without going through `pipeline_controller_`,
2381     // making it difficult to do.
2382     // TODO(xhwang): Handle this in PipelineController when we have a clearer
2383     // picture on how to refactor WebMediaPlayerImpl, PipelineController and
2384     // PipelineImpl.
2385     case media::WaitingReason::kDecoderStateLost:
2386       pipeline_controller_->OnDecoderStateLost();
2387       return;
2388
2389     // On Android, it happens when the surface used by the decoder is destroyed,
2390     // e.g. background. We want to suspend the pipeline and hope the surface
2391     // will be available when resuming the pipeline by some other signals.
2392     case media::WaitingReason::kSecureSurfaceLost:
2393       if (!pipeline_controller_->IsSuspended() && !pending_oneshot_suspend_) {
2394         pending_oneshot_suspend_ = true;
2395         UpdatePlayState();
2396       }
2397       return;
2398   }
2399 }
2400
2401 void WebMediaPlayerImpl::OnVideoNaturalSizeChange(const gfx::Size& size) {
2402   DCHECK(main_task_runner_->BelongsToCurrentThread());
2403   DCHECK_NE(ready_state_, WebMediaPlayer::kReadyStateHaveNothing);
2404
2405   TRACE_EVENT0("media", "WebMediaPlayerImpl::OnVideoNaturalSizeChange");
2406
2407   // The input `size` is from the decoded video frame, which is the original
2408   // natural size and need to be rotated accordingly.
2409   gfx::Size rotated_size = GetRotatedVideoSize(
2410       pipeline_metadata_.video_decoder_config.video_transformation().rotation,
2411       size);
2412
2413   RecordVideoNaturalSize(rotated_size);
2414
2415 #if defined(TIZEN_VIDEO_HOLE)
2416   if (ShouldUseVideoHole()) {
2417     CreateVideoHoleFrame();
2418     StartLayerBoundUpdateTimer();
2419   }
2420 #endif
2421
2422   gfx::Size old_size = pipeline_metadata_.natural_size;
2423   if (rotated_size == old_size)
2424     return;
2425
2426   pipeline_metadata_.natural_size = rotated_size;
2427
2428   if (using_media_player_renderer_ && old_size.IsEmpty()) {
2429     // If we are using MediaPlayerRenderer and this is the first size change, we
2430     // now know that there is a video track. This condition is paired with code
2431     // in CreateWatchTimeReporter() that guesses the existence of a video track.
2432     CreateWatchTimeReporter();
2433   } else {
2434     UpdateSecondaryProperties();
2435   }
2436
2437   if (video_decode_stats_reporter_ &&
2438       !video_decode_stats_reporter_->MatchesBucketedNaturalSize(
2439           pipeline_metadata_.natural_size)) {
2440     CreateVideoDecodeStatsReporter();
2441   }
2442
2443   // Create or replace the smoothness helper now that we have a size.
2444   UpdateSmoothnessHelper();
2445
2446   client_->SizeChanged();
2447
2448   if (observer_)
2449     observer_->OnMetadataChanged(pipeline_metadata_);
2450
2451   client_->DidPlayerSizeChange(NaturalSize());
2452 }
2453
2454 void WebMediaPlayerImpl::OnVideoOpacityChange(bool opaque) {
2455   DCHECK(main_task_runner_->BelongsToCurrentThread());
2456   DCHECK_NE(ready_state_, WebMediaPlayer::kReadyStateHaveNothing);
2457
2458   opaque_ = opaque;
2459   if (!surface_layer_for_video_enabled_ && video_layer_)
2460     video_layer_->SetContentsOpaque(opaque_);
2461   else if (bridge_->GetCcLayer())
2462     bridge_->SetContentsOpaque(opaque_);
2463 }
2464
2465 void WebMediaPlayerImpl::OnVideoFrameRateChange(absl::optional<int> fps) {
2466   DCHECK(main_task_runner_->BelongsToCurrentThread());
2467   if (power_status_helper_)
2468     power_status_helper_->SetAverageFrameRate(fps);
2469
2470   last_reported_fps_ = fps;
2471   UpdateSmoothnessHelper();
2472 }
2473
2474 void WebMediaPlayerImpl::OnAudioConfigChange(
2475     const media::AudioDecoderConfig& config) {
2476   DCHECK(main_task_runner_->BelongsToCurrentThread());
2477   DCHECK_NE(ready_state_, WebMediaPlayer::kReadyStateHaveNothing);
2478
2479   const bool codec_change =
2480       pipeline_metadata_.audio_decoder_config.codec() != config.codec();
2481   const bool codec_profile_change =
2482       pipeline_metadata_.audio_decoder_config.profile() != config.profile();
2483
2484   pipeline_metadata_.audio_decoder_config = config;
2485
2486   if (observer_)
2487     observer_->OnMetadataChanged(pipeline_metadata_);
2488
2489   if (codec_change) {
2490     media_metrics_provider_->SetHasAudio(
2491         pipeline_metadata_.audio_decoder_config.codec());
2492   }
2493
2494   if (codec_change || codec_profile_change)
2495     UpdateSecondaryProperties();
2496 }
2497
2498 void WebMediaPlayerImpl::OnVideoConfigChange(
2499     const media::VideoDecoderConfig& config) {
2500   DCHECK(main_task_runner_->BelongsToCurrentThread());
2501   DCHECK_NE(ready_state_, WebMediaPlayer::kReadyStateHaveNothing);
2502
2503 #if defined(TIZEN_VIDEO_HOLE)
2504   if (ShouldUseVideoHole()) {
2505     CreateVideoHoleFrame();
2506     StartLayerBoundUpdateTimer();
2507   }
2508 #endif
2509
2510   const bool codec_change =
2511       pipeline_metadata_.video_decoder_config.codec() != config.codec();
2512   const bool codec_profile_change =
2513       pipeline_metadata_.video_decoder_config.profile() != config.profile();
2514
2515   pipeline_metadata_.video_decoder_config = config;
2516
2517   if (observer_)
2518     observer_->OnMetadataChanged(pipeline_metadata_);
2519
2520   if (codec_change) {
2521     media_metrics_provider_->SetHasVideo(
2522         pipeline_metadata_.video_decoder_config.codec());
2523   }
2524
2525   if (codec_change || codec_profile_change)
2526     UpdateSecondaryProperties();
2527
2528   if (video_decode_stats_reporter_ && codec_profile_change)
2529     CreateVideoDecodeStatsReporter();
2530 }
2531
2532 void WebMediaPlayerImpl::OnVideoAverageKeyframeDistanceUpdate() {
2533   UpdateBackgroundVideoOptimizationState();
2534 }
2535
2536 void WebMediaPlayerImpl::OnAudioPipelineInfoChange(
2537     const media::AudioPipelineInfo& info) {
2538   media_metrics_provider_->SetAudioPipelineInfo(info);
2539   if (info.decoder_type == audio_decoder_type_)
2540     return;
2541
2542   audio_decoder_type_ = info.decoder_type;
2543
2544   // If there's no current reporter, there's nothing to be done.
2545   if (!watch_time_reporter_)
2546     return;
2547
2548   UpdateSecondaryProperties();
2549 }
2550
2551 void WebMediaPlayerImpl::OnVideoPipelineInfoChange(
2552     const media::VideoPipelineInfo& info) {
2553   media_metrics_provider_->SetVideoPipelineInfo(info);
2554   if (info.decoder_type == video_decoder_type_)
2555     return;
2556
2557   video_decoder_type_ = info.decoder_type;
2558
2559   // If there's no current reporter, there's nothing to be done.
2560   if (!watch_time_reporter_)
2561     return;
2562
2563   UpdateSecondaryProperties();
2564 }
2565
2566 #if defined(TIZEN_MULTIMEDIA)
2567 void WebMediaPlayerImpl::Suspend() {
2568   LOG(INFO) << __func__;
2569   SetSuspendState(true);
2570 }
2571
2572 void WebMediaPlayerImpl::Resume() {
2573   LOG(INFO) << __func__;
2574   SetSuspendState(false);
2575 }
2576
2577 void WebMediaPlayerImpl::OnSeekableTimeChange(base::TimeDelta min_time,
2578                                               base::TimeDelta max_time,
2579                                               bool is_live) {
2580   LOG(INFO) << __func__ << " "
2581             << " : " << min_time << " : " << max_time << " : " << is_live;
2582   is_live_stream_ = is_live;
2583   min_seekable_time_ = min_time;
2584   max_seekable_time_ = max_time;
2585 }
2586
2587 void WebMediaPlayerImpl::OnLivePlaybackComplete() {
2588   LOG(INFO) << __func__;
2589   if (!client_) {
2590     LOG(ERROR) << __func__ << ", client is null";
2591     return;
2592   }
2593   client_->OnLivePlaybackComplete();
2594 }
2595
2596 void WebMediaPlayerImpl::OnRequestSuspend(bool resource_conflicted) {
2597   if (pipeline_controller_->IsSuspended()) {
2598     LOG(INFO) << __func__ << " Already suspended.";
2599     return;
2600   }
2601
2602   // Check if suspended by the resource conflict or not.
2603   // If resource is conflicted by other process, it can be resumed.
2604   was_suspended_by_player_ = !resource_conflicted;
2605
2606   client_->PausePlayback(WebMediaPlayerClient::PauseReason::kUnknown);
2607   client_->SuspendPlayer();
2608   SetSuspendState(true);
2609 }
2610
2611 void WebMediaPlayerImpl::FullscreenModeToggled() {
2612   LOG(INFO) << __func__;
2613 #if defined(TIZEN_VIDEO_HOLE) && !BUILDFLAG(IS_TIZEN_TV)
2614   if (ShouldUseVideoHole()) {
2615     CreateVideoHoleFrame();
2616     StartLayerBoundUpdateTimer();
2617   } else {
2618     StopLayerBoundUpdateTimer();
2619   }
2620 #endif
2621 }
2622 #endif  // defined(TIZEN_MULTIMEDIA)
2623
2624 void WebMediaPlayerImpl::OnFrameHidden() {
2625   DCHECK(main_task_runner_->BelongsToCurrentThread());
2626
2627   // Backgrounding a video requires a user gesture to resume playback.
2628   if (IsHidden())
2629     video_locked_when_paused_when_hidden_ = true;
2630
2631   if (watch_time_reporter_)
2632     watch_time_reporter_->OnHidden();
2633
2634   if (video_decode_stats_reporter_)
2635     video_decode_stats_reporter_->OnHidden();
2636
2637   UpdateBackgroundVideoOptimizationState();
2638   UpdatePlayState();
2639
2640   // Schedule suspended playing media to be paused if the user doesn't come back
2641   // to it within some timeout period to avoid any autoplay surprises.
2642   ScheduleIdlePauseTimer();
2643
2644   // Notify the compositor of our page visibility status.
2645   vfc_task_runner_->PostTask(
2646       FROM_HERE,
2647       base::BindOnce(&VideoFrameCompositor::SetIsPageVisible,
2648                      base::Unretained(compositor_.get()), !IsHidden()));
2649 }
2650
2651 void WebMediaPlayerImpl::SuspendForFrameClosed() {
2652   DCHECK(main_task_runner_->BelongsToCurrentThread());
2653
2654   was_suspended_for_frame_closed_ = true;
2655   UpdateBackgroundVideoOptimizationState();
2656   UpdatePlayState();
2657 }
2658
2659 void WebMediaPlayerImpl::OnFrameShown() {
2660   DCHECK(main_task_runner_->BelongsToCurrentThread());
2661   background_pause_timer_.Stop();
2662
2663   // Foreground videos don't require user gesture to continue playback.
2664   video_locked_when_paused_when_hidden_ = false;
2665
2666   was_suspended_for_frame_closed_ = false;
2667
2668   if (watch_time_reporter_)
2669     watch_time_reporter_->OnShown();
2670
2671   if (video_decode_stats_reporter_)
2672     video_decode_stats_reporter_->OnShown();
2673
2674   // Notify the compositor of our page visibility status.
2675   vfc_task_runner_->PostTask(
2676       FROM_HERE,
2677       base::BindOnce(&VideoFrameCompositor::SetIsPageVisible,
2678                      base::Unretained(compositor_.get()), !IsHidden()));
2679
2680   UpdateBackgroundVideoOptimizationState();
2681
2682   if (paused_when_hidden_) {
2683     paused_when_hidden_ = false;
2684     client_->ResumePlayback();  // Calls UpdatePlayState() so return afterwards.
2685     return;
2686   }
2687
2688   UpdatePlayState();
2689 }
2690
2691 void WebMediaPlayerImpl::OnIdleTimeout() {
2692   DCHECK(main_task_runner_->BelongsToCurrentThread());
2693
2694   // This should never be called when stale state testing overrides are used.
2695   DCHECK(!stale_state_override_for_testing_.has_value());
2696
2697   // If we are attempting preroll, clear the stale flag.
2698   if (IsPrerollAttemptNeeded()) {
2699     delegate_->ClearStaleFlag(delegate_id_);
2700     return;
2701   }
2702
2703   UpdatePlayState();
2704 }
2705
2706 void WebMediaPlayerImpl::SetVolumeMultiplier(double multiplier) {
2707   volume_multiplier_ = multiplier;
2708   SetVolume(volume_);
2709 }
2710
2711 void WebMediaPlayerImpl::SetPersistentState(bool value) {
2712   DVLOG(2) << __func__ << ": value=" << value;
2713   overlay_info_.is_persistent_video = value;
2714   MaybeSendOverlayInfoToDecoder();
2715 }
2716
2717 void WebMediaPlayerImpl::SetPowerExperimentState(bool state) {
2718   if (power_status_helper_)
2719     power_status_helper_->UpdatePowerExperimentState(state);
2720 }
2721
2722 void WebMediaPlayerImpl::ScheduleRestart() {
2723   // TODO(watk): All restart logic should be moved into PipelineController.
2724   if (pipeline_controller_->IsPipelineRunning() &&
2725       !pipeline_controller_->IsPipelineSuspended()) {
2726     pending_suspend_resume_cycle_ = true;
2727     UpdatePlayState();
2728   }
2729 }
2730
2731 void WebMediaPlayerImpl::RequestRemotePlaybackDisabled(bool disabled) {
2732   if (observer_)
2733     observer_->OnRemotePlaybackDisabled(disabled);
2734   if (client_) {
2735     client_->OnRemotePlaybackDisabled(disabled);
2736   }
2737 }
2738
2739 void WebMediaPlayerImpl::RequestMediaRemoting() {
2740   if (observer_) {
2741     observer_->OnMediaRemotingRequested();
2742   }
2743 }
2744
2745 #if BUILDFLAG(IS_ANDROID)
2746 void WebMediaPlayerImpl::FlingingStarted() {
2747   DCHECK(main_task_runner_->BelongsToCurrentThread());
2748   DCHECK(!disable_pipeline_auto_suspend_);
2749   disable_pipeline_auto_suspend_ = true;
2750
2751   is_flinging_ = true;
2752
2753   // Capabilities reporting should only be performed for local playbacks.
2754   video_decode_stats_reporter_.reset();
2755
2756   // Requests to restart media pipeline. A flinging renderer will be created via
2757   // the `renderer_factory_selector_`.
2758   ScheduleRestart();
2759 }
2760
2761 void WebMediaPlayerImpl::FlingingStopped() {
2762   DCHECK(main_task_runner_->BelongsToCurrentThread());
2763   DCHECK(disable_pipeline_auto_suspend_);
2764   disable_pipeline_auto_suspend_ = false;
2765
2766   is_flinging_ = false;
2767
2768   CreateVideoDecodeStatsReporter();
2769
2770   ScheduleRestart();
2771 }
2772
2773 void WebMediaPlayerImpl::OnRemotePlayStateChange(
2774     media::MediaStatus::State state) {
2775   DCHECK(is_flinging_);
2776   DCHECK(main_task_runner_->BelongsToCurrentThread());
2777
2778   if (state == media::MediaStatus::State::kPlaying && Paused()) {
2779     DVLOG(1) << __func__ << " requesting PLAY.";
2780     client_->ResumePlayback();
2781   } else if (state == media::MediaStatus::State::kPaused && !Paused()) {
2782     DVLOG(1) << __func__ << " requesting PAUSE.";
2783     client_->PausePlayback(
2784         WebMediaPlayerClient::PauseReason::kRemotePlayStateChange);
2785   }
2786 }
2787 #endif  // BUILDFLAG(IS_ANDROID)
2788
2789 void WebMediaPlayerImpl::SetPoster(const WebURL& poster) {
2790   has_poster_ = !poster.IsEmpty();
2791 }
2792
2793 void WebMediaPlayerImpl::MemoryDataSourceInitialized(bool success,
2794                                                      size_t data_size) {
2795   if (success) {
2796     // Replace the loaded url with an empty data:// URL since it may be large.
2797     demuxer_manager_->SetLoadedUrl(GURL("data:,"));
2798
2799     // Mark all the data as buffered.
2800     buffered_data_source_host_->SetTotalBytes(data_size);
2801     buffered_data_source_host_->AddBufferedByteRange(0, data_size);
2802   }
2803   DataSourceInitialized(success);
2804 }
2805
2806 void WebMediaPlayerImpl::DataSourceInitialized(bool success) {
2807   DVLOG(1) << __func__;
2808   DCHECK(main_task_runner_->BelongsToCurrentThread());
2809
2810   if (!success) {
2811     SetNetworkState(WebMediaPlayer::kNetworkStateFormatError);
2812     media_metrics_provider_->OnError(media::PIPELINE_ERROR_NETWORK);
2813
2814     // Not really necessary, since the pipeline was never started, but it at
2815     // least this makes sure that the error handling code is in sync.
2816     UpdatePlayState();
2817
2818     return;
2819   }
2820
2821   StartPipeline();
2822 }
2823
2824 void WebMediaPlayerImpl::MultiBufferDataSourceInitialized(bool success) {
2825   DVLOG(1) << __func__;
2826   DCHECK(demuxer_manager_->HasDataSource());
2827   if (observer_) {
2828     observer_->OnDataSourceInitialized(
2829         demuxer_manager_->GetDataSourceUrlAfterRedirects().value());
2830   }
2831
2832   // No point in preloading data as we'll probably just throw it away anyways.
2833   if (success && IsStreaming() && preload_ > media::DataSource::METADATA)
2834     demuxer_manager_->SetPreload(media::DataSource::METADATA);
2835   DataSourceInitialized(success);
2836 }
2837
2838 void WebMediaPlayerImpl::OnDataSourceRedirected() {
2839   DVLOG(1) << __func__;
2840   DCHECK(main_task_runner_->BelongsToCurrentThread());
2841
2842   if (WouldTaintOrigin()) {
2843     audio_source_provider_->TaintOrigin();
2844   }
2845 }
2846
2847 void WebMediaPlayerImpl::NotifyDownloading(bool is_downloading) {
2848   DVLOG(1) << __func__ << "(" << is_downloading << ")";
2849   if (!is_downloading && network_state_ == WebMediaPlayer::kNetworkStateLoading)
2850     SetNetworkState(WebMediaPlayer::kNetworkStateIdle);
2851   else if (is_downloading &&
2852            network_state_ == WebMediaPlayer::kNetworkStateIdle)
2853     SetNetworkState(WebMediaPlayer::kNetworkStateLoading);
2854   if (ready_state_ == ReadyState::kReadyStateHaveFutureData && !is_downloading)
2855     SetReadyState(WebMediaPlayer::kReadyStateHaveEnoughData);
2856 }
2857
2858 void WebMediaPlayerImpl::OnOverlayRoutingToken(
2859     const base::UnguessableToken& token) {
2860   DCHECK(overlay_mode_ == OverlayMode::kUseAndroidOverlay);
2861   // TODO(liberato): `token` should already be a RoutingToken.
2862   overlay_routing_token_is_pending_ = false;
2863   overlay_routing_token_ = media::OverlayInfo::RoutingToken(token);
2864   MaybeSendOverlayInfoToDecoder();
2865 }
2866
2867 void WebMediaPlayerImpl::OnOverlayInfoRequested(
2868     bool decoder_requires_restart_for_overlay,
2869     media::ProvideOverlayInfoCB provide_overlay_info_cb) {
2870   DCHECK(main_task_runner_->BelongsToCurrentThread());
2871
2872   // If we get a non-null cb, a decoder is initializing and requires overlay
2873   // info. If we get a null cb, a previously initialized decoder is
2874   // unregistering for overlay info updates.
2875   if (!provide_overlay_info_cb) {
2876     decoder_requires_restart_for_overlay_ = false;
2877     provide_overlay_info_cb_.Reset();
2878     return;
2879   }
2880
2881   // If `decoder_requires_restart_for_overlay` is true, we must restart the
2882   // pipeline for fullscreen transitions. The decoder is unable to switch
2883   // surfaces otherwise. If false, we simply need to tell the decoder about the
2884   // new surface and it will handle things seamlessly.
2885   // For encrypted video we pretend that the decoder doesn't require a restart
2886   // because it needs an overlay all the time anyway. We'll switch into
2887   // `always_enable_overlays_` mode below.
2888   decoder_requires_restart_for_overlay_ =
2889       (overlay_mode_ == OverlayMode::kUseAndroidOverlay && is_encrypted_)
2890           ? false
2891           : decoder_requires_restart_for_overlay;
2892   provide_overlay_info_cb_ = std::move(provide_overlay_info_cb);
2893
2894   // If the decoder doesn't require restarts for surface transitions, and we're
2895   // using AndroidOverlay mode, we can always enable the overlay and the decoder
2896   // can choose whether or not to use it. Otherwise, we'll restart the decoder
2897   // and enable the overlay on fullscreen transitions.
2898   if (overlay_mode_ == OverlayMode::kUseAndroidOverlay &&
2899       !decoder_requires_restart_for_overlay_) {
2900     always_enable_overlays_ = true;
2901     if (!overlay_enabled_)
2902       EnableOverlay();
2903   }
2904
2905   // Send the overlay info if we already have it. If not, it will be sent later.
2906   MaybeSendOverlayInfoToDecoder();
2907 }
2908
2909 void WebMediaPlayerImpl::MaybeSendOverlayInfoToDecoder() {
2910   // If the decoder didn't request overlay info, then don't send it.
2911   if (!provide_overlay_info_cb_)
2912     return;
2913
2914   // We should send the overlay info as long as we know it.  This includes the
2915   // case where `!overlay_enabled_`, since we want to tell the decoder to avoid
2916   // using overlays.  Assuming that the decoder has requested info, the only
2917   // case in which we don't want to send something is if we've requested the
2918   // info but not received it yet.  Then, we should wait until we do.
2919   //
2920   // Initialization requires this; AVDA should start with enough info to make an
2921   // overlay, so that (pre-M) the initial codec is created with the right output
2922   // surface; it can't switch later.
2923   if (overlay_mode_ == OverlayMode::kUseAndroidOverlay) {
2924     if (overlay_routing_token_is_pending_)
2925       return;
2926
2927     overlay_info_.routing_token = overlay_routing_token_;
2928   }
2929
2930   // If restart is required, the callback is one-shot only.
2931   if (decoder_requires_restart_for_overlay_) {
2932     std::move(provide_overlay_info_cb_).Run(overlay_info_);
2933   } else {
2934     provide_overlay_info_cb_.Run(overlay_info_);
2935   }
2936 }
2937
2938 std::unique_ptr<media::Renderer> WebMediaPlayerImpl::CreateRenderer(
2939     absl::optional<media::RendererType> renderer_type) {
2940   DCHECK(main_task_runner_->BelongsToCurrentThread());
2941
2942   // Make sure that overlays are enabled if they're always allowed.
2943   if (always_enable_overlays_)
2944     EnableOverlay();
2945
2946   media::RequestOverlayInfoCB request_overlay_info_cb;
2947 #if BUILDFLAG(IS_ANDROID)
2948   request_overlay_info_cb =
2949       base::BindPostTaskToCurrentDefault(base::BindRepeating(
2950           &WebMediaPlayerImpl::OnOverlayInfoRequested, weak_this_));
2951 #endif
2952
2953   if (renderer_type) {
2954     DVLOG(1) << __func__
2955              << ": renderer_type=" << static_cast<int>(renderer_type.value());
2956     renderer_factory_selector_->SetBaseRendererType(renderer_type.value());
2957   }
2958
2959   bool old_uses_audio_service = UsesAudioService(renderer_type_);
2960   renderer_type_ = renderer_factory_selector_->GetCurrentRendererType();
2961
2962   // TODO(crbug/1426179): Support codec changing for Media Foundation.
2963   if (renderer_type_ == media::RendererType::kMediaFoundation) {
2964     demuxer_manager_->DisableDemuxerCanChangeType();
2965   }
2966
2967   bool new_uses_audio_service = UsesAudioService(renderer_type_);
2968   if (new_uses_audio_service != old_uses_audio_service)
2969     client_->DidUseAudioServiceChange(new_uses_audio_service);
2970
2971   media_metrics_provider_->SetRendererType(renderer_type_);
2972   media_log_->SetProperty<MediaLogProperty::kRendererName>(renderer_type_);
2973
2974 #if defined(TIZEN_VIDEO_HOLE)
2975   if (ShouldUseVideoHole())
2976     StartLayerBoundUpdateTimer();
2977 #endif
2978
2979   return renderer_factory_selector_->GetCurrentFactory()->CreateRenderer(
2980       media_task_runner_, worker_task_runner_, audio_source_provider_.get(),
2981       compositor_.get(), std::move(request_overlay_info_cb),
2982       client_->TargetColorSpace());
2983 }
2984
2985 absl::optional<media::DemuxerType> WebMediaPlayerImpl::GetDemuxerType() const {
2986   // Note: this can't be a ternary expression because the compiler throws a fit
2987   // over type conversions.
2988   if (demuxer_manager_) {
2989     return demuxer_manager_->GetDemuxerType();
2990   }
2991   return absl::nullopt;
2992 }
2993
2994 media::PipelineStatus WebMediaPlayerImpl::OnDemuxerCreated(
2995     Demuxer* demuxer,
2996     media::Pipeline::StartType start_type,
2997     bool is_streaming,
2998     bool is_static) {
2999   CHECK_NE(demuxer, nullptr);
3000   switch (demuxer->GetDemuxerType()) {
3001     case media::DemuxerType::kMediaUrlDemuxer: {
3002       using_media_player_renderer_ = true;
3003       video_decode_stats_reporter_.reset();
3004       break;
3005     }
3006     default: {
3007       seeking_ = true;
3008       break;
3009     }
3010   }
3011
3012   if (start_type != media::Pipeline::StartType::kNormal) {
3013     attempting_suspended_start_ = true;
3014   }
3015 #if BUILDFLAG(IS_TIZEN_TV)
3016   blink::WebString content_mime_type =
3017       blink::WebString(client_->GetContentMIMEType());
3018   pipeline_controller_->SetContentMimeType(content_mime_type.Utf8());
3019 #endif
3020
3021   pipeline_controller_->Start(start_type, demuxer, this, is_streaming,
3022                               is_static);
3023   return media::OkStatus();
3024 }
3025
3026 void WebMediaPlayerImpl::StartPipeline() {
3027   DCHECK(main_task_runner_->BelongsToCurrentThread());
3028
3029   vfc_task_runner_->PostTask(
3030       FROM_HERE,
3031       base::BindOnce(&VideoFrameCompositor::SetOnNewProcessedFrameCallback,
3032                      base::Unretained(compositor_.get()),
3033                      base::BindPostTaskToCurrentDefault(base::BindOnce(
3034                          &WebMediaPlayerImpl::OnFirstFrame, weak_this_))));
3035
3036   // base::Unretained(this) is safe here, since |CreateDemuxer| calls the bound
3037   // method directly and immediately.
3038   auto create_demuxer_error = demuxer_manager_->CreateDemuxer(
3039       load_type_ == kLoadTypeMediaSource, preload_, needs_first_frame_,
3040       base::BindOnce(&WebMediaPlayerImpl::OnDemuxerCreated,
3041                      base::Unretained(this)));
3042
3043   if (!create_demuxer_error.is_ok()) {
3044     return OnError(std::move(create_demuxer_error));
3045   }
3046 }
3047
3048 void WebMediaPlayerImpl::SetNetworkState(WebMediaPlayer::NetworkState state) {
3049   DVLOG(1) << __func__ << "(" << state << ")";
3050   DCHECK(main_task_runner_->BelongsToCurrentThread());
3051   network_state_ = state;
3052   // Always notify to ensure client has the latest value.
3053   client_->NetworkStateChanged();
3054 }
3055
3056 void WebMediaPlayerImpl::SetReadyState(WebMediaPlayer::ReadyState state) {
3057   DVLOG(1) << __func__ << "(" << state << ")";
3058   DCHECK(main_task_runner_->BelongsToCurrentThread());
3059
3060   if (state == WebMediaPlayer::kReadyStateHaveEnoughData &&
3061       demuxer_manager_->DataSourceFullyBuffered() &&
3062       network_state_ == WebMediaPlayer::kNetworkStateLoading) {
3063     SetNetworkState(WebMediaPlayer::kNetworkStateLoaded);
3064   }
3065
3066   ready_state_ = state;
3067   highest_ready_state_ = std::max(highest_ready_state_, ready_state_);
3068
3069   // Always notify to ensure client has the latest value.
3070   client_->ReadyStateChanged();
3071 }
3072
3073 scoped_refptr<WebAudioSourceProviderImpl>
3074 WebMediaPlayerImpl::GetAudioSourceProvider() {
3075   return audio_source_provider_;
3076 }
3077
3078 scoped_refptr<media::VideoFrame>
3079 WebMediaPlayerImpl::GetCurrentFrameFromCompositor() const {
3080   DCHECK(main_task_runner_->BelongsToCurrentThread());
3081   TRACE_EVENT0("media", "WebMediaPlayerImpl::GetCurrentFrameFromCompositor");
3082
3083   // We can't copy from protected frames.
3084   if (cdm_context_ref_)
3085     return nullptr;
3086
3087   // Can be null.
3088   scoped_refptr<media::VideoFrame> video_frame =
3089       compositor_->GetCurrentFrameOnAnyThread();
3090
3091   // base::Unretained is safe here because `compositor_` is destroyed on
3092   // `vfc_task_runner_`. The destruction is queued from `this`' destructor,
3093   // which also runs on `main_task_runner_`, which makes it impossible for
3094   // UpdateCurrentFrameIfStale() to be queued after `compositor_`'s dtor.
3095   vfc_task_runner_->PostTask(
3096       FROM_HERE,
3097       base::BindOnce(&VideoFrameCompositor::UpdateCurrentFrameIfStale,
3098                      base::Unretained(compositor_.get()),
3099                      VideoFrameCompositor::UpdateType::kNormal));
3100
3101   return video_frame;
3102 }
3103
3104 void WebMediaPlayerImpl::UpdatePlayState() {
3105   DCHECK(main_task_runner_->BelongsToCurrentThread());
3106   bool can_auto_suspend = !disable_pipeline_auto_suspend_;
3107   // For streaming videos, we only allow suspending at the very beginning of the
3108   // video, and only if we know the length of the video. (If we don't know
3109   // the length, it might be a dynamically generated video, and suspending
3110   // will not work at all.)
3111   if (IsStreaming()) {
3112     bool at_beginning =
3113         ready_state_ == WebMediaPlayer::kReadyStateHaveNothing ||
3114         CurrentTime() == 0.0;
3115     if (!at_beginning || GetPipelineMediaDuration() == media::kInfiniteDuration)
3116       can_auto_suspend = false;
3117   }
3118
3119   bool is_suspended = pipeline_controller_->IsSuspended();
3120   bool is_backgrounded = IsBackgroundSuspendEnabled(this) && IsHidden();
3121   PlayState state = UpdatePlayState_ComputePlayState(
3122       is_flinging_, can_auto_suspend, is_suspended, is_backgrounded,
3123       IsInPictureInPicture());
3124   SetDelegateState(state.delegate_state, state.is_idle);
3125   SetMemoryReportingState(state.is_memory_reporting_enabled);
3126   SetSuspendState(state.is_suspended || pending_suspend_resume_cycle_);
3127   if (power_status_helper_) {
3128     // Make sure that we're in something like steady-state before recording.
3129     power_status_helper_->SetIsPlaying(
3130         !paused_ && !seeking_ && !IsHidden() && !state.is_suspended &&
3131         ready_state_ == kReadyStateHaveEnoughData);
3132   }
3133   UpdateSmoothnessHelper();
3134 }
3135
3136 void WebMediaPlayerImpl::OnTimeUpdate() {
3137   // When seeking the current time can go beyond the duration so we should
3138   // cap the current time at the duration.
3139   base::TimeDelta duration = GetPipelineMediaDuration();
3140   base::TimeDelta current_time = GetCurrentTimeInternal();
3141   if (current_time > duration)
3142     current_time = duration;
3143
3144   const double effective_playback_rate =
3145       paused_ || ready_state_ < kReadyStateHaveFutureData ? 0.0
3146                                                           : playback_rate_;
3147
3148   media_session::MediaPosition new_position(effective_playback_rate, duration,
3149                                             current_time, ended_);
3150
3151   if (!MediaPositionNeedsUpdate(media_position_state_, new_position))
3152     return;
3153
3154   DVLOG(2) << __func__ << "(" << new_position.ToString() << ")";
3155   media_position_state_ = new_position;
3156   client_->DidPlayerMediaPositionStateChange(effective_playback_rate, duration,
3157                                              current_time, ended_);
3158 }
3159
3160 void WebMediaPlayerImpl::SetDelegateState(DelegateState new_state,
3161                                           bool is_idle) {
3162   DCHECK(delegate_);
3163   DVLOG(2) << __func__ << "(" << static_cast<int>(new_state) << ", " << is_idle
3164            << ")";
3165
3166   // Prevent duplicate delegate calls.
3167   // TODO(sandersd): Move this deduplication into the delegate itself.
3168   if (delegate_state_ == new_state)
3169     return;
3170   delegate_state_ = new_state;
3171
3172   switch (new_state) {
3173     case DelegateState::GONE:
3174       delegate_->PlayerGone(delegate_id_);
3175       break;
3176     case DelegateState::PLAYING: {
3177       // When delegate get PlayerGone it removes all state, need to make sure
3178       // it is up-to-date before calling DidPlay.
3179       delegate_->DidMediaMetadataChange(delegate_id_, delegate_has_audio_,
3180                                         HasVideo(), GetMediaContentType());
3181       if (HasVideo())
3182         client_->DidPlayerSizeChange(NaturalSize());
3183       client_->DidPlayerStartPlaying();
3184       delegate_->DidPlay(delegate_id_);
3185       break;
3186     }
3187     case DelegateState::PAUSED:
3188       client_->DidPlayerPaused(ended_);
3189       delegate_->DidPause(delegate_id_, ended_);
3190       break;
3191   }
3192
3193   delegate_->SetIdle(delegate_id_, is_idle);
3194 }
3195
3196 void WebMediaPlayerImpl::SetMemoryReportingState(
3197     bool is_memory_reporting_enabled) {
3198   if (memory_usage_reporting_timer_.IsRunning() ==
3199       is_memory_reporting_enabled) {
3200     return;
3201   }
3202
3203   if (is_memory_reporting_enabled) {
3204     memory_usage_reporting_timer_.Start(FROM_HERE, base::Seconds(2), this,
3205                                         &WebMediaPlayerImpl::ReportMemoryUsage);
3206   } else {
3207     memory_usage_reporting_timer_.Stop();
3208     ReportMemoryUsage();
3209   }
3210 }
3211
3212 void WebMediaPlayerImpl::SetSuspendState(bool is_suspended) {
3213   DCHECK(main_task_runner_->BelongsToCurrentThread());
3214   DVLOG(2) << __func__ << "(" << is_suspended << ")";
3215
3216   // Do not change the state after an error has occurred.
3217   // TODO(sandersd): Update PipelineController to remove the need for this.
3218   if (IsNetworkStateError(network_state_))
3219     return;
3220
3221   if (is_suspended) {
3222     // If we were not resumed for long enough to satisfy the preroll attempt,
3223     // reset the clock.
3224     if (!preroll_attempt_pending_ && IsPrerollAttemptNeeded()) {
3225       preroll_attempt_pending_ = true;
3226       preroll_attempt_start_time_ = base::TimeTicks();
3227     }
3228     pipeline_controller_->Suspend();
3229   } else {
3230     // When resuming, start the preroll attempt clock.
3231     if (preroll_attempt_pending_) {
3232       preroll_attempt_pending_ = false;
3233       preroll_attempt_start_time_ = tick_clock_->NowTicks();
3234     }
3235     pipeline_controller_->Resume();
3236   }
3237 }
3238
3239 WebMediaPlayerImpl::PlayState
3240 WebMediaPlayerImpl::UpdatePlayState_ComputePlayState(
3241     bool is_flinging,
3242     bool can_auto_suspend,
3243     bool is_suspended,
3244     bool is_backgrounded,
3245     bool is_in_picture_in_picture) {
3246   PlayState result;
3247
3248   bool must_suspend =
3249       was_suspended_for_frame_closed_ || pending_oneshot_suspend_;
3250   bool is_stale = delegate_->IsStale(delegate_id_);
3251
3252   if (stale_state_override_for_testing_.has_value() &&
3253       ready_state_ >= stale_state_override_for_testing_.value()) {
3254     is_stale = true;
3255   }
3256
3257   // This includes both data source (before pipeline startup) and pipeline
3258   // errors.
3259   bool has_error = IsNetworkStateError(network_state_);
3260
3261   // Note: Even though we get play/pause signals at kReadyStateHaveMetadata, we
3262   // must attempt to preroll until kReadyStateHaveFutureData so that the
3263   // canplaythrough event will be fired to the page (which may be waiting).
3264   bool have_future_data =
3265       highest_ready_state_ >= WebMediaPlayer::kReadyStateHaveFutureData;
3266
3267   // Background suspend is only enabled for paused players.
3268   // In the case of players with audio the session should be kept.
3269   bool background_suspended = can_auto_suspend && is_backgrounded && paused_ &&
3270                               have_future_data && !is_in_picture_in_picture;
3271
3272   // Idle suspension is allowed prior to kReadyStateHaveMetadata since there
3273   // exist mechanisms to exit the idle state when the player is capable of
3274   // reaching the kReadyStateHaveMetadata state; see didLoadingProgress().
3275   //
3276   // TODO(sandersd): Make the delegate suspend idle players immediately when
3277   // hidden.
3278   bool idle_suspended = can_auto_suspend && is_stale && paused_ && !seeking_ &&
3279                         !overlay_info_.is_fullscreen && !needs_first_frame_;
3280
3281   // If we're already suspended, see if we can wait for user interaction. Prior
3282   // to kReadyStateHaveMetadata, we require `is_stale` to remain suspended.
3283   // `is_stale` will be cleared when we receive data which may take us to
3284   // kReadyStateHaveMetadata.
3285   bool can_stay_suspended = (is_stale || have_future_data) && is_suspended &&
3286                             paused_ && !seeking_ && !needs_first_frame_;
3287
3288   // Combined suspend state.
3289   result.is_suspended = must_suspend || idle_suspended ||
3290 #if defined(TIZEN_MULTIMEDIA)
3291                         was_suspended_by_player_ ||
3292 #endif
3293                         background_suspended || can_stay_suspended;
3294
3295   DVLOG(3) << __func__ << ": must_suspend=" << must_suspend
3296            << ", idle_suspended=" << idle_suspended
3297            << ", background_suspended=" << background_suspended
3298            << ", can_stay_suspended=" << can_stay_suspended
3299            << ", is_stale=" << is_stale
3300            << ", have_future_data=" << have_future_data
3301            << ", paused_=" << paused_ << ", seeking_=" << seeking_;
3302
3303   // We do not treat `playback_rate_` == 0 as paused. For the media session,
3304   // being paused implies displaying a play button, which is incorrect in this
3305   // case. For memory usage reporting, we just use the same definition (but we
3306   // don't have to).
3307   //
3308   // Similarly, we don't consider `ended_` to be paused. Blink will immediately
3309   // call pause() or seek(), so `ended_` should not affect the computation.
3310   // Despite that, `ended_` does result in a separate paused state, to simplfy
3311   // the contract for SetDelegateState().
3312   //
3313   // `has_remote_controls` indicates if the player can be controlled outside the
3314   // page (e.g. via the notification controls or by audio focus events). Idle
3315   // suspension does not destroy the media session, because we expect that the
3316   // notification controls (and audio focus) remain. With some exceptions for
3317   // background videos, the player only needs to have audio to have controls
3318   // (requires `have_current_data`).
3319   //
3320   // `alive` indicates if the player should be present (not `GONE`) to the
3321   // delegate, either paused or playing. The following must be true for the
3322   // player:
3323   //   - `have_current_data`, since playback can't begin before that point, we
3324   //     need to know whether we are paused to correctly configure the session,
3325   //     and also because the tracks and duration are passed to DidPlay(),
3326   //   - `is_flinging` is false (RemotePlayback is not handled by the delegate)
3327   //   - `has_error` is false as player should have no errors,
3328   //   - `background_suspended` is false, otherwise `has_remote_controls` must
3329   //     be true.
3330   //
3331   // TODO(sandersd): If Blink told us the paused state sooner, we could detect
3332   // if the remote controls are available sooner.
3333
3334   // Background videos with audio don't have remote controls if background
3335   // suspend is enabled and resuming background videos is not (original Android
3336   // behavior).
3337   bool backgrounded_video_has_no_remote_controls =
3338       IsBackgroundSuspendEnabled(this) && !IsResumeBackgroundVideosEnabled() &&
3339       is_backgrounded && HasVideo();
3340   bool have_current_data = highest_ready_state_ >= kReadyStateHaveCurrentData;
3341   bool can_play = !has_error && have_current_data;
3342   bool has_remote_controls =
3343       HasAudio() && !backgrounded_video_has_no_remote_controls;
3344   bool alive = can_play && !is_flinging && !must_suspend &&
3345                (!background_suspended || has_remote_controls);
3346   if (!alive) {
3347     // Do not mark players as idle when flinging.
3348     result.delegate_state = DelegateState::GONE;
3349     result.is_idle = delegate_->IsIdle(delegate_id_) && !is_flinging;
3350   } else if (paused_) {
3351     // TODO(sandersd): Is it possible to have a suspended session, be ended,
3352     // and not be paused? If so we should be in a PLAYING state.
3353     result.delegate_state = DelegateState::PAUSED;
3354     result.is_idle = !seeking_;
3355   } else {
3356     result.delegate_state = DelegateState::PLAYING;
3357     result.is_idle = false;
3358   }
3359
3360   // It's not critical if some cases where memory usage can change are missed,
3361   // since media memory changes are usually gradual.
3362   result.is_memory_reporting_enabled = !has_error && can_play && !is_flinging &&
3363                                        !result.is_suspended &&
3364                                        (!paused_ || seeking_);
3365
3366   return result;
3367 }
3368
3369 void WebMediaPlayerImpl::MakeDemuxerThreadDumper(media::Demuxer* demuxer) {
3370   DCHECK(main_task_runner_->BelongsToCurrentThread());
3371   DCHECK(!media_thread_mem_dumper_);
3372
3373   // base::Unretained() is safe here. `demuxer` is owned by |demuxer_manager_|,
3374   // which is destroyed on the main thread, but before doing it
3375   // ~WebMediaPlayerImpl() posts a media thread task that deletes
3376   // |media_thread_mem_dumper_| and  waits for it to finish.
3377   media_thread_mem_dumper_ = std::make_unique<media::MemoryDumpProviderProxy>(
3378       "WebMediaPlayer_MediaThread", media_task_runner_,
3379       base::BindRepeating(&WebMediaPlayerImpl::OnMediaThreadMemoryDump,
3380                           media_player_id_, base::Unretained(demuxer)));
3381 }
3382
3383 bool WebMediaPlayerImpl::CouldPlayIfEnoughData() {
3384   return client_->CouldPlayIfEnoughData();
3385 }
3386
3387 bool WebMediaPlayerImpl::IsMediaPlayerRendererClient() {
3388   // MediaPlayerRendererClientFactory is the only factory that a uses
3389   // MediaResource::Type::URL for the moment.
3390   return renderer_factory_selector_->GetCurrentFactory()
3391              ->GetRequiredMediaResourceType() ==
3392          media::MediaResource::Type::KUrl;
3393 }
3394
3395 void WebMediaPlayerImpl::ReportMemoryUsage() {
3396   DCHECK(main_task_runner_->BelongsToCurrentThread());
3397
3398   // About base::Unretained() usage below: We destroy `demuxer_manager_` on the
3399   // main thread.  Before that, however, ~WebMediaPlayerImpl() posts a task to
3400   // the media thread and waits for it to finish.  Hence, the GetMemoryUsage()
3401   // task posted here must finish earlier.
3402   //
3403   // The exception to the above is when OnError() has been called. If we're in
3404   // the error state we've already shut down the pipeline and can't rely on it
3405   // to cycle the media thread before we destroy `demuxer_manager_`. In this
3406   // case skip collection of the demuxer memory stats.
3407   if (demuxer_manager_ && !IsNetworkStateError(network_state_)) {
3408     demuxer_manager_->RespondToDemuxerMemoryUsageReport(base::BindOnce(
3409         &WebMediaPlayerImpl::FinishMemoryUsageReport, weak_this_));
3410   } else {
3411     FinishMemoryUsageReport(0);
3412   }
3413 }
3414
3415 void WebMediaPlayerImpl::FinishMemoryUsageReport(int64_t demuxer_memory_usage) {
3416   DCHECK(main_task_runner_->BelongsToCurrentThread());
3417
3418   const auto stats = GetPipelineStatistics();
3419   const int64_t data_source_memory_usage =
3420       demuxer_manager_->GetDataSourceMemoryUsage();
3421
3422   // If we have video and no video memory usage and we've rendered the first
3423   // frame, assume the VideoFrameCompositor is holding onto the last frame after
3424   // we've suspended the pipeline; which thus reports zero memory usage from the
3425   // video renderer.
3426   //
3427   // Technically this should use the coded size, but that requires us to hop to
3428   // the compositor to get and byte-perfect accuracy isn't important here.
3429   const int64_t video_memory_usage =
3430       stats.video_memory_usage +
3431       ((pipeline_metadata_.has_video && !stats.video_memory_usage &&
3432         has_first_frame_)
3433            ? media::VideoFrame::AllocationSize(media::PIXEL_FORMAT_I420,
3434                                                pipeline_metadata_.natural_size)
3435            : 0);
3436
3437   const int64_t current_memory_usage =
3438       stats.audio_memory_usage + video_memory_usage + data_source_memory_usage +
3439       demuxer_memory_usage;
3440
3441   DVLOG(3) << "Memory Usage -- Total: " << current_memory_usage
3442            << " Audio: " << stats.audio_memory_usage
3443            << ", Video: " << video_memory_usage
3444            << ", DataSource: " << data_source_memory_usage
3445            << ", Demuxer: " << demuxer_memory_usage;
3446
3447   const int64_t delta = current_memory_usage - last_reported_memory_usage_;
3448   last_reported_memory_usage_ = current_memory_usage;
3449   adjust_allocated_memory_cb_.Run(delta);
3450 }
3451
3452 void WebMediaPlayerImpl::OnMainThreadMemoryDump(
3453     media::MediaPlayerLoggingID id,
3454     const base::trace_event::MemoryDumpArgs& args,
3455     base::trace_event::ProcessMemoryDump* pmd) {
3456   const auto stats = GetPipelineStatistics();
3457   auto player_node_name =
3458       base::StringPrintf("media/webmediaplayer/player_0x%x", id);
3459   auto* player_node = pmd->CreateAllocatorDump(player_node_name);
3460   player_node->AddScalar(
3461       base::trace_event::MemoryAllocatorDump::kNameObjectCount,
3462       base::trace_event::MemoryAllocatorDump::kUnitsObjects, 1);
3463
3464   if (args.level_of_detail !=
3465       base::trace_event::MemoryDumpLevelOfDetail::kBackground) {
3466     bool suspended = pipeline_controller_->IsPipelineSuspended();
3467     auto player_state =
3468         base::StringPrintf("Paused: %d Ended: %d ReadyState: %d Suspended: %d",
3469                            paused_, ended_, GetReadyState(), suspended);
3470     player_node->AddString("player_state", "", player_state);
3471   }
3472
3473   CreateAllocation(pmd, id, "audio", stats.audio_memory_usage);
3474   CreateAllocation(pmd, id, "video", stats.video_memory_usage);
3475
3476   if (demuxer_manager_->HasDataSource()) {
3477     CreateAllocation(pmd, id, "data_source",
3478                      demuxer_manager_->GetDataSourceMemoryUsage());
3479   }
3480 }
3481
3482 // static
3483 void WebMediaPlayerImpl::OnMediaThreadMemoryDump(
3484     media::MediaPlayerLoggingID id,
3485     Demuxer* demuxer,
3486     const base::trace_event::MemoryDumpArgs& args,
3487     base::trace_event::ProcessMemoryDump* pmd) {
3488   if (!demuxer)
3489     return;
3490
3491   CreateAllocation(pmd, id, "demuxer", demuxer->GetMemoryUsage());
3492 }
3493
3494 void WebMediaPlayerImpl::ScheduleIdlePauseTimer() {
3495   // Only schedule the pause timer if we're not paused or paused but going to
3496   // resume when foregrounded, and are suspended and have audio.
3497   if ((paused_ && !paused_when_hidden_) ||
3498       !pipeline_controller_->IsSuspended() || !HasAudio()) {
3499     return;
3500   }
3501
3502 #if BUILDFLAG(IS_ANDROID)
3503   // Don't pause videos casted as part of RemotePlayback.
3504   if (is_flinging_)
3505     return;
3506 #endif
3507
3508   // Idle timeout chosen arbitrarily.
3509   background_pause_timer_.Start(
3510       FROM_HERE, base::Seconds(5),
3511       base::BindOnce(
3512           &WebMediaPlayerClient::PausePlayback, base::Unretained(client_),
3513           WebMediaPlayerClient::PauseReason::kSuspendedPlayerIdleTimeout));
3514 }
3515
3516 void WebMediaPlayerImpl::CreateWatchTimeReporter() {
3517   if (!HasVideo() && !HasAudio())
3518     return;
3519
3520   // MediaPlayerRenderer does not know about tracks until playback starts.
3521   // Assume audio-only unless the natural size has been detected.
3522   bool has_video = pipeline_metadata_.has_video;
3523   if (using_media_player_renderer_) {
3524     has_video = !pipeline_metadata_.natural_size.IsEmpty();
3525   }
3526
3527   // Create the watch time reporter and synchronize its initial state.
3528   watch_time_reporter_ = std::make_unique<WatchTimeReporter>(
3529       media::mojom::PlaybackProperties::New(
3530           pipeline_metadata_.has_audio, has_video, false, false,
3531           GetDemuxerType() == media::DemuxerType::kChunkDemuxer, is_encrypted_,
3532           embedded_media_experience_enabled_,
3533           media::mojom::MediaStreamType::kNone, renderer_type_),
3534       pipeline_metadata_.natural_size,
3535       base::BindRepeating(&WebMediaPlayerImpl::GetCurrentTimeInternal,
3536                           base::Unretained(this)),
3537       base::BindRepeating(&WebMediaPlayerImpl::GetPipelineStatistics,
3538                           base::Unretained(this)),
3539       media_metrics_provider_.get(),
3540       frame_->GetTaskRunner(TaskType::kInternalMedia));
3541   watch_time_reporter_->OnVolumeChange(volume_);
3542   watch_time_reporter_->OnDurationChanged(GetPipelineMediaDuration());
3543
3544   if (delegate_->IsFrameHidden())
3545     watch_time_reporter_->OnHidden();
3546   else
3547     watch_time_reporter_->OnShown();
3548
3549   if (client_->HasNativeControls())
3550     watch_time_reporter_->OnNativeControlsEnabled();
3551   else
3552     watch_time_reporter_->OnNativeControlsDisabled();
3553
3554   switch (client_->GetDisplayType()) {
3555     case DisplayType::kInline:
3556       watch_time_reporter_->OnDisplayTypeInline();
3557       break;
3558     case DisplayType::kFullscreen:
3559       watch_time_reporter_->OnDisplayTypeFullscreen();
3560       break;
3561     case DisplayType::kPictureInPicture:
3562       watch_time_reporter_->OnDisplayTypePictureInPicture();
3563       break;
3564   }
3565
3566   UpdateSecondaryProperties();
3567
3568   // If the WatchTimeReporter was recreated in the middle of playback, we want
3569   // to resume playback here too since we won't get another play() call. When
3570   // seeking, the seek completion will restart it if necessary.
3571   if (!paused_ && !seeking_)
3572     watch_time_reporter_->OnPlaying();
3573 }
3574
3575 void WebMediaPlayerImpl::UpdateSecondaryProperties() {
3576   watch_time_reporter_->UpdateSecondaryProperties(
3577       media::mojom::SecondaryPlaybackProperties::New(
3578           pipeline_metadata_.audio_decoder_config.codec(),
3579           pipeline_metadata_.video_decoder_config.codec(),
3580           pipeline_metadata_.audio_decoder_config.profile(),
3581           pipeline_metadata_.video_decoder_config.profile(),
3582           audio_decoder_type_, video_decoder_type_,
3583           pipeline_metadata_.audio_decoder_config.encryption_scheme(),
3584           pipeline_metadata_.video_decoder_config.encryption_scheme(),
3585           pipeline_metadata_.natural_size));
3586 }
3587
3588 bool WebMediaPlayerImpl::IsHidden() const {
3589   DCHECK(main_task_runner_->BelongsToCurrentThread());
3590
3591   return delegate_->IsFrameHidden() && !was_suspended_for_frame_closed_;
3592 }
3593
3594 bool WebMediaPlayerImpl::IsStreaming() const {
3595   return demuxer_manager_->IsStreaming();
3596 }
3597
3598 bool WebMediaPlayerImpl::DoesOverlaySupportMetadata() const {
3599   return pipeline_metadata_.video_decoder_config.video_transformation() ==
3600          media::kNoTransformation;
3601 }
3602
3603 void WebMediaPlayerImpl::UpdateRemotePlaybackCompatibility(bool is_compatible) {
3604   DCHECK(main_task_runner_->BelongsToCurrentThread());
3605
3606   client_->RemotePlaybackCompatibilityChanged(demuxer_manager_->LoadedUrl(),
3607                                               is_compatible);
3608 }
3609
3610 void WebMediaPlayerImpl::ForceStaleStateForTesting(ReadyState target_state) {
3611   stale_state_override_for_testing_.emplace(target_state);
3612   UpdatePlayState();
3613 }
3614
3615 bool WebMediaPlayerImpl::IsSuspendedForTesting() {
3616   // This intentionally uses IsPipelineSuspended since we need to know when the
3617   // pipeline has reached the suspended state, not when it's in suspending.
3618   return pipeline_controller_->IsPipelineSuspended();
3619 }
3620
3621 bool WebMediaPlayerImpl::DidLazyLoad() const {
3622   return did_lazy_load_;
3623 }
3624
3625 void WebMediaPlayerImpl::OnBecameVisible() {
3626   have_enough_after_lazy_load_cb_.Cancel();
3627   needs_first_frame_ = !has_first_frame_;
3628   UpdatePlayState();
3629 }
3630
3631 bool WebMediaPlayerImpl::IsOpaque() const {
3632   return opaque_;
3633 }
3634
3635 int WebMediaPlayerImpl::GetDelegateId() {
3636   return delegate_id_;
3637 }
3638
3639 absl::optional<viz::SurfaceId> WebMediaPlayerImpl::GetSurfaceId() {
3640   if (!surface_layer_for_video_enabled_)
3641     return absl::nullopt;
3642   return bridge_->GetSurfaceId();
3643 }
3644
3645 void WebMediaPlayerImpl::RequestVideoFrameCallback() {
3646   // If the first frame hasn't been received, kick off a request to generate one
3647   // since we may not always do so for hidden preload=metadata playbacks.
3648   if (!has_first_frame_) {
3649     OnBecameVisible();
3650   }
3651
3652   compositor_->SetOnFramePresentedCallback(
3653       base::BindPostTaskToCurrentDefault(base::BindOnce(
3654           &WebMediaPlayerImpl::OnNewFramePresentedCallback, weak_this_)));
3655 }
3656
3657 void WebMediaPlayerImpl::OnNewFramePresentedCallback() {
3658   client_->OnRequestVideoFrameCallback();
3659 }
3660
3661 std::unique_ptr<WebMediaPlayer::VideoFramePresentationMetadata>
3662 WebMediaPlayerImpl::GetVideoFramePresentationMetadata() {
3663   return compositor_->GetLastPresentedFrameMetadata();
3664 }
3665
3666 void WebMediaPlayerImpl::UpdateFrameIfStale() {
3667   // base::Unretained is safe here because `compositor_` is destroyed on
3668   // `vfc_task_runner_`. The destruction is queued from `this`' destructor,
3669   // which also runs on `main_task_runner_`, which makes it impossible for
3670   // UpdateCurrentFrameIfStale() to be queued after `compositor_`'s dtor.
3671   vfc_task_runner_->PostTask(
3672       FROM_HERE,
3673       base::BindOnce(&VideoFrameCompositor::UpdateCurrentFrameIfStale,
3674                      base::Unretained(compositor_.get()),
3675                      VideoFrameCompositor::UpdateType::kBypassClient));
3676 }
3677
3678 base::WeakPtr<WebMediaPlayer> WebMediaPlayerImpl::AsWeakPtr() {
3679   return weak_this_;
3680 }
3681
3682 bool WebMediaPlayerImpl::ShouldPausePlaybackWhenHidden() const {
3683   DCHECK(main_task_runner_->BelongsToCurrentThread());
3684
3685   const bool preserve_audio =
3686       should_pause_background_muted_audio_
3687           ? HasUnmutedAudio() || audio_source_provider_->IsAudioBeingCaptured()
3688           : HasAudio();
3689
3690   // Audio only stream is allowed to play when in background.
3691   if (!HasVideo() && preserve_audio)
3692     return false;
3693
3694   // MediaPlayer always signals audio and video, so use an empty natural size to
3695   // determine if there's really video or not.
3696   if (using_media_player_renderer_ &&
3697       pipeline_metadata_.natural_size.IsEmpty() && preserve_audio) {
3698     return false;
3699   }
3700
3701   // PiP is the only exception when background video playback is disabled.
3702   if (HasVideo() && IsInPictureInPicture())
3703     return false;
3704
3705   // This takes precedent over every restriction except PiP.
3706   if (!is_background_video_playback_enabled_)
3707     return true;
3708
3709   if (is_flinging_)
3710     return false;
3711
3712   // If suspending background video, pause any video that's not unlocked to play
3713   // in the background.
3714   if (IsBackgroundSuspendEnabled(this)) {
3715     return !preserve_audio || (IsResumeBackgroundVideosEnabled() &&
3716                                video_locked_when_paused_when_hidden_);
3717   }
3718
3719   if (HasVideo() && IsVideoBeingCaptured())
3720     return false;
3721
3722   return !preserve_audio;
3723 }
3724
3725 bool WebMediaPlayerImpl::ShouldDisableVideoWhenHidden() const {
3726   DCHECK(main_task_runner_->BelongsToCurrentThread());
3727
3728   if (!is_background_video_track_optimization_supported_)
3729     return false;
3730
3731   // Only disable the video track on audio + video playbacks, otherwise they
3732   // should be paused or left alone.
3733   if (!HasVideo() || !HasAudio())
3734     return false;
3735
3736   // Disabling tracks causes seeks which can cause problematic network delays
3737   // on streaming resources.
3738   if (IsStreaming())
3739     return false;
3740
3741   // In these cases something external needs the frames.
3742   if (IsInPictureInPicture() || IsVideoBeingCaptured() || is_flinging_)
3743     return false;
3744
3745   // Videos shorter than the maximum allowed keyframe distance can be optimized.
3746   base::TimeDelta duration = GetPipelineMediaDuration();
3747
3748   constexpr base::TimeDelta kMaxKeyframeDistanceToDisableBackgroundVideo =
3749       base::Milliseconds(kMaxKeyframeDistanceToDisableBackgroundVideoMs);
3750   if (duration < kMaxKeyframeDistanceToDisableBackgroundVideo)
3751     return true;
3752
3753   // Otherwise, only optimize videos with shorter average keyframe distance.
3754   auto stats = GetPipelineStatistics();
3755   return stats.video_keyframe_distance_average <
3756          kMaxKeyframeDistanceToDisableBackgroundVideo;
3757 }
3758
3759 void WebMediaPlayerImpl::UpdateBackgroundVideoOptimizationState() {
3760   if (IsHidden()) {
3761     if (ShouldPausePlaybackWhenHidden()) {
3762       update_background_status_cb_.Cancel();
3763       is_background_status_change_cancelled_ = true;
3764       PauseVideoIfNeeded();
3765     } else if (is_background_status_change_cancelled_) {
3766       // Only trigger updates when we don't have one already scheduled.
3767       update_background_status_cb_.Reset(
3768           base::BindOnce(&WebMediaPlayerImpl::DisableVideoTrackIfNeeded,
3769                          base::Unretained(this)));
3770       is_background_status_change_cancelled_ = false;
3771
3772       // Defer disable track until we're sure the clip will be backgrounded for
3773       // some time. Resuming may take half a second, so frequent tab switches
3774       // will yield a poor user experience otherwise. http://crbug.com/709302
3775       // may also cause AV sync issues if disable/enable happens too fast.
3776       main_task_runner_->PostDelayedTask(
3777           FROM_HERE, update_background_status_cb_.callback(),
3778           base::Seconds(10));
3779     }
3780   } else {
3781     update_background_status_cb_.Cancel();
3782     is_background_status_change_cancelled_ = true;
3783     EnableVideoTrackIfNeeded();
3784   }
3785 }
3786
3787 void WebMediaPlayerImpl::PauseVideoIfNeeded() {
3788   DCHECK(IsHidden());
3789
3790   // Don't pause video while the pipeline is stopped, resuming or seeking.
3791   // Also if the video is paused already.
3792   if (!pipeline_controller_->IsPipelineRunning() || is_pipeline_resuming_ ||
3793       seeking_ || paused_)
3794     return;
3795
3796   // client_->PausePlayback() will get `paused_when_hidden_` set to
3797   // false and UpdatePlayState() called, so set the flag to true after and then
3798   // return.
3799   client_->PausePlayback(
3800       WebMediaPlayerClient::PauseReason::kBackgroundVideoOptimization);
3801   paused_when_hidden_ = true;
3802 }
3803
3804 void WebMediaPlayerImpl::EnableVideoTrackIfNeeded() {
3805   // Don't change video track while the pipeline is stopped, resuming or
3806   // seeking.
3807   if (!pipeline_controller_->IsPipelineRunning() || is_pipeline_resuming_ ||
3808       seeking_)
3809     return;
3810
3811   if (video_track_disabled_) {
3812     video_track_disabled_ = false;
3813     if (client_->HasSelectedVideoTrack()) {
3814       WebMediaPlayer::TrackId trackId = client_->GetSelectedVideoTrackId();
3815       SelectedVideoTrackChanged(&trackId);
3816     }
3817   }
3818 }
3819
3820 void WebMediaPlayerImpl::DisableVideoTrackIfNeeded() {
3821   DCHECK(IsHidden());
3822
3823   // Don't change video track while the pipeline is resuming or seeking.
3824   if (is_pipeline_resuming_ || seeking_)
3825     return;
3826
3827   if (!video_track_disabled_ && ShouldDisableVideoWhenHidden()) {
3828     video_track_disabled_ = true;
3829     SelectedVideoTrackChanged(nullptr);
3830   }
3831 }
3832
3833 void WebMediaPlayerImpl::SetPipelineStatisticsForTest(
3834     const media::PipelineStatistics& stats) {
3835   pipeline_statistics_for_test_ = absl::make_optional(stats);
3836 }
3837
3838 media::PipelineStatistics WebMediaPlayerImpl::GetPipelineStatistics() const {
3839   DCHECK(main_task_runner_->BelongsToCurrentThread());
3840
3841   return pipeline_statistics_for_test_.value_or(
3842       pipeline_controller_->GetStatistics());
3843 }
3844
3845 void WebMediaPlayerImpl::SetPipelineMediaDurationForTest(
3846     base::TimeDelta duration) {
3847   pipeline_media_duration_for_test_ = absl::make_optional(duration);
3848 }
3849
3850 base::TimeDelta WebMediaPlayerImpl::GetPipelineMediaDuration() const {
3851   DCHECK(main_task_runner_->BelongsToCurrentThread());
3852
3853   return pipeline_media_duration_for_test_.value_or(
3854       pipeline_controller_->GetMediaDuration());
3855 }
3856
3857 media::MediaContentType WebMediaPlayerImpl::GetMediaContentType() const {
3858   return media::DurationToMediaContentType(GetPipelineMediaDuration());
3859 }
3860
3861 void WebMediaPlayerImpl::SwitchToRemoteRenderer(
3862     const std::string& remote_device_friendly_name) {
3863   DCHECK(main_task_runner_->BelongsToCurrentThread());
3864
3865   DCHECK(!is_remote_rendering_);
3866   is_remote_rendering_ = true;
3867
3868   DCHECK(!disable_pipeline_auto_suspend_);
3869   disable_pipeline_auto_suspend_ = true;
3870
3871   // Capabilities reporting should only be performed for local playbacks.
3872   video_decode_stats_reporter_.reset();
3873
3874   // Requests to restart media pipeline. A remote renderer will be created via
3875   // the `renderer_factory_selector_`.
3876   ScheduleRestart();
3877   if (client_) {
3878     client_->MediaRemotingStarted(
3879         WebString::FromUTF8(remote_device_friendly_name));
3880   }
3881 }
3882
3883 void WebMediaPlayerImpl::SwitchToLocalRenderer(
3884     media::MediaObserverClient::ReasonToSwitchToLocal reason) {
3885   DCHECK(main_task_runner_->BelongsToCurrentThread());
3886   if (!is_remote_rendering_)
3887     return;  // Is currently with local renderer.
3888   is_remote_rendering_ = false;
3889
3890   DCHECK(disable_pipeline_auto_suspend_);
3891   disable_pipeline_auto_suspend_ = false;
3892
3893   // Capabilities reporting may resume now that playback is local.
3894   CreateVideoDecodeStatsReporter();
3895
3896   // Requests to restart media pipeline. A local renderer will be created via
3897   // the `renderer_factory_selector_`.
3898   ScheduleRestart();
3899   if (client_)
3900     client_->MediaRemotingStopped(GetSwitchToLocalMessage(reason));
3901 }
3902
3903 template <uint32_t Flags, typename... T>
3904 void WebMediaPlayerImpl::WriteSplitHistogram(
3905     void (*UmaFunction)(const std::string&, T...),
3906     const std::string& key,
3907     const T&... values) {
3908   std::string strkey = std::string(key);
3909
3910   if (Flags & kEncrypted) {
3911     if (is_encrypted_)
3912       UmaFunction(strkey + ".EME", values...);
3913   }
3914
3915   if (Flags & kTotal)
3916     UmaFunction(strkey + ".All", values...);
3917
3918   if (Flags & kPlaybackType) {
3919     auto demuxer_type = GetDemuxerType();
3920     if (!demuxer_type.has_value())
3921       return;
3922     switch (*demuxer_type) {
3923       case media::DemuxerType::kChunkDemuxer:
3924         UmaFunction(strkey + ".MSE", values...);
3925         break;
3926       default:
3927         // TODO (crbug/1377053): Add additional cases for HLS, eventually.
3928         UmaFunction(strkey + ".SRC", values...);
3929         break;
3930     }
3931   }
3932 }
3933
3934 void WebMediaPlayerImpl::RecordUnderflowDuration(base::TimeDelta duration) {
3935   DCHECK(demuxer_manager_->HasDataSource() ||
3936          GetDemuxerType() == media::DemuxerType::kChunkDemuxer ||
3937          GetDemuxerType() == media::DemuxerType::kManifestDemuxer);
3938   WriteSplitHistogram<kPlaybackType | kEncrypted>(
3939       &base::UmaHistogramTimes, "Media.UnderflowDuration2", duration);
3940 }
3941
3942 void WebMediaPlayerImpl::RecordVideoNaturalSize(const gfx::Size& natural_size) {
3943   // Always report video natural size to MediaLog.
3944   media_log_->AddEvent<MediaLogEvent::kVideoSizeChanged>(natural_size);
3945   media_log_->SetProperty<MediaLogProperty::kResolution>(natural_size);
3946
3947   if (initial_video_height_recorded_)
3948     return;
3949
3950   initial_video_height_recorded_ = true;
3951
3952   int height = natural_size.height();
3953
3954   WriteSplitHistogram<kPlaybackType | kEncrypted | kTotal>(
3955       &base::UmaHistogramCustomCounts, "Media.VideoHeight.Initial", height, 100,
3956       10000, size_t{50});
3957
3958   if (playback_events_recorder_)
3959     playback_events_recorder_->OnNaturalSizeChanged(natural_size);
3960 }
3961
3962 void WebMediaPlayerImpl::SetTickClockForTest(
3963     const base::TickClock* tick_clock) {
3964   tick_clock_ = tick_clock;
3965   buffered_data_source_host_->SetTickClockForTest(tick_clock);
3966 }
3967
3968 void WebMediaPlayerImpl::OnFirstFrame(base::TimeTicks frame_time,
3969                                       bool is_frame_readable) {
3970   DCHECK(!load_start_time_.is_null());
3971   DCHECK(!skip_metrics_due_to_startup_suspend_);
3972
3973   has_first_frame_ = true;
3974   needs_first_frame_ = false;
3975   is_frame_readable_ = is_frame_readable;
3976
3977   const base::TimeDelta elapsed = frame_time - load_start_time_;
3978   media_metrics_provider_->SetTimeToFirstFrame(elapsed);
3979   WriteSplitHistogram<kPlaybackType | kEncrypted>(
3980       &base::UmaHistogramMediumTimes, "Media.TimeToFirstFrame", elapsed);
3981
3982   media::PipelineStatistics ps = GetPipelineStatistics();
3983   if (client_) {
3984     client_->OnFirstFrame(frame_time, ps.video_bytes_decoded);
3985
3986     // Needed to signal HTMLVideoElement that it should remove the poster image.
3987     if (has_poster_) {
3988       client_->Repaint();
3989     }
3990   }
3991 }
3992
3993 void WebMediaPlayerImpl::RecordEncryptionScheme(
3994     const std::string& stream_name,
3995     media::EncryptionScheme encryption_scheme) {
3996   DCHECK(stream_name == "Audio" || stream_name == "Video");
3997
3998   // If the stream is not encrypted, don't record it.
3999   if (encryption_scheme == media::EncryptionScheme::kUnencrypted)
4000     return;
4001
4002   base::UmaHistogramEnumeration(
4003       "Media.EME.EncryptionScheme.Initial." + stream_name,
4004       DetermineEncryptionSchemeUMAValue(encryption_scheme),
4005       EncryptionSchemeUMA::kCount);
4006 }
4007
4008 bool WebMediaPlayerImpl::IsInPictureInPicture() const {
4009   DCHECK(client_);
4010   return client_->GetDisplayType() == DisplayType::kPictureInPicture;
4011 }
4012
4013 void WebMediaPlayerImpl::MaybeSetContainerNameForMetrics() {
4014   // Pipeline startup failed before even getting a demuxer setup.
4015   if (!demuxer_manager_->HasDemuxer()) {
4016     return;
4017   }
4018
4019   // Container has already been set.
4020   if (highest_ready_state_ >= WebMediaPlayer::kReadyStateHaveMetadata)
4021     return;
4022
4023   // Only report metrics for demuxers that provide container information.
4024   auto container = demuxer_manager_->GetContainerForMetrics();
4025   if (container.has_value())
4026     media_metrics_provider_->SetContainerName(container.value());
4027 }
4028
4029 void WebMediaPlayerImpl::MaybeUpdateBufferSizesForPlayback() {
4030   // Don't increase the MultiBufferDataSource buffer size until we've reached
4031   // kReadyStateHaveEnoughData. Otherwise we will unnecessarily slow down
4032   // playback startup -- it can instead be done for free after playback starts.
4033   if (highest_ready_state_ < kReadyStateHaveEnoughData) {
4034     return;
4035   }
4036
4037   demuxer_manager_->OnDataSourcePlaybackRateChange(playback_rate_, paused_);
4038 }
4039
4040 void WebMediaPlayerImpl::OnSimpleWatchTimerTick() {
4041   if (playback_events_recorder_)
4042     playback_events_recorder_->OnPipelineStatistics(GetPipelineStatistics());
4043 }
4044
4045 GURL WebMediaPlayerImpl::GetSrcAfterRedirects() {
4046   return demuxer_manager_->GetDataSourceUrlAfterRedirects().value_or(GURL());
4047 }
4048
4049 void WebMediaPlayerImpl::UpdateSmoothnessHelper() {
4050   // If the experiment flag is off, then do nothing.
4051   if (!base::FeatureList::IsEnabled(media::kMediaLearningSmoothnessExperiment))
4052     return;
4053
4054   // If we're paused, or if we can't get all the features, then clear any
4055   // smoothness helper and stop.  We'll try to create it later when we're
4056   // playing and have all the features.
4057   if (paused_ || !HasVideo() || pipeline_metadata_.natural_size.IsEmpty() ||
4058       !last_reported_fps_) {
4059     smoothness_helper_.reset();
4060     return;
4061   }
4062
4063   // Fill in features.
4064   // NOTE: this is a very bad way to do this, since it memorizes the order of
4065   // features in the task.  However, it'll do for now.
4066   learning::FeatureVector features;
4067   features.push_back(learning::FeatureValue(
4068       static_cast<int>(pipeline_metadata_.video_decoder_config.codec())));
4069   features.push_back(learning::FeatureValue(
4070       pipeline_metadata_.video_decoder_config.profile()));
4071   features.push_back(
4072       learning::FeatureValue(pipeline_metadata_.natural_size.width()));
4073   features.push_back(learning::FeatureValue(*last_reported_fps_));
4074
4075   // If we have a smoothness helper, and we're not changing the features, then
4076   // do nothing.  This prevents restarting the helper for no reason.
4077   if (smoothness_helper_ && features == smoothness_helper_->features())
4078     return;
4079
4080   // Create or restart the smoothness helper with `features`.
4081   smoothness_helper_ = SmoothnessHelper::Create(
4082       GetLearningTaskController(learning::tasknames::kConsecutiveBadWindows),
4083       GetLearningTaskController(learning::tasknames::kConsecutiveNNRs),
4084       features, this);
4085 }
4086
4087 std::unique_ptr<learning::LearningTaskController>
4088 WebMediaPlayerImpl::GetLearningTaskController(const char* task_name) {
4089   // Get the LearningTaskController for `task_id`.
4090   learning::LearningTask task = learning::MediaLearningTasks::Get(task_name);
4091   DCHECK_EQ(task.name, task_name);
4092
4093   mojo::Remote<learning::mojom::LearningTaskController> remote_ltc;
4094   media_metrics_provider_->AcquireLearningTaskController(
4095       task.name, remote_ltc.BindNewPipeAndPassReceiver());
4096   return std::make_unique<learning::MojoLearningTaskController>(
4097       task, std::move(remote_ltc));
4098 }
4099
4100 bool WebMediaPlayerImpl::HasUnmutedAudio() const {
4101   // Pretend that the media has no audio if it never played unmuted. This is to
4102   // avoid any action related to audible media such as taking audio focus or
4103   // showing a media notification. To preserve a consistent experience, it does
4104   // not apply if a media was audible so the system states do not flicker
4105   // depending on whether the user muted the player.
4106   return HasAudio() && !client_->WasAlwaysMuted();
4107 }
4108
4109 bool WebMediaPlayerImpl::IsVideoBeingCaptured() const {
4110   // 5 seconds chosen arbitrarily since most videos are never captured.
4111   return tick_clock_->NowTicks() - last_frame_request_time_ < base::Seconds(5);
4112 }
4113
4114 void WebMediaPlayerImpl::RegisterFrameSinkHierarchy() {
4115   if (bridge_)
4116     bridge_->RegisterFrameSinkHierarchy();
4117 }
4118
4119 void WebMediaPlayerImpl::UnregisterFrameSinkHierarchy() {
4120   if (bridge_)
4121     bridge_->UnregisterFrameSinkHierarchy();
4122 }
4123
4124 void WebMediaPlayerImpl::ReportSessionUMAs() const {
4125   if (renderer_type_ != media::RendererType::kRendererImpl &&
4126       renderer_type_ != media::RendererType::kMediaFoundation) {
4127     return;
4128   }
4129
4130   // Report the `Media.DroppedFrameCount2.{RendererType}.{EncryptedOrClear}`
4131   // UMA.
4132   constexpr char kDroppedFrameUmaPrefix[] = "Media.DroppedFrameCount2.";
4133   std::string uma_name = kDroppedFrameUmaPrefix;
4134   uma_name += GetRendererName(renderer_type_);
4135   if (is_encrypted_)
4136     uma_name += ".Encrypted";
4137   else
4138     uma_name += ".Clear";
4139   base::UmaHistogramCounts1M(uma_name, DroppedFrameCount());
4140
4141   if (!is_encrypted_) {
4142     // Report the `Media.FrameReadBackCount.{RendererType}` UMA.
4143     constexpr char kFrameReadBackUmaPrefix[] = "Media.FrameReadBackCount.";
4144     uma_name = kFrameReadBackUmaPrefix;
4145     uma_name += GetRendererName(renderer_type_);
4146     base::UmaHistogramCounts10M(uma_name, video_frame_readback_count_);
4147   }
4148 }
4149
4150 bool WebMediaPlayerImpl::PassedTimingAllowOriginCheck() const {
4151   return demuxer_manager_->PassedDataSourceTimingAllowOriginCheck();
4152 }
4153
4154 void WebMediaPlayerImpl::DidMediaMetadataChange() {
4155   media::MediaContentType content_type = GetMediaContentType();
4156   bool is_encrypted_media =
4157       pipeline_metadata_.audio_decoder_config.is_encrypted() ||
4158       pipeline_metadata_.video_decoder_config.is_encrypted();
4159
4160   client_->DidMediaMetadataChange(
4161       delegate_has_audio_, HasVideo(),
4162       pipeline_metadata_.audio_decoder_config.codec(),
4163       pipeline_metadata_.video_decoder_config.codec(), content_type,
4164       is_encrypted_media);
4165
4166   delegate_->DidMediaMetadataChange(delegate_id_, delegate_has_audio_,
4167                                     HasVideo(), content_type);
4168 }
4169
4170 #if defined(TIZEN_VIDEO_HOLE)
4171 bool WebMediaPlayerImpl::ShouldUseVideoHole() const {
4172   if (HasVideo() && is_video_hole_)
4173 #if !BUILDFLAG(IS_TIZEN_TV)
4174     if (overlay_info_.is_fullscreen)
4175 #endif
4176       return true;
4177   return false;
4178 }
4179
4180 void WebMediaPlayerImpl::CreateVideoHoleFrame() {
4181   gfx::Size size(pipeline_metadata_.natural_size.width(),
4182                  pipeline_metadata_.natural_size.height());
4183
4184   scoped_refptr<media::VideoFrame> video_frame =
4185       media::VideoFrame::CreateHoleFrame(size);
4186   if (video_frame)
4187     compositor_->PaintSingleFrame(video_frame);
4188 }
4189
4190 void WebMediaPlayerImpl::OnDrawableContentRectChanged(gfx::Rect rect,
4191                                                       bool is_video) {
4192   gfx::RectF rect_f = static_cast<gfx::RectF>(rect);
4193   LOG(INFO) << __func__ << " : " << rect_f.ToString();
4194   if (rect_f != last_computed_rect_)
4195     pipeline_controller_->SetMediaGeometry(rect_f);
4196
4197   last_computed_rect_ = rect_f;
4198 }
4199
4200 bool WebMediaPlayerImpl::UpdateBoundaryRectangle() {
4201   if (!video_layer_)
4202     return false;
4203
4204   // Compute the geometry of video frame layer.
4205   cc::Layer* layer = video_layer_.get();
4206   if (!layer->layer_tree_host())
4207     return false;
4208
4209   gfx::Rect layer_screen_space_rect = cc::MathUtil::MapEnclosingClippedRect(
4210       layer->ScreenSpaceTransform(), gfx::Rect(layer->bounds()));
4211   gfx::RectF rect = gfx::RectF(layer_screen_space_rect);
4212   // Return false when the geometry hasn't been changed from the last time.
4213   // if (last_computed_rect_ == rect)
4214   //   return false;
4215
4216   // Store the changed geometry information when it is actually changed.
4217   last_computed_rect_ = rect;
4218   return true;
4219 }
4220
4221 void WebMediaPlayerImpl::StartLayerBoundUpdateTimer() {
4222   if (layer_bound_update_timer_.IsRunning())
4223     return;
4224
4225   LOG(INFO) << __func__;
4226   layer_bound_update_timer_.Start(
4227       FROM_HERE, kLayerBoundUpdateInterval, this,
4228       &WebMediaPlayerImpl::OnLayerBoundUpdateTimerFired);
4229 }
4230
4231 void WebMediaPlayerImpl::StopLayerBoundUpdateTimer() {
4232   if (layer_bound_update_timer_.IsRunning())
4233     layer_bound_update_timer_.Stop();
4234 }
4235
4236 void WebMediaPlayerImpl::OnLayerBoundUpdateTimerFired() {
4237   LOG(INFO) << __func__;
4238   if (UpdateBoundaryRectangle())
4239     pipeline_controller_->SetMediaGeometry(last_computed_rect_);
4240   StopLayerBoundUpdateTimer();
4241 }
4242 #endif
4243 }  // namespace blink