1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/media_stream_impl.h"
9 #include "base/logging.h"
10 #include "base/strings/string_util.h"
11 #include "base/strings/stringprintf.h"
12 #include "base/strings/utf_string_conversions.h"
13 #include "content/renderer/media/media_stream_audio_renderer.h"
14 #include "content/renderer/media/media_stream_audio_source.h"
15 #include "content/renderer/media/media_stream_dependency_factory.h"
16 #include "content/renderer/media/media_stream_dispatcher.h"
17 #include "content/renderer/media/media_stream_extra_data.h"
18 #include "content/renderer/media/media_stream_video_capturer_source.h"
19 #include "content/renderer/media/peer_connection_tracker.h"
20 #include "content/renderer/media/rtc_video_renderer.h"
21 #include "content/renderer/media/webrtc_audio_capturer.h"
22 #include "content/renderer/media/webrtc_audio_renderer.h"
23 #include "content/renderer/media/webrtc_local_audio_renderer.h"
24 #include "content/renderer/media/webrtc_logging.h"
25 #include "content/renderer/media/webrtc_uma_histograms.h"
26 #include "content/renderer/render_thread_impl.h"
27 #include "media/base/audio_hardware_config.h"
28 #include "third_party/WebKit/public/platform/WebMediaConstraints.h"
29 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
30 #include "third_party/WebKit/public/web/WebDocument.h"
31 #include "third_party/WebKit/public/web/WebFrame.h"
32 #include "third_party/WebKit/public/web/WebMediaStreamRegistry.h"
37 void CopyStreamConstraints(const blink::WebMediaConstraints& constraints,
38 StreamOptions::Constraints* mandatory,
39 StreamOptions::Constraints* optional) {
40 blink::WebVector<blink::WebMediaConstraint> mandatory_constraints;
41 constraints.getMandatoryConstraints(mandatory_constraints);
42 for (size_t i = 0; i < mandatory_constraints.size(); i++) {
43 mandatory->push_back(StreamOptions::Constraint(
44 mandatory_constraints[i].m_name.utf8(),
45 mandatory_constraints[i].m_value.utf8()));
48 blink::WebVector<blink::WebMediaConstraint> optional_constraints;
49 constraints.getOptionalConstraints(optional_constraints);
50 for (size_t i = 0; i < optional_constraints.size(); i++) {
51 optional->push_back(StreamOptions::Constraint(
52 optional_constraints[i].m_name.utf8(),
53 optional_constraints[i].m_value.utf8()));
57 static int g_next_request_id = 0;
59 webrtc::MediaStreamInterface* GetNativeMediaStream(
60 const blink::WebMediaStream& web_stream) {
61 content::MediaStreamExtraData* extra_data =
62 static_cast<content::MediaStreamExtraData*>(web_stream.extraData());
65 return extra_data->stream().get();
68 void GetDefaultOutputDeviceParams(
69 int* output_sample_rate, int* output_buffer_size) {
70 // Fetch the default audio output hardware config.
71 media::AudioHardwareConfig* hardware_config =
72 RenderThreadImpl::current()->GetAudioHardwareConfig();
73 *output_sample_rate = hardware_config->GetOutputSampleRate();
74 *output_buffer_size = hardware_config->GetOutputBufferSize();
79 MediaStreamImpl::MediaStreamImpl(
80 RenderView* render_view,
81 MediaStreamDispatcher* media_stream_dispatcher,
82 MediaStreamDependencyFactory* dependency_factory)
83 : RenderViewObserver(render_view),
84 dependency_factory_(dependency_factory),
85 media_stream_dispatcher_(media_stream_dispatcher) {
88 MediaStreamImpl::~MediaStreamImpl() {
91 void MediaStreamImpl::requestUserMedia(
92 const blink::WebUserMediaRequest& user_media_request) {
93 // Save histogram data so we can see how much GetUserMedia is used.
94 // The histogram counts the number of calls to the JS API
96 UpdateWebRTCMethodCount(WEBKIT_GET_USER_MEDIA);
97 DCHECK(CalledOnValidThread());
99 if (RenderThreadImpl::current()) {
100 RenderThreadImpl::current()->peer_connection_tracker()->TrackGetUserMedia(
104 int request_id = g_next_request_id++;
105 StreamOptions options;
106 blink::WebFrame* frame = NULL;
107 GURL security_origin;
108 bool enable_automatic_output_device_selection = false;
110 // |user_media_request| can't be mocked. So in order to test at all we check
112 if (user_media_request.isNull()) {
114 options.audio_requested = true;
115 options.video_requested = true;
117 if (user_media_request.audio()) {
118 options.audio_requested = true;
119 CopyStreamConstraints(user_media_request.audioConstraints(),
120 &options.mandatory_audio,
121 &options.optional_audio);
123 // Check if this input device should be used to select a matching output
124 // device for audio rendering.
126 if (options.GetFirstAudioConstraintByName(
127 kMediaStreamRenderToAssociatedSink, &enable, NULL) &&
128 LowerCaseEqualsASCII(enable, "true")) {
129 enable_automatic_output_device_selection = true;
132 if (user_media_request.video()) {
133 options.video_requested = true;
134 CopyStreamConstraints(user_media_request.videoConstraints(),
135 &options.mandatory_video,
136 &options.optional_video);
139 security_origin = GURL(user_media_request.securityOrigin().toString());
140 // Get the WebFrame that requested a MediaStream.
141 // The frame is needed to tell the MediaStreamDispatcher when a stream goes
143 frame = user_media_request.ownerDocument().frame();
147 DVLOG(1) << "MediaStreamImpl::requestUserMedia(" << request_id << ", [ "
148 << "audio=" << (options.audio_requested)
149 << " select associated sink: "
150 << enable_automatic_output_device_selection
151 << ", video=" << (options.video_requested) << " ], "
152 << security_origin.spec() << ")";
154 std::string audio_device_id;
155 bool mandatory_audio;
156 options.GetFirstAudioConstraintByName(kMediaStreamSourceInfoId,
157 &audio_device_id, &mandatory_audio);
158 std::string video_device_id;
159 bool mandatory_video;
160 options.GetFirstVideoConstraintByName(kMediaStreamSourceInfoId,
161 &video_device_id, &mandatory_video);
163 WebRtcLogMessage(base::StringPrintf(
164 "MSI::requestUserMedia. request_id=%d"
165 ", audio source id=%s mandatory= %s "
166 ", video source id=%s mandatory= %s",
168 audio_device_id.c_str(),
169 mandatory_audio ? "true":"false",
170 video_device_id.c_str(),
171 mandatory_video ? "true":"false"));
173 user_media_requests_.push_back(
174 new UserMediaRequestInfo(request_id, frame, user_media_request,
175 enable_automatic_output_device_selection));
177 media_stream_dispatcher_->GenerateStream(
184 void MediaStreamImpl::cancelUserMediaRequest(
185 const blink::WebUserMediaRequest& user_media_request) {
186 DCHECK(CalledOnValidThread());
187 UserMediaRequestInfo* request = FindUserMediaRequestInfo(user_media_request);
189 // We can't abort the stream generation process.
190 // Instead, erase the request. Once the stream is generated we will stop the
191 // stream if the request does not exist.
192 DeleteUserMediaRequestInfo(request);
196 blink::WebMediaStream MediaStreamImpl::GetMediaStream(
198 return blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url);
201 bool MediaStreamImpl::IsMediaStream(const GURL& url) {
202 blink::WebMediaStream web_stream(
203 blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url));
205 if (web_stream.isNull() || !web_stream.extraData())
206 return false; // This is not a valid stream.
208 webrtc::MediaStreamInterface* stream = GetNativeMediaStream(web_stream);
210 (!stream->GetVideoTracks().empty() || !stream->GetAudioTracks().empty()));
213 scoped_refptr<VideoFrameProvider>
214 MediaStreamImpl::GetVideoFrameProvider(
216 const base::Closure& error_cb,
217 const VideoFrameProvider::RepaintCB& repaint_cb) {
218 DCHECK(CalledOnValidThread());
219 blink::WebMediaStream web_stream(GetMediaStream(url));
221 if (web_stream.isNull() || !web_stream.extraData())
222 return NULL; // This is not a valid stream.
224 DVLOG(1) << "MediaStreamImpl::GetVideoFrameProvider stream:"
225 << base::UTF16ToUTF8(web_stream.id());
227 blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
228 web_stream.videoTracks(video_tracks);
229 if (video_tracks.isEmpty())
232 return new RTCVideoRenderer(video_tracks[0], error_cb, repaint_cb);
235 scoped_refptr<MediaStreamAudioRenderer>
236 MediaStreamImpl::GetAudioRenderer(const GURL& url, int render_frame_id) {
237 DCHECK(CalledOnValidThread());
238 blink::WebMediaStream web_stream(GetMediaStream(url));
240 if (web_stream.isNull() || !web_stream.extraData())
241 return NULL; // This is not a valid stream.
243 DVLOG(1) << "MediaStreamImpl::GetAudioRenderer stream:"
244 << base::UTF16ToUTF8(web_stream.id());
246 MediaStreamExtraData* extra_data =
247 static_cast<MediaStreamExtraData*>(web_stream.extraData());
249 // TODO(tommi): MediaStreams do not have a 'local or not' concept.
250 // Tracks _might_, but even so, we need to fix the data flow so that
251 // it works the same way for all track implementations, local, remote or what
253 // In this function, we should simply create a renderer object that receives
254 // and mixes audio from all the tracks that belong to the media stream.
255 // We need to remove the |is_local| property from MediaStreamExtraData since
256 // this concept is peerconnection specific (is a previously recorded stream
257 // local or remote?).
258 if (extra_data->is_local()) {
259 // Create the local audio renderer if the stream contains audio tracks.
260 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
261 web_stream.audioTracks(audio_tracks);
262 if (audio_tracks.isEmpty())
265 // TODO(xians): Add support for the case where the media stream contains
266 // multiple audio tracks.
267 return CreateLocalAudioRenderer(audio_tracks[0], render_frame_id);
270 webrtc::MediaStreamInterface* stream = extra_data->stream().get();
271 if (!stream || stream->GetAudioTracks().empty())
274 // This is a remote WebRTC media stream.
275 WebRtcAudioDeviceImpl* audio_device =
276 dependency_factory_->GetWebRtcAudioDevice();
278 // Share the existing renderer if any, otherwise create a new one.
279 scoped_refptr<WebRtcAudioRenderer> renderer(audio_device->renderer());
280 if (!renderer.get()) {
281 renderer = CreateRemoteAudioRenderer(stream, render_frame_id);
283 if (renderer.get() && !audio_device->SetAudioRenderer(renderer.get()))
287 return renderer.get() ?
288 renderer->CreateSharedAudioRendererProxy(stream) : NULL;
291 // Callback from MediaStreamDispatcher.
292 // The requested stream have been generated by the MediaStreamDispatcher.
293 void MediaStreamImpl::OnStreamGenerated(
295 const std::string& label,
296 const StreamDeviceInfoArray& audio_array,
297 const StreamDeviceInfoArray& video_array) {
298 DCHECK(CalledOnValidThread());
299 DVLOG(1) << "MediaStreamImpl::OnStreamGenerated stream:" << label;
301 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id);
303 // This can happen if the request is canceled or the frame reloads while
304 // MediaStreamDispatcher is processing the request.
305 // Only stop the device if the device is not used in another MediaStream.
306 for (StreamDeviceInfoArray::const_iterator device_it = audio_array.begin();
307 device_it != audio_array.end(); ++device_it) {
308 if (!FindLocalSource(*device_it))
309 media_stream_dispatcher_->StopStreamDevice(*device_it);
312 for (StreamDeviceInfoArray::const_iterator device_it = video_array.begin();
313 device_it != video_array.end(); ++device_it) {
314 if (!FindLocalSource(*device_it))
315 media_stream_dispatcher_->StopStreamDevice(*device_it);
318 DVLOG(1) << "Request ID not found";
321 request_info->generated = true;
323 // WebUserMediaRequest don't have an implementation in unit tests.
324 // Therefore we need to check for isNull here.
325 blink::WebUserMediaRequest* request = &(request_info->request);
326 blink::WebMediaConstraints audio_constraints = request->isNull() ?
327 blink::WebMediaConstraints() : request->audioConstraints();
328 blink::WebMediaConstraints video_constraints = request->isNull() ?
329 blink::WebMediaConstraints() : request->videoConstraints();
331 blink::WebVector<blink::WebMediaStreamTrack> audio_track_vector(
333 CreateAudioTracks(audio_array, audio_constraints, &audio_track_vector,
336 blink::WebVector<blink::WebMediaStreamTrack> video_track_vector(
338 CreateVideoTracks(video_array, video_constraints, &video_track_vector,
341 blink::WebString webkit_id = base::UTF8ToUTF16(label);
342 blink::WebMediaStream* web_stream = &(request_info->web_stream);
344 web_stream->initialize(webkit_id, audio_track_vector,
347 // Wait for the tracks to be started successfully or to fail.
348 request_info->CallbackOnTracksStarted(
349 base::Bind(&MediaStreamImpl::OnCreateNativeTracksCompleted, AsWeakPtr()));
352 // Callback from MediaStreamDispatcher.
353 // The requested stream failed to be generated.
354 void MediaStreamImpl::OnStreamGenerationFailed(int request_id) {
355 DCHECK(CalledOnValidThread());
356 DVLOG(1) << "MediaStreamImpl::OnStreamGenerationFailed("
357 << request_id << ")";
358 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id);
360 // This can happen if the request is canceled or the frame reloads while
361 // MediaStreamDispatcher is processing the request.
362 DVLOG(1) << "Request ID not found";
365 CompleteGetUserMediaRequest(request_info->web_stream,
366 &request_info->request,
368 DeleteUserMediaRequestInfo(request_info);
371 // Callback from MediaStreamDispatcher.
372 // The browser process has stopped a device used by a MediaStream.
373 void MediaStreamImpl::OnDeviceStopped(
374 const std::string& label,
375 const StreamDeviceInfo& device_info) {
376 DCHECK(CalledOnValidThread());
377 DVLOG(1) << "MediaStreamImpl::OnDeviceStopped("
378 << "{device_id = " << device_info.device.id << "})";
380 const blink::WebMediaStreamSource* source_ptr = FindLocalSource(device_info);
382 // This happens if the same device is used in several guM requests or
383 // if a user happen stop a track from JS at the same time
384 // as the underlying media device is unplugged from the system.
387 // By creating |source| it is guaranteed that the blink::WebMediaStreamSource
388 // object is valid during the cleanup.
389 blink::WebMediaStreamSource source(*source_ptr);
390 StopLocalSource(source, false);
392 for (LocalStreamSources::iterator device_it = local_sources_.begin();
393 device_it != local_sources_.end(); ++device_it) {
394 if (device_it->source.id() == source.id()) {
395 local_sources_.erase(device_it);
400 // Remove the reference to this source from all |user_media_requests_|.
401 // TODO(perkj): The below is not necessary once we don't need to support
402 // MediaStream::Stop().
403 UserMediaRequests::iterator it = user_media_requests_.begin();
404 while (it != user_media_requests_.end()) {
405 (*it)->RemoveSource(source);
406 if ((*it)->AreAllSourcesRemoved()) {
407 it = user_media_requests_.erase(it);
414 void MediaStreamImpl::InitializeSourceObject(
415 const StreamDeviceInfo& device,
416 blink::WebMediaStreamSource::Type type,
417 const blink::WebMediaConstraints& constraints,
418 blink::WebFrame* frame,
419 blink::WebMediaStreamSource* webkit_source) {
420 const blink::WebMediaStreamSource* existing_source =
421 FindLocalSource(device);
422 if (existing_source) {
423 *webkit_source = *existing_source;
424 DVLOG(1) << "Source already exist. Reusing source with id "
425 << webkit_source->id().utf8();
429 webkit_source->initialize(
430 base::UTF8ToUTF16(device.device.id),
432 base::UTF8ToUTF16(device.device.name));
434 DVLOG(1) << "Initialize source object :"
435 << "id = " << webkit_source->id().utf8()
436 << ", name = " << webkit_source->name().utf8();
438 if (type == blink::WebMediaStreamSource::TypeVideo) {
439 MediaStreamVideoCapturerSource* video_source(
440 new content::MediaStreamVideoCapturerSource(
442 base::Bind(&MediaStreamImpl::OnLocalSourceStopped, AsWeakPtr()),
443 dependency_factory_));
444 webkit_source->setExtraData(video_source);
446 DCHECK_EQ(blink::WebMediaStreamSource::TypeAudio, type);
447 MediaStreamAudioSource* audio_source(
448 new MediaStreamAudioSource(
449 RenderViewObserver::routing_id(),
451 base::Bind(&MediaStreamImpl::OnLocalSourceStopped, AsWeakPtr()),
452 dependency_factory_));
453 webkit_source->setExtraData(audio_source);
455 local_sources_.push_back(LocalStreamSource(frame, *webkit_source));
458 void MediaStreamImpl::CreateVideoTracks(
459 const StreamDeviceInfoArray& devices,
460 const blink::WebMediaConstraints& constraints,
461 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks,
462 UserMediaRequestInfo* request) {
463 DCHECK_EQ(devices.size(), webkit_tracks->size());
465 for (size_t i = 0; i < devices.size(); ++i) {
466 blink::WebMediaStreamSource webkit_source;
467 InitializeSourceObject(devices[i],
468 blink::WebMediaStreamSource::TypeVideo,
472 (*webkit_tracks)[i].initialize(webkit_source);
473 request->StartTrack((*webkit_tracks)[i], constraints);
477 void MediaStreamImpl::CreateAudioTracks(
478 const StreamDeviceInfoArray& devices,
479 const blink::WebMediaConstraints& constraints,
480 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks,
481 UserMediaRequestInfo* request) {
482 DCHECK_EQ(devices.size(), webkit_tracks->size());
484 // Log the device names for this request.
485 for (StreamDeviceInfoArray::const_iterator it = devices.begin();
486 it != devices.end(); ++it) {
487 WebRtcLogMessage(base::StringPrintf(
488 "Generated media stream for request id %d contains audio device name"
491 it->device.name.c_str()));
494 StreamDeviceInfoArray overridden_audio_array = devices;
495 if (!request->enable_automatic_output_device_selection) {
496 // If the GetUserMedia request did not explicitly set the constraint
497 // kMediaStreamRenderToAssociatedSink, the output device parameters must
499 for (StreamDeviceInfoArray::iterator it = overridden_audio_array.begin();
500 it != overridden_audio_array.end(); ++it) {
501 it->device.matched_output_device_id = "";
502 it->device.matched_output = MediaStreamDevice::AudioDeviceParameters();
506 for (size_t i = 0; i < overridden_audio_array.size(); ++i) {
507 blink::WebMediaStreamSource webkit_source;
508 InitializeSourceObject(overridden_audio_array[i],
509 blink::WebMediaStreamSource::TypeAudio,
513 (*webkit_tracks)[i].initialize(webkit_source);
514 request->StartTrack((*webkit_tracks)[i], constraints);
518 void MediaStreamImpl::OnCreateNativeTracksCompleted(
519 UserMediaRequestInfo* request,
520 bool request_succeeded) {
521 // Create a native representation of the stream.
522 if (request_succeeded) {
523 dependency_factory_->CreateNativeLocalMediaStream(
524 &request->web_stream,
525 base::Bind(&MediaStreamImpl::OnLocalMediaStreamStop, AsWeakPtr()));
527 DVLOG(1) << "MediaStreamImpl::OnCreateNativeTracksComplete("
528 << "{request_id = " << request->request_id << "} "
529 << "{request_succeeded = " << request_succeeded << "})";
530 CompleteGetUserMediaRequest(request->web_stream, &request->request,
532 if (!request_succeeded) {
533 // TODO(perkj): Once we don't support MediaStream::Stop the |request_info|
534 // can be deleted even if the request succeeds.
535 DeleteUserMediaRequestInfo(request);
536 StopUnreferencedSources(true);
540 void MediaStreamImpl::OnDevicesEnumerated(
542 const StreamDeviceInfoArray& device_array) {
543 DVLOG(1) << "MediaStreamImpl::OnDevicesEnumerated("
544 << request_id << ")";
548 void MediaStreamImpl::OnDeviceOpened(
550 const std::string& label,
551 const StreamDeviceInfo& video_device) {
552 DVLOG(1) << "MediaStreamImpl::OnDeviceOpened("
553 << request_id << ", " << label << ")";
557 void MediaStreamImpl::OnDeviceOpenFailed(int request_id) {
558 DVLOG(1) << "MediaStreamImpl::VideoDeviceOpenFailed("
559 << request_id << ")";
563 void MediaStreamImpl::CompleteGetUserMediaRequest(
564 const blink::WebMediaStream& stream,
565 blink::WebUserMediaRequest* request_info,
566 bool request_succeeded) {
567 if (request_succeeded) {
568 request_info->requestSucceeded(stream);
570 request_info->requestFailed();
574 const blink::WebMediaStreamSource* MediaStreamImpl::FindLocalSource(
575 const StreamDeviceInfo& device) const {
576 for (LocalStreamSources::const_iterator it = local_sources_.begin();
577 it != local_sources_.end(); ++it) {
578 MediaStreamSource* source =
579 static_cast<MediaStreamSource*>(it->source.extraData());
580 const StreamDeviceInfo& active_device = source->device_info();
581 if (active_device.device.id == device.device.id &&
582 active_device.device.type == device.device.type &&
583 active_device.session_id == device.session_id) {
590 bool MediaStreamImpl::IsSourceInRequests(
591 const blink::WebMediaStreamSource& source) const {
592 for (UserMediaRequests::const_iterator req_it = user_media_requests_.begin();
593 req_it != user_media_requests_.end(); ++req_it) {
594 if ((*req_it)->IsSourceUsed(source))
600 MediaStreamImpl::UserMediaRequestInfo*
601 MediaStreamImpl::FindUserMediaRequestInfo(int request_id) {
602 UserMediaRequests::iterator it = user_media_requests_.begin();
603 for (; it != user_media_requests_.end(); ++it) {
604 if ((*it)->request_id == request_id)
610 MediaStreamImpl::UserMediaRequestInfo*
611 MediaStreamImpl::FindUserMediaRequestInfo(
612 const blink::WebUserMediaRequest& request) {
613 UserMediaRequests::iterator it = user_media_requests_.begin();
614 for (; it != user_media_requests_.end(); ++it) {
615 if ((*it)->request == request)
621 MediaStreamImpl::UserMediaRequestInfo*
622 MediaStreamImpl::FindUserMediaRequestInfo(const std::string& label) {
623 UserMediaRequests::iterator it = user_media_requests_.begin();
624 for (; it != user_media_requests_.end(); ++it) {
625 if ((*it)->generated && (*it)->web_stream.id() == base::UTF8ToUTF16(label))
631 void MediaStreamImpl::DeleteUserMediaRequestInfo(
632 UserMediaRequestInfo* request) {
633 UserMediaRequests::iterator it = user_media_requests_.begin();
634 for (; it != user_media_requests_.end(); ++it) {
635 if ((*it) == request) {
636 user_media_requests_.erase(it);
643 void MediaStreamImpl::FrameDetached(blink::WebFrame* frame) {
644 // Do same thing as FrameWillClose.
645 FrameWillClose(frame);
648 void MediaStreamImpl::FrameWillClose(blink::WebFrame* frame) {
649 // Loop through all UserMediaRequests and find the requests that belong to the
650 // frame that is being closed.
651 UserMediaRequests::iterator request_it = user_media_requests_.begin();
652 while (request_it != user_media_requests_.end()) {
653 if ((*request_it)->frame == frame) {
654 DVLOG(1) << "MediaStreamImpl::FrameWillClose: "
655 << "Cancel user media request " << (*request_it)->request_id;
656 // If the request is not generated, it means that a request
657 // has been sent to the MediaStreamDispatcher to generate a stream
658 // but MediaStreamDispatcher has not yet responded and we need to cancel
660 if (!(*request_it)->generated) {
661 media_stream_dispatcher_->CancelGenerateStream(
662 (*request_it)->request_id, AsWeakPtr());
664 request_it = user_media_requests_.erase(request_it);
670 // Loop through all current local sources and stop the sources that were
671 // created by the frame that will be closed.
672 LocalStreamSources::iterator sources_it = local_sources_.begin();
673 while (sources_it != local_sources_.end()) {
674 if (sources_it->frame == frame) {
675 StopLocalSource(sources_it->source, true);
676 sources_it = local_sources_.erase(sources_it);
683 void MediaStreamImpl::OnLocalMediaStreamStop(
684 const std::string& label) {
685 DVLOG(1) << "MediaStreamImpl::OnLocalMediaStreamStop(" << label << ")";
687 UserMediaRequestInfo* user_media_request = FindUserMediaRequestInfo(label);
688 if (user_media_request) {
689 DeleteUserMediaRequestInfo(user_media_request);
691 StopUnreferencedSources(true);
694 void MediaStreamImpl::OnLocalSourceStopped(
695 const blink::WebMediaStreamSource& source) {
696 DCHECK(CalledOnValidThread());
698 bool device_found = false;
699 for (LocalStreamSources::iterator device_it = local_sources_.begin();
700 device_it != local_sources_.end(); ++device_it) {
701 if (device_it->source.id() == source.id()) {
703 local_sources_.erase(device_it);
709 // Remove the reference to this source from all |user_media_requests_|.
710 // TODO(perkj): The below is not necessary once we don't need to support
711 // MediaStream::Stop().
712 UserMediaRequests::iterator it = user_media_requests_.begin();
713 while (it != user_media_requests_.end()) {
714 (*it)->RemoveSource(source);
715 if ((*it)->AreAllSourcesRemoved()) {
716 it = user_media_requests_.erase(it);
722 MediaStreamSource* source_impl =
723 static_cast<MediaStreamSource*> (source.extraData());
724 media_stream_dispatcher_->StopStreamDevice(source_impl->device_info());
727 void MediaStreamImpl::StopLocalSource(
728 const blink::WebMediaStreamSource& source,
729 bool notify_dispatcher) {
730 MediaStreamSource* source_impl =
731 static_cast<MediaStreamSource*> (source.extraData());
732 DVLOG(1) << "MediaStreamImpl::StopLocalSource("
733 << "{device_id = " << source_impl->device_info().device.id << "})";
735 if (notify_dispatcher)
736 media_stream_dispatcher_->StopStreamDevice(source_impl->device_info());
738 source_impl->ResetSourceStoppedCallback();
739 source_impl->StopSource();
742 void MediaStreamImpl::StopUnreferencedSources(bool notify_dispatcher) {
743 LocalStreamSources::iterator source_it = local_sources_.begin();
744 while (source_it != local_sources_.end()) {
745 if (!IsSourceInRequests(source_it->source)) {
746 StopLocalSource(source_it->source, notify_dispatcher);
747 source_it = local_sources_.erase(source_it);
754 scoped_refptr<WebRtcAudioRenderer> MediaStreamImpl::CreateRemoteAudioRenderer(
755 webrtc::MediaStreamInterface* stream,
756 int render_frame_id) {
757 if (stream->GetAudioTracks().empty())
760 DVLOG(1) << "MediaStreamImpl::CreateRemoteAudioRenderer label:"
763 // TODO(tommi): Change the default value of session_id to be
764 // StreamDeviceInfo::kNoId. Also update AudioOutputDevice etc.
765 int session_id = 0, sample_rate = 0, buffer_size = 0;
766 if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id,
769 GetDefaultOutputDeviceParams(&sample_rate, &buffer_size);
772 return new WebRtcAudioRenderer(
773 stream, RenderViewObserver::routing_id(), render_frame_id, session_id,
774 sample_rate, buffer_size);
777 scoped_refptr<WebRtcLocalAudioRenderer>
778 MediaStreamImpl::CreateLocalAudioRenderer(
779 const blink::WebMediaStreamTrack& audio_track,
780 int render_frame_id) {
781 DVLOG(1) << "MediaStreamImpl::CreateLocalAudioRenderer";
783 int session_id = 0, sample_rate = 0, buffer_size = 0;
784 if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id,
787 GetDefaultOutputDeviceParams(&sample_rate, &buffer_size);
790 // Create a new WebRtcLocalAudioRenderer instance and connect it to the
791 // existing WebRtcAudioCapturer so that the renderer can use it as source.
792 return new WebRtcLocalAudioRenderer(
794 RenderViewObserver::routing_id(),
800 bool MediaStreamImpl::GetAuthorizedDeviceInfoForAudioRenderer(
802 int* output_sample_rate,
803 int* output_frames_per_buffer) {
804 DCHECK(CalledOnValidThread());
805 WebRtcAudioDeviceImpl* audio_device =
806 dependency_factory_->GetWebRtcAudioDevice();
810 return audio_device->GetAuthorizedDeviceInfoForAudioRenderer(
811 session_id, output_sample_rate, output_frames_per_buffer);
814 MediaStreamExtraData::MediaStreamExtraData(
815 webrtc::MediaStreamInterface* stream, bool is_local)
817 is_local_(is_local) {
820 MediaStreamExtraData::~MediaStreamExtraData() {
823 void MediaStreamExtraData::SetLocalStreamStopCallback(
824 const StreamStopCallback& stop_callback) {
825 stream_stop_callback_ = stop_callback;
828 void MediaStreamExtraData::OnLocalStreamStop() {
829 if (!stream_stop_callback_.is_null())
830 stream_stop_callback_.Run(stream_->label());
833 MediaStreamImpl::UserMediaRequestInfo::UserMediaRequestInfo(
835 blink::WebFrame* frame,
836 const blink::WebUserMediaRequest& request,
837 bool enable_automatic_output_device_selection)
838 : request_id(request_id),
840 enable_automatic_output_device_selection(
841 enable_automatic_output_device_selection),
844 request_failed_(false) {
847 MediaStreamImpl::UserMediaRequestInfo::~UserMediaRequestInfo() {
848 DVLOG(1) << "~UserMediaRequestInfo";
851 void MediaStreamImpl::UserMediaRequestInfo::StartTrack(
852 const blink::WebMediaStreamTrack& track,
853 const blink::WebMediaConstraints& constraints) {
854 MediaStreamSource* native_source =
855 static_cast <MediaStreamSource*>(track.source().extraData());
856 DCHECK(native_source);
858 sources_.push_back(track.source());
859 sources_waiting_for_callback_.push_back(native_source);
860 native_source->AddTrack(
861 track, constraints, base::Bind(
862 &MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted,
866 void MediaStreamImpl::UserMediaRequestInfo::CallbackOnTracksStarted(
867 const ResourcesReady& callback) {
868 DCHECK(ready_callback_.is_null());
869 ready_callback_ = callback;
870 CheckAllTracksStarted();
873 void MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted(
874 MediaStreamSource* source, bool success) {
875 DVLOG(1) << "OnTrackStarted";
876 std::vector<MediaStreamSource*>::iterator it =
877 std::find(sources_waiting_for_callback_.begin(),
878 sources_waiting_for_callback_.end(),
880 DCHECK(it != sources_waiting_for_callback_.end());
881 sources_waiting_for_callback_.erase(it);
882 // All tracks must be started successfully. Otherwise the request is a
885 request_failed_ = true;
886 CheckAllTracksStarted();
889 void MediaStreamImpl::UserMediaRequestInfo::CheckAllTracksStarted() {
890 if (!ready_callback_.is_null() && sources_waiting_for_callback_.empty())
891 ready_callback_.Run(this, !request_failed_);
894 bool MediaStreamImpl::UserMediaRequestInfo::IsSourceUsed(
895 const blink::WebMediaStreamSource& source) const {
896 for (std::vector<blink::WebMediaStreamSource>::const_iterator source_it =
898 source_it != sources_.end(); ++source_it) {
899 if (source_it->id() == source.id())
905 void MediaStreamImpl::UserMediaRequestInfo::RemoveSource(
906 const blink::WebMediaStreamSource& source) {
907 for (std::vector<blink::WebMediaStreamSource>::iterator it =
909 it != sources_.end(); ++it) {
910 if (source.id() == it->id()) {
917 } // namespace content