1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "content/renderer/media/media_stream_impl.h"
9 #include "base/logging.h"
10 #include "base/strings/string_util.h"
11 #include "base/strings/stringprintf.h"
12 #include "base/strings/utf_string_conversions.h"
13 #include "content/renderer/media/media_stream.h"
14 #include "content/renderer/media/media_stream_audio_renderer.h"
15 #include "content/renderer/media/media_stream_audio_source.h"
16 #include "content/renderer/media/media_stream_dependency_factory.h"
17 #include "content/renderer/media/media_stream_dispatcher.h"
18 #include "content/renderer/media/media_stream_video_capturer_source.h"
19 #include "content/renderer/media/media_stream_video_track.h"
20 #include "content/renderer/media/peer_connection_tracker.h"
21 #include "content/renderer/media/rtc_video_renderer.h"
22 #include "content/renderer/media/webrtc_audio_capturer.h"
23 #include "content/renderer/media/webrtc_audio_renderer.h"
24 #include "content/renderer/media/webrtc_local_audio_renderer.h"
25 #include "content/renderer/media/webrtc_logging.h"
26 #include "content/renderer/media/webrtc_uma_histograms.h"
27 #include "content/renderer/render_thread_impl.h"
28 #include "media/base/audio_hardware_config.h"
29 #include "third_party/WebKit/public/platform/WebMediaConstraints.h"
30 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
31 #include "third_party/WebKit/public/web/WebDocument.h"
32 #include "third_party/WebKit/public/web/WebLocalFrame.h"
33 #include "third_party/WebKit/public/web/WebMediaStreamRegistry.h"
38 void CopyStreamConstraints(const blink::WebMediaConstraints& constraints,
39 StreamOptions::Constraints* mandatory,
40 StreamOptions::Constraints* optional) {
41 blink::WebVector<blink::WebMediaConstraint> mandatory_constraints;
42 constraints.getMandatoryConstraints(mandatory_constraints);
43 for (size_t i = 0; i < mandatory_constraints.size(); i++) {
44 mandatory->push_back(StreamOptions::Constraint(
45 mandatory_constraints[i].m_name.utf8(),
46 mandatory_constraints[i].m_value.utf8()));
49 blink::WebVector<blink::WebMediaConstraint> optional_constraints;
50 constraints.getOptionalConstraints(optional_constraints);
51 for (size_t i = 0; i < optional_constraints.size(); i++) {
52 optional->push_back(StreamOptions::Constraint(
53 optional_constraints[i].m_name.utf8(),
54 optional_constraints[i].m_value.utf8()));
58 static int g_next_request_id = 0;
60 void GetDefaultOutputDeviceParams(
61 int* output_sample_rate, int* output_buffer_size) {
62 // Fetch the default audio output hardware config.
63 media::AudioHardwareConfig* hardware_config =
64 RenderThreadImpl::current()->GetAudioHardwareConfig();
65 *output_sample_rate = hardware_config->GetOutputSampleRate();
66 *output_buffer_size = hardware_config->GetOutputBufferSize();
71 MediaStreamImpl::MediaStreamImpl(
72 RenderView* render_view,
73 MediaStreamDispatcher* media_stream_dispatcher,
74 MediaStreamDependencyFactory* dependency_factory)
75 : RenderViewObserver(render_view),
76 dependency_factory_(dependency_factory),
77 media_stream_dispatcher_(media_stream_dispatcher) {
80 MediaStreamImpl::~MediaStreamImpl() {
83 void MediaStreamImpl::requestUserMedia(
84 const blink::WebUserMediaRequest& user_media_request) {
85 // Save histogram data so we can see how much GetUserMedia is used.
86 // The histogram counts the number of calls to the JS API
88 UpdateWebRTCMethodCount(WEBKIT_GET_USER_MEDIA);
89 DCHECK(CalledOnValidThread());
91 if (RenderThreadImpl::current()) {
92 RenderThreadImpl::current()->peer_connection_tracker()->TrackGetUserMedia(
96 int request_id = g_next_request_id++;
97 StreamOptions options;
98 blink::WebLocalFrame* frame = NULL;
100 bool enable_automatic_output_device_selection = false;
102 // |user_media_request| can't be mocked. So in order to test at all we check
104 if (user_media_request.isNull()) {
106 options.audio_requested = true;
107 options.video_requested = true;
109 if (user_media_request.audio()) {
110 options.audio_requested = true;
111 CopyStreamConstraints(user_media_request.audioConstraints(),
112 &options.mandatory_audio,
113 &options.optional_audio);
115 // Check if this input device should be used to select a matching output
116 // device for audio rendering.
118 if (options.GetFirstAudioConstraintByName(
119 kMediaStreamRenderToAssociatedSink, &enable, NULL) &&
120 LowerCaseEqualsASCII(enable, "true")) {
121 enable_automatic_output_device_selection = true;
124 if (user_media_request.video()) {
125 options.video_requested = true;
126 CopyStreamConstraints(user_media_request.videoConstraints(),
127 &options.mandatory_video,
128 &options.optional_video);
131 security_origin = GURL(user_media_request.securityOrigin().toString());
132 // Get the WebFrame that requested a MediaStream.
133 // The frame is needed to tell the MediaStreamDispatcher when a stream goes
135 frame = user_media_request.ownerDocument().frame();
139 DVLOG(1) << "MediaStreamImpl::requestUserMedia(" << request_id << ", [ "
140 << "audio=" << (options.audio_requested)
141 << " select associated sink: "
142 << enable_automatic_output_device_selection
143 << ", video=" << (options.video_requested) << " ], "
144 << security_origin.spec() << ")";
146 std::string audio_device_id;
147 bool mandatory_audio;
148 options.GetFirstAudioConstraintByName(kMediaStreamSourceInfoId,
149 &audio_device_id, &mandatory_audio);
150 std::string video_device_id;
151 bool mandatory_video;
152 options.GetFirstVideoConstraintByName(kMediaStreamSourceInfoId,
153 &video_device_id, &mandatory_video);
155 WebRtcLogMessage(base::StringPrintf(
156 "MSI::requestUserMedia. request_id=%d"
157 ", audio source id=%s mandatory= %s "
158 ", video source id=%s mandatory= %s",
160 audio_device_id.c_str(),
161 mandatory_audio ? "true":"false",
162 video_device_id.c_str(),
163 mandatory_video ? "true":"false"));
165 user_media_requests_.push_back(
166 new UserMediaRequestInfo(request_id, frame, user_media_request,
167 enable_automatic_output_device_selection));
169 media_stream_dispatcher_->GenerateStream(
176 void MediaStreamImpl::cancelUserMediaRequest(
177 const blink::WebUserMediaRequest& user_media_request) {
178 DCHECK(CalledOnValidThread());
179 UserMediaRequestInfo* request = FindUserMediaRequestInfo(user_media_request);
181 // We can't abort the stream generation process.
182 // Instead, erase the request. Once the stream is generated we will stop the
183 // stream if the request does not exist.
184 DeleteUserMediaRequestInfo(request);
188 blink::WebMediaStream MediaStreamImpl::GetMediaStream(
190 return blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url);
193 bool MediaStreamImpl::IsMediaStream(const GURL& url) {
194 blink::WebMediaStream web_stream(
195 blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url));
197 return (!web_stream.isNull() &&
198 (MediaStream::GetMediaStream(web_stream) != NULL));
201 scoped_refptr<VideoFrameProvider>
202 MediaStreamImpl::GetVideoFrameProvider(
204 const base::Closure& error_cb,
205 const VideoFrameProvider::RepaintCB& repaint_cb) {
206 DCHECK(CalledOnValidThread());
207 blink::WebMediaStream web_stream(GetMediaStream(url));
209 if (web_stream.isNull() || !web_stream.extraData())
210 return NULL; // This is not a valid stream.
212 DVLOG(1) << "MediaStreamImpl::GetVideoFrameProvider stream:"
213 << base::UTF16ToUTF8(web_stream.id());
215 blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
216 web_stream.videoTracks(video_tracks);
217 if (video_tracks.isEmpty() ||
218 !MediaStreamVideoTrack::GetTrack(video_tracks[0])) {
222 return new RTCVideoRenderer(video_tracks[0], error_cb, repaint_cb);
225 scoped_refptr<MediaStreamAudioRenderer>
226 MediaStreamImpl::GetAudioRenderer(const GURL& url, int render_frame_id) {
227 DCHECK(CalledOnValidThread());
228 blink::WebMediaStream web_stream(GetMediaStream(url));
230 if (web_stream.isNull() || !web_stream.extraData())
231 return NULL; // This is not a valid stream.
233 DVLOG(1) << "MediaStreamImpl::GetAudioRenderer stream:"
234 << base::UTF16ToUTF8(web_stream.id());
236 MediaStream* native_stream = MediaStream::GetMediaStream(web_stream);
238 // TODO(tommi): MediaStreams do not have a 'local or not' concept.
239 // Tracks _might_, but even so, we need to fix the data flow so that
240 // it works the same way for all track implementations, local, remote or what
242 // In this function, we should simply create a renderer object that receives
243 // and mixes audio from all the tracks that belong to the media stream.
244 // We need to remove the |is_local| property from MediaStreamExtraData since
245 // this concept is peerconnection specific (is a previously recorded stream
246 // local or remote?).
247 if (native_stream->is_local()) {
248 // Create the local audio renderer if the stream contains audio tracks.
249 blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
250 web_stream.audioTracks(audio_tracks);
251 if (audio_tracks.isEmpty())
254 // TODO(xians): Add support for the case where the media stream contains
255 // multiple audio tracks.
256 return CreateLocalAudioRenderer(audio_tracks[0], render_frame_id);
259 webrtc::MediaStreamInterface* stream =
260 MediaStream::GetAdapter(web_stream);
261 if (stream->GetAudioTracks().empty())
264 // This is a remote WebRTC media stream.
265 WebRtcAudioDeviceImpl* audio_device =
266 dependency_factory_->GetWebRtcAudioDevice();
268 // Share the existing renderer if any, otherwise create a new one.
269 scoped_refptr<WebRtcAudioRenderer> renderer(audio_device->renderer());
270 if (!renderer.get()) {
271 renderer = CreateRemoteAudioRenderer(stream, render_frame_id);
273 if (renderer.get() && !audio_device->SetAudioRenderer(renderer.get()))
277 return renderer.get() ?
278 renderer->CreateSharedAudioRendererProxy(stream) : NULL;
281 // Callback from MediaStreamDispatcher.
282 // The requested stream have been generated by the MediaStreamDispatcher.
283 void MediaStreamImpl::OnStreamGenerated(
285 const std::string& label,
286 const StreamDeviceInfoArray& audio_array,
287 const StreamDeviceInfoArray& video_array) {
288 DCHECK(CalledOnValidThread());
289 DVLOG(1) << "MediaStreamImpl::OnStreamGenerated stream:" << label;
291 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id);
293 // This can happen if the request is canceled or the frame reloads while
294 // MediaStreamDispatcher is processing the request.
295 // Only stop the device if the device is not used in another MediaStream.
296 for (StreamDeviceInfoArray::const_iterator device_it = audio_array.begin();
297 device_it != audio_array.end(); ++device_it) {
298 if (!FindLocalSource(*device_it))
299 media_stream_dispatcher_->StopStreamDevice(*device_it);
302 for (StreamDeviceInfoArray::const_iterator device_it = video_array.begin();
303 device_it != video_array.end(); ++device_it) {
304 if (!FindLocalSource(*device_it))
305 media_stream_dispatcher_->StopStreamDevice(*device_it);
308 DVLOG(1) << "Request ID not found";
311 request_info->generated = true;
313 // WebUserMediaRequest don't have an implementation in unit tests.
314 // Therefore we need to check for isNull here and initialize the
316 blink::WebUserMediaRequest* request = &(request_info->request);
317 blink::WebMediaConstraints audio_constraints;
318 blink::WebMediaConstraints video_constraints;
319 if (request->isNull()) {
320 audio_constraints.initialize();
321 video_constraints.initialize();
323 audio_constraints = request->audioConstraints();
324 video_constraints = request->videoConstraints();
327 blink::WebVector<blink::WebMediaStreamTrack> audio_track_vector(
329 CreateAudioTracks(audio_array, audio_constraints, &audio_track_vector,
332 blink::WebVector<blink::WebMediaStreamTrack> video_track_vector(
334 CreateVideoTracks(video_array, video_constraints, &video_track_vector,
337 blink::WebString webkit_id = base::UTF8ToUTF16(label);
338 blink::WebMediaStream* web_stream = &(request_info->web_stream);
340 web_stream->initialize(webkit_id, audio_track_vector,
342 web_stream->setExtraData(
346 // Wait for the tracks to be started successfully or to fail.
347 request_info->CallbackOnTracksStarted(
348 base::Bind(&MediaStreamImpl::OnCreateNativeTracksCompleted, AsWeakPtr()));
351 // Callback from MediaStreamDispatcher.
352 // The requested stream failed to be generated.
353 void MediaStreamImpl::OnStreamGenerationFailed(
355 content::MediaStreamRequestResult result) {
356 DCHECK(CalledOnValidThread());
357 DVLOG(1) << "MediaStreamImpl::OnStreamGenerationFailed("
358 << request_id << ")";
359 UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id);
361 // This can happen if the request is canceled or the frame reloads while
362 // MediaStreamDispatcher is processing the request.
363 DVLOG(1) << "Request ID not found";
367 GetUserMediaRequestFailed(&request_info->request, result);
368 DeleteUserMediaRequestInfo(request_info);
371 // Callback from MediaStreamDispatcher.
372 // The browser process has stopped a device used by a MediaStream.
373 void MediaStreamImpl::OnDeviceStopped(
374 const std::string& label,
375 const StreamDeviceInfo& device_info) {
376 DCHECK(CalledOnValidThread());
377 DVLOG(1) << "MediaStreamImpl::OnDeviceStopped("
378 << "{device_id = " << device_info.device.id << "})";
380 const blink::WebMediaStreamSource* source_ptr = FindLocalSource(device_info);
382 // This happens if the same device is used in several guM requests or
383 // if a user happen stop a track from JS at the same time
384 // as the underlying media device is unplugged from the system.
387 // By creating |source| it is guaranteed that the blink::WebMediaStreamSource
388 // object is valid during the cleanup.
389 blink::WebMediaStreamSource source(*source_ptr);
390 StopLocalSource(source, false);
392 for (LocalStreamSources::iterator device_it = local_sources_.begin();
393 device_it != local_sources_.end(); ++device_it) {
394 if (device_it->source.id() == source.id()) {
395 local_sources_.erase(device_it);
401 void MediaStreamImpl::InitializeSourceObject(
402 const StreamDeviceInfo& device,
403 blink::WebMediaStreamSource::Type type,
404 const blink::WebMediaConstraints& constraints,
405 blink::WebFrame* frame,
406 blink::WebMediaStreamSource* webkit_source) {
407 const blink::WebMediaStreamSource* existing_source =
408 FindLocalSource(device);
409 if (existing_source) {
410 *webkit_source = *existing_source;
411 DVLOG(1) << "Source already exist. Reusing source with id "
412 << webkit_source->id().utf8();
416 webkit_source->initialize(
417 base::UTF8ToUTF16(device.device.id),
419 base::UTF8ToUTF16(device.device.name));
421 DVLOG(1) << "Initialize source object :"
422 << "id = " << webkit_source->id().utf8()
423 << ", name = " << webkit_source->name().utf8();
425 if (type == blink::WebMediaStreamSource::TypeVideo) {
426 webkit_source->setExtraData(
429 base::Bind(&MediaStreamImpl::OnLocalSourceStopped, AsWeakPtr())));
431 DCHECK_EQ(blink::WebMediaStreamSource::TypeAudio, type);
432 MediaStreamAudioSource* audio_source(
433 new MediaStreamAudioSource(
434 RenderViewObserver::routing_id(),
436 base::Bind(&MediaStreamImpl::OnLocalSourceStopped, AsWeakPtr()),
437 dependency_factory_));
438 webkit_source->setExtraData(audio_source);
440 local_sources_.push_back(LocalStreamSource(frame, *webkit_source));
443 MediaStreamVideoSource* MediaStreamImpl::CreateVideoSource(
444 const StreamDeviceInfo& device,
445 const MediaStreamSource::SourceStoppedCallback& stop_callback) {
446 return new content::MediaStreamVideoCapturerSource(
449 new VideoCapturerDelegate(device));
452 void MediaStreamImpl::CreateVideoTracks(
453 const StreamDeviceInfoArray& devices,
454 const blink::WebMediaConstraints& constraints,
455 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks,
456 UserMediaRequestInfo* request) {
457 DCHECK_EQ(devices.size(), webkit_tracks->size());
459 for (size_t i = 0; i < devices.size(); ++i) {
460 blink::WebMediaStreamSource webkit_source;
461 InitializeSourceObject(devices[i],
462 blink::WebMediaStreamSource::TypeVideo,
466 (*webkit_tracks)[i] =
467 request->CreateAndStartVideoTrack(webkit_source, constraints,
468 dependency_factory_);
472 void MediaStreamImpl::CreateAudioTracks(
473 const StreamDeviceInfoArray& devices,
474 const blink::WebMediaConstraints& constraints,
475 blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks,
476 UserMediaRequestInfo* request) {
477 DCHECK_EQ(devices.size(), webkit_tracks->size());
479 // Log the device names for this request.
480 for (StreamDeviceInfoArray::const_iterator it = devices.begin();
481 it != devices.end(); ++it) {
482 WebRtcLogMessage(base::StringPrintf(
483 "Generated media stream for request id %d contains audio device name"
486 it->device.name.c_str()));
489 StreamDeviceInfoArray overridden_audio_array = devices;
490 if (!request->enable_automatic_output_device_selection) {
491 // If the GetUserMedia request did not explicitly set the constraint
492 // kMediaStreamRenderToAssociatedSink, the output device parameters must
494 for (StreamDeviceInfoArray::iterator it = overridden_audio_array.begin();
495 it != overridden_audio_array.end(); ++it) {
496 it->device.matched_output_device_id = "";
497 it->device.matched_output = MediaStreamDevice::AudioDeviceParameters();
501 for (size_t i = 0; i < overridden_audio_array.size(); ++i) {
502 blink::WebMediaStreamSource webkit_source;
503 InitializeSourceObject(overridden_audio_array[i],
504 blink::WebMediaStreamSource::TypeAudio,
508 (*webkit_tracks)[i].initialize(webkit_source);
509 request->StartAudioTrack((*webkit_tracks)[i], constraints);
513 void MediaStreamImpl::OnCreateNativeTracksCompleted(
514 UserMediaRequestInfo* request,
515 content::MediaStreamRequestResult result) {
516 DVLOG(1) << "MediaStreamImpl::OnCreateNativeTracksComplete("
517 << "{request_id = " << request->request_id << "} "
518 << "{result = " << result << "})";
519 if (result == content::MEDIA_DEVICE_OK)
520 GetUserMediaRequestSucceeded(request->web_stream, &request->request);
522 GetUserMediaRequestFailed(&request->request, result);
524 DeleteUserMediaRequestInfo(request);
527 void MediaStreamImpl::OnDevicesEnumerated(
529 const StreamDeviceInfoArray& device_array) {
530 DVLOG(1) << "MediaStreamImpl::OnDevicesEnumerated("
531 << request_id << ")";
535 void MediaStreamImpl::OnDeviceOpened(
537 const std::string& label,
538 const StreamDeviceInfo& video_device) {
539 DVLOG(1) << "MediaStreamImpl::OnDeviceOpened("
540 << request_id << ", " << label << ")";
544 void MediaStreamImpl::OnDeviceOpenFailed(int request_id) {
545 DVLOG(1) << "MediaStreamImpl::VideoDeviceOpenFailed("
546 << request_id << ")";
550 void MediaStreamImpl::GetUserMediaRequestSucceeded(
551 const blink::WebMediaStream& stream,
552 blink::WebUserMediaRequest* request_info) {
553 DVLOG(1) << "MediaStreamImpl::GetUserMediaRequestSucceeded";
554 request_info->requestSucceeded(stream);
557 void MediaStreamImpl::GetUserMediaRequestFailed(
558 blink::WebUserMediaRequest* request_info,
559 content::MediaStreamRequestResult result) {
561 case MEDIA_DEVICE_OK:
564 case MEDIA_DEVICE_PERMISSION_DENIED:
565 request_info->requestDenied();
567 case MEDIA_DEVICE_PERMISSION_DISMISSED:
568 request_info->requestFailedUASpecific("PermissionDismissedError");
570 case MEDIA_DEVICE_INVALID_STATE:
571 request_info->requestFailedUASpecific("InvalidStateError");
573 case MEDIA_DEVICE_NO_HARDWARE:
574 request_info->requestFailedUASpecific("DevicesNotFoundError");
576 case MEDIA_DEVICE_INVALID_SECURITY_ORIGIN:
577 request_info->requestFailedUASpecific("InvalidSecurityOriginError");
579 case MEDIA_DEVICE_TAB_CAPTURE_FAILURE:
580 request_info->requestFailedUASpecific("TabCaptureError");
582 case MEDIA_DEVICE_SCREEN_CAPTURE_FAILURE:
583 request_info->requestFailedUASpecific("ScreenCaptureError");
585 case MEDIA_DEVICE_CAPTURE_FAILURE:
586 request_info->requestFailedUASpecific("DeviceCaptureError");
588 case MEDIA_DEVICE_TRACK_START_FAILURE:
589 request_info->requestFailedUASpecific("TrackStartError");
592 request_info->requestFailed();
597 const blink::WebMediaStreamSource* MediaStreamImpl::FindLocalSource(
598 const StreamDeviceInfo& device) const {
599 for (LocalStreamSources::const_iterator it = local_sources_.begin();
600 it != local_sources_.end(); ++it) {
601 MediaStreamSource* source =
602 static_cast<MediaStreamSource*>(it->source.extraData());
603 const StreamDeviceInfo& active_device = source->device_info();
604 if (active_device.device.id == device.device.id &&
605 active_device.device.type == device.device.type &&
606 active_device.session_id == device.session_id) {
613 MediaStreamImpl::UserMediaRequestInfo*
614 MediaStreamImpl::FindUserMediaRequestInfo(int request_id) {
615 UserMediaRequests::iterator it = user_media_requests_.begin();
616 for (; it != user_media_requests_.end(); ++it) {
617 if ((*it)->request_id == request_id)
623 MediaStreamImpl::UserMediaRequestInfo*
624 MediaStreamImpl::FindUserMediaRequestInfo(
625 const blink::WebUserMediaRequest& request) {
626 UserMediaRequests::iterator it = user_media_requests_.begin();
627 for (; it != user_media_requests_.end(); ++it) {
628 if ((*it)->request == request)
634 void MediaStreamImpl::DeleteUserMediaRequestInfo(
635 UserMediaRequestInfo* request) {
636 UserMediaRequests::iterator it = user_media_requests_.begin();
637 for (; it != user_media_requests_.end(); ++it) {
638 if ((*it) == request) {
639 user_media_requests_.erase(it);
646 void MediaStreamImpl::FrameDetached(blink::WebFrame* frame) {
647 // Do same thing as FrameWillClose.
648 FrameWillClose(frame);
651 void MediaStreamImpl::FrameWillClose(blink::WebFrame* frame) {
652 // Loop through all UserMediaRequests and find the requests that belong to the
653 // frame that is being closed.
654 UserMediaRequests::iterator request_it = user_media_requests_.begin();
655 while (request_it != user_media_requests_.end()) {
656 if ((*request_it)->frame == frame) {
657 DVLOG(1) << "MediaStreamImpl::FrameWillClose: "
658 << "Cancel user media request " << (*request_it)->request_id;
659 // If the request is not generated, it means that a request
660 // has been sent to the MediaStreamDispatcher to generate a stream
661 // but MediaStreamDispatcher has not yet responded and we need to cancel
663 if (!(*request_it)->generated) {
664 media_stream_dispatcher_->CancelGenerateStream(
665 (*request_it)->request_id, AsWeakPtr());
667 request_it = user_media_requests_.erase(request_it);
673 // Loop through all current local sources and stop the sources that were
674 // created by the frame that will be closed.
675 LocalStreamSources::iterator sources_it = local_sources_.begin();
676 while (sources_it != local_sources_.end()) {
677 if (sources_it->frame == frame) {
678 StopLocalSource(sources_it->source, true);
679 sources_it = local_sources_.erase(sources_it);
686 void MediaStreamImpl::OnLocalSourceStopped(
687 const blink::WebMediaStreamSource& source) {
688 DCHECK(CalledOnValidThread());
689 DVLOG(1) << "MediaStreamImpl::OnLocalSourceStopped";
691 bool device_found = false;
692 for (LocalStreamSources::iterator device_it = local_sources_.begin();
693 device_it != local_sources_.end(); ++device_it) {
694 if (device_it->source.id() == source.id()) {
696 local_sources_.erase(device_it);
702 MediaStreamSource* source_impl =
703 static_cast<MediaStreamSource*> (source.extraData());
704 media_stream_dispatcher_->StopStreamDevice(source_impl->device_info());
707 void MediaStreamImpl::StopLocalSource(
708 const blink::WebMediaStreamSource& source,
709 bool notify_dispatcher) {
710 MediaStreamSource* source_impl =
711 static_cast<MediaStreamSource*> (source.extraData());
712 DVLOG(1) << "MediaStreamImpl::StopLocalSource("
713 << "{device_id = " << source_impl->device_info().device.id << "})";
715 if (notify_dispatcher)
716 media_stream_dispatcher_->StopStreamDevice(source_impl->device_info());
718 source_impl->ResetSourceStoppedCallback();
719 source_impl->StopSource();
722 scoped_refptr<WebRtcAudioRenderer> MediaStreamImpl::CreateRemoteAudioRenderer(
723 webrtc::MediaStreamInterface* stream,
724 int render_frame_id) {
725 if (stream->GetAudioTracks().empty())
728 DVLOG(1) << "MediaStreamImpl::CreateRemoteAudioRenderer label:"
731 // TODO(tommi): Change the default value of session_id to be
732 // StreamDeviceInfo::kNoId. Also update AudioOutputDevice etc.
733 int session_id = 0, sample_rate = 0, buffer_size = 0;
734 if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id,
737 GetDefaultOutputDeviceParams(&sample_rate, &buffer_size);
740 return new WebRtcAudioRenderer(
741 stream, RenderViewObserver::routing_id(), render_frame_id, session_id,
742 sample_rate, buffer_size);
745 scoped_refptr<WebRtcLocalAudioRenderer>
746 MediaStreamImpl::CreateLocalAudioRenderer(
747 const blink::WebMediaStreamTrack& audio_track,
748 int render_frame_id) {
749 DVLOG(1) << "MediaStreamImpl::CreateLocalAudioRenderer";
751 int session_id = 0, sample_rate = 0, buffer_size = 0;
752 if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id,
755 GetDefaultOutputDeviceParams(&sample_rate, &buffer_size);
758 // Create a new WebRtcLocalAudioRenderer instance and connect it to the
759 // existing WebRtcAudioCapturer so that the renderer can use it as source.
760 return new WebRtcLocalAudioRenderer(
762 RenderViewObserver::routing_id(),
768 bool MediaStreamImpl::GetAuthorizedDeviceInfoForAudioRenderer(
770 int* output_sample_rate,
771 int* output_frames_per_buffer) {
772 DCHECK(CalledOnValidThread());
773 WebRtcAudioDeviceImpl* audio_device =
774 dependency_factory_->GetWebRtcAudioDevice();
778 return audio_device->GetAuthorizedDeviceInfoForAudioRenderer(
779 session_id, output_sample_rate, output_frames_per_buffer);
782 MediaStreamImpl::UserMediaRequestInfo::UserMediaRequestInfo(
784 blink::WebFrame* frame,
785 const blink::WebUserMediaRequest& request,
786 bool enable_automatic_output_device_selection)
787 : request_id(request_id),
789 enable_automatic_output_device_selection(
790 enable_automatic_output_device_selection),
793 request_failed_(false) {
796 MediaStreamImpl::UserMediaRequestInfo::~UserMediaRequestInfo() {
797 DVLOG(1) << "~UserMediaRequestInfo";
800 void MediaStreamImpl::UserMediaRequestInfo::StartAudioTrack(
801 const blink::WebMediaStreamTrack& track,
802 const blink::WebMediaConstraints& constraints) {
803 DCHECK(track.source().type() == blink::WebMediaStreamSource::TypeAudio);
804 MediaStreamAudioSource* native_source =
805 static_cast <MediaStreamAudioSource*>(track.source().extraData());
806 DCHECK(native_source);
808 sources_.push_back(track.source());
809 sources_waiting_for_callback_.push_back(native_source);
810 native_source->AddTrack(
811 track, constraints, base::Bind(
812 &MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted,
816 blink::WebMediaStreamTrack
817 MediaStreamImpl::UserMediaRequestInfo::CreateAndStartVideoTrack(
818 const blink::WebMediaStreamSource& source,
819 const blink::WebMediaConstraints& constraints,
820 MediaStreamDependencyFactory* factory) {
821 DCHECK(source.type() == blink::WebMediaStreamSource::TypeVideo);
822 MediaStreamVideoSource* native_source =
823 MediaStreamVideoSource::GetVideoSource(source);
824 DCHECK(native_source);
825 sources_.push_back(source);
826 sources_waiting_for_callback_.push_back(native_source);
827 return MediaStreamVideoTrack::CreateVideoTrack(
828 native_source, constraints, base::Bind(
829 &MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted,
834 void MediaStreamImpl::UserMediaRequestInfo::CallbackOnTracksStarted(
835 const ResourcesReady& callback) {
836 DCHECK(ready_callback_.is_null());
837 ready_callback_ = callback;
838 CheckAllTracksStarted();
841 void MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted(
842 MediaStreamSource* source, bool success) {
843 DVLOG(1) << "OnTrackStarted result " << success;
844 std::vector<MediaStreamSource*>::iterator it =
845 std::find(sources_waiting_for_callback_.begin(),
846 sources_waiting_for_callback_.end(),
848 DCHECK(it != sources_waiting_for_callback_.end());
849 sources_waiting_for_callback_.erase(it);
850 // All tracks must be started successfully. Otherwise the request is a
853 request_failed_ = true;
854 CheckAllTracksStarted();
857 void MediaStreamImpl::UserMediaRequestInfo::CheckAllTracksStarted() {
858 if (!ready_callback_.is_null() && sources_waiting_for_callback_.empty()) {
861 request_failed_ ? MEDIA_DEVICE_TRACK_START_FAILURE : MEDIA_DEVICE_OK);
865 bool MediaStreamImpl::UserMediaRequestInfo::IsSourceUsed(
866 const blink::WebMediaStreamSource& source) const {
867 for (std::vector<blink::WebMediaStreamSource>::const_iterator source_it =
869 source_it != sources_.end(); ++source_it) {
870 if (source_it->id() == source.id())
876 void MediaStreamImpl::UserMediaRequestInfo::RemoveSource(
877 const blink::WebMediaStreamSource& source) {
878 for (std::vector<blink::WebMediaStreamSource>::iterator it =
880 it != sources_.end(); ++it) {
881 if (source.id() == it->id()) {
888 } // namespace content