Upstream version 5.34.104.0
[platform/framework/web/crosswalk.git] / src / content / renderer / media / media_stream_impl.cc
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/renderer/media/media_stream_impl.h"
6
7 #include <utility>
8
9 #include "base/logging.h"
10 #include "base/strings/string_util.h"
11 #include "base/strings/stringprintf.h"
12 #include "base/strings/utf_string_conversions.h"
13 #include "content/renderer/media/media_stream_audio_renderer.h"
14 #include "content/renderer/media/media_stream_audio_source.h"
15 #include "content/renderer/media/media_stream_dependency_factory.h"
16 #include "content/renderer/media/media_stream_dispatcher.h"
17 #include "content/renderer/media/media_stream_extra_data.h"
18 #include "content/renderer/media/media_stream_video_capturer_source.h"
19 #include "content/renderer/media/peer_connection_tracker.h"
20 #include "content/renderer/media/rtc_video_renderer.h"
21 #include "content/renderer/media/webrtc_audio_capturer.h"
22 #include "content/renderer/media/webrtc_audio_renderer.h"
23 #include "content/renderer/media/webrtc_local_audio_renderer.h"
24 #include "content/renderer/media/webrtc_logging.h"
25 #include "content/renderer/media/webrtc_uma_histograms.h"
26 #include "content/renderer/render_thread_impl.h"
27 #include "media/base/audio_hardware_config.h"
28 #include "third_party/WebKit/public/platform/WebMediaConstraints.h"
29 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
30 #include "third_party/WebKit/public/web/WebDocument.h"
31 #include "third_party/WebKit/public/web/WebFrame.h"
32 #include "third_party/WebKit/public/web/WebMediaStreamRegistry.h"
33
34 namespace content {
35 namespace {
36
37 void CopyStreamConstraints(const blink::WebMediaConstraints& constraints,
38                            StreamOptions::Constraints* mandatory,
39                            StreamOptions::Constraints* optional) {
40   blink::WebVector<blink::WebMediaConstraint> mandatory_constraints;
41   constraints.getMandatoryConstraints(mandatory_constraints);
42   for (size_t i = 0; i < mandatory_constraints.size(); i++) {
43     mandatory->push_back(StreamOptions::Constraint(
44         mandatory_constraints[i].m_name.utf8(),
45         mandatory_constraints[i].m_value.utf8()));
46   }
47
48   blink::WebVector<blink::WebMediaConstraint> optional_constraints;
49   constraints.getOptionalConstraints(optional_constraints);
50   for (size_t i = 0; i < optional_constraints.size(); i++) {
51     optional->push_back(StreamOptions::Constraint(
52         optional_constraints[i].m_name.utf8(),
53         optional_constraints[i].m_value.utf8()));
54   }
55 }
56
57 static int g_next_request_id  = 0;
58
59 webrtc::MediaStreamInterface* GetNativeMediaStream(
60     const blink::WebMediaStream& web_stream) {
61   content::MediaStreamExtraData* extra_data =
62       static_cast<content::MediaStreamExtraData*>(web_stream.extraData());
63   if (!extra_data)
64     return NULL;
65   return extra_data->stream().get();
66 }
67
68 void GetDefaultOutputDeviceParams(
69     int* output_sample_rate, int* output_buffer_size) {
70   // Fetch the default audio output hardware config.
71   media::AudioHardwareConfig* hardware_config =
72       RenderThreadImpl::current()->GetAudioHardwareConfig();
73   *output_sample_rate = hardware_config->GetOutputSampleRate();
74   *output_buffer_size = hardware_config->GetOutputBufferSize();
75 }
76
77 }  // namespace
78
79 MediaStreamImpl::MediaStreamImpl(
80     RenderView* render_view,
81     MediaStreamDispatcher* media_stream_dispatcher,
82     MediaStreamDependencyFactory* dependency_factory)
83     : RenderViewObserver(render_view),
84       dependency_factory_(dependency_factory),
85       media_stream_dispatcher_(media_stream_dispatcher) {
86 }
87
88 MediaStreamImpl::~MediaStreamImpl() {
89 }
90
91 void MediaStreamImpl::requestUserMedia(
92     const blink::WebUserMediaRequest& user_media_request) {
93   // Save histogram data so we can see how much GetUserMedia is used.
94   // The histogram counts the number of calls to the JS API
95   // webGetUserMedia.
96   UpdateWebRTCMethodCount(WEBKIT_GET_USER_MEDIA);
97   DCHECK(CalledOnValidThread());
98
99   if (RenderThreadImpl::current()) {
100     RenderThreadImpl::current()->peer_connection_tracker()->TrackGetUserMedia(
101         user_media_request);
102   }
103
104   int request_id = g_next_request_id++;
105   StreamOptions options;
106   blink::WebFrame* frame = NULL;
107   GURL security_origin;
108   bool enable_automatic_output_device_selection = false;
109
110   // |user_media_request| can't be mocked. So in order to test at all we check
111   // if it isNull.
112   if (user_media_request.isNull()) {
113     // We are in a test.
114     options.audio_requested = true;
115     options.video_requested = true;
116   } else {
117     if (user_media_request.audio()) {
118       options.audio_requested = true;
119       CopyStreamConstraints(user_media_request.audioConstraints(),
120                             &options.mandatory_audio,
121                             &options.optional_audio);
122
123       // Check if this input device should be used to select a matching output
124       // device for audio rendering.
125       std::string enable;
126       if (options.GetFirstAudioConstraintByName(
127               kMediaStreamRenderToAssociatedSink, &enable, NULL) &&
128           LowerCaseEqualsASCII(enable, "true")) {
129         enable_automatic_output_device_selection = true;
130       }
131     }
132     if (user_media_request.video()) {
133       options.video_requested = true;
134       CopyStreamConstraints(user_media_request.videoConstraints(),
135                             &options.mandatory_video,
136                             &options.optional_video);
137     }
138
139     security_origin = GURL(user_media_request.securityOrigin().toString());
140     // Get the WebFrame that requested a MediaStream.
141     // The frame is needed to tell the MediaStreamDispatcher when a stream goes
142     // out of scope.
143     frame = user_media_request.ownerDocument().frame();
144     DCHECK(frame);
145   }
146
147   DVLOG(1) << "MediaStreamImpl::requestUserMedia(" << request_id << ", [ "
148            << "audio=" << (options.audio_requested)
149            << " select associated sink: "
150            << enable_automatic_output_device_selection
151            << ", video=" << (options.video_requested) << " ], "
152            << security_origin.spec() << ")";
153
154   std::string audio_device_id;
155   bool mandatory_audio;
156   options.GetFirstAudioConstraintByName(kMediaStreamSourceInfoId,
157                                         &audio_device_id, &mandatory_audio);
158   std::string video_device_id;
159   bool mandatory_video;
160   options.GetFirstVideoConstraintByName(kMediaStreamSourceInfoId,
161                                         &video_device_id, &mandatory_video);
162
163   WebRtcLogMessage(base::StringPrintf(
164       "MSI::requestUserMedia. request_id=%d"
165       ", audio source id=%s mandatory= %s "
166       ", video source id=%s mandatory= %s",
167       request_id,
168       audio_device_id.c_str(),
169       mandatory_audio ? "true":"false",
170       video_device_id.c_str(),
171       mandatory_video ? "true":"false"));
172
173   user_media_requests_.push_back(
174       new UserMediaRequestInfo(request_id, frame, user_media_request,
175           enable_automatic_output_device_selection));
176
177   media_stream_dispatcher_->GenerateStream(
178       request_id,
179       AsWeakPtr(),
180       options,
181       security_origin);
182 }
183
184 void MediaStreamImpl::cancelUserMediaRequest(
185     const blink::WebUserMediaRequest& user_media_request) {
186   DCHECK(CalledOnValidThread());
187   UserMediaRequestInfo* request = FindUserMediaRequestInfo(user_media_request);
188   if (request) {
189     // We can't abort the stream generation process.
190     // Instead, erase the request. Once the stream is generated we will stop the
191     // stream if the request does not exist.
192     DeleteUserMediaRequestInfo(request);
193   }
194 }
195
196 blink::WebMediaStream MediaStreamImpl::GetMediaStream(
197     const GURL& url) {
198   return blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url);
199 }
200
201 bool MediaStreamImpl::IsMediaStream(const GURL& url) {
202   blink::WebMediaStream web_stream(
203       blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url));
204
205   if (web_stream.isNull() || !web_stream.extraData())
206     return false;  // This is not a valid stream.
207
208   webrtc::MediaStreamInterface* stream = GetNativeMediaStream(web_stream);
209   return (stream &&
210       (!stream->GetVideoTracks().empty() || !stream->GetAudioTracks().empty()));
211 }
212
213 scoped_refptr<VideoFrameProvider>
214 MediaStreamImpl::GetVideoFrameProvider(
215     const GURL& url,
216     const base::Closure& error_cb,
217     const VideoFrameProvider::RepaintCB& repaint_cb) {
218   DCHECK(CalledOnValidThread());
219   blink::WebMediaStream web_stream(GetMediaStream(url));
220
221   if (web_stream.isNull() || !web_stream.extraData())
222     return NULL;  // This is not a valid stream.
223
224   DVLOG(1) << "MediaStreamImpl::GetVideoFrameProvider stream:"
225            << base::UTF16ToUTF8(web_stream.id());
226
227   blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
228   web_stream.videoTracks(video_tracks);
229   if (video_tracks.isEmpty())
230     return NULL;
231
232   return new RTCVideoRenderer(video_tracks[0], error_cb, repaint_cb);
233 }
234
235 scoped_refptr<MediaStreamAudioRenderer>
236 MediaStreamImpl::GetAudioRenderer(const GURL& url, int render_frame_id) {
237   DCHECK(CalledOnValidThread());
238   blink::WebMediaStream web_stream(GetMediaStream(url));
239
240   if (web_stream.isNull() || !web_stream.extraData())
241     return NULL;  // This is not a valid stream.
242
243   DVLOG(1) << "MediaStreamImpl::GetAudioRenderer stream:"
244            << base::UTF16ToUTF8(web_stream.id());
245
246   MediaStreamExtraData* extra_data =
247       static_cast<MediaStreamExtraData*>(web_stream.extraData());
248
249   // TODO(tommi): MediaStreams do not have a 'local or not' concept.
250   // Tracks _might_, but even so, we need to fix the data flow so that
251   // it works the same way for all track implementations, local, remote or what
252   // have you.
253   // In this function, we should simply create a renderer object that receives
254   // and mixes audio from all the tracks that belong to the media stream.
255   // We need to remove the |is_local| property from MediaStreamExtraData since
256   // this concept is peerconnection specific (is a previously recorded stream
257   // local or remote?).
258   if (extra_data->is_local()) {
259     // Create the local audio renderer if the stream contains audio tracks.
260     blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
261     web_stream.audioTracks(audio_tracks);
262     if (audio_tracks.isEmpty())
263       return NULL;
264
265     // TODO(xians): Add support for the case where the media stream contains
266     // multiple audio tracks.
267     return CreateLocalAudioRenderer(audio_tracks[0], render_frame_id);
268   }
269
270   webrtc::MediaStreamInterface* stream = extra_data->stream().get();
271   if (!stream || stream->GetAudioTracks().empty())
272     return NULL;
273
274   // This is a remote WebRTC media stream.
275   WebRtcAudioDeviceImpl* audio_device =
276       dependency_factory_->GetWebRtcAudioDevice();
277
278   // Share the existing renderer if any, otherwise create a new one.
279   scoped_refptr<WebRtcAudioRenderer> renderer(audio_device->renderer());
280   if (!renderer.get()) {
281     renderer = CreateRemoteAudioRenderer(stream, render_frame_id);
282
283     if (renderer.get() && !audio_device->SetAudioRenderer(renderer.get()))
284       renderer = NULL;
285   }
286
287   return renderer.get() ?
288       renderer->CreateSharedAudioRendererProxy(stream) : NULL;
289 }
290
291 // Callback from MediaStreamDispatcher.
292 // The requested stream have been generated by the MediaStreamDispatcher.
293 void MediaStreamImpl::OnStreamGenerated(
294     int request_id,
295     const std::string& label,
296     const StreamDeviceInfoArray& audio_array,
297     const StreamDeviceInfoArray& video_array) {
298   DCHECK(CalledOnValidThread());
299   DVLOG(1) << "MediaStreamImpl::OnStreamGenerated stream:" << label;
300
301   UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id);
302   if (!request_info) {
303     // This can happen if the request is canceled or the frame reloads while
304     // MediaStreamDispatcher is processing the request.
305     // Only stop the device if the device is not used in another MediaStream.
306     for (StreamDeviceInfoArray::const_iterator device_it = audio_array.begin();
307          device_it != audio_array.end(); ++device_it) {
308       if (!FindLocalSource(*device_it))
309         media_stream_dispatcher_->StopStreamDevice(*device_it);
310     }
311
312     for (StreamDeviceInfoArray::const_iterator device_it = video_array.begin();
313          device_it != video_array.end(); ++device_it) {
314       if (!FindLocalSource(*device_it))
315         media_stream_dispatcher_->StopStreamDevice(*device_it);
316     }
317
318     DVLOG(1) << "Request ID not found";
319     return;
320   }
321   request_info->generated = true;
322
323   // WebUserMediaRequest don't have an implementation in unit tests.
324   // Therefore we need to check for isNull here.
325   blink::WebUserMediaRequest* request = &(request_info->request);
326   blink::WebMediaConstraints audio_constraints = request->isNull() ?
327       blink::WebMediaConstraints() : request->audioConstraints();
328   blink::WebMediaConstraints video_constraints = request->isNull() ?
329       blink::WebMediaConstraints() : request->videoConstraints();
330
331   blink::WebVector<blink::WebMediaStreamTrack> audio_track_vector(
332       audio_array.size());
333   CreateAudioTracks(audio_array, audio_constraints, &audio_track_vector,
334                     request_info);
335
336   blink::WebVector<blink::WebMediaStreamTrack> video_track_vector(
337       video_array.size());
338   CreateVideoTracks(video_array, video_constraints, &video_track_vector,
339                     request_info);
340
341   blink::WebString webkit_id = base::UTF8ToUTF16(label);
342   blink::WebMediaStream* web_stream = &(request_info->web_stream);
343
344   web_stream->initialize(webkit_id, audio_track_vector,
345                          video_track_vector);
346
347   // Wait for the tracks to be started successfully or to fail.
348   request_info->CallbackOnTracksStarted(
349       base::Bind(&MediaStreamImpl::OnCreateNativeTracksCompleted, AsWeakPtr()));
350 }
351
352 // Callback from MediaStreamDispatcher.
353 // The requested stream failed to be generated.
354 void MediaStreamImpl::OnStreamGenerationFailed(int request_id) {
355   DCHECK(CalledOnValidThread());
356   DVLOG(1) << "MediaStreamImpl::OnStreamGenerationFailed("
357            << request_id << ")";
358   UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id);
359   if (!request_info) {
360     // This can happen if the request is canceled or the frame reloads while
361     // MediaStreamDispatcher is processing the request.
362     DVLOG(1) << "Request ID not found";
363     return;
364   }
365   CompleteGetUserMediaRequest(request_info->web_stream,
366                               &request_info->request,
367                               false);
368   DeleteUserMediaRequestInfo(request_info);
369 }
370
371 // Callback from MediaStreamDispatcher.
372 // The browser process has stopped a device used by a MediaStream.
373 void MediaStreamImpl::OnDeviceStopped(
374     const std::string& label,
375     const StreamDeviceInfo& device_info) {
376   DCHECK(CalledOnValidThread());
377   DVLOG(1) << "MediaStreamImpl::OnDeviceStopped("
378            << "{device_id = " << device_info.device.id << "})";
379
380   const blink::WebMediaStreamSource* source_ptr = FindLocalSource(device_info);
381   if (!source_ptr) {
382     // This happens if the same device is used in several guM requests or
383     // if a user happen stop a track from JS at the same time
384     // as the underlying media device is unplugged from the system.
385     return;
386   }
387   // By creating |source| it is guaranteed that the blink::WebMediaStreamSource
388   // object is valid during the cleanup.
389   blink::WebMediaStreamSource source(*source_ptr);
390   StopLocalSource(source, false);
391
392   for (LocalStreamSources::iterator device_it = local_sources_.begin();
393        device_it != local_sources_.end(); ++device_it) {
394     if (device_it->source.id() == source.id()) {
395       local_sources_.erase(device_it);
396       break;
397     }
398   }
399
400   // Remove the reference to this source from all |user_media_requests_|.
401   // TODO(perkj): The below is not necessary once we don't need to support
402   // MediaStream::Stop().
403   UserMediaRequests::iterator it = user_media_requests_.begin();
404   while (it != user_media_requests_.end()) {
405     (*it)->RemoveSource(source);
406     if ((*it)->AreAllSourcesRemoved()) {
407       it = user_media_requests_.erase(it);
408     } else {
409       ++it;
410     }
411   }
412 }
413
414 void MediaStreamImpl::InitializeSourceObject(
415     const StreamDeviceInfo& device,
416     blink::WebMediaStreamSource::Type type,
417     const blink::WebMediaConstraints& constraints,
418     blink::WebFrame* frame,
419     blink::WebMediaStreamSource* webkit_source) {
420   const blink::WebMediaStreamSource* existing_source =
421       FindLocalSource(device);
422   if (existing_source) {
423     *webkit_source = *existing_source;
424     DVLOG(1) << "Source already exist. Reusing source with id "
425              << webkit_source->id().utf8();
426     return;
427   }
428
429   webkit_source->initialize(
430       base::UTF8ToUTF16(device.device.id),
431       type,
432       base::UTF8ToUTF16(device.device.name));
433
434   DVLOG(1) << "Initialize source object :"
435            << "id = " << webkit_source->id().utf8()
436            << ", name = " << webkit_source->name().utf8();
437
438   if (type == blink::WebMediaStreamSource::TypeVideo) {
439     MediaStreamVideoCapturerSource* video_source(
440         new content::MediaStreamVideoCapturerSource(
441             device,
442             base::Bind(&MediaStreamImpl::OnLocalSourceStopped, AsWeakPtr()),
443             dependency_factory_));
444     webkit_source->setExtraData(video_source);
445   } else {
446     DCHECK_EQ(blink::WebMediaStreamSource::TypeAudio, type);
447     MediaStreamAudioSource* audio_source(
448         new MediaStreamAudioSource(
449             RenderViewObserver::routing_id(),
450             device,
451             base::Bind(&MediaStreamImpl::OnLocalSourceStopped, AsWeakPtr()),
452             dependency_factory_));
453     webkit_source->setExtraData(audio_source);
454   }
455   local_sources_.push_back(LocalStreamSource(frame, *webkit_source));
456 }
457
458 void MediaStreamImpl::CreateVideoTracks(
459     const StreamDeviceInfoArray& devices,
460     const blink::WebMediaConstraints& constraints,
461     blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks,
462     UserMediaRequestInfo* request) {
463   DCHECK_EQ(devices.size(), webkit_tracks->size());
464
465   for (size_t i = 0; i < devices.size(); ++i) {
466     blink::WebMediaStreamSource webkit_source;
467     InitializeSourceObject(devices[i],
468                            blink::WebMediaStreamSource::TypeVideo,
469                            constraints,
470                            request->frame,
471                            &webkit_source);
472     (*webkit_tracks)[i].initialize(webkit_source);
473     request->StartTrack((*webkit_tracks)[i], constraints);
474   }
475 }
476
477 void MediaStreamImpl::CreateAudioTracks(
478     const StreamDeviceInfoArray& devices,
479     const blink::WebMediaConstraints& constraints,
480     blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks,
481     UserMediaRequestInfo* request) {
482   DCHECK_EQ(devices.size(), webkit_tracks->size());
483
484   // Log the device names for this request.
485   for (StreamDeviceInfoArray::const_iterator it = devices.begin();
486        it != devices.end(); ++it) {
487     WebRtcLogMessage(base::StringPrintf(
488         "Generated media stream for request id %d contains audio device name"
489         " \"%s\"",
490         request->request_id,
491         it->device.name.c_str()));
492   }
493
494   StreamDeviceInfoArray overridden_audio_array = devices;
495   if (!request->enable_automatic_output_device_selection) {
496     // If the GetUserMedia request did not explicitly set the constraint
497     // kMediaStreamRenderToAssociatedSink, the output device parameters must
498     // be removed.
499     for (StreamDeviceInfoArray::iterator it = overridden_audio_array.begin();
500          it != overridden_audio_array.end(); ++it) {
501       it->device.matched_output_device_id = "";
502       it->device.matched_output = MediaStreamDevice::AudioDeviceParameters();
503     }
504   }
505
506   for (size_t i = 0; i < overridden_audio_array.size(); ++i) {
507     blink::WebMediaStreamSource webkit_source;
508     InitializeSourceObject(overridden_audio_array[i],
509                            blink::WebMediaStreamSource::TypeAudio,
510                            constraints,
511                            request->frame,
512                            &webkit_source);
513     (*webkit_tracks)[i].initialize(webkit_source);
514     request->StartTrack((*webkit_tracks)[i], constraints);
515   }
516 }
517
518 void MediaStreamImpl::OnCreateNativeTracksCompleted(
519     UserMediaRequestInfo* request,
520     bool request_succeeded) {
521   // Create a native representation of the stream.
522   if (request_succeeded) {
523     dependency_factory_->CreateNativeLocalMediaStream(
524         &request->web_stream,
525         base::Bind(&MediaStreamImpl::OnLocalMediaStreamStop, AsWeakPtr()));
526   }
527   DVLOG(1) << "MediaStreamImpl::OnCreateNativeTracksComplete("
528            << "{request_id = " << request->request_id << "} "
529            << "{request_succeeded = " << request_succeeded << "})";
530   CompleteGetUserMediaRequest(request->web_stream, &request->request,
531                               request_succeeded);
532   if (!request_succeeded) {
533     // TODO(perkj): Once we don't support MediaStream::Stop the |request_info|
534     // can be deleted even if the request succeeds.
535     DeleteUserMediaRequestInfo(request);
536     StopUnreferencedSources(true);
537   }
538 }
539
540 void MediaStreamImpl::OnDevicesEnumerated(
541     int request_id,
542     const StreamDeviceInfoArray& device_array) {
543   DVLOG(1) << "MediaStreamImpl::OnDevicesEnumerated("
544            << request_id << ")";
545   NOTIMPLEMENTED();
546 }
547
548 void MediaStreamImpl::OnDeviceOpened(
549     int request_id,
550     const std::string& label,
551     const StreamDeviceInfo& video_device) {
552   DVLOG(1) << "MediaStreamImpl::OnDeviceOpened("
553            << request_id << ", " << label << ")";
554   NOTIMPLEMENTED();
555 }
556
557 void MediaStreamImpl::OnDeviceOpenFailed(int request_id) {
558   DVLOG(1) << "MediaStreamImpl::VideoDeviceOpenFailed("
559            << request_id << ")";
560   NOTIMPLEMENTED();
561 }
562
563 void MediaStreamImpl::CompleteGetUserMediaRequest(
564     const blink::WebMediaStream& stream,
565     blink::WebUserMediaRequest* request_info,
566     bool request_succeeded) {
567   if (request_succeeded) {
568     request_info->requestSucceeded(stream);
569   } else {
570     request_info->requestFailed();
571   }
572 }
573
574 const blink::WebMediaStreamSource* MediaStreamImpl::FindLocalSource(
575     const StreamDeviceInfo& device) const {
576   for (LocalStreamSources::const_iterator it = local_sources_.begin();
577        it != local_sources_.end(); ++it) {
578     MediaStreamSource* source =
579         static_cast<MediaStreamSource*>(it->source.extraData());
580     const StreamDeviceInfo& active_device = source->device_info();
581     if (active_device.device.id == device.device.id &&
582         active_device.device.type == device.device.type &&
583         active_device.session_id == device.session_id) {
584       return &it->source;
585     }
586   }
587   return NULL;
588 }
589
590 bool MediaStreamImpl::IsSourceInRequests(
591     const blink::WebMediaStreamSource& source) const {
592   for (UserMediaRequests::const_iterator req_it = user_media_requests_.begin();
593        req_it != user_media_requests_.end(); ++req_it) {
594     if ((*req_it)->IsSourceUsed(source))
595       return true;
596   }
597   return false;
598 }
599
600 MediaStreamImpl::UserMediaRequestInfo*
601 MediaStreamImpl::FindUserMediaRequestInfo(int request_id) {
602   UserMediaRequests::iterator it = user_media_requests_.begin();
603   for (; it != user_media_requests_.end(); ++it) {
604     if ((*it)->request_id == request_id)
605       return (*it);
606   }
607   return NULL;
608 }
609
610 MediaStreamImpl::UserMediaRequestInfo*
611 MediaStreamImpl::FindUserMediaRequestInfo(
612     const blink::WebUserMediaRequest& request) {
613   UserMediaRequests::iterator it = user_media_requests_.begin();
614   for (; it != user_media_requests_.end(); ++it) {
615     if ((*it)->request == request)
616       return (*it);
617   }
618   return NULL;
619 }
620
621 MediaStreamImpl::UserMediaRequestInfo*
622 MediaStreamImpl::FindUserMediaRequestInfo(const std::string& label) {
623   UserMediaRequests::iterator it = user_media_requests_.begin();
624   for (; it != user_media_requests_.end(); ++it) {
625     if ((*it)->generated && (*it)->web_stream.id() == base::UTF8ToUTF16(label))
626       return (*it);
627   }
628   return NULL;
629 }
630
631 void MediaStreamImpl::DeleteUserMediaRequestInfo(
632     UserMediaRequestInfo* request) {
633   UserMediaRequests::iterator it = user_media_requests_.begin();
634   for (; it != user_media_requests_.end(); ++it) {
635     if ((*it) == request) {
636       user_media_requests_.erase(it);
637       return;
638     }
639   }
640   NOTREACHED();
641 }
642
643 void MediaStreamImpl::FrameDetached(blink::WebFrame* frame) {
644   // Do same thing as FrameWillClose.
645   FrameWillClose(frame);
646 }
647
648 void MediaStreamImpl::FrameWillClose(blink::WebFrame* frame) {
649   // Loop through all UserMediaRequests and find the requests that belong to the
650   // frame that is being closed.
651   UserMediaRequests::iterator request_it = user_media_requests_.begin();
652   while (request_it != user_media_requests_.end()) {
653     if ((*request_it)->frame == frame) {
654       DVLOG(1) << "MediaStreamImpl::FrameWillClose: "
655                << "Cancel user media request " << (*request_it)->request_id;
656       // If the request is not generated, it means that a request
657       // has been sent to the MediaStreamDispatcher to generate a stream
658       // but MediaStreamDispatcher has not yet responded and we need to cancel
659       // the request.
660       if (!(*request_it)->generated) {
661         media_stream_dispatcher_->CancelGenerateStream(
662             (*request_it)->request_id, AsWeakPtr());
663       }
664       request_it = user_media_requests_.erase(request_it);
665     } else {
666       ++request_it;
667     }
668   }
669
670   // Loop through all current local sources and stop the sources that were
671   // created by the frame that will be closed.
672   LocalStreamSources::iterator sources_it = local_sources_.begin();
673   while (sources_it != local_sources_.end()) {
674     if (sources_it->frame == frame) {
675       StopLocalSource(sources_it->source, true);
676       sources_it = local_sources_.erase(sources_it);
677     } else {
678       ++sources_it;
679     }
680   }
681 }
682
683 void MediaStreamImpl::OnLocalMediaStreamStop(
684     const std::string& label) {
685   DVLOG(1) << "MediaStreamImpl::OnLocalMediaStreamStop(" << label << ")";
686
687   UserMediaRequestInfo* user_media_request = FindUserMediaRequestInfo(label);
688   if (user_media_request) {
689     DeleteUserMediaRequestInfo(user_media_request);
690   }
691   StopUnreferencedSources(true);
692 }
693
694 void MediaStreamImpl::OnLocalSourceStopped(
695     const blink::WebMediaStreamSource& source) {
696   DCHECK(CalledOnValidThread());
697
698   bool device_found = false;
699   for (LocalStreamSources::iterator device_it = local_sources_.begin();
700        device_it != local_sources_.end(); ++device_it) {
701     if (device_it->source.id()  == source.id()) {
702       device_found = true;
703       local_sources_.erase(device_it);
704       break;
705     }
706   }
707   CHECK(device_found);
708
709   // Remove the reference to this source from all |user_media_requests_|.
710   // TODO(perkj): The below is not necessary once we don't need to support
711   // MediaStream::Stop().
712   UserMediaRequests::iterator it = user_media_requests_.begin();
713   while (it != user_media_requests_.end()) {
714     (*it)->RemoveSource(source);
715     if ((*it)->AreAllSourcesRemoved()) {
716       it = user_media_requests_.erase(it);
717     } else {
718       ++it;
719     }
720   }
721
722   MediaStreamSource* source_impl =
723       static_cast<MediaStreamSource*> (source.extraData());
724   media_stream_dispatcher_->StopStreamDevice(source_impl->device_info());
725 }
726
727 void MediaStreamImpl::StopLocalSource(
728     const blink::WebMediaStreamSource& source,
729     bool notify_dispatcher) {
730   MediaStreamSource* source_impl =
731       static_cast<MediaStreamSource*> (source.extraData());
732   DVLOG(1) << "MediaStreamImpl::StopLocalSource("
733            << "{device_id = " << source_impl->device_info().device.id << "})";
734
735   if (notify_dispatcher)
736     media_stream_dispatcher_->StopStreamDevice(source_impl->device_info());
737
738   source_impl->ResetSourceStoppedCallback();
739   source_impl->StopSource();
740 }
741
742 void MediaStreamImpl::StopUnreferencedSources(bool notify_dispatcher) {
743   LocalStreamSources::iterator source_it = local_sources_.begin();
744   while (source_it != local_sources_.end()) {
745     if (!IsSourceInRequests(source_it->source)) {
746       StopLocalSource(source_it->source, notify_dispatcher);
747       source_it = local_sources_.erase(source_it);
748     } else {
749       ++source_it;
750     }
751   }
752 }
753
754 scoped_refptr<WebRtcAudioRenderer> MediaStreamImpl::CreateRemoteAudioRenderer(
755     webrtc::MediaStreamInterface* stream,
756     int render_frame_id) {
757   if (stream->GetAudioTracks().empty())
758     return NULL;
759
760   DVLOG(1) << "MediaStreamImpl::CreateRemoteAudioRenderer label:"
761            << stream->label();
762
763   // TODO(tommi): Change the default value of session_id to be
764   // StreamDeviceInfo::kNoId.  Also update AudioOutputDevice etc.
765   int session_id = 0, sample_rate = 0, buffer_size = 0;
766   if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id,
767                                                &sample_rate,
768                                                &buffer_size)) {
769     GetDefaultOutputDeviceParams(&sample_rate, &buffer_size);
770   }
771
772   return new WebRtcAudioRenderer(
773       stream, RenderViewObserver::routing_id(), render_frame_id,  session_id,
774       sample_rate, buffer_size);
775 }
776
777 scoped_refptr<WebRtcLocalAudioRenderer>
778 MediaStreamImpl::CreateLocalAudioRenderer(
779     const blink::WebMediaStreamTrack& audio_track,
780     int render_frame_id) {
781   DVLOG(1) << "MediaStreamImpl::CreateLocalAudioRenderer";
782
783   int session_id = 0, sample_rate = 0, buffer_size = 0;
784   if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id,
785                                                &sample_rate,
786                                                &buffer_size)) {
787     GetDefaultOutputDeviceParams(&sample_rate, &buffer_size);
788   }
789
790   // Create a new WebRtcLocalAudioRenderer instance and connect it to the
791   // existing WebRtcAudioCapturer so that the renderer can use it as source.
792   return new WebRtcLocalAudioRenderer(
793       audio_track,
794       RenderViewObserver::routing_id(),
795       render_frame_id,
796       session_id,
797       buffer_size);
798 }
799
800 bool MediaStreamImpl::GetAuthorizedDeviceInfoForAudioRenderer(
801     int* session_id,
802     int* output_sample_rate,
803     int* output_frames_per_buffer) {
804   DCHECK(CalledOnValidThread());
805   WebRtcAudioDeviceImpl* audio_device =
806       dependency_factory_->GetWebRtcAudioDevice();
807   if (!audio_device)
808     return false;
809
810   return audio_device->GetAuthorizedDeviceInfoForAudioRenderer(
811       session_id, output_sample_rate, output_frames_per_buffer);
812 }
813
814 MediaStreamExtraData::MediaStreamExtraData(
815     webrtc::MediaStreamInterface* stream, bool is_local)
816     : stream_(stream),
817       is_local_(is_local) {
818 }
819
820 MediaStreamExtraData::~MediaStreamExtraData() {
821 }
822
823 void MediaStreamExtraData::SetLocalStreamStopCallback(
824     const StreamStopCallback& stop_callback) {
825   stream_stop_callback_ = stop_callback;
826 }
827
828 void MediaStreamExtraData::OnLocalStreamStop() {
829   if (!stream_stop_callback_.is_null())
830     stream_stop_callback_.Run(stream_->label());
831 }
832
833 MediaStreamImpl::UserMediaRequestInfo::UserMediaRequestInfo(
834     int request_id,
835     blink::WebFrame* frame,
836     const blink::WebUserMediaRequest& request,
837     bool enable_automatic_output_device_selection)
838     : request_id(request_id),
839       generated(false),
840       enable_automatic_output_device_selection(
841           enable_automatic_output_device_selection),
842       frame(frame),
843       request(request),
844       request_failed_(false) {
845 }
846
847 MediaStreamImpl::UserMediaRequestInfo::~UserMediaRequestInfo() {
848   DVLOG(1) << "~UserMediaRequestInfo";
849 }
850
851 void MediaStreamImpl::UserMediaRequestInfo::StartTrack(
852     const blink::WebMediaStreamTrack& track,
853     const blink::WebMediaConstraints& constraints) {
854   MediaStreamSource* native_source =
855       static_cast <MediaStreamSource*>(track.source().extraData());
856   DCHECK(native_source);
857
858   sources_.push_back(track.source());
859   sources_waiting_for_callback_.push_back(native_source);
860   native_source->AddTrack(
861       track, constraints, base::Bind(
862           &MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted,
863           AsWeakPtr()));
864 }
865
866 void MediaStreamImpl::UserMediaRequestInfo::CallbackOnTracksStarted(
867     const ResourcesReady& callback) {
868   DCHECK(ready_callback_.is_null());
869   ready_callback_ = callback;
870   CheckAllTracksStarted();
871 }
872
873 void MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted(
874     MediaStreamSource* source, bool success) {
875   DVLOG(1) << "OnTrackStarted";
876   std::vector<MediaStreamSource*>::iterator it =
877       std::find(sources_waiting_for_callback_.begin(),
878                 sources_waiting_for_callback_.end(),
879                 source);
880   DCHECK(it != sources_waiting_for_callback_.end());
881   sources_waiting_for_callback_.erase(it);
882   // All tracks must be started successfully. Otherwise the request is a
883   // failure.
884   if (!success)
885     request_failed_ = true;
886   CheckAllTracksStarted();
887 }
888
889 void MediaStreamImpl::UserMediaRequestInfo::CheckAllTracksStarted() {
890   if (!ready_callback_.is_null() && sources_waiting_for_callback_.empty())
891     ready_callback_.Run(this, !request_failed_);
892 }
893
894 bool MediaStreamImpl::UserMediaRequestInfo::IsSourceUsed(
895     const blink::WebMediaStreamSource& source) const {
896   for (std::vector<blink::WebMediaStreamSource>::const_iterator source_it =
897            sources_.begin();
898        source_it != sources_.end(); ++source_it) {
899     if (source_it->id() == source.id())
900       return true;
901   }
902   return false;
903 }
904
905 void MediaStreamImpl::UserMediaRequestInfo::RemoveSource(
906     const blink::WebMediaStreamSource& source) {
907   for (std::vector<blink::WebMediaStreamSource>::iterator it =
908            sources_.begin();
909        it != sources_.end(); ++it) {
910     if (source.id() == it->id()) {
911       sources_.erase(it);
912       return;
913     }
914   }
915 }
916
917 }  // namespace content