Upstream version 7.36.149.0
[platform/framework/web/crosswalk.git] / src / content / renderer / media / media_stream_impl.cc
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/renderer/media/media_stream_impl.h"
6
7 #include <utility>
8
9 #include "base/logging.h"
10 #include "base/strings/string_util.h"
11 #include "base/strings/stringprintf.h"
12 #include "base/strings/utf_string_conversions.h"
13 #include "content/renderer/media/media_stream.h"
14 #include "content/renderer/media/media_stream_audio_renderer.h"
15 #include "content/renderer/media/media_stream_audio_source.h"
16 #include "content/renderer/media/media_stream_dependency_factory.h"
17 #include "content/renderer/media/media_stream_dispatcher.h"
18 #include "content/renderer/media/media_stream_video_capturer_source.h"
19 #include "content/renderer/media/media_stream_video_track.h"
20 #include "content/renderer/media/peer_connection_tracker.h"
21 #include "content/renderer/media/rtc_video_renderer.h"
22 #include "content/renderer/media/webrtc_audio_capturer.h"
23 #include "content/renderer/media/webrtc_audio_renderer.h"
24 #include "content/renderer/media/webrtc_local_audio_renderer.h"
25 #include "content/renderer/media/webrtc_logging.h"
26 #include "content/renderer/media/webrtc_uma_histograms.h"
27 #include "content/renderer/render_thread_impl.h"
28 #include "media/base/audio_hardware_config.h"
29 #include "third_party/WebKit/public/platform/WebMediaConstraints.h"
30 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
31 #include "third_party/WebKit/public/web/WebDocument.h"
32 #include "third_party/WebKit/public/web/WebLocalFrame.h"
33 #include "third_party/WebKit/public/web/WebMediaStreamRegistry.h"
34
35 namespace content {
36 namespace {
37
38 void CopyStreamConstraints(const blink::WebMediaConstraints& constraints,
39                            StreamOptions::Constraints* mandatory,
40                            StreamOptions::Constraints* optional) {
41   blink::WebVector<blink::WebMediaConstraint> mandatory_constraints;
42   constraints.getMandatoryConstraints(mandatory_constraints);
43   for (size_t i = 0; i < mandatory_constraints.size(); i++) {
44     mandatory->push_back(StreamOptions::Constraint(
45         mandatory_constraints[i].m_name.utf8(),
46         mandatory_constraints[i].m_value.utf8()));
47   }
48
49   blink::WebVector<blink::WebMediaConstraint> optional_constraints;
50   constraints.getOptionalConstraints(optional_constraints);
51   for (size_t i = 0; i < optional_constraints.size(); i++) {
52     optional->push_back(StreamOptions::Constraint(
53         optional_constraints[i].m_name.utf8(),
54         optional_constraints[i].m_value.utf8()));
55   }
56 }
57
58 static int g_next_request_id  = 0;
59
60 void GetDefaultOutputDeviceParams(
61     int* output_sample_rate, int* output_buffer_size) {
62   // Fetch the default audio output hardware config.
63   media::AudioHardwareConfig* hardware_config =
64       RenderThreadImpl::current()->GetAudioHardwareConfig();
65   *output_sample_rate = hardware_config->GetOutputSampleRate();
66   *output_buffer_size = hardware_config->GetOutputBufferSize();
67 }
68
69 }  // namespace
70
71 MediaStreamImpl::MediaStreamImpl(
72     RenderView* render_view,
73     MediaStreamDispatcher* media_stream_dispatcher,
74     MediaStreamDependencyFactory* dependency_factory)
75     : RenderViewObserver(render_view),
76       dependency_factory_(dependency_factory),
77       media_stream_dispatcher_(media_stream_dispatcher) {
78 }
79
80 MediaStreamImpl::~MediaStreamImpl() {
81 }
82
83 void MediaStreamImpl::requestUserMedia(
84     const blink::WebUserMediaRequest& user_media_request) {
85   // Save histogram data so we can see how much GetUserMedia is used.
86   // The histogram counts the number of calls to the JS API
87   // webGetUserMedia.
88   UpdateWebRTCMethodCount(WEBKIT_GET_USER_MEDIA);
89   DCHECK(CalledOnValidThread());
90
91   if (RenderThreadImpl::current()) {
92     RenderThreadImpl::current()->peer_connection_tracker()->TrackGetUserMedia(
93         user_media_request);
94   }
95
96   int request_id = g_next_request_id++;
97   StreamOptions options;
98   blink::WebLocalFrame* frame = NULL;
99   GURL security_origin;
100   bool enable_automatic_output_device_selection = false;
101
102   // |user_media_request| can't be mocked. So in order to test at all we check
103   // if it isNull.
104   if (user_media_request.isNull()) {
105     // We are in a test.
106     options.audio_requested = true;
107     options.video_requested = true;
108   } else {
109     if (user_media_request.audio()) {
110       options.audio_requested = true;
111       CopyStreamConstraints(user_media_request.audioConstraints(),
112                             &options.mandatory_audio,
113                             &options.optional_audio);
114
115       // Check if this input device should be used to select a matching output
116       // device for audio rendering.
117       std::string enable;
118       if (options.GetFirstAudioConstraintByName(
119               kMediaStreamRenderToAssociatedSink, &enable, NULL) &&
120           LowerCaseEqualsASCII(enable, "true")) {
121         enable_automatic_output_device_selection = true;
122       }
123     }
124     if (user_media_request.video()) {
125       options.video_requested = true;
126       CopyStreamConstraints(user_media_request.videoConstraints(),
127                             &options.mandatory_video,
128                             &options.optional_video);
129     }
130
131     security_origin = GURL(user_media_request.securityOrigin().toString());
132     // Get the WebFrame that requested a MediaStream.
133     // The frame is needed to tell the MediaStreamDispatcher when a stream goes
134     // out of scope.
135     frame = user_media_request.ownerDocument().frame();
136     DCHECK(frame);
137   }
138
139   DVLOG(1) << "MediaStreamImpl::requestUserMedia(" << request_id << ", [ "
140            << "audio=" << (options.audio_requested)
141            << " select associated sink: "
142            << enable_automatic_output_device_selection
143            << ", video=" << (options.video_requested) << " ], "
144            << security_origin.spec() << ")";
145
146   std::string audio_device_id;
147   bool mandatory_audio;
148   options.GetFirstAudioConstraintByName(kMediaStreamSourceInfoId,
149                                         &audio_device_id, &mandatory_audio);
150   std::string video_device_id;
151   bool mandatory_video;
152   options.GetFirstVideoConstraintByName(kMediaStreamSourceInfoId,
153                                         &video_device_id, &mandatory_video);
154
155   WebRtcLogMessage(base::StringPrintf(
156       "MSI::requestUserMedia. request_id=%d"
157       ", audio source id=%s mandatory= %s "
158       ", video source id=%s mandatory= %s",
159       request_id,
160       audio_device_id.c_str(),
161       mandatory_audio ? "true":"false",
162       video_device_id.c_str(),
163       mandatory_video ? "true":"false"));
164
165   user_media_requests_.push_back(
166       new UserMediaRequestInfo(request_id, frame, user_media_request,
167           enable_automatic_output_device_selection));
168
169   media_stream_dispatcher_->GenerateStream(
170       request_id,
171       AsWeakPtr(),
172       options,
173       security_origin);
174 }
175
176 void MediaStreamImpl::cancelUserMediaRequest(
177     const blink::WebUserMediaRequest& user_media_request) {
178   DCHECK(CalledOnValidThread());
179   UserMediaRequestInfo* request = FindUserMediaRequestInfo(user_media_request);
180   if (request) {
181     // We can't abort the stream generation process.
182     // Instead, erase the request. Once the stream is generated we will stop the
183     // stream if the request does not exist.
184     DeleteUserMediaRequestInfo(request);
185   }
186 }
187
188 blink::WebMediaStream MediaStreamImpl::GetMediaStream(
189     const GURL& url) {
190   return blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url);
191 }
192
193 bool MediaStreamImpl::IsMediaStream(const GURL& url) {
194   blink::WebMediaStream web_stream(
195       blink::WebMediaStreamRegistry::lookupMediaStreamDescriptor(url));
196
197   return (!web_stream.isNull() &&
198       (MediaStream::GetMediaStream(web_stream) != NULL));
199 }
200
201 scoped_refptr<VideoFrameProvider>
202 MediaStreamImpl::GetVideoFrameProvider(
203     const GURL& url,
204     const base::Closure& error_cb,
205     const VideoFrameProvider::RepaintCB& repaint_cb) {
206   DCHECK(CalledOnValidThread());
207   blink::WebMediaStream web_stream(GetMediaStream(url));
208
209   if (web_stream.isNull() || !web_stream.extraData())
210     return NULL;  // This is not a valid stream.
211
212   DVLOG(1) << "MediaStreamImpl::GetVideoFrameProvider stream:"
213            << base::UTF16ToUTF8(web_stream.id());
214
215   blink::WebVector<blink::WebMediaStreamTrack> video_tracks;
216   web_stream.videoTracks(video_tracks);
217   if (video_tracks.isEmpty() ||
218       !MediaStreamVideoTrack::GetTrack(video_tracks[0])) {
219     return NULL;
220   }
221
222   return new RTCVideoRenderer(video_tracks[0], error_cb, repaint_cb);
223 }
224
225 scoped_refptr<MediaStreamAudioRenderer>
226 MediaStreamImpl::GetAudioRenderer(const GURL& url, int render_frame_id) {
227   DCHECK(CalledOnValidThread());
228   blink::WebMediaStream web_stream(GetMediaStream(url));
229
230   if (web_stream.isNull() || !web_stream.extraData())
231     return NULL;  // This is not a valid stream.
232
233   DVLOG(1) << "MediaStreamImpl::GetAudioRenderer stream:"
234            << base::UTF16ToUTF8(web_stream.id());
235
236   MediaStream* native_stream = MediaStream::GetMediaStream(web_stream);
237
238   // TODO(tommi): MediaStreams do not have a 'local or not' concept.
239   // Tracks _might_, but even so, we need to fix the data flow so that
240   // it works the same way for all track implementations, local, remote or what
241   // have you.
242   // In this function, we should simply create a renderer object that receives
243   // and mixes audio from all the tracks that belong to the media stream.
244   // We need to remove the |is_local| property from MediaStreamExtraData since
245   // this concept is peerconnection specific (is a previously recorded stream
246   // local or remote?).
247   if (native_stream->is_local()) {
248     // Create the local audio renderer if the stream contains audio tracks.
249     blink::WebVector<blink::WebMediaStreamTrack> audio_tracks;
250     web_stream.audioTracks(audio_tracks);
251     if (audio_tracks.isEmpty())
252       return NULL;
253
254     // TODO(xians): Add support for the case where the media stream contains
255     // multiple audio tracks.
256     return CreateLocalAudioRenderer(audio_tracks[0], render_frame_id);
257   }
258
259   webrtc::MediaStreamInterface* stream =
260       MediaStream::GetAdapter(web_stream);
261   if (stream->GetAudioTracks().empty())
262     return NULL;
263
264   // This is a remote WebRTC media stream.
265   WebRtcAudioDeviceImpl* audio_device =
266       dependency_factory_->GetWebRtcAudioDevice();
267
268   // Share the existing renderer if any, otherwise create a new one.
269   scoped_refptr<WebRtcAudioRenderer> renderer(audio_device->renderer());
270   if (!renderer.get()) {
271     renderer = CreateRemoteAudioRenderer(stream, render_frame_id);
272
273     if (renderer.get() && !audio_device->SetAudioRenderer(renderer.get()))
274       renderer = NULL;
275   }
276
277   return renderer.get() ?
278       renderer->CreateSharedAudioRendererProxy(stream) : NULL;
279 }
280
281 // Callback from MediaStreamDispatcher.
282 // The requested stream have been generated by the MediaStreamDispatcher.
283 void MediaStreamImpl::OnStreamGenerated(
284     int request_id,
285     const std::string& label,
286     const StreamDeviceInfoArray& audio_array,
287     const StreamDeviceInfoArray& video_array) {
288   DCHECK(CalledOnValidThread());
289   DVLOG(1) << "MediaStreamImpl::OnStreamGenerated stream:" << label;
290
291   UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id);
292   if (!request_info) {
293     // This can happen if the request is canceled or the frame reloads while
294     // MediaStreamDispatcher is processing the request.
295     // Only stop the device if the device is not used in another MediaStream.
296     for (StreamDeviceInfoArray::const_iterator device_it = audio_array.begin();
297          device_it != audio_array.end(); ++device_it) {
298       if (!FindLocalSource(*device_it))
299         media_stream_dispatcher_->StopStreamDevice(*device_it);
300     }
301
302     for (StreamDeviceInfoArray::const_iterator device_it = video_array.begin();
303          device_it != video_array.end(); ++device_it) {
304       if (!FindLocalSource(*device_it))
305         media_stream_dispatcher_->StopStreamDevice(*device_it);
306     }
307
308     DVLOG(1) << "Request ID not found";
309     return;
310   }
311   request_info->generated = true;
312
313   // WebUserMediaRequest don't have an implementation in unit tests.
314   // Therefore we need to check for isNull here and initialize the
315   // constraints.
316   blink::WebUserMediaRequest* request = &(request_info->request);
317   blink::WebMediaConstraints audio_constraints;
318   blink::WebMediaConstraints video_constraints;
319   if (request->isNull()) {
320     audio_constraints.initialize();
321     video_constraints.initialize();
322   } else {
323     audio_constraints = request->audioConstraints();
324     video_constraints = request->videoConstraints();
325   }
326
327   blink::WebVector<blink::WebMediaStreamTrack> audio_track_vector(
328       audio_array.size());
329   CreateAudioTracks(audio_array, audio_constraints, &audio_track_vector,
330                     request_info);
331
332   blink::WebVector<blink::WebMediaStreamTrack> video_track_vector(
333       video_array.size());
334   CreateVideoTracks(video_array, video_constraints, &video_track_vector,
335                     request_info);
336
337   blink::WebString webkit_id = base::UTF8ToUTF16(label);
338   blink::WebMediaStream* web_stream = &(request_info->web_stream);
339
340   web_stream->initialize(webkit_id, audio_track_vector,
341                          video_track_vector);
342   web_stream->setExtraData(
343       new MediaStream(
344           *web_stream));
345
346   // Wait for the tracks to be started successfully or to fail.
347   request_info->CallbackOnTracksStarted(
348       base::Bind(&MediaStreamImpl::OnCreateNativeTracksCompleted, AsWeakPtr()));
349 }
350
351 // Callback from MediaStreamDispatcher.
352 // The requested stream failed to be generated.
353 void MediaStreamImpl::OnStreamGenerationFailed(
354     int request_id,
355     content::MediaStreamRequestResult result) {
356   DCHECK(CalledOnValidThread());
357   DVLOG(1) << "MediaStreamImpl::OnStreamGenerationFailed("
358            << request_id << ")";
359   UserMediaRequestInfo* request_info = FindUserMediaRequestInfo(request_id);
360   if (!request_info) {
361     // This can happen if the request is canceled or the frame reloads while
362     // MediaStreamDispatcher is processing the request.
363     DVLOG(1) << "Request ID not found";
364     return;
365   }
366
367   GetUserMediaRequestFailed(&request_info->request, result);
368   DeleteUserMediaRequestInfo(request_info);
369 }
370
371 // Callback from MediaStreamDispatcher.
372 // The browser process has stopped a device used by a MediaStream.
373 void MediaStreamImpl::OnDeviceStopped(
374     const std::string& label,
375     const StreamDeviceInfo& device_info) {
376   DCHECK(CalledOnValidThread());
377   DVLOG(1) << "MediaStreamImpl::OnDeviceStopped("
378            << "{device_id = " << device_info.device.id << "})";
379
380   const blink::WebMediaStreamSource* source_ptr = FindLocalSource(device_info);
381   if (!source_ptr) {
382     // This happens if the same device is used in several guM requests or
383     // if a user happen stop a track from JS at the same time
384     // as the underlying media device is unplugged from the system.
385     return;
386   }
387   // By creating |source| it is guaranteed that the blink::WebMediaStreamSource
388   // object is valid during the cleanup.
389   blink::WebMediaStreamSource source(*source_ptr);
390   StopLocalSource(source, false);
391
392   for (LocalStreamSources::iterator device_it = local_sources_.begin();
393        device_it != local_sources_.end(); ++device_it) {
394     if (device_it->source.id() == source.id()) {
395       local_sources_.erase(device_it);
396       break;
397     }
398   }
399 }
400
401 void MediaStreamImpl::InitializeSourceObject(
402     const StreamDeviceInfo& device,
403     blink::WebMediaStreamSource::Type type,
404     const blink::WebMediaConstraints& constraints,
405     blink::WebFrame* frame,
406     blink::WebMediaStreamSource* webkit_source) {
407   const blink::WebMediaStreamSource* existing_source =
408       FindLocalSource(device);
409   if (existing_source) {
410     *webkit_source = *existing_source;
411     DVLOG(1) << "Source already exist. Reusing source with id "
412              << webkit_source->id().utf8();
413     return;
414   }
415
416   webkit_source->initialize(
417       base::UTF8ToUTF16(device.device.id),
418       type,
419       base::UTF8ToUTF16(device.device.name));
420
421   DVLOG(1) << "Initialize source object :"
422            << "id = " << webkit_source->id().utf8()
423            << ", name = " << webkit_source->name().utf8();
424
425   if (type == blink::WebMediaStreamSource::TypeVideo) {
426     webkit_source->setExtraData(
427         CreateVideoSource(
428             device,
429             base::Bind(&MediaStreamImpl::OnLocalSourceStopped, AsWeakPtr())));
430   } else {
431     DCHECK_EQ(blink::WebMediaStreamSource::TypeAudio, type);
432     MediaStreamAudioSource* audio_source(
433         new MediaStreamAudioSource(
434             RenderViewObserver::routing_id(),
435             device,
436             base::Bind(&MediaStreamImpl::OnLocalSourceStopped, AsWeakPtr()),
437             dependency_factory_));
438     webkit_source->setExtraData(audio_source);
439   }
440   local_sources_.push_back(LocalStreamSource(frame, *webkit_source));
441 }
442
443 MediaStreamVideoSource* MediaStreamImpl::CreateVideoSource(
444     const StreamDeviceInfo& device,
445     const MediaStreamSource::SourceStoppedCallback& stop_callback) {
446   return new content::MediaStreamVideoCapturerSource(
447       device,
448       stop_callback,
449       new VideoCapturerDelegate(device));
450 }
451
452 void MediaStreamImpl::CreateVideoTracks(
453     const StreamDeviceInfoArray& devices,
454     const blink::WebMediaConstraints& constraints,
455     blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks,
456     UserMediaRequestInfo* request) {
457   DCHECK_EQ(devices.size(), webkit_tracks->size());
458
459   for (size_t i = 0; i < devices.size(); ++i) {
460     blink::WebMediaStreamSource webkit_source;
461     InitializeSourceObject(devices[i],
462                            blink::WebMediaStreamSource::TypeVideo,
463                            constraints,
464                            request->frame,
465                            &webkit_source);
466     (*webkit_tracks)[i] =
467         request->CreateAndStartVideoTrack(webkit_source, constraints,
468                                           dependency_factory_);
469   }
470 }
471
472 void MediaStreamImpl::CreateAudioTracks(
473     const StreamDeviceInfoArray& devices,
474     const blink::WebMediaConstraints& constraints,
475     blink::WebVector<blink::WebMediaStreamTrack>* webkit_tracks,
476     UserMediaRequestInfo* request) {
477   DCHECK_EQ(devices.size(), webkit_tracks->size());
478
479   // Log the device names for this request.
480   for (StreamDeviceInfoArray::const_iterator it = devices.begin();
481        it != devices.end(); ++it) {
482     WebRtcLogMessage(base::StringPrintf(
483         "Generated media stream for request id %d contains audio device name"
484         " \"%s\"",
485         request->request_id,
486         it->device.name.c_str()));
487   }
488
489   StreamDeviceInfoArray overridden_audio_array = devices;
490   if (!request->enable_automatic_output_device_selection) {
491     // If the GetUserMedia request did not explicitly set the constraint
492     // kMediaStreamRenderToAssociatedSink, the output device parameters must
493     // be removed.
494     for (StreamDeviceInfoArray::iterator it = overridden_audio_array.begin();
495          it != overridden_audio_array.end(); ++it) {
496       it->device.matched_output_device_id = "";
497       it->device.matched_output = MediaStreamDevice::AudioDeviceParameters();
498     }
499   }
500
501   for (size_t i = 0; i < overridden_audio_array.size(); ++i) {
502     blink::WebMediaStreamSource webkit_source;
503     InitializeSourceObject(overridden_audio_array[i],
504                            blink::WebMediaStreamSource::TypeAudio,
505                            constraints,
506                            request->frame,
507                            &webkit_source);
508     (*webkit_tracks)[i].initialize(webkit_source);
509     request->StartAudioTrack((*webkit_tracks)[i], constraints);
510   }
511 }
512
513 void MediaStreamImpl::OnCreateNativeTracksCompleted(
514     UserMediaRequestInfo* request,
515     content::MediaStreamRequestResult result) {
516   DVLOG(1) << "MediaStreamImpl::OnCreateNativeTracksComplete("
517            << "{request_id = " << request->request_id << "} "
518            << "{result = " << result << "})";
519   if (result == content::MEDIA_DEVICE_OK)
520     GetUserMediaRequestSucceeded(request->web_stream, &request->request);
521   else
522     GetUserMediaRequestFailed(&request->request, result);
523
524   DeleteUserMediaRequestInfo(request);
525 }
526
527 void MediaStreamImpl::OnDevicesEnumerated(
528     int request_id,
529     const StreamDeviceInfoArray& device_array) {
530   DVLOG(1) << "MediaStreamImpl::OnDevicesEnumerated("
531            << request_id << ")";
532   NOTIMPLEMENTED();
533 }
534
535 void MediaStreamImpl::OnDeviceOpened(
536     int request_id,
537     const std::string& label,
538     const StreamDeviceInfo& video_device) {
539   DVLOG(1) << "MediaStreamImpl::OnDeviceOpened("
540            << request_id << ", " << label << ")";
541   NOTIMPLEMENTED();
542 }
543
544 void MediaStreamImpl::OnDeviceOpenFailed(int request_id) {
545   DVLOG(1) << "MediaStreamImpl::VideoDeviceOpenFailed("
546            << request_id << ")";
547   NOTIMPLEMENTED();
548 }
549
550 void MediaStreamImpl::GetUserMediaRequestSucceeded(
551     const blink::WebMediaStream& stream,
552     blink::WebUserMediaRequest* request_info) {
553   DVLOG(1) << "MediaStreamImpl::GetUserMediaRequestSucceeded";
554   request_info->requestSucceeded(stream);
555 }
556
557 void MediaStreamImpl::GetUserMediaRequestFailed(
558     blink::WebUserMediaRequest* request_info,
559     content::MediaStreamRequestResult result) {
560   switch (result) {
561     case MEDIA_DEVICE_OK:
562       NOTREACHED();
563       break;
564     case MEDIA_DEVICE_PERMISSION_DENIED:
565       request_info->requestDenied();
566       break;
567     case MEDIA_DEVICE_PERMISSION_DISMISSED:
568       request_info->requestFailedUASpecific("PermissionDismissedError");
569       break;
570     case MEDIA_DEVICE_INVALID_STATE:
571       request_info->requestFailedUASpecific("InvalidStateError");
572       break;
573     case MEDIA_DEVICE_NO_HARDWARE:
574       request_info->requestFailedUASpecific("DevicesNotFoundError");
575       break;
576     case MEDIA_DEVICE_INVALID_SECURITY_ORIGIN:
577       request_info->requestFailedUASpecific("InvalidSecurityOriginError");
578       break;
579     case MEDIA_DEVICE_TAB_CAPTURE_FAILURE:
580       request_info->requestFailedUASpecific("TabCaptureError");
581       break;
582     case MEDIA_DEVICE_SCREEN_CAPTURE_FAILURE:
583       request_info->requestFailedUASpecific("ScreenCaptureError");
584       break;
585     case MEDIA_DEVICE_CAPTURE_FAILURE:
586       request_info->requestFailedUASpecific("DeviceCaptureError");
587       break;
588     case MEDIA_DEVICE_TRACK_START_FAILURE:
589       request_info->requestFailedUASpecific("TrackStartError");
590       break;
591     default:
592       request_info->requestFailed();
593       break;
594   }
595 }
596
597 const blink::WebMediaStreamSource* MediaStreamImpl::FindLocalSource(
598     const StreamDeviceInfo& device) const {
599   for (LocalStreamSources::const_iterator it = local_sources_.begin();
600        it != local_sources_.end(); ++it) {
601     MediaStreamSource* source =
602         static_cast<MediaStreamSource*>(it->source.extraData());
603     const StreamDeviceInfo& active_device = source->device_info();
604     if (active_device.device.id == device.device.id &&
605         active_device.device.type == device.device.type &&
606         active_device.session_id == device.session_id) {
607       return &it->source;
608     }
609   }
610   return NULL;
611 }
612
613 MediaStreamImpl::UserMediaRequestInfo*
614 MediaStreamImpl::FindUserMediaRequestInfo(int request_id) {
615   UserMediaRequests::iterator it = user_media_requests_.begin();
616   for (; it != user_media_requests_.end(); ++it) {
617     if ((*it)->request_id == request_id)
618       return (*it);
619   }
620   return NULL;
621 }
622
623 MediaStreamImpl::UserMediaRequestInfo*
624 MediaStreamImpl::FindUserMediaRequestInfo(
625     const blink::WebUserMediaRequest& request) {
626   UserMediaRequests::iterator it = user_media_requests_.begin();
627   for (; it != user_media_requests_.end(); ++it) {
628     if ((*it)->request == request)
629       return (*it);
630   }
631   return NULL;
632 }
633
634 void MediaStreamImpl::DeleteUserMediaRequestInfo(
635     UserMediaRequestInfo* request) {
636   UserMediaRequests::iterator it = user_media_requests_.begin();
637   for (; it != user_media_requests_.end(); ++it) {
638     if ((*it) == request) {
639       user_media_requests_.erase(it);
640       return;
641     }
642   }
643   NOTREACHED();
644 }
645
646 void MediaStreamImpl::FrameDetached(blink::WebFrame* frame) {
647   // Do same thing as FrameWillClose.
648   FrameWillClose(frame);
649 }
650
651 void MediaStreamImpl::FrameWillClose(blink::WebFrame* frame) {
652   // Loop through all UserMediaRequests and find the requests that belong to the
653   // frame that is being closed.
654   UserMediaRequests::iterator request_it = user_media_requests_.begin();
655   while (request_it != user_media_requests_.end()) {
656     if ((*request_it)->frame == frame) {
657       DVLOG(1) << "MediaStreamImpl::FrameWillClose: "
658                << "Cancel user media request " << (*request_it)->request_id;
659       // If the request is not generated, it means that a request
660       // has been sent to the MediaStreamDispatcher to generate a stream
661       // but MediaStreamDispatcher has not yet responded and we need to cancel
662       // the request.
663       if (!(*request_it)->generated) {
664         media_stream_dispatcher_->CancelGenerateStream(
665             (*request_it)->request_id, AsWeakPtr());
666       }
667       request_it = user_media_requests_.erase(request_it);
668     } else {
669       ++request_it;
670     }
671   }
672
673   // Loop through all current local sources and stop the sources that were
674   // created by the frame that will be closed.
675   LocalStreamSources::iterator sources_it = local_sources_.begin();
676   while (sources_it != local_sources_.end()) {
677     if (sources_it->frame == frame) {
678       StopLocalSource(sources_it->source, true);
679       sources_it = local_sources_.erase(sources_it);
680     } else {
681       ++sources_it;
682     }
683   }
684 }
685
686 void MediaStreamImpl::OnLocalSourceStopped(
687     const blink::WebMediaStreamSource& source) {
688   DCHECK(CalledOnValidThread());
689   DVLOG(1) << "MediaStreamImpl::OnLocalSourceStopped";
690
691   bool device_found = false;
692   for (LocalStreamSources::iterator device_it = local_sources_.begin();
693        device_it != local_sources_.end(); ++device_it) {
694     if (device_it->source.id()  == source.id()) {
695       device_found = true;
696       local_sources_.erase(device_it);
697       break;
698     }
699   }
700   CHECK(device_found);
701
702   MediaStreamSource* source_impl =
703       static_cast<MediaStreamSource*> (source.extraData());
704   media_stream_dispatcher_->StopStreamDevice(source_impl->device_info());
705 }
706
707 void MediaStreamImpl::StopLocalSource(
708     const blink::WebMediaStreamSource& source,
709     bool notify_dispatcher) {
710   MediaStreamSource* source_impl =
711       static_cast<MediaStreamSource*> (source.extraData());
712   DVLOG(1) << "MediaStreamImpl::StopLocalSource("
713            << "{device_id = " << source_impl->device_info().device.id << "})";
714
715   if (notify_dispatcher)
716     media_stream_dispatcher_->StopStreamDevice(source_impl->device_info());
717
718   source_impl->ResetSourceStoppedCallback();
719   source_impl->StopSource();
720 }
721
722 scoped_refptr<WebRtcAudioRenderer> MediaStreamImpl::CreateRemoteAudioRenderer(
723     webrtc::MediaStreamInterface* stream,
724     int render_frame_id) {
725   if (stream->GetAudioTracks().empty())
726     return NULL;
727
728   DVLOG(1) << "MediaStreamImpl::CreateRemoteAudioRenderer label:"
729            << stream->label();
730
731   // TODO(tommi): Change the default value of session_id to be
732   // StreamDeviceInfo::kNoId.  Also update AudioOutputDevice etc.
733   int session_id = 0, sample_rate = 0, buffer_size = 0;
734   if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id,
735                                                &sample_rate,
736                                                &buffer_size)) {
737     GetDefaultOutputDeviceParams(&sample_rate, &buffer_size);
738   }
739
740   return new WebRtcAudioRenderer(
741       stream, RenderViewObserver::routing_id(), render_frame_id,  session_id,
742       sample_rate, buffer_size);
743 }
744
745 scoped_refptr<WebRtcLocalAudioRenderer>
746 MediaStreamImpl::CreateLocalAudioRenderer(
747     const blink::WebMediaStreamTrack& audio_track,
748     int render_frame_id) {
749   DVLOG(1) << "MediaStreamImpl::CreateLocalAudioRenderer";
750
751   int session_id = 0, sample_rate = 0, buffer_size = 0;
752   if (!GetAuthorizedDeviceInfoForAudioRenderer(&session_id,
753                                                &sample_rate,
754                                                &buffer_size)) {
755     GetDefaultOutputDeviceParams(&sample_rate, &buffer_size);
756   }
757
758   // Create a new WebRtcLocalAudioRenderer instance and connect it to the
759   // existing WebRtcAudioCapturer so that the renderer can use it as source.
760   return new WebRtcLocalAudioRenderer(
761       audio_track,
762       RenderViewObserver::routing_id(),
763       render_frame_id,
764       session_id,
765       buffer_size);
766 }
767
768 bool MediaStreamImpl::GetAuthorizedDeviceInfoForAudioRenderer(
769     int* session_id,
770     int* output_sample_rate,
771     int* output_frames_per_buffer) {
772   DCHECK(CalledOnValidThread());
773   WebRtcAudioDeviceImpl* audio_device =
774       dependency_factory_->GetWebRtcAudioDevice();
775   if (!audio_device)
776     return false;
777
778   return audio_device->GetAuthorizedDeviceInfoForAudioRenderer(
779       session_id, output_sample_rate, output_frames_per_buffer);
780 }
781
782 MediaStreamImpl::UserMediaRequestInfo::UserMediaRequestInfo(
783     int request_id,
784     blink::WebFrame* frame,
785     const blink::WebUserMediaRequest& request,
786     bool enable_automatic_output_device_selection)
787     : request_id(request_id),
788       generated(false),
789       enable_automatic_output_device_selection(
790           enable_automatic_output_device_selection),
791       frame(frame),
792       request(request),
793       request_failed_(false) {
794 }
795
796 MediaStreamImpl::UserMediaRequestInfo::~UserMediaRequestInfo() {
797   DVLOG(1) << "~UserMediaRequestInfo";
798 }
799
800 void MediaStreamImpl::UserMediaRequestInfo::StartAudioTrack(
801     const blink::WebMediaStreamTrack& track,
802     const blink::WebMediaConstraints& constraints) {
803   DCHECK(track.source().type() == blink::WebMediaStreamSource::TypeAudio);
804   MediaStreamAudioSource* native_source =
805       static_cast <MediaStreamAudioSource*>(track.source().extraData());
806   DCHECK(native_source);
807
808   sources_.push_back(track.source());
809   sources_waiting_for_callback_.push_back(native_source);
810   native_source->AddTrack(
811       track, constraints, base::Bind(
812           &MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted,
813           AsWeakPtr()));
814 }
815
816 blink::WebMediaStreamTrack
817 MediaStreamImpl::UserMediaRequestInfo::CreateAndStartVideoTrack(
818     const blink::WebMediaStreamSource& source,
819     const blink::WebMediaConstraints& constraints,
820     MediaStreamDependencyFactory* factory) {
821   DCHECK(source.type() == blink::WebMediaStreamSource::TypeVideo);
822   MediaStreamVideoSource* native_source =
823       MediaStreamVideoSource::GetVideoSource(source);
824   DCHECK(native_source);
825   sources_.push_back(source);
826   sources_waiting_for_callback_.push_back(native_source);
827   return MediaStreamVideoTrack::CreateVideoTrack(
828       native_source, constraints, base::Bind(
829           &MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted,
830           AsWeakPtr()),
831       true);
832 }
833
834 void MediaStreamImpl::UserMediaRequestInfo::CallbackOnTracksStarted(
835     const ResourcesReady& callback) {
836   DCHECK(ready_callback_.is_null());
837   ready_callback_ = callback;
838   CheckAllTracksStarted();
839 }
840
841 void MediaStreamImpl::UserMediaRequestInfo::OnTrackStarted(
842     MediaStreamSource* source, bool success) {
843   DVLOG(1) << "OnTrackStarted result " << success;
844   std::vector<MediaStreamSource*>::iterator it =
845       std::find(sources_waiting_for_callback_.begin(),
846                 sources_waiting_for_callback_.end(),
847                 source);
848   DCHECK(it != sources_waiting_for_callback_.end());
849   sources_waiting_for_callback_.erase(it);
850   // All tracks must be started successfully. Otherwise the request is a
851   // failure.
852   if (!success)
853     request_failed_ = true;
854   CheckAllTracksStarted();
855 }
856
857 void MediaStreamImpl::UserMediaRequestInfo::CheckAllTracksStarted() {
858   if (!ready_callback_.is_null() && sources_waiting_for_callback_.empty()) {
859     ready_callback_.Run(
860         this,
861         request_failed_ ? MEDIA_DEVICE_TRACK_START_FAILURE : MEDIA_DEVICE_OK);
862   }
863 }
864
865 bool MediaStreamImpl::UserMediaRequestInfo::IsSourceUsed(
866     const blink::WebMediaStreamSource& source) const {
867   for (std::vector<blink::WebMediaStreamSource>::const_iterator source_it =
868            sources_.begin();
869        source_it != sources_.end(); ++source_it) {
870     if (source_it->id() == source.id())
871       return true;
872   }
873   return false;
874 }
875
876 void MediaStreamImpl::UserMediaRequestInfo::RemoveSource(
877     const blink::WebMediaStreamSource& source) {
878   for (std::vector<blink::WebMediaStreamSource>::iterator it =
879            sources_.begin();
880        it != sources_.end(); ++it) {
881     if (source.id() == it->id()) {
882       sources_.erase(it);
883       return;
884     }
885   }
886 }
887
888 }  // namespace content