Upstream version 9.38.198.0
[platform/framework/web/crosswalk.git] / src / content / renderer / media / webrtc / peer_connection_dependency_factory.cc
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h"
6
7 #include <vector>
8
9 #include "base/command_line.h"
10 #include "base/strings/utf_string_conversions.h"
11 #include "base/synchronization/waitable_event.h"
12 #include "content/common/media/media_stream_messages.h"
13 #include "content/public/common/content_switches.h"
14 #include "content/renderer/media/media_stream.h"
15 #include "content/renderer/media/media_stream_audio_processor.h"
16 #include "content/renderer/media/media_stream_audio_processor_options.h"
17 #include "content/renderer/media/media_stream_audio_source.h"
18 #include "content/renderer/media/media_stream_video_source.h"
19 #include "content/renderer/media/media_stream_video_track.h"
20 #include "content/renderer/media/peer_connection_identity_service.h"
21 #include "content/renderer/media/rtc_media_constraints.h"
22 #include "content/renderer/media/rtc_peer_connection_handler.h"
23 #include "content/renderer/media/rtc_video_decoder_factory.h"
24 #include "content/renderer/media/rtc_video_encoder_factory.h"
25 #include "content/renderer/media/webaudio_capturer_source.h"
26 #include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h"
27 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
28 #include "content/renderer/media/webrtc_audio_device_impl.h"
29 #include "content/renderer/media/webrtc_local_audio_track.h"
30 #include "content/renderer/media/webrtc_uma_histograms.h"
31 #include "content/renderer/p2p/ipc_network_manager.h"
32 #include "content/renderer/p2p/ipc_socket_factory.h"
33 #include "content/renderer/p2p/port_allocator.h"
34 #include "content/renderer/render_thread_impl.h"
35 #include "jingle/glue/thread_wrapper.h"
36 #include "media/filters/gpu_video_accelerator_factories.h"
37 #include "third_party/WebKit/public/platform/WebMediaConstraints.h"
38 #include "third_party/WebKit/public/platform/WebMediaStream.h"
39 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
40 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
41 #include "third_party/WebKit/public/platform/WebURL.h"
42 #include "third_party/WebKit/public/web/WebDocument.h"
43 #include "third_party/WebKit/public/web/WebFrame.h"
44 #include "third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.h"
45
46 #if defined(USE_OPENSSL)
47 #include "third_party/webrtc/base/ssladapter.h"
48 #else
49 #include "net/socket/nss_ssl_util.h"
50 #endif
51
52 #if defined(OS_ANDROID)
53 #include "media/base/android/media_codec_bridge.h"
54 #endif
55
56 namespace content {
57
58 // Map of corresponding media constraints and platform effects.
59 struct {
60   const char* constraint;
61   const media::AudioParameters::PlatformEffectsMask effect;
62 } const kConstraintEffectMap[] = {
63   { content::kMediaStreamAudioDucking,
64     media::AudioParameters::DUCKING },
65   { webrtc::MediaConstraintsInterface::kEchoCancellation,
66     media::AudioParameters::ECHO_CANCELLER },
67 };
68
69 // If any platform effects are available, check them against the constraints.
70 // Disable effects to match false constraints, but if a constraint is true, set
71 // the constraint to false to later disable the software effect.
72 //
73 // This function may modify both |constraints| and |effects|.
74 void HarmonizeConstraintsAndEffects(RTCMediaConstraints* constraints,
75                                     int* effects) {
76   if (*effects != media::AudioParameters::NO_EFFECTS) {
77     for (size_t i = 0; i < ARRAYSIZE_UNSAFE(kConstraintEffectMap); ++i) {
78       bool value;
79       size_t is_mandatory = 0;
80       if (!webrtc::FindConstraint(constraints,
81                                   kConstraintEffectMap[i].constraint,
82                                   &value,
83                                   &is_mandatory) || !value) {
84         // If the constraint is false, or does not exist, disable the platform
85         // effect.
86         *effects &= ~kConstraintEffectMap[i].effect;
87         DVLOG(1) << "Disabling platform effect: "
88                  << kConstraintEffectMap[i].effect;
89       } else if (*effects & kConstraintEffectMap[i].effect) {
90         // If the constraint is true, leave the platform effect enabled, and
91         // set the constraint to false to later disable the software effect.
92         if (is_mandatory) {
93           constraints->AddMandatory(kConstraintEffectMap[i].constraint,
94               webrtc::MediaConstraintsInterface::kValueFalse, true);
95         } else {
96           constraints->AddOptional(kConstraintEffectMap[i].constraint,
97               webrtc::MediaConstraintsInterface::kValueFalse, true);
98         }
99         DVLOG(1) << "Disabling constraint: "
100                  << kConstraintEffectMap[i].constraint;
101       } else if (kConstraintEffectMap[i].effect ==
102                  media::AudioParameters::DUCKING && value && !is_mandatory) {
103         // Special handling of the DUCKING flag that sets the optional
104         // constraint to |false| to match what the device will support.
105         constraints->AddOptional(kConstraintEffectMap[i].constraint,
106             webrtc::MediaConstraintsInterface::kValueFalse, true);
107         // No need to modify |effects| since the ducking flag is already off.
108         DCHECK((*effects & media::AudioParameters::DUCKING) == 0);
109       }
110     }
111   }
112 }
113
114 class P2PPortAllocatorFactory : public webrtc::PortAllocatorFactoryInterface {
115  public:
116   P2PPortAllocatorFactory(
117       P2PSocketDispatcher* socket_dispatcher,
118       rtc::NetworkManager* network_manager,
119       rtc::PacketSocketFactory* socket_factory,
120       blink::WebFrame* web_frame)
121       : socket_dispatcher_(socket_dispatcher),
122         network_manager_(network_manager),
123         socket_factory_(socket_factory),
124         web_frame_(web_frame) {
125   }
126
127   virtual cricket::PortAllocator* CreatePortAllocator(
128       const std::vector<StunConfiguration>& stun_servers,
129       const std::vector<TurnConfiguration>& turn_configurations) OVERRIDE {
130     CHECK(web_frame_);
131     P2PPortAllocator::Config config;
132     for (size_t i = 0; i < stun_servers.size(); ++i) {
133       config.stun_servers.insert(rtc::SocketAddress(
134           stun_servers[i].server.hostname(),
135           stun_servers[i].server.port()));
136     }
137     config.legacy_relay = false;
138     for (size_t i = 0; i < turn_configurations.size(); ++i) {
139       P2PPortAllocator::Config::RelayServerConfig relay_config;
140       relay_config.server_address = turn_configurations[i].server.hostname();
141       relay_config.port = turn_configurations[i].server.port();
142       relay_config.username = turn_configurations[i].username;
143       relay_config.password = turn_configurations[i].password;
144       relay_config.transport_type = turn_configurations[i].transport_type;
145       relay_config.secure = turn_configurations[i].secure;
146       config.relays.push_back(relay_config);
147
148       // Use turn servers as stun servers.
149       config.stun_servers.insert(rtc::SocketAddress(
150           turn_configurations[i].server.hostname(),
151           turn_configurations[i].server.port()));
152     }
153
154     return new P2PPortAllocator(
155         web_frame_, socket_dispatcher_.get(), network_manager_,
156         socket_factory_, config);
157   }
158
159  protected:
160   virtual ~P2PPortAllocatorFactory() {}
161
162  private:
163   scoped_refptr<P2PSocketDispatcher> socket_dispatcher_;
164   // |network_manager_| and |socket_factory_| are a weak references, owned by
165   // PeerConnectionDependencyFactory.
166   rtc::NetworkManager* network_manager_;
167   rtc::PacketSocketFactory* socket_factory_;
168   // Raw ptr to the WebFrame that created the P2PPortAllocatorFactory.
169   blink::WebFrame* web_frame_;
170 };
171
172 PeerConnectionDependencyFactory::PeerConnectionDependencyFactory(
173     P2PSocketDispatcher* p2p_socket_dispatcher)
174     : network_manager_(NULL),
175       p2p_socket_dispatcher_(p2p_socket_dispatcher),
176       signaling_thread_(NULL),
177       worker_thread_(NULL),
178       chrome_worker_thread_("Chrome_libJingle_WorkerThread") {
179 }
180
181 PeerConnectionDependencyFactory::~PeerConnectionDependencyFactory() {
182   CleanupPeerConnectionFactory();
183   if (aec_dump_message_filter_)
184     aec_dump_message_filter_->RemoveDelegate(this);
185 }
186
187 blink::WebRTCPeerConnectionHandler*
188 PeerConnectionDependencyFactory::CreateRTCPeerConnectionHandler(
189     blink::WebRTCPeerConnectionHandlerClient* client) {
190   // Save histogram data so we can see how much PeerConnetion is used.
191   // The histogram counts the number of calls to the JS API
192   // webKitRTCPeerConnection.
193   UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION);
194
195   return new RTCPeerConnectionHandler(client, this);
196 }
197
198 bool PeerConnectionDependencyFactory::InitializeMediaStreamAudioSource(
199     int render_view_id,
200     const blink::WebMediaConstraints& audio_constraints,
201     MediaStreamAudioSource* source_data) {
202   DVLOG(1) << "InitializeMediaStreamAudioSources()";
203
204   // Do additional source initialization if the audio source is a valid
205   // microphone or tab audio.
206   RTCMediaConstraints native_audio_constraints(audio_constraints);
207   MediaAudioConstraints::ApplyFixedAudioConstraints(&native_audio_constraints);
208
209   StreamDeviceInfo device_info = source_data->device_info();
210   RTCMediaConstraints constraints = native_audio_constraints;
211   // May modify both |constraints| and |effects|.
212   HarmonizeConstraintsAndEffects(&constraints,
213                                  &device_info.device.input.effects);
214
215   scoped_refptr<WebRtcAudioCapturer> capturer(
216       CreateAudioCapturer(render_view_id, device_info, audio_constraints,
217                           source_data));
218   if (!capturer.get()) {
219     DLOG(WARNING) << "Failed to create the capturer for device "
220         << device_info.device.id;
221     // TODO(xians): Don't we need to check if source_observer is observing
222     // something? If not, then it looks like we have a leak here.
223     // OTOH, if it _is_ observing something, then the callback might
224     // be called multiple times which is likely also a bug.
225     return false;
226   }
227   source_data->SetAudioCapturer(capturer);
228
229   // Creates a LocalAudioSource object which holds audio options.
230   // TODO(xians): The option should apply to the track instead of the source.
231   // TODO(perkj): Move audio constraints parsing to Chrome.
232   // Currently there are a few constraints that are parsed by libjingle and
233   // the state is set to ended if parsing fails.
234   scoped_refptr<webrtc::AudioSourceInterface> rtc_source(
235       CreateLocalAudioSource(&constraints).get());
236   if (rtc_source->state() != webrtc::MediaSourceInterface::kLive) {
237     DLOG(WARNING) << "Failed to create rtc LocalAudioSource.";
238     return false;
239   }
240   source_data->SetLocalAudioSource(rtc_source);
241   return true;
242 }
243
244 WebRtcVideoCapturerAdapter*
245 PeerConnectionDependencyFactory::CreateVideoCapturer(
246     bool is_screeencast) {
247   // We need to make sure the libjingle thread wrappers have been created
248   // before we can use an instance of a WebRtcVideoCapturerAdapter. This is
249   // since the base class of WebRtcVideoCapturerAdapter is a
250   // cricket::VideoCapturer and it uses the libjingle thread wrappers.
251   if (!GetPcFactory())
252     return NULL;
253   return new WebRtcVideoCapturerAdapter(is_screeencast);
254 }
255
256 scoped_refptr<webrtc::VideoSourceInterface>
257 PeerConnectionDependencyFactory::CreateVideoSource(
258     cricket::VideoCapturer* capturer,
259     const blink::WebMediaConstraints& constraints) {
260   RTCMediaConstraints webrtc_constraints(constraints);
261   scoped_refptr<webrtc::VideoSourceInterface> source =
262       GetPcFactory()->CreateVideoSource(capturer, &webrtc_constraints).get();
263   return source;
264 }
265
266 const scoped_refptr<webrtc::PeerConnectionFactoryInterface>&
267 PeerConnectionDependencyFactory::GetPcFactory() {
268   if (!pc_factory_)
269     CreatePeerConnectionFactory();
270   CHECK(pc_factory_);
271   return pc_factory_;
272 }
273
274 void PeerConnectionDependencyFactory::CreatePeerConnectionFactory() {
275   DCHECK(!pc_factory_.get());
276   DCHECK(!signaling_thread_);
277   DCHECK(!worker_thread_);
278   DCHECK(!network_manager_);
279   DCHECK(!socket_factory_);
280   DCHECK(!chrome_worker_thread_.IsRunning());
281
282   DVLOG(1) << "PeerConnectionDependencyFactory::CreatePeerConnectionFactory()";
283
284   jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
285   jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
286   signaling_thread_ = jingle_glue::JingleThreadWrapper::current();
287   CHECK(signaling_thread_);
288
289   CHECK(chrome_worker_thread_.Start());
290
291   base::WaitableEvent start_worker_event(true, false);
292   chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
293       &PeerConnectionDependencyFactory::InitializeWorkerThread,
294       base::Unretained(this),
295       &worker_thread_,
296       &start_worker_event));
297   start_worker_event.Wait();
298   CHECK(worker_thread_);
299
300   base::WaitableEvent create_network_manager_event(true, false);
301   chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
302       &PeerConnectionDependencyFactory::CreateIpcNetworkManagerOnWorkerThread,
303       base::Unretained(this),
304       &create_network_manager_event));
305   create_network_manager_event.Wait();
306
307   socket_factory_.reset(
308       new IpcPacketSocketFactory(p2p_socket_dispatcher_.get()));
309
310   // Init SSL, which will be needed by PeerConnection.
311 #if defined(USE_OPENSSL)
312   if (!rtc::InitializeSSL()) {
313     LOG(ERROR) << "Failed on InitializeSSL.";
314     NOTREACHED();
315     return;
316   }
317 #else
318   // TODO(ronghuawu): Replace this call with InitializeSSL.
319   net::EnsureNSSSSLInit();
320 #endif
321
322   scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory;
323   scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory;
324
325   const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
326   scoped_refptr<media::GpuVideoAcceleratorFactories> gpu_factories =
327       RenderThreadImpl::current()->GetGpuFactories();
328   if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWDecoding)) {
329     if (gpu_factories)
330       decoder_factory.reset(new RTCVideoDecoderFactory(gpu_factories));
331   }
332
333   if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWEncoding)) {
334     if (gpu_factories)
335       encoder_factory.reset(new RTCVideoEncoderFactory(gpu_factories));
336   }
337
338 #if defined(OS_ANDROID)
339   if (!media::MediaCodecBridge::SupportsSetParameters())
340     encoder_factory.reset();
341 #endif
342
343   EnsureWebRtcAudioDeviceImpl();
344
345   scoped_refptr<webrtc::PeerConnectionFactoryInterface> factory(
346       webrtc::CreatePeerConnectionFactory(worker_thread_,
347                                           signaling_thread_,
348                                           audio_device_.get(),
349                                           encoder_factory.release(),
350                                           decoder_factory.release()));
351   CHECK(factory);
352
353   pc_factory_ = factory;
354   webrtc::PeerConnectionFactoryInterface::Options factory_options;
355   factory_options.disable_sctp_data_channels = false;
356   factory_options.disable_encryption =
357       cmd_line->HasSwitch(switches::kDisableWebRtcEncryption);
358   pc_factory_->SetOptions(factory_options);
359
360   // TODO(xians): Remove the following code after kDisableAudioTrackProcessing
361   // is removed.
362   if (!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled()) {
363     aec_dump_message_filter_ = AecDumpMessageFilter::Get();
364     // In unit tests not creating a message filter, |aec_dump_message_filter_|
365     // will be NULL. We can just ignore that. Other unit tests and browser tests
366     // ensure that we do get the filter when we should.
367     if (aec_dump_message_filter_)
368       aec_dump_message_filter_->AddDelegate(this);
369   }
370 }
371
372 bool PeerConnectionDependencyFactory::PeerConnectionFactoryCreated() {
373   return pc_factory_.get() != NULL;
374 }
375
376 scoped_refptr<webrtc::PeerConnectionInterface>
377 PeerConnectionDependencyFactory::CreatePeerConnection(
378     const webrtc::PeerConnectionInterface::RTCConfiguration& config,
379     const webrtc::MediaConstraintsInterface* constraints,
380     blink::WebFrame* web_frame,
381     webrtc::PeerConnectionObserver* observer) {
382   CHECK(web_frame);
383   CHECK(observer);
384   if (!GetPcFactory())
385     return NULL;
386
387   scoped_refptr<P2PPortAllocatorFactory> pa_factory =
388         new rtc::RefCountedObject<P2PPortAllocatorFactory>(
389             p2p_socket_dispatcher_.get(),
390             network_manager_,
391             socket_factory_.get(),
392             web_frame);
393
394   PeerConnectionIdentityService* identity_service =
395       new PeerConnectionIdentityService(
396           GURL(web_frame->document().url().spec()).GetOrigin());
397
398   return GetPcFactory()->CreatePeerConnection(config,
399                                               constraints,
400                                               pa_factory.get(),
401                                               identity_service,
402                                               observer).get();
403 }
404
405 scoped_refptr<webrtc::MediaStreamInterface>
406 PeerConnectionDependencyFactory::CreateLocalMediaStream(
407     const std::string& label) {
408   return GetPcFactory()->CreateLocalMediaStream(label).get();
409 }
410
411 scoped_refptr<webrtc::AudioSourceInterface>
412 PeerConnectionDependencyFactory::CreateLocalAudioSource(
413     const webrtc::MediaConstraintsInterface* constraints) {
414   scoped_refptr<webrtc::AudioSourceInterface> source =
415       GetPcFactory()->CreateAudioSource(constraints).get();
416   return source;
417 }
418
419 void PeerConnectionDependencyFactory::CreateLocalAudioTrack(
420     const blink::WebMediaStreamTrack& track) {
421   blink::WebMediaStreamSource source = track.source();
422   DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio);
423   MediaStreamAudioSource* source_data =
424       static_cast<MediaStreamAudioSource*>(source.extraData());
425
426   scoped_refptr<WebAudioCapturerSource> webaudio_source;
427   if (!source_data) {
428     if (source.requiresAudioConsumer()) {
429       // We're adding a WebAudio MediaStream.
430       // Create a specific capturer for each WebAudio consumer.
431       webaudio_source = CreateWebAudioSource(&source);
432       source_data =
433           static_cast<MediaStreamAudioSource*>(source.extraData());
434     } else {
435       // TODO(perkj): Implement support for sources from
436       // remote MediaStreams.
437       NOTIMPLEMENTED();
438       return;
439     }
440   }
441
442   // Creates an adapter to hold all the libjingle objects.
443   scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter(
444       WebRtcLocalAudioTrackAdapter::Create(track.id().utf8(),
445                                            source_data->local_audio_source()));
446   static_cast<webrtc::AudioTrackInterface*>(adapter.get())->set_enabled(
447       track.isEnabled());
448
449   // TODO(xians): Merge |source| to the capturer(). We can't do this today
450   // because only one capturer() is supported while one |source| is created
451   // for each audio track.
452   scoped_ptr<WebRtcLocalAudioTrack> audio_track(
453       new WebRtcLocalAudioTrack(adapter,
454                                 source_data->GetAudioCapturer(),
455                                 webaudio_source));
456
457   StartLocalAudioTrack(audio_track.get());
458
459   // Pass the ownership of the native local audio track to the blink track.
460   blink::WebMediaStreamTrack writable_track = track;
461   writable_track.setExtraData(audio_track.release());
462 }
463
464 void PeerConnectionDependencyFactory::StartLocalAudioTrack(
465     WebRtcLocalAudioTrack* audio_track) {
466   // Add the WebRtcAudioDevice as the sink to the local audio track.
467   // TODO(xians): Remove the following line of code after the APM in WebRTC is
468   // completely deprecated. See http://crbug/365672.
469   if (!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled())
470     audio_track->AddSink(GetWebRtcAudioDevice());
471
472   // Start the audio track. This will hook the |audio_track| to the capturer
473   // as the sink of the audio, and only start the source of the capturer if
474   // it is the first audio track connecting to the capturer.
475   audio_track->Start();
476 }
477
478 scoped_refptr<WebAudioCapturerSource>
479 PeerConnectionDependencyFactory::CreateWebAudioSource(
480     blink::WebMediaStreamSource* source) {
481   DVLOG(1) << "PeerConnectionDependencyFactory::CreateWebAudioSource()";
482
483   scoped_refptr<WebAudioCapturerSource>
484       webaudio_capturer_source(new WebAudioCapturerSource());
485   MediaStreamAudioSource* source_data = new MediaStreamAudioSource();
486
487   // Use the current default capturer for the WebAudio track so that the
488   // WebAudio track can pass a valid delay value and |need_audio_processing|
489   // flag to PeerConnection.
490   // TODO(xians): Remove this after moving APM to Chrome.
491   if (GetWebRtcAudioDevice()) {
492     source_data->SetAudioCapturer(
493         GetWebRtcAudioDevice()->GetDefaultCapturer());
494   }
495
496   // Create a LocalAudioSource object which holds audio options.
497   // SetLocalAudioSource() affects core audio parts in third_party/Libjingle.
498   source_data->SetLocalAudioSource(CreateLocalAudioSource(NULL).get());
499   source->setExtraData(source_data);
500
501   // Replace the default source with WebAudio as source instead.
502   source->addAudioConsumer(webaudio_capturer_source.get());
503
504   return webaudio_capturer_source;
505 }
506
507 scoped_refptr<webrtc::VideoTrackInterface>
508 PeerConnectionDependencyFactory::CreateLocalVideoTrack(
509     const std::string& id,
510     webrtc::VideoSourceInterface* source) {
511   return GetPcFactory()->CreateVideoTrack(id, source).get();
512 }
513
514 scoped_refptr<webrtc::VideoTrackInterface>
515 PeerConnectionDependencyFactory::CreateLocalVideoTrack(
516     const std::string& id, cricket::VideoCapturer* capturer) {
517   if (!capturer) {
518     LOG(ERROR) << "CreateLocalVideoTrack called with null VideoCapturer.";
519     return NULL;
520   }
521
522   // Create video source from the |capturer|.
523   scoped_refptr<webrtc::VideoSourceInterface> source =
524       GetPcFactory()->CreateVideoSource(capturer, NULL).get();
525
526   // Create native track from the source.
527   return GetPcFactory()->CreateVideoTrack(id, source.get()).get();
528 }
529
530 webrtc::SessionDescriptionInterface*
531 PeerConnectionDependencyFactory::CreateSessionDescription(
532     const std::string& type,
533     const std::string& sdp,
534     webrtc::SdpParseError* error) {
535   return webrtc::CreateSessionDescription(type, sdp, error);
536 }
537
538 webrtc::IceCandidateInterface*
539 PeerConnectionDependencyFactory::CreateIceCandidate(
540     const std::string& sdp_mid,
541     int sdp_mline_index,
542     const std::string& sdp) {
543   return webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, sdp);
544 }
545
546 WebRtcAudioDeviceImpl*
547 PeerConnectionDependencyFactory::GetWebRtcAudioDevice() {
548   return audio_device_.get();
549 }
550
551 void PeerConnectionDependencyFactory::InitializeWorkerThread(
552     rtc::Thread** thread,
553     base::WaitableEvent* event) {
554   jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
555   jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
556   *thread = jingle_glue::JingleThreadWrapper::current();
557   event->Signal();
558 }
559
560 void PeerConnectionDependencyFactory::CreateIpcNetworkManagerOnWorkerThread(
561     base::WaitableEvent* event) {
562   DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop());
563   network_manager_ = new IpcNetworkManager(p2p_socket_dispatcher_.get());
564   event->Signal();
565 }
566
567 void PeerConnectionDependencyFactory::DeleteIpcNetworkManager() {
568   DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop());
569   delete network_manager_;
570   network_manager_ = NULL;
571 }
572
573 void PeerConnectionDependencyFactory::CleanupPeerConnectionFactory() {
574   pc_factory_ = NULL;
575   if (network_manager_) {
576     // The network manager needs to free its resources on the thread they were
577     // created, which is the worked thread.
578     if (chrome_worker_thread_.IsRunning()) {
579       chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
580           &PeerConnectionDependencyFactory::DeleteIpcNetworkManager,
581           base::Unretained(this)));
582       // Stopping the thread will wait until all tasks have been
583       // processed before returning. We wait for the above task to finish before
584       // letting the the function continue to avoid any potential race issues.
585       chrome_worker_thread_.Stop();
586     } else {
587       NOTREACHED() << "Worker thread not running.";
588     }
589   }
590 }
591
592 scoped_refptr<WebRtcAudioCapturer>
593 PeerConnectionDependencyFactory::CreateAudioCapturer(
594     int render_view_id,
595     const StreamDeviceInfo& device_info,
596     const blink::WebMediaConstraints& constraints,
597     MediaStreamAudioSource* audio_source) {
598   // TODO(xians): Handle the cases when gUM is called without a proper render
599   // view, for example, by an extension.
600   DCHECK_GE(render_view_id, 0);
601
602   EnsureWebRtcAudioDeviceImpl();
603   DCHECK(GetWebRtcAudioDevice());
604   return WebRtcAudioCapturer::CreateCapturer(render_view_id, device_info,
605                                              constraints,
606                                              GetWebRtcAudioDevice(),
607                                              audio_source);
608 }
609
610 void PeerConnectionDependencyFactory::AddNativeAudioTrackToBlinkTrack(
611     webrtc::MediaStreamTrackInterface* native_track,
612     const blink::WebMediaStreamTrack& webkit_track,
613     bool is_local_track) {
614   DCHECK(!webkit_track.isNull() && !webkit_track.extraData());
615   DCHECK_EQ(blink::WebMediaStreamSource::TypeAudio,
616             webkit_track.source().type());
617   blink::WebMediaStreamTrack track = webkit_track;
618
619   DVLOG(1) << "AddNativeTrackToBlinkTrack() audio";
620   track.setExtraData(
621       new MediaStreamTrack(
622           static_cast<webrtc::AudioTrackInterface*>(native_track),
623           is_local_track));
624 }
625
626 scoped_refptr<base::MessageLoopProxy>
627 PeerConnectionDependencyFactory::GetWebRtcWorkerThread() const {
628   DCHECK(CalledOnValidThread());
629   return chrome_worker_thread_.message_loop_proxy();
630 }
631
632 void PeerConnectionDependencyFactory::OnAecDumpFile(
633     const IPC::PlatformFileForTransit& file_handle) {
634   DCHECK(CalledOnValidThread());
635   DCHECK(!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled());
636   DCHECK(PeerConnectionFactoryCreated());
637
638   base::File file = IPC::PlatformFileForTransitToFile(file_handle);
639   DCHECK(file.IsValid());
640
641   // |pc_factory_| always takes ownership of |aec_dump_file|. If StartAecDump()
642   // fails, |aec_dump_file| will be closed.
643   if (!GetPcFactory()->StartAecDump(file.TakePlatformFile()))
644     VLOG(1) << "Could not start AEC dump.";
645 }
646
647 void PeerConnectionDependencyFactory::OnDisableAecDump() {
648   DCHECK(CalledOnValidThread());
649   DCHECK(!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled());
650   // Do nothing. We never disable AEC dump for non-track-processing case.
651 }
652
653 void PeerConnectionDependencyFactory::OnIpcClosing() {
654   DCHECK(CalledOnValidThread());
655   aec_dump_message_filter_ = NULL;
656 }
657
658 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() {
659   if (audio_device_)
660     return;
661
662   audio_device_ = new WebRtcAudioDeviceImpl();
663 }
664
665 }  // namespace content