Update To 11.40.268.0
[platform/framework/web/crosswalk.git] / src / content / renderer / media / webrtc / peer_connection_dependency_factory.cc
1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/renderer/media/webrtc/peer_connection_dependency_factory.h"
6
7 #include <vector>
8
9 #include "base/command_line.h"
10 #include "base/strings/utf_string_conversions.h"
11 #include "base/synchronization/waitable_event.h"
12 #include "content/common/media/media_stream_messages.h"
13 #include "content/public/common/content_switches.h"
14 #include "content/renderer/media/media_stream.h"
15 #include "content/renderer/media/media_stream_audio_processor.h"
16 #include "content/renderer/media/media_stream_audio_processor_options.h"
17 #include "content/renderer/media/media_stream_audio_source.h"
18 #include "content/renderer/media/media_stream_video_source.h"
19 #include "content/renderer/media/media_stream_video_track.h"
20 #include "content/renderer/media/peer_connection_identity_service.h"
21 #include "content/renderer/media/rtc_media_constraints.h"
22 #include "content/renderer/media/rtc_peer_connection_handler.h"
23 #include "content/renderer/media/rtc_video_decoder_factory.h"
24 #include "content/renderer/media/rtc_video_encoder_factory.h"
25 #include "content/renderer/media/webaudio_capturer_source.h"
26 #include "content/renderer/media/webrtc/webrtc_local_audio_track_adapter.h"
27 #include "content/renderer/media/webrtc/webrtc_video_capturer_adapter.h"
28 #include "content/renderer/media/webrtc_audio_device_impl.h"
29 #include "content/renderer/media/webrtc_local_audio_track.h"
30 #include "content/renderer/media/webrtc_logging.h"
31 #include "content/renderer/media/webrtc_uma_histograms.h"
32 #include "content/renderer/p2p/ipc_network_manager.h"
33 #include "content/renderer/p2p/ipc_socket_factory.h"
34 #include "content/renderer/p2p/port_allocator.h"
35 #include "content/renderer/render_thread_impl.h"
36 #include "jingle/glue/thread_wrapper.h"
37 #include "media/filters/gpu_video_accelerator_factories.h"
38 #include "third_party/WebKit/public/platform/WebMediaConstraints.h"
39 #include "third_party/WebKit/public/platform/WebMediaStream.h"
40 #include "third_party/WebKit/public/platform/WebMediaStreamSource.h"
41 #include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
42 #include "third_party/WebKit/public/platform/WebURL.h"
43 #include "third_party/WebKit/public/web/WebDocument.h"
44 #include "third_party/WebKit/public/web/WebFrame.h"
45 #include "third_party/libjingle/source/talk/app/webrtc/mediaconstraintsinterface.h"
46
47 #if defined(USE_OPENSSL)
48 #include "third_party/webrtc/base/ssladapter.h"
49 #else
50 #include "net/socket/nss_ssl_util.h"
51 #endif
52
53 #if defined(OS_ANDROID)
54 #include "media/base/android/media_codec_bridge.h"
55 #endif
56
57 namespace content {
58
59 // Map of corresponding media constraints and platform effects.
60 struct {
61   const char* constraint;
62   const media::AudioParameters::PlatformEffectsMask effect;
63 } const kConstraintEffectMap[] = {
64   { content::kMediaStreamAudioDucking,
65     media::AudioParameters::DUCKING },
66   { webrtc::MediaConstraintsInterface::kEchoCancellation,
67     media::AudioParameters::ECHO_CANCELLER },
68 };
69
70 // If any platform effects are available, check them against the constraints.
71 // Disable effects to match false constraints, but if a constraint is true, set
72 // the constraint to false to later disable the software effect.
73 //
74 // This function may modify both |constraints| and |effects|.
75 void HarmonizeConstraintsAndEffects(RTCMediaConstraints* constraints,
76                                     int* effects) {
77   if (*effects != media::AudioParameters::NO_EFFECTS) {
78     for (size_t i = 0; i < arraysize(kConstraintEffectMap); ++i) {
79       bool value;
80       size_t is_mandatory = 0;
81       if (!webrtc::FindConstraint(constraints,
82                                   kConstraintEffectMap[i].constraint,
83                                   &value,
84                                   &is_mandatory) || !value) {
85         // If the constraint is false, or does not exist, disable the platform
86         // effect.
87         *effects &= ~kConstraintEffectMap[i].effect;
88         DVLOG(1) << "Disabling platform effect: "
89                  << kConstraintEffectMap[i].effect;
90       } else if (*effects & kConstraintEffectMap[i].effect) {
91         // If the constraint is true, leave the platform effect enabled, and
92         // set the constraint to false to later disable the software effect.
93         if (is_mandatory) {
94           constraints->AddMandatory(kConstraintEffectMap[i].constraint,
95               webrtc::MediaConstraintsInterface::kValueFalse, true);
96         } else {
97           constraints->AddOptional(kConstraintEffectMap[i].constraint,
98               webrtc::MediaConstraintsInterface::kValueFalse, true);
99         }
100         DVLOG(1) << "Disabling constraint: "
101                  << kConstraintEffectMap[i].constraint;
102       } else if (kConstraintEffectMap[i].effect ==
103                  media::AudioParameters::DUCKING && value && !is_mandatory) {
104         // Special handling of the DUCKING flag that sets the optional
105         // constraint to |false| to match what the device will support.
106         constraints->AddOptional(kConstraintEffectMap[i].constraint,
107             webrtc::MediaConstraintsInterface::kValueFalse, true);
108         // No need to modify |effects| since the ducking flag is already off.
109         DCHECK((*effects & media::AudioParameters::DUCKING) == 0);
110       }
111     }
112   }
113 }
114
115 class P2PPortAllocatorFactory : public webrtc::PortAllocatorFactoryInterface {
116  public:
117   P2PPortAllocatorFactory(
118       P2PSocketDispatcher* socket_dispatcher,
119       rtc::NetworkManager* network_manager,
120       rtc::PacketSocketFactory* socket_factory)
121       : socket_dispatcher_(socket_dispatcher),
122         network_manager_(network_manager),
123         socket_factory_(socket_factory) {
124   }
125
126   cricket::PortAllocator* CreatePortAllocator(
127       const std::vector<StunConfiguration>& stun_servers,
128       const std::vector<TurnConfiguration>& turn_configurations) override {
129     P2PPortAllocator::Config config;
130     for (size_t i = 0; i < stun_servers.size(); ++i) {
131       config.stun_servers.insert(rtc::SocketAddress(
132           stun_servers[i].server.hostname(),
133           stun_servers[i].server.port()));
134     }
135     for (size_t i = 0; i < turn_configurations.size(); ++i) {
136       P2PPortAllocator::Config::RelayServerConfig relay_config;
137       relay_config.server_address = turn_configurations[i].server.hostname();
138       relay_config.port = turn_configurations[i].server.port();
139       relay_config.username = turn_configurations[i].username;
140       relay_config.password = turn_configurations[i].password;
141       relay_config.transport_type = turn_configurations[i].transport_type;
142       relay_config.secure = turn_configurations[i].secure;
143       config.relays.push_back(relay_config);
144
145       // Use turn servers as stun servers.
146       config.stun_servers.insert(rtc::SocketAddress(
147           turn_configurations[i].server.hostname(),
148           turn_configurations[i].server.port()));
149     }
150
151     return new P2PPortAllocator(
152         socket_dispatcher_.get(), network_manager_, socket_factory_, config);
153   }
154
155  protected:
156   ~P2PPortAllocatorFactory() override {}
157
158  private:
159   scoped_refptr<P2PSocketDispatcher> socket_dispatcher_;
160   // |network_manager_| and |socket_factory_| are a weak references, owned by
161   // PeerConnectionDependencyFactory.
162   rtc::NetworkManager* network_manager_;
163   rtc::PacketSocketFactory* socket_factory_;
164 };
165
166 PeerConnectionDependencyFactory::PeerConnectionDependencyFactory(
167     P2PSocketDispatcher* p2p_socket_dispatcher)
168     : network_manager_(NULL),
169       p2p_socket_dispatcher_(p2p_socket_dispatcher),
170       signaling_thread_(NULL),
171       worker_thread_(NULL),
172       chrome_signaling_thread_("Chrome_libJingle_Signaling"),
173       chrome_worker_thread_("Chrome_libJingle_WorkerThread") {
174 }
175
176 PeerConnectionDependencyFactory::~PeerConnectionDependencyFactory() {
177   CleanupPeerConnectionFactory();
178   if (aec_dump_message_filter_.get())
179     aec_dump_message_filter_->RemoveDelegate(this);
180 }
181
182 blink::WebRTCPeerConnectionHandler*
183 PeerConnectionDependencyFactory::CreateRTCPeerConnectionHandler(
184     blink::WebRTCPeerConnectionHandlerClient* client) {
185   // Save histogram data so we can see how much PeerConnetion is used.
186   // The histogram counts the number of calls to the JS API
187   // webKitRTCPeerConnection.
188   UpdateWebRTCMethodCount(WEBKIT_RTC_PEER_CONNECTION);
189
190   return new RTCPeerConnectionHandler(client, this);
191 }
192
193 bool PeerConnectionDependencyFactory::InitializeMediaStreamAudioSource(
194     int render_view_id,
195     const blink::WebMediaConstraints& audio_constraints,
196     MediaStreamAudioSource* source_data) {
197   DVLOG(1) << "InitializeMediaStreamAudioSources()";
198
199   // Do additional source initialization if the audio source is a valid
200   // microphone or tab audio.
201   RTCMediaConstraints native_audio_constraints(audio_constraints);
202   MediaAudioConstraints::ApplyFixedAudioConstraints(&native_audio_constraints);
203
204   StreamDeviceInfo device_info = source_data->device_info();
205   RTCMediaConstraints constraints = native_audio_constraints;
206   // May modify both |constraints| and |effects|.
207   HarmonizeConstraintsAndEffects(&constraints,
208                                  &device_info.device.input.effects);
209
210   scoped_refptr<WebRtcAudioCapturer> capturer(
211       CreateAudioCapturer(render_view_id, device_info, audio_constraints,
212                           source_data));
213   if (!capturer.get()) {
214     const std::string log_string =
215         "PCDF::InitializeMediaStreamAudioSource: fails to create capturer";
216     WebRtcLogMessage(log_string);
217     DVLOG(1) << log_string;
218     // TODO(xians): Don't we need to check if source_observer is observing
219     // something? If not, then it looks like we have a leak here.
220     // OTOH, if it _is_ observing something, then the callback might
221     // be called multiple times which is likely also a bug.
222     return false;
223   }
224   source_data->SetAudioCapturer(capturer.get());
225
226   // Creates a LocalAudioSource object which holds audio options.
227   // TODO(xians): The option should apply to the track instead of the source.
228   // TODO(perkj): Move audio constraints parsing to Chrome.
229   // Currently there are a few constraints that are parsed by libjingle and
230   // the state is set to ended if parsing fails.
231   scoped_refptr<webrtc::AudioSourceInterface> rtc_source(
232       CreateLocalAudioSource(&constraints).get());
233   if (rtc_source->state() != webrtc::MediaSourceInterface::kLive) {
234     DLOG(WARNING) << "Failed to create rtc LocalAudioSource.";
235     return false;
236   }
237   source_data->SetLocalAudioSource(rtc_source.get());
238   return true;
239 }
240
241 WebRtcVideoCapturerAdapter*
242 PeerConnectionDependencyFactory::CreateVideoCapturer(
243     bool is_screeencast) {
244   // We need to make sure the libjingle thread wrappers have been created
245   // before we can use an instance of a WebRtcVideoCapturerAdapter. This is
246   // since the base class of WebRtcVideoCapturerAdapter is a
247   // cricket::VideoCapturer and it uses the libjingle thread wrappers.
248   if (!GetPcFactory().get())
249     return NULL;
250   return new WebRtcVideoCapturerAdapter(is_screeencast);
251 }
252
253 scoped_refptr<webrtc::VideoSourceInterface>
254 PeerConnectionDependencyFactory::CreateVideoSource(
255     cricket::VideoCapturer* capturer,
256     const blink::WebMediaConstraints& constraints) {
257   RTCMediaConstraints webrtc_constraints(constraints);
258   scoped_refptr<webrtc::VideoSourceInterface> source =
259       GetPcFactory()->CreateVideoSource(capturer, &webrtc_constraints).get();
260   return source;
261 }
262
263 const scoped_refptr<webrtc::PeerConnectionFactoryInterface>&
264 PeerConnectionDependencyFactory::GetPcFactory() {
265   if (!pc_factory_.get())
266     CreatePeerConnectionFactory();
267   CHECK(pc_factory_.get());
268   return pc_factory_;
269 }
270
271 void PeerConnectionDependencyFactory::CreatePeerConnectionFactory() {
272   DCHECK(!pc_factory_.get());
273   DCHECK(!signaling_thread_);
274   DCHECK(!worker_thread_);
275   DCHECK(!network_manager_);
276   DCHECK(!socket_factory_);
277   DCHECK(!chrome_signaling_thread_.IsRunning());
278   DCHECK(!chrome_worker_thread_.IsRunning());
279
280   DVLOG(1) << "PeerConnectionDependencyFactory::CreatePeerConnectionFactory()";
281
282   // To allow sending to the signaling/worker threads.
283   jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
284   jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
285
286   CHECK(chrome_signaling_thread_.Start());
287   CHECK(chrome_worker_thread_.Start());
288
289   base::WaitableEvent start_worker_event(true, false);
290   chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
291       &PeerConnectionDependencyFactory::InitializeWorkerThread,
292       base::Unretained(this),
293       &worker_thread_,
294       &start_worker_event));
295
296   base::WaitableEvent create_network_manager_event(true, false);
297   chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
298       &PeerConnectionDependencyFactory::CreateIpcNetworkManagerOnWorkerThread,
299       base::Unretained(this),
300       &create_network_manager_event));
301
302   start_worker_event.Wait();
303   create_network_manager_event.Wait();
304
305   CHECK(worker_thread_);
306
307   // Init SSL, which will be needed by PeerConnection.
308 #if defined(USE_OPENSSL)
309   if (!rtc::InitializeSSL()) {
310     LOG(ERROR) << "Failed on InitializeSSL.";
311     NOTREACHED();
312     return;
313   }
314 #else
315   // TODO(ronghuawu): Replace this call with InitializeSSL.
316   net::EnsureNSSSSLInit();
317 #endif
318
319   base::WaitableEvent start_signaling_event(true, false);
320   chrome_signaling_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
321       &PeerConnectionDependencyFactory::InitializeSignalingThread,
322       base::Unretained(this),
323       RenderThreadImpl::current()->GetGpuFactories(),
324       &start_signaling_event));
325
326   // TODO(xians): Remove the following code after kDisableAudioTrackProcessing
327   // is removed.
328   if (!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled()) {
329     aec_dump_message_filter_ = AecDumpMessageFilter::Get();
330     // In unit tests not creating a message filter, |aec_dump_message_filter_|
331     // will be NULL. We can just ignore that. Other unit tests and browser tests
332     // ensure that we do get the filter when we should.
333     if (aec_dump_message_filter_.get())
334       aec_dump_message_filter_->AddDelegate(this);
335   }
336
337   start_signaling_event.Wait();
338   CHECK(signaling_thread_);
339 }
340
341 void PeerConnectionDependencyFactory::InitializeSignalingThread(
342     const scoped_refptr<media::GpuVideoAcceleratorFactories>& gpu_factories,
343     base::WaitableEvent* event) {
344   DCHECK(chrome_signaling_thread_.task_runner()->BelongsToCurrentThread());
345   DCHECK(worker_thread_);
346   DCHECK(p2p_socket_dispatcher_.get());
347
348   jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
349   jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
350   signaling_thread_ = jingle_glue::JingleThreadWrapper::current();
351
352   EnsureWebRtcAudioDeviceImpl();
353
354   socket_factory_.reset(
355       new IpcPacketSocketFactory(p2p_socket_dispatcher_.get()));
356
357   scoped_ptr<cricket::WebRtcVideoDecoderFactory> decoder_factory;
358   scoped_ptr<cricket::WebRtcVideoEncoderFactory> encoder_factory;
359
360   const CommandLine* cmd_line = CommandLine::ForCurrentProcess();
361   if (gpu_factories.get()) {
362     if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWDecoding))
363       decoder_factory.reset(new RTCVideoDecoderFactory(gpu_factories));
364
365     if (!cmd_line->HasSwitch(switches::kDisableWebRtcHWEncoding))
366       encoder_factory.reset(new RTCVideoEncoderFactory(gpu_factories));
367   }
368
369 #if defined(OS_ANDROID)
370   if (!media::MediaCodecBridge::SupportsSetParameters())
371     encoder_factory.reset();
372 #endif
373
374   pc_factory_ = webrtc::CreatePeerConnectionFactory(
375       worker_thread_, signaling_thread_, audio_device_.get(),
376       encoder_factory.release(), decoder_factory.release());
377   CHECK(pc_factory_.get());
378
379   webrtc::PeerConnectionFactoryInterface::Options factory_options;
380   factory_options.disable_sctp_data_channels = false;
381   factory_options.disable_encryption =
382       cmd_line->HasSwitch(switches::kDisableWebRtcEncryption);
383   pc_factory_->SetOptions(factory_options);
384
385   event->Signal();
386 }
387
388 bool PeerConnectionDependencyFactory::PeerConnectionFactoryCreated() {
389   return pc_factory_.get() != NULL;
390 }
391
392 scoped_refptr<webrtc::PeerConnectionInterface>
393 PeerConnectionDependencyFactory::CreatePeerConnection(
394     const webrtc::PeerConnectionInterface::RTCConfiguration& config,
395     const webrtc::MediaConstraintsInterface* constraints,
396     blink::WebFrame* web_frame,
397     webrtc::PeerConnectionObserver* observer) {
398   CHECK(web_frame);
399   CHECK(observer);
400   if (!GetPcFactory().get())
401     return NULL;
402
403   scoped_refptr<P2PPortAllocatorFactory> pa_factory =
404         new rtc::RefCountedObject<P2PPortAllocatorFactory>(
405             p2p_socket_dispatcher_.get(),
406             network_manager_,
407             socket_factory_.get());
408
409   PeerConnectionIdentityService* identity_service =
410       new PeerConnectionIdentityService(
411           GURL(web_frame->document().url().spec()).GetOrigin());
412
413   return GetPcFactory()->CreatePeerConnection(config,
414                                               constraints,
415                                               pa_factory.get(),
416                                               identity_service,
417                                               observer).get();
418 }
419
420 scoped_refptr<webrtc::MediaStreamInterface>
421 PeerConnectionDependencyFactory::CreateLocalMediaStream(
422     const std::string& label) {
423   return GetPcFactory()->CreateLocalMediaStream(label).get();
424 }
425
426 scoped_refptr<webrtc::AudioSourceInterface>
427 PeerConnectionDependencyFactory::CreateLocalAudioSource(
428     const webrtc::MediaConstraintsInterface* constraints) {
429   scoped_refptr<webrtc::AudioSourceInterface> source =
430       GetPcFactory()->CreateAudioSource(constraints).get();
431   return source;
432 }
433
434 void PeerConnectionDependencyFactory::CreateLocalAudioTrack(
435     const blink::WebMediaStreamTrack& track) {
436   blink::WebMediaStreamSource source = track.source();
437   DCHECK_EQ(source.type(), blink::WebMediaStreamSource::TypeAudio);
438   MediaStreamAudioSource* source_data =
439       static_cast<MediaStreamAudioSource*>(source.extraData());
440
441   scoped_refptr<WebAudioCapturerSource> webaudio_source;
442   if (!source_data) {
443     if (source.requiresAudioConsumer()) {
444       // We're adding a WebAudio MediaStream.
445       // Create a specific capturer for each WebAudio consumer.
446       webaudio_source = CreateWebAudioSource(&source);
447       source_data =
448           static_cast<MediaStreamAudioSource*>(source.extraData());
449     } else {
450       // TODO(perkj): Implement support for sources from
451       // remote MediaStreams.
452       NOTIMPLEMENTED();
453       return;
454     }
455   }
456
457   // Creates an adapter to hold all the libjingle objects.
458   scoped_refptr<WebRtcLocalAudioTrackAdapter> adapter(
459       WebRtcLocalAudioTrackAdapter::Create(track.id().utf8(),
460                                            source_data->local_audio_source()));
461   static_cast<webrtc::AudioTrackInterface*>(adapter.get())->set_enabled(
462       track.isEnabled());
463
464   // TODO(xians): Merge |source| to the capturer(). We can't do this today
465   // because only one capturer() is supported while one |source| is created
466   // for each audio track.
467   scoped_ptr<WebRtcLocalAudioTrack> audio_track(new WebRtcLocalAudioTrack(
468       adapter.get(), source_data->GetAudioCapturer(), webaudio_source.get()));
469
470   StartLocalAudioTrack(audio_track.get());
471
472   // Pass the ownership of the native local audio track to the blink track.
473   blink::WebMediaStreamTrack writable_track = track;
474   writable_track.setExtraData(audio_track.release());
475 }
476
477 void PeerConnectionDependencyFactory::StartLocalAudioTrack(
478     WebRtcLocalAudioTrack* audio_track) {
479   // Add the WebRtcAudioDevice as the sink to the local audio track.
480   // TODO(xians): Remove the following line of code after the APM in WebRTC is
481   // completely deprecated. See http://crbug/365672.
482   if (!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled())
483     audio_track->AddSink(GetWebRtcAudioDevice());
484
485   // Start the audio track. This will hook the |audio_track| to the capturer
486   // as the sink of the audio, and only start the source of the capturer if
487   // it is the first audio track connecting to the capturer.
488   audio_track->Start();
489 }
490
491 scoped_refptr<WebAudioCapturerSource>
492 PeerConnectionDependencyFactory::CreateWebAudioSource(
493     blink::WebMediaStreamSource* source) {
494   DVLOG(1) << "PeerConnectionDependencyFactory::CreateWebAudioSource()";
495
496   scoped_refptr<WebAudioCapturerSource>
497       webaudio_capturer_source(new WebAudioCapturerSource());
498   MediaStreamAudioSource* source_data = new MediaStreamAudioSource();
499
500   // Use the current default capturer for the WebAudio track so that the
501   // WebAudio track can pass a valid delay value and |need_audio_processing|
502   // flag to PeerConnection.
503   // TODO(xians): Remove this after moving APM to Chrome.
504   if (GetWebRtcAudioDevice()) {
505     source_data->SetAudioCapturer(
506         GetWebRtcAudioDevice()->GetDefaultCapturer());
507   }
508
509   // Create a LocalAudioSource object which holds audio options.
510   // SetLocalAudioSource() affects core audio parts in third_party/Libjingle.
511   source_data->SetLocalAudioSource(CreateLocalAudioSource(NULL).get());
512   source->setExtraData(source_data);
513
514   // Replace the default source with WebAudio as source instead.
515   source->addAudioConsumer(webaudio_capturer_source.get());
516
517   return webaudio_capturer_source;
518 }
519
520 scoped_refptr<webrtc::VideoTrackInterface>
521 PeerConnectionDependencyFactory::CreateLocalVideoTrack(
522     const std::string& id,
523     webrtc::VideoSourceInterface* source) {
524   return GetPcFactory()->CreateVideoTrack(id, source).get();
525 }
526
527 scoped_refptr<webrtc::VideoTrackInterface>
528 PeerConnectionDependencyFactory::CreateLocalVideoTrack(
529     const std::string& id, cricket::VideoCapturer* capturer) {
530   if (!capturer) {
531     LOG(ERROR) << "CreateLocalVideoTrack called with null VideoCapturer.";
532     return NULL;
533   }
534
535   // Create video source from the |capturer|.
536   scoped_refptr<webrtc::VideoSourceInterface> source =
537       GetPcFactory()->CreateVideoSource(capturer, NULL).get();
538
539   // Create native track from the source.
540   return GetPcFactory()->CreateVideoTrack(id, source.get()).get();
541 }
542
543 webrtc::SessionDescriptionInterface*
544 PeerConnectionDependencyFactory::CreateSessionDescription(
545     const std::string& type,
546     const std::string& sdp,
547     webrtc::SdpParseError* error) {
548   return webrtc::CreateSessionDescription(type, sdp, error);
549 }
550
551 webrtc::IceCandidateInterface*
552 PeerConnectionDependencyFactory::CreateIceCandidate(
553     const std::string& sdp_mid,
554     int sdp_mline_index,
555     const std::string& sdp) {
556   return webrtc::CreateIceCandidate(sdp_mid, sdp_mline_index, sdp);
557 }
558
559 WebRtcAudioDeviceImpl*
560 PeerConnectionDependencyFactory::GetWebRtcAudioDevice() {
561   return audio_device_.get();
562 }
563
564 void PeerConnectionDependencyFactory::InitializeWorkerThread(
565     rtc::Thread** thread,
566     base::WaitableEvent* event) {
567   jingle_glue::JingleThreadWrapper::EnsureForCurrentMessageLoop();
568   jingle_glue::JingleThreadWrapper::current()->set_send_allowed(true);
569   *thread = jingle_glue::JingleThreadWrapper::current();
570   event->Signal();
571 }
572
573 void PeerConnectionDependencyFactory::CreateIpcNetworkManagerOnWorkerThread(
574     base::WaitableEvent* event) {
575   DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop());
576   network_manager_ = new IpcNetworkManager(p2p_socket_dispatcher_.get());
577   event->Signal();
578 }
579
580 void PeerConnectionDependencyFactory::DeleteIpcNetworkManager() {
581   DCHECK_EQ(base::MessageLoop::current(), chrome_worker_thread_.message_loop());
582   delete network_manager_;
583   network_manager_ = NULL;
584 }
585
586 void PeerConnectionDependencyFactory::CleanupPeerConnectionFactory() {
587   pc_factory_ = NULL;
588   if (network_manager_) {
589     // The network manager needs to free its resources on the thread they were
590     // created, which is the worked thread.
591     if (chrome_worker_thread_.IsRunning()) {
592       chrome_worker_thread_.message_loop()->PostTask(FROM_HERE, base::Bind(
593           &PeerConnectionDependencyFactory::DeleteIpcNetworkManager,
594           base::Unretained(this)));
595       // Stopping the thread will wait until all tasks have been
596       // processed before returning. We wait for the above task to finish before
597       // letting the the function continue to avoid any potential race issues.
598       chrome_worker_thread_.Stop();
599     } else {
600       NOTREACHED() << "Worker thread not running.";
601     }
602   }
603 }
604
605 scoped_refptr<WebRtcAudioCapturer>
606 PeerConnectionDependencyFactory::CreateAudioCapturer(
607     int render_view_id,
608     const StreamDeviceInfo& device_info,
609     const blink::WebMediaConstraints& constraints,
610     MediaStreamAudioSource* audio_source) {
611   // TODO(xians): Handle the cases when gUM is called without a proper render
612   // view, for example, by an extension.
613   DCHECK_GE(render_view_id, 0);
614
615   EnsureWebRtcAudioDeviceImpl();
616   DCHECK(GetWebRtcAudioDevice());
617   return WebRtcAudioCapturer::CreateCapturer(render_view_id, device_info,
618                                              constraints,
619                                              GetWebRtcAudioDevice(),
620                                              audio_source);
621 }
622
623 scoped_refptr<base::MessageLoopProxy>
624 PeerConnectionDependencyFactory::GetWebRtcWorkerThread() const {
625   DCHECK(CalledOnValidThread());
626   return chrome_worker_thread_.message_loop_proxy();
627 }
628
629 scoped_refptr<base::MessageLoopProxy>
630 PeerConnectionDependencyFactory::GetWebRtcSignalingThread() const {
631   DCHECK(CalledOnValidThread());
632   return chrome_signaling_thread_.message_loop_proxy();
633 }
634
635 void PeerConnectionDependencyFactory::OnAecDumpFile(
636     const IPC::PlatformFileForTransit& file_handle) {
637   DCHECK(CalledOnValidThread());
638   DCHECK(!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled());
639   DCHECK(PeerConnectionFactoryCreated());
640
641   base::File file = IPC::PlatformFileForTransitToFile(file_handle);
642   DCHECK(file.IsValid());
643
644   // |pc_factory_| always takes ownership of |aec_dump_file|. If StartAecDump()
645   // fails, |aec_dump_file| will be closed.
646   if (!GetPcFactory()->StartAecDump(file.TakePlatformFile()))
647     VLOG(1) << "Could not start AEC dump.";
648 }
649
650 void PeerConnectionDependencyFactory::OnDisableAecDump() {
651   DCHECK(CalledOnValidThread());
652   DCHECK(!MediaStreamAudioProcessor::IsAudioTrackProcessingEnabled());
653   // Do nothing. We never disable AEC dump for non-track-processing case.
654 }
655
656 void PeerConnectionDependencyFactory::OnIpcClosing() {
657   DCHECK(CalledOnValidThread());
658   aec_dump_message_filter_ = NULL;
659 }
660
661 void PeerConnectionDependencyFactory::EnsureWebRtcAudioDeviceImpl() {
662   if (audio_device_.get())
663     return;
664
665   audio_device_ = new WebRtcAudioDeviceImpl();
666 }
667
668 }  // namespace content