- add sources.
[platform/framework/web/crosswalk.git] / src / content / test / webrtc_audio_device_test.cc
1 // Copyright (c) 2012 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "content/test/webrtc_audio_device_test.h"
6
7 #include "base/bind.h"
8 #include "base/bind_helpers.h"
9 #include "base/compiler_specific.h"
10 #include "base/file_util.h"
11 #include "base/message_loop/message_loop.h"
12 #include "base/run_loop.h"
13 #include "base/synchronization/waitable_event.h"
14 #include "base/test/test_timeouts.h"
15 #include "content/browser/renderer_host/media/audio_input_renderer_host.h"
16 #include "content/browser/renderer_host/media/audio_mirroring_manager.h"
17 #include "content/browser/renderer_host/media/audio_renderer_host.h"
18 #include "content/browser/renderer_host/media/media_stream_manager.h"
19 #include "content/browser/renderer_host/media/mock_media_observer.h"
20 #include "content/common/media/media_param_traits.h"
21 #include "content/common/view_messages.h"
22 #include "content/public/browser/browser_thread.h"
23 #include "content/public/browser/resource_context.h"
24 #include "content/public/common/content_paths.h"
25 #include "content/public/test/test_browser_thread.h"
26 #include "content/renderer/media/audio_input_message_filter.h"
27 #include "content/renderer/media/audio_message_filter.h"
28 #include "content/renderer/media/webrtc_audio_device_impl.h"
29 #include "content/renderer/render_process.h"
30 #include "content/renderer/render_thread_impl.h"
31 #include "content/renderer/renderer_webkitplatformsupport_impl.h"
32 #include "media/audio/audio_parameters.h"
33 #include "media/base/audio_hardware_config.h"
34 #include "net/url_request/url_request_test_util.h"
35 #include "testing/gmock/include/gmock/gmock.h"
36 #include "testing/gtest/include/gtest/gtest.h"
37 #include "third_party/webrtc/voice_engine/include/voe_audio_processing.h"
38 #include "third_party/webrtc/voice_engine/include/voe_base.h"
39 #include "third_party/webrtc/voice_engine/include/voe_file.h"
40 #include "third_party/webrtc/voice_engine/include/voe_network.h"
41
42 #if defined(OS_WIN)
43 #include "base/win/scoped_com_initializer.h"
44 #endif
45
46 using media::AudioParameters;
47 using media::ChannelLayout;
48 using testing::_;
49 using testing::InvokeWithoutArgs;
50 using testing::Return;
51 using testing::StrEq;
52
53 namespace content {
54
55 // This class is a mock of the child process singleton which is needed
56 // to be able to create a RenderThread object.
57 class WebRTCMockRenderProcess : public RenderProcess {
58  public:
59   WebRTCMockRenderProcess() {}
60   virtual ~WebRTCMockRenderProcess() {}
61
62   // RenderProcess implementation.
63   virtual skia::PlatformCanvas* GetDrawingCanvas(
64       TransportDIB** memory, const gfx::Rect& rect) OVERRIDE {
65     return NULL;
66   }
67   virtual void ReleaseTransportDIB(TransportDIB* memory) OVERRIDE {}
68   virtual bool UseInProcessPlugins() const OVERRIDE { return false; }
69   virtual void AddBindings(int bindings) OVERRIDE {}
70   virtual int GetEnabledBindings() const OVERRIDE { return 0; }
71   virtual TransportDIB* CreateTransportDIB(size_t size) OVERRIDE {
72     return NULL;
73   }
74   virtual void FreeTransportDIB(TransportDIB*) OVERRIDE {}
75
76  private:
77   DISALLOW_COPY_AND_ASSIGN(WebRTCMockRenderProcess);
78 };
79
80 class TestAudioRendererHost : public AudioRendererHost {
81  public:
82   TestAudioRendererHost(
83       int render_process_id,
84       media::AudioManager* audio_manager,
85       AudioMirroringManager* mirroring_manager,
86       MediaInternals* media_internals,
87       MediaStreamManager* media_stream_manager,
88       IPC::Channel* channel)
89       : AudioRendererHost(render_process_id, audio_manager, mirroring_manager,
90                           media_internals, media_stream_manager),
91         channel_(channel) {}
92   virtual bool Send(IPC::Message* message) OVERRIDE {
93     if (channel_)
94       return channel_->Send(message);
95     return false;
96   }
97   void ResetChannel() {
98     channel_ = NULL;
99   }
100
101  protected:
102   virtual ~TestAudioRendererHost() {}
103
104  private:
105   IPC::Channel* channel_;
106 };
107
108 class TestAudioInputRendererHost : public AudioInputRendererHost {
109  public:
110   TestAudioInputRendererHost(
111       media::AudioManager* audio_manager,
112       MediaStreamManager* media_stream_manager,
113       AudioMirroringManager* audio_mirroring_manager,
114       media::UserInputMonitor* user_input_monitor,
115       IPC::Channel* channel)
116       : AudioInputRendererHost(audio_manager, media_stream_manager,
117                                audio_mirroring_manager, user_input_monitor),
118         channel_(channel) {}
119   virtual bool Send(IPC::Message* message) OVERRIDE {
120     if (channel_)
121       return channel_->Send(message);
122     return false;
123   }
124   void ResetChannel() {
125     channel_ = NULL;
126   }
127
128  protected:
129   virtual ~TestAudioInputRendererHost() {}
130
131  private:
132   IPC::Channel* channel_;
133 };
134
135 // Utility scoped class to replace the global content client's renderer for the
136 // duration of the test.
137 class ReplaceContentClientRenderer {
138  public:
139   explicit ReplaceContentClientRenderer(ContentRendererClient* new_renderer) {
140     saved_renderer_ = SetRendererClientForTesting(new_renderer);
141   }
142   ~ReplaceContentClientRenderer() {
143     // Restore the original renderer.
144     SetRendererClientForTesting(saved_renderer_);
145   }
146  private:
147   ContentRendererClient* saved_renderer_;
148   DISALLOW_COPY_AND_ASSIGN(ReplaceContentClientRenderer);
149 };
150
151 class MockRTCResourceContext : public ResourceContext {
152  public:
153   MockRTCResourceContext() : test_request_context_(NULL) {}
154   virtual ~MockRTCResourceContext() {}
155
156   void set_request_context(net::URLRequestContext* request_context) {
157     test_request_context_ = request_context;
158   }
159
160   // ResourceContext implementation:
161   virtual net::HostResolver* GetHostResolver() OVERRIDE {
162     return NULL;
163   }
164   virtual net::URLRequestContext* GetRequestContext() OVERRIDE {
165     return test_request_context_;
166   }
167
168   virtual bool AllowMicAccess(const GURL& origin) OVERRIDE {
169     return false;
170   }
171
172   virtual bool AllowCameraAccess(const GURL& origin) OVERRIDE {
173     return false;
174   }
175
176  private:
177   net::URLRequestContext* test_request_context_;
178
179   DISALLOW_COPY_AND_ASSIGN(MockRTCResourceContext);
180 };
181
182 ACTION_P(QuitMessageLoop, loop_or_proxy) {
183   loop_or_proxy->PostTask(FROM_HERE, base::MessageLoop::QuitClosure());
184 }
185
186 MAYBE_WebRTCAudioDeviceTest::MAYBE_WebRTCAudioDeviceTest()
187     : render_thread_(NULL), audio_hardware_config_(NULL),
188       has_input_devices_(false), has_output_devices_(false) {
189 }
190
191 MAYBE_WebRTCAudioDeviceTest::~MAYBE_WebRTCAudioDeviceTest() {}
192
193 void MAYBE_WebRTCAudioDeviceTest::SetUp() {
194   // This part sets up a RenderThread environment to ensure that
195   // RenderThread::current() (<=> TLS pointer) is valid.
196   // Main parts are inspired by the RenderViewFakeResourcesTest.
197   // Note that, the IPC part is not utilized in this test.
198   saved_content_renderer_.reset(
199       new ReplaceContentClientRenderer(&content_renderer_client_));
200   mock_process_.reset(new WebRTCMockRenderProcess());
201   ui_thread_.reset(
202       new TestBrowserThread(BrowserThread::UI, base::MessageLoop::current()));
203
204   // Construct the resource context on the UI thread.
205   resource_context_.reset(new MockRTCResourceContext);
206
207   static const char kThreadName[] = "RenderThread";
208   ChildProcess::current()->io_message_loop()->PostTask(FROM_HERE,
209       base::Bind(&MAYBE_WebRTCAudioDeviceTest::InitializeIOThread,
210                  base::Unretained(this), kThreadName));
211   WaitForIOThreadCompletion();
212
213   sandbox_was_enabled_ =
214       RendererWebKitPlatformSupportImpl::SetSandboxEnabledForTesting(false);
215   render_thread_ = new RenderThreadImpl(kThreadName);
216 }
217
218 void MAYBE_WebRTCAudioDeviceTest::TearDown() {
219   SetAudioHardwareConfig(NULL);
220
221   // Run any pending cleanup tasks that may have been posted to the main thread.
222   base::RunLoop().RunUntilIdle();
223
224   // Kick of the cleanup process by closing the channel. This queues up
225   // OnStreamClosed calls to be executed on the audio thread.
226   ChildProcess::current()->io_message_loop()->PostTask(FROM_HERE,
227       base::Bind(&MAYBE_WebRTCAudioDeviceTest::DestroyChannel,
228                  base::Unretained(this)));
229   WaitForIOThreadCompletion();
230
231   // When audio [input] render hosts are notified that the channel has
232   // been closed, they post tasks to the audio thread to close the
233   // AudioOutputController and once that's completed, a task is posted back to
234   // the IO thread to actually delete the AudioEntry for the audio stream. Only
235   // then is the reference to the audio manager released, so we wait for the
236   // whole thing to be torn down before we finally uninitialize the io thread.
237   WaitForAudioManagerCompletion();
238
239   ChildProcess::current()->io_message_loop()->PostTask(FROM_HERE,
240       base::Bind(&MAYBE_WebRTCAudioDeviceTest::UninitializeIOThread,
241                  base::Unretained((this))));
242   WaitForIOThreadCompletion();
243   mock_process_.reset();
244   media_stream_manager_.reset();
245   mirroring_manager_.reset();
246   RendererWebKitPlatformSupportImpl::SetSandboxEnabledForTesting(
247       sandbox_was_enabled_);
248 }
249
250 bool MAYBE_WebRTCAudioDeviceTest::Send(IPC::Message* message) {
251   return channel_->Send(message);
252 }
253
254 void MAYBE_WebRTCAudioDeviceTest::SetAudioHardwareConfig(
255     media::AudioHardwareConfig* hardware_config) {
256   audio_hardware_config_ = hardware_config;
257 }
258
259 scoped_refptr<WebRtcAudioRenderer>
260 MAYBE_WebRTCAudioDeviceTest::CreateDefaultWebRtcAudioRenderer(
261     int render_view_id) {
262   media::AudioHardwareConfig* hardware_config =
263       RenderThreadImpl::current()->GetAudioHardwareConfig();
264   int sample_rate = hardware_config->GetOutputSampleRate();
265   int frames_per_buffer = hardware_config->GetOutputBufferSize();
266
267   return new WebRtcAudioRenderer(render_view_id, 0, sample_rate,
268                                  frames_per_buffer);
269 }
270
271 void MAYBE_WebRTCAudioDeviceTest::InitializeIOThread(const char* thread_name) {
272 #if defined(OS_WIN)
273   // We initialize COM (STA) on our IO thread as is done in Chrome.
274   // See BrowserProcessSubThread::Init.
275   initialize_com_.reset(new base::win::ScopedCOMInitializer());
276 #endif
277
278   // Set the current thread as the IO thread.
279   io_thread_.reset(
280       new TestBrowserThread(BrowserThread::IO, base::MessageLoop::current()));
281
282   // Populate our resource context.
283   test_request_context_.reset(new net::TestURLRequestContext());
284   MockRTCResourceContext* resource_context =
285       static_cast<MockRTCResourceContext*>(resource_context_.get());
286   resource_context->set_request_context(test_request_context_.get());
287   media_internals_.reset(new MockMediaInternals());
288
289   // Create our own AudioManager, AudioMirroringManager and MediaStreamManager.
290   audio_manager_.reset(media::AudioManager::Create());
291   mirroring_manager_.reset(new AudioMirroringManager());
292   media_stream_manager_.reset(new MediaStreamManager(audio_manager_.get()));
293
294   has_input_devices_ = audio_manager_->HasAudioInputDevices();
295   has_output_devices_ = audio_manager_->HasAudioOutputDevices();
296
297   // Create an IPC channel that handles incoming messages on the IO thread.
298   CreateChannel(thread_name);
299 }
300
301 void MAYBE_WebRTCAudioDeviceTest::UninitializeIOThread() {
302   resource_context_.reset();
303
304   test_request_context_.reset();
305
306 #if defined(OS_WIN)
307   initialize_com_.reset();
308 #endif
309
310   audio_manager_.reset();
311 }
312
313 void MAYBE_WebRTCAudioDeviceTest::CreateChannel(const char* name) {
314   DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
315
316   channel_.reset(new IPC::Channel(name, IPC::Channel::MODE_SERVER, this));
317   ASSERT_TRUE(channel_->Connect());
318
319   static const int kRenderProcessId = 1;
320   audio_render_host_ = new TestAudioRendererHost(
321       kRenderProcessId, audio_manager_.get(), mirroring_manager_.get(),
322       media_internals_.get(), media_stream_manager_.get(), channel_.get());
323   audio_render_host_->set_peer_pid_for_testing(base::GetCurrentProcId());
324
325   audio_input_renderer_host_ =
326       new TestAudioInputRendererHost(audio_manager_.get(),
327                                      media_stream_manager_.get(),
328                                      mirroring_manager_.get(),
329                                      NULL,
330                                      channel_.get());
331   audio_input_renderer_host_->set_peer_pid_for_testing(
332       base::GetCurrentProcId());
333 }
334
335 void MAYBE_WebRTCAudioDeviceTest::DestroyChannel() {
336   DCHECK(BrowserThread::CurrentlyOn(BrowserThread::IO));
337   audio_render_host_->OnChannelClosing();
338   audio_render_host_->OnFilterRemoved();
339   audio_input_renderer_host_->OnChannelClosing();
340   audio_input_renderer_host_->OnFilterRemoved();
341   audio_render_host_->ResetChannel();
342   audio_input_renderer_host_->ResetChannel();
343   channel_.reset();
344   audio_render_host_ = NULL;
345   audio_input_renderer_host_ = NULL;
346 }
347
348 void MAYBE_WebRTCAudioDeviceTest::OnGetAudioHardwareConfig(
349     AudioParameters* input_params, AudioParameters* output_params) {
350   ASSERT_TRUE(audio_hardware_config_);
351   *input_params = audio_hardware_config_->GetInputConfig();
352   *output_params = audio_hardware_config_->GetOutputConfig();
353 }
354
355 // IPC::Listener implementation.
356 bool MAYBE_WebRTCAudioDeviceTest::OnMessageReceived(
357     const IPC::Message& message) {
358   if (render_thread_) {
359     IPC::ChannelProxy::MessageFilter* filter =
360         render_thread_->audio_input_message_filter();
361     if (filter->OnMessageReceived(message))
362       return true;
363
364     filter = render_thread_->audio_message_filter();
365     if (filter->OnMessageReceived(message))
366       return true;
367   }
368
369   if (audio_render_host_.get()) {
370     bool message_was_ok = false;
371     if (audio_render_host_->OnMessageReceived(message, &message_was_ok))
372       return true;
373   }
374
375   if (audio_input_renderer_host_.get()) {
376     bool message_was_ok = false;
377     if (audio_input_renderer_host_->OnMessageReceived(message, &message_was_ok))
378       return true;
379   }
380
381   bool handled ALLOW_UNUSED = true;
382   bool message_is_ok = true;
383   IPC_BEGIN_MESSAGE_MAP_EX(MAYBE_WebRTCAudioDeviceTest, message, message_is_ok)
384     IPC_MESSAGE_HANDLER(ViewHostMsg_GetAudioHardwareConfig,
385                         OnGetAudioHardwareConfig)
386     IPC_MESSAGE_UNHANDLED(handled = false)
387   IPC_END_MESSAGE_MAP_EX()
388
389   EXPECT_TRUE(message_is_ok);
390
391   return true;
392 }
393
394 // Posts a final task to the IO message loop and waits for completion.
395 void MAYBE_WebRTCAudioDeviceTest::WaitForIOThreadCompletion() {
396   WaitForMessageLoopCompletion(
397       ChildProcess::current()->io_message_loop()->message_loop_proxy().get());
398 }
399
400 void MAYBE_WebRTCAudioDeviceTest::WaitForAudioManagerCompletion() {
401   if (audio_manager_)
402     WaitForMessageLoopCompletion(audio_manager_->GetMessageLoop().get());
403 }
404
405 void MAYBE_WebRTCAudioDeviceTest::WaitForMessageLoopCompletion(
406     base::MessageLoopProxy* loop) {
407   base::WaitableEvent* event = new base::WaitableEvent(false, false);
408   loop->PostTask(FROM_HERE, base::Bind(&base::WaitableEvent::Signal,
409                  base::Unretained(event)));
410   if (event->TimedWait(TestTimeouts::action_max_timeout())) {
411     delete event;
412   } else {
413     // Don't delete the event object in case the message ever gets processed.
414     // If we do, we will crash the test process.
415     ADD_FAILURE() << "Failed to wait for message loop";
416   }
417 }
418
419 std::string MAYBE_WebRTCAudioDeviceTest::GetTestDataPath(
420     const base::FilePath::StringType& file_name) {
421   base::FilePath path;
422   EXPECT_TRUE(PathService::Get(DIR_TEST_DATA, &path));
423   path = path.Append(file_name);
424   EXPECT_TRUE(base::PathExists(path));
425 #if defined(OS_WIN)
426   return WideToUTF8(path.value());
427 #else
428   return path.value();
429 #endif
430 }
431
432 WebRTCTransportImpl::WebRTCTransportImpl(webrtc::VoENetwork* network)
433     : network_(network) {
434 }
435
436 WebRTCTransportImpl::~WebRTCTransportImpl() {}
437
438 int WebRTCTransportImpl::SendPacket(int channel, const void* data, int len) {
439   return network_->ReceivedRTPPacket(channel, data, len);
440 }
441
442 int WebRTCTransportImpl::SendRTCPPacket(int channel, const void* data,
443                                         int len) {
444   return network_->ReceivedRTCPPacket(channel, data, len);
445 }
446
447 }  // namespace content