1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_
6 #define CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_
8 #include "base/atomicops.h"
9 #include "base/files/file.h"
10 #include "base/synchronization/lock.h"
11 #include "base/threading/thread_checker.h"
12 #include "base/time/time.h"
13 #include "content/common/content_export.h"
14 #include "content/renderer/media/aec_dump_message_filter.h"
15 #include "content/renderer/media/webrtc_audio_device_impl.h"
16 #include "media/base/audio_converter.h"
17 #include "third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h"
18 #include "third_party/webrtc/modules/audio_processing/include/audio_processing.h"
19 #include "third_party/webrtc/modules/interface/module_common_types.h"
22 class WebMediaConstraints;
28 class AudioParameters;
33 class TypingDetection;
38 class EchoInformation;
39 class MediaStreamAudioBus;
40 class MediaStreamAudioFifo;
41 class RTCMediaConstraints;
43 using webrtc::AudioProcessorInterface;
45 // This class owns an object of webrtc::AudioProcessing which contains signal
46 // processing components like AGC, AEC and NS. It enables the components based
47 // on the getUserMedia constraints, processes the data and outputs it in a unit
48 // of 10 ms data chunk.
49 class CONTENT_EXPORT MediaStreamAudioProcessor :
50 NON_EXPORTED_BASE(public WebRtcPlayoutDataSource::Sink),
51 NON_EXPORTED_BASE(public AudioProcessorInterface),
52 NON_EXPORTED_BASE(public AecDumpMessageFilter::AecDumpDelegate) {
54 // Returns false if |kDisableAudioTrackProcessing| is set to true, otherwise
56 static bool IsAudioTrackProcessingEnabled();
58 // |playout_data_source| is used to register this class as a sink to the
59 // WebRtc playout data for processing AEC. If clients do not enable AEC,
60 // |playout_data_source| won't be used.
61 MediaStreamAudioProcessor(const blink::WebMediaConstraints& constraints,
63 WebRtcPlayoutDataSource* playout_data_source);
65 // Called when the format of the capture data has changed.
66 // Called on the main render thread. The caller is responsible for stopping
67 // the capture thread before calling this method.
68 // After this method, the capture thread will be changed to a new capture
70 void OnCaptureFormatChanged(const media::AudioParameters& source_params);
72 // Pushes capture data in |audio_source| to the internal FIFO. Each call to
73 // this method should be followed by calls to ProcessAndConsumeData() while
74 // it returns false, to pull out all available data.
75 // Called on the capture audio thread.
76 void PushCaptureData(const media::AudioBus* audio_source);
78 // Processes a block of 10 ms data from the internal FIFO and outputs it via
79 // |out|. |out| is the address of the pointer that will be pointed to
80 // the post-processed data if the method is returning a true. The lifetime
81 // of the data represeted by |out| is guaranteed until this method is called
83 // |new_volume| receives the new microphone volume from the AGC.
84 // The new microphone volume range is [0, 255], and the value will be 0 if
85 // the microphone volume should not be adjusted.
86 // Returns true if the internal FIFO has at least 10 ms data for processing,
88 // Called on the capture audio thread.
90 // TODO(ajm): Don't we want this to output float?
91 bool ProcessAndConsumeData(base::TimeDelta capture_delay,
97 // Stops the audio processor, no more AEC dump or render data after calling
101 // The audio formats of the capture input to and output from the processor.
102 // Must only be called on the main render or audio capture threads.
103 const media::AudioParameters& InputFormat() const;
104 const media::AudioParameters& OutputFormat() const;
106 // Accessor to check if the audio processing is enabled or not.
107 bool has_audio_processing() const { return audio_processing_ != NULL; }
109 // AecDumpMessageFilter::AecDumpDelegate implementation.
110 // Called on the main render thread.
111 void OnAecDumpFile(const IPC::PlatformFileForTransit& file_handle) override;
112 void OnDisableAecDump() override;
113 void OnIpcClosing() override;
116 ~MediaStreamAudioProcessor() override;
119 friend class MediaStreamAudioProcessorTest;
120 FRIEND_TEST_ALL_PREFIXES(MediaStreamAudioProcessorTest,
121 GetAecDumpMessageFilter);
123 // WebRtcPlayoutDataSource::Sink implementation.
124 void OnPlayoutData(media::AudioBus* audio_bus,
126 int audio_delay_milliseconds) override;
127 void OnPlayoutDataSourceChanged() override;
129 // webrtc::AudioProcessorInterface implementation.
130 // This method is called on the libjingle thread.
131 void GetStats(AudioProcessorStats* stats) override;
133 // Helper to initialize the WebRtc AudioProcessing.
134 void InitializeAudioProcessingModule(
135 const blink::WebMediaConstraints& constraints, int effects);
137 // Helper to initialize the capture converter.
138 void InitializeCaptureFifo(const media::AudioParameters& input_format);
140 // Helper to initialize the render converter.
141 void InitializeRenderFifoIfNeeded(int sample_rate,
142 int number_of_channels,
143 int frames_per_buffer);
145 // Called by ProcessAndConsumeData().
146 // Returns the new microphone volume in the range of |0, 255].
147 // When the volume does not need to be updated, it returns 0.
148 int ProcessData(const float* const* process_ptrs,
150 base::TimeDelta capture_delay,
153 float* const* output_ptrs);
155 // Cached value for the render delay latency. This member is accessed by
156 // both the capture audio thread and the render audio thread.
157 base::subtle::Atomic32 render_delay_ms_;
159 // Module to handle processing and format conversion.
160 scoped_ptr<webrtc::AudioProcessing> audio_processing_;
162 // FIFO to provide 10 ms capture chunks.
163 scoped_ptr<MediaStreamAudioFifo> capture_fifo_;
164 // Receives processing output.
165 scoped_ptr<MediaStreamAudioBus> output_bus_;
166 // Receives interleaved int16 data for output.
167 scoped_ptr<int16[]> output_data_;
169 // FIFO to provide 10 ms render chunks when the AEC is enabled.
170 scoped_ptr<MediaStreamAudioFifo> render_fifo_;
172 // These are mutated on the main render thread in OnCaptureFormatChanged().
173 // The caller guarantees this does not run concurrently with accesses on the
174 // capture audio thread.
175 media::AudioParameters input_format_;
176 media::AudioParameters output_format_;
177 // Only used on the render audio thread.
178 media::AudioParameters render_format_;
180 // Raw pointer to the WebRtcPlayoutDataSource, which is valid for the
181 // lifetime of RenderThread.
182 WebRtcPlayoutDataSource* playout_data_source_;
184 // Used to DCHECK that some methods are called on the main render thread.
185 base::ThreadChecker main_thread_checker_;
186 // Used to DCHECK that some methods are called on the capture audio thread.
187 base::ThreadChecker capture_thread_checker_;
188 // Used to DCHECK that some methods are called on the render audio thread.
189 base::ThreadChecker render_thread_checker_;
191 // Flag to enable stereo channel mirroring.
192 bool audio_mirroring_;
194 scoped_ptr<webrtc::TypingDetection> typing_detector_;
195 // This flag is used to show the result of typing detection.
196 // It can be accessed by the capture audio thread and by the libjingle thread
197 // which calls GetStats().
198 base::subtle::Atomic32 typing_detected_;
200 // Communication with browser for AEC dump.
201 scoped_refptr<AecDumpMessageFilter> aec_dump_message_filter_;
203 // Flag to avoid executing Stop() more than once.
206 // Object for logging echo information when the AEC is enabled. Accessible by
207 // the libjingle thread through GetStats().
208 scoped_ptr<EchoInformation> echo_information_;
210 DISALLOW_COPY_AND_ASSIGN(MediaStreamAudioProcessor);
213 } // namespace content
215 #endif // CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_