1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_
6 #define CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_
8 #include "base/atomicops.h"
9 #include "base/files/file.h"
10 #include "base/synchronization/lock.h"
11 #include "base/threading/thread_checker.h"
12 #include "base/time/time.h"
13 #include "content/common/content_export.h"
14 #include "content/renderer/media/aec_dump_message_filter.h"
15 #include "content/renderer/media/webrtc_audio_device_impl.h"
16 #include "media/base/audio_converter.h"
17 #include "third_party/libjingle/source/talk/app/webrtc/mediastreaminterface.h"
18 #include "third_party/webrtc/modules/audio_processing/include/audio_processing.h"
19 #include "third_party/webrtc/modules/interface/module_common_types.h"
22 class WebMediaConstraints;
28 class AudioParameters;
33 class TypingDetection;
38 class RTCMediaConstraints;
40 using webrtc::AudioProcessorInterface;
42 // This class owns an object of webrtc::AudioProcessing which contains signal
43 // processing components like AGC, AEC and NS. It enables the components based
44 // on the getUserMedia constraints, processes the data and outputs it in a unit
45 // of 10 ms data chunk.
46 class CONTENT_EXPORT MediaStreamAudioProcessor :
47 NON_EXPORTED_BASE(public WebRtcPlayoutDataSource::Sink),
48 NON_EXPORTED_BASE(public AudioProcessorInterface),
49 NON_EXPORTED_BASE(public AecDumpMessageFilter::AecDumpDelegate) {
51 // Returns false if |kDisableAudioTrackProcessing| is set to true, otherwise
53 static bool IsAudioTrackProcessingEnabled();
55 // |playout_data_source| is used to register this class as a sink to the
56 // WebRtc playout data for processing AEC. If clients do not enable AEC,
57 // |playout_data_source| won't be used.
58 MediaStreamAudioProcessor(const blink::WebMediaConstraints& constraints,
60 WebRtcPlayoutDataSource* playout_data_source);
62 // Called when format of the capture data has changed.
63 // Called on the main render thread. The caller is responsible for stopping
64 // the capture thread before calling this method.
65 // After this method, the capture thread will be changed to a new capture
67 void OnCaptureFormatChanged(const media::AudioParameters& source_params);
69 // Pushes capture data in |audio_source| to the internal FIFO.
70 // Called on the capture audio thread.
71 void PushCaptureData(const media::AudioBus* audio_source);
73 // Processes a block of 10 ms data from the internal FIFO and outputs it via
74 // |out|. |out| is the address of the pointer that will be pointed to
75 // the post-processed data if the method is returning a true. The lifetime
76 // of the data represeted by |out| is guaranteed to outlive the method call.
77 // That also says *|out| won't change until this method is called again.
78 // |new_volume| receives the new microphone volume from the AGC.
79 // The new microphoen volume range is [0, 255], and the value will be 0 if
80 // the microphone volume should not be adjusted.
81 // Returns true if the internal FIFO has at least 10 ms data for processing,
83 // |capture_delay|, |volume| and |key_pressed| will be passed to
84 // webrtc::AudioProcessing to help processing the data.
85 // Called on the capture audio thread.
86 bool ProcessAndConsumeData(base::TimeDelta capture_delay,
92 // Stops the audio processor, no more AEC dump or render data after calling
96 // The audio format of the input to the processor.
97 const media::AudioParameters& InputFormat() const;
99 // The audio format of the output from the processor.
100 const media::AudioParameters& OutputFormat() const;
102 // Accessor to check if the audio processing is enabled or not.
103 bool has_audio_processing() const { return audio_processing_ != NULL; }
105 // AecDumpMessageFilter::AecDumpDelegate implementation.
106 // Called on the main render thread.
107 virtual void OnAecDumpFile(
108 const IPC::PlatformFileForTransit& file_handle) OVERRIDE;
109 virtual void OnDisableAecDump() OVERRIDE;
110 virtual void OnIpcClosing() OVERRIDE;
113 friend class base::RefCountedThreadSafe<MediaStreamAudioProcessor>;
114 virtual ~MediaStreamAudioProcessor();
117 friend class MediaStreamAudioProcessorTest;
119 class MediaStreamAudioConverter;
121 // WebRtcPlayoutDataSource::Sink implementation.
122 virtual void OnPlayoutData(media::AudioBus* audio_bus,
124 int audio_delay_milliseconds) OVERRIDE;
125 virtual void OnPlayoutDataSourceChanged() OVERRIDE;
127 // webrtc::AudioProcessorInterface implementation.
128 // This method is called on the libjingle thread.
129 virtual void GetStats(AudioProcessorStats* stats) OVERRIDE;
131 // Helper to initialize the WebRtc AudioProcessing.
132 void InitializeAudioProcessingModule(
133 const blink::WebMediaConstraints& constraints, int effects);
135 // Helper to initialize the capture converter.
136 void InitializeCaptureConverter(const media::AudioParameters& source_params);
138 // Helper to initialize the render converter.
139 void InitializeRenderConverterIfNeeded(int sample_rate,
140 int number_of_channels,
141 int frames_per_buffer);
143 // Called by ProcessAndConsumeData().
144 // Returns the new microphone volume in the range of |0, 255].
145 // When the volume does not need to be updated, it returns 0.
146 int ProcessData(webrtc::AudioFrame* audio_frame,
147 base::TimeDelta capture_delay,
151 // Cached value for the render delay latency. This member is accessed by
152 // both the capture audio thread and the render audio thread.
153 base::subtle::Atomic32 render_delay_ms_;
155 // webrtc::AudioProcessing module which does AEC, AGC, NS, HighPass filter,
157 scoped_ptr<webrtc::AudioProcessing> audio_processing_;
159 // Converter used for the down-mixing and resampling of the capture data.
160 scoped_ptr<MediaStreamAudioConverter> capture_converter_;
162 // AudioFrame used to hold the output of |capture_converter_|.
163 webrtc::AudioFrame capture_frame_;
165 // Converter used for the down-mixing and resampling of the render data when
166 // the AEC is enabled.
167 scoped_ptr<MediaStreamAudioConverter> render_converter_;
169 // AudioFrame used to hold the output of |render_converter_|.
170 webrtc::AudioFrame render_frame_;
172 // Data bus to help converting interleaved data to an AudioBus.
173 scoped_ptr<media::AudioBus> render_data_bus_;
175 // Raw pointer to the WebRtcPlayoutDataSource, which is valid for the
176 // lifetime of RenderThread.
177 WebRtcPlayoutDataSource* playout_data_source_;
179 // Used to DCHECK that the destructor is called on the main render thread.
180 base::ThreadChecker main_thread_checker_;
182 // Used to DCHECK that some methods are called on the capture audio thread.
183 base::ThreadChecker capture_thread_checker_;
185 // Used to DCHECK that PushRenderData() is called on the render audio thread.
186 base::ThreadChecker render_thread_checker_;
188 // Flag to enable the stereo channels mirroring.
189 bool audio_mirroring_;
191 // Used by the typing detection.
192 scoped_ptr<webrtc::TypingDetection> typing_detector_;
194 // This flag is used to show the result of typing detection.
195 // It can be accessed by the capture audio thread and by the libjingle thread
196 // which calls GetStats().
197 base::subtle::Atomic32 typing_detected_;
199 // Communication with browser for AEC dump.
200 scoped_refptr<AecDumpMessageFilter> aec_dump_message_filter_;
202 // Flag to avoid executing Stop() more than once.
206 } // namespace content
208 #endif // CONTENT_RENDERER_MEDIA_MEDIA_STREAM_AUDIO_PROCESSOR_H_