2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
11 #include "webrtc/modules/audio_device/android/opensles_input.h"
15 #include "webrtc/modules/audio_device/android/audio_common.h"
16 #include "webrtc/modules/audio_device/android/opensles_common.h"
17 #include "webrtc/modules/audio_device/android/single_rw_fifo.h"
18 #include "webrtc/modules/audio_device/audio_device_buffer.h"
19 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
20 #include "webrtc/system_wrappers/interface/thread_wrapper.h"
21 #include "webrtc/system_wrappers/interface/trace.h"
24 #define OPENSL_RETURN_ON_FAILURE(op, ret_val) \
26 SLresult err = (op); \
27 if (err != SL_RESULT_SUCCESS) { \
28 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, \
29 "OpenSL error: %d", err); \
35 static const SLEngineOption kOption[] = {
36 { SL_ENGINEOPTION_THREADSAFE, static_cast<SLuint32>(SL_BOOLEAN_TRUE) },
46 OpenSlesInput::OpenSlesInput(
47 const int32_t id, PlayoutDelayProvider* delay_provider)
49 delay_provider_(delay_provider),
51 mic_initialized_(false),
52 rec_initialized_(false),
53 crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
55 num_fifo_buffers_needed_(0),
58 sles_engine_itf_(NULL),
60 sles_recorder_itf_(NULL),
61 sles_recorder_sbq_itf_(NULL),
64 rec_sampling_rate_(0),
69 OpenSlesInput::~OpenSlesInput() {
72 int32_t OpenSlesInput::SetAndroidAudioDeviceObjects(void* javaVM,
78 void OpenSlesInput::ClearAndroidAudioDeviceObjects() {
81 int32_t OpenSlesInput::Init() {
82 assert(!initialized_);
84 // Set up OpenSL engine.
85 OPENSL_RETURN_ON_FAILURE(slCreateEngine(&sles_engine_, 1, kOption, 0,
88 OPENSL_RETURN_ON_FAILURE((*sles_engine_)->Realize(sles_engine_,
91 OPENSL_RETURN_ON_FAILURE((*sles_engine_)->GetInterface(sles_engine_,
96 if (InitSampleRate() != 0) {
104 int32_t OpenSlesInput::Terminate() {
105 // It is assumed that the caller has stopped recording before terminating.
107 (*sles_engine_)->Destroy(sles_engine_);
108 initialized_ = false;
109 mic_initialized_ = false;
110 rec_initialized_ = false;
114 int32_t OpenSlesInput::RecordingDeviceName(uint16_t index,
115 char name[kAdmMaxDeviceNameSize],
116 char guid[kAdmMaxGuidSize]) {
124 int32_t OpenSlesInput::SetRecordingDevice(uint16_t index) {
129 int32_t OpenSlesInput::RecordingIsAvailable(bool& available) { // NOLINT
134 int32_t OpenSlesInput::InitRecording() {
135 assert(initialized_);
136 rec_initialized_ = true;
140 int32_t OpenSlesInput::StartRecording() {
141 assert(rec_initialized_);
143 if (!CreateAudioRecorder()) {
146 // Setup to receive buffer queue event callbacks.
147 OPENSL_RETURN_ON_FAILURE(
148 (*sles_recorder_sbq_itf_)->RegisterCallback(
149 sles_recorder_sbq_itf_,
150 RecorderSimpleBufferQueueCallback,
154 if (!EnqueueAllBuffers()) {
159 // To prevent the compiler from e.g. optimizing the code to
160 // recording_ = StartCbThreads() which wouldn't have been thread safe.
161 CriticalSectionScoped lock(crit_sect_.get());
164 if (!StartCbThreads()) {
171 int32_t OpenSlesInput::StopRecording() {
173 DestroyAudioRecorder();
178 int32_t OpenSlesInput::SetAGC(bool enable) {
179 agc_enabled_ = enable;
183 int32_t OpenSlesInput::MicrophoneIsAvailable(bool& available) { // NOLINT
188 int32_t OpenSlesInput::InitMicrophone() {
189 assert(initialized_);
191 mic_initialized_ = true;
195 int32_t OpenSlesInput::MicrophoneVolumeIsAvailable(bool& available) { // NOLINT
200 int32_t OpenSlesInput::MinMicrophoneVolume(
201 uint32_t& minVolume) const { // NOLINT
206 int32_t OpenSlesInput::MicrophoneVolumeStepSize(
207 uint16_t& stepSize) const {
212 int32_t OpenSlesInput::MicrophoneMuteIsAvailable(bool& available) { // NOLINT
213 available = false; // Mic mute not supported on Android
217 int32_t OpenSlesInput::MicrophoneBoostIsAvailable(bool& available) { // NOLINT
218 available = false; // Mic boost not supported on Android.
222 int32_t OpenSlesInput::SetMicrophoneBoost(bool enable) {
224 return -1; // Not supported
227 int32_t OpenSlesInput::MicrophoneBoost(bool& enabled) const { // NOLINT
229 return -1; // Not supported
232 int32_t OpenSlesInput::StereoRecordingIsAvailable(bool& available) { // NOLINT
233 available = false; // Stereo recording not supported on Android.
237 int32_t OpenSlesInput::StereoRecording(bool& enabled) const { // NOLINT
242 int32_t OpenSlesInput::RecordingDelay(uint16_t& delayMS) const { // NOLINT
243 delayMS = recording_delay_;
247 void OpenSlesInput::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
248 audio_buffer_ = audioBuffer;
251 int OpenSlesInput::InitSampleRate() {
253 audio_buffer_->SetRecordingSampleRate(rec_sampling_rate_);
254 audio_buffer_->SetRecordingChannels(kNumChannels);
255 UpdateRecordingDelay();
259 int OpenSlesInput::buffer_size_samples() const {
260 // Since there is no low latency recording, use buffer size corresponding to
261 // 10ms of data since that's the framesize WebRTC uses. Getting any other
262 // size would require patching together buffers somewhere before passing them
264 return rec_sampling_rate_ * 10 / 1000;
267 int OpenSlesInput::buffer_size_bytes() const {
268 return buffer_size_samples() * kNumChannels * sizeof(int16_t);
271 void OpenSlesInput::UpdateRecordingDelay() {
272 // TODO(hellner): Add accurate delay estimate.
273 // On average half the current buffer will have been filled with audio.
274 int outstanding_samples =
275 (TotalBuffersUsed() - 0.5) * buffer_size_samples();
276 recording_delay_ = outstanding_samples / (rec_sampling_rate_ / 1000);
279 void OpenSlesInput::UpdateSampleRate() {
280 rec_sampling_rate_ = audio_manager_.low_latency_supported() ?
281 audio_manager_.native_output_sample_rate() : kDefaultSampleRate;
284 void OpenSlesInput::CalculateNumFifoBuffersNeeded() {
285 // Buffer size is 10ms of data.
286 num_fifo_buffers_needed_ = kNum10MsToBuffer;
289 void OpenSlesInput::AllocateBuffers() {
290 // Allocate FIFO to handle passing buffers between processing and OpenSL
292 CalculateNumFifoBuffersNeeded();
293 assert(num_fifo_buffers_needed_ > 0);
294 fifo_.reset(new SingleRwFifo(num_fifo_buffers_needed_));
296 // Allocate the memory area to be used.
297 rec_buf_.reset(new scoped_array<int8_t>[TotalBuffersUsed()]);
298 for (int i = 0; i < TotalBuffersUsed(); ++i) {
299 rec_buf_[i].reset(new int8_t[buffer_size_bytes()]);
303 int OpenSlesInput::TotalBuffersUsed() const {
304 return num_fifo_buffers_needed_ + kNumOpenSlBuffers;
307 bool OpenSlesInput::EnqueueAllBuffers() {
309 number_overruns_ = 0;
310 for (int i = 0; i < kNumOpenSlBuffers; ++i) {
311 memset(rec_buf_[i].get(), 0, buffer_size_bytes());
312 OPENSL_RETURN_ON_FAILURE(
313 (*sles_recorder_sbq_itf_)->Enqueue(
314 sles_recorder_sbq_itf_,
315 reinterpret_cast<void*>(rec_buf_[i].get()),
316 buffer_size_bytes()),
319 // In case of underrun the fifo will be at capacity. In case of first enqueue
320 // no audio can have been returned yet meaning fifo must be empty. Any other
321 // values are unexpected.
322 assert(fifo_->size() == fifo_->capacity() ||
324 // OpenSL recording has been stopped. I.e. only this thread is touching
326 while (fifo_->size() != 0) {
333 bool OpenSlesInput::CreateAudioRecorder() {
334 if (!event_.Start()) {
338 SLDataLocator_IODevice micLocator = {
339 SL_DATALOCATOR_IODEVICE, SL_IODEVICE_AUDIOINPUT,
340 SL_DEFAULTDEVICEID_AUDIOINPUT, NULL };
341 SLDataSource audio_source = { &micLocator, NULL };
343 SLDataLocator_AndroidSimpleBufferQueue simple_buf_queue = {
344 SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE,
345 static_cast<SLuint32>(TotalBuffersUsed())
347 SLDataFormat_PCM configuration =
348 webrtc_opensl::CreatePcmConfiguration(rec_sampling_rate_);
349 SLDataSink audio_sink = { &simple_buf_queue, &configuration };
351 // Interfaces for recording android audio data and Android are needed.
352 // Note the interfaces still need to be initialized. This only tells OpenSl
353 // that the interfaces will be needed at some point.
354 const SLInterfaceID id[kNumInterfaces] = {
355 SL_IID_ANDROIDSIMPLEBUFFERQUEUE, SL_IID_ANDROIDCONFIGURATION };
356 const SLboolean req[kNumInterfaces] = {
357 SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE };
358 OPENSL_RETURN_ON_FAILURE(
359 (*sles_engine_itf_)->CreateAudioRecorder(sles_engine_itf_,
368 // Realize the recorder in synchronous mode.
369 OPENSL_RETURN_ON_FAILURE((*sles_recorder_)->Realize(sles_recorder_,
372 OPENSL_RETURN_ON_FAILURE(
373 (*sles_recorder_)->GetInterface(sles_recorder_, SL_IID_RECORD,
374 static_cast<void*>(&sles_recorder_itf_)),
376 OPENSL_RETURN_ON_FAILURE(
377 (*sles_recorder_)->GetInterface(
379 SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
380 static_cast<void*>(&sles_recorder_sbq_itf_)),
385 void OpenSlesInput::DestroyAudioRecorder() {
387 if (sles_recorder_sbq_itf_) {
388 // Release all buffers currently queued up.
389 OPENSL_RETURN_ON_FAILURE(
390 (*sles_recorder_sbq_itf_)->Clear(sles_recorder_sbq_itf_),
392 sles_recorder_sbq_itf_ = NULL;
394 sles_recorder_itf_ = NULL;
396 if (sles_recorder_) {
397 (*sles_recorder_)->Destroy(sles_recorder_);
398 sles_recorder_ = NULL;
402 bool OpenSlesInput::HandleOverrun(int event_id, int event_msg) {
406 if (event_id == kNoOverrun) {
409 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, id_, "Audio overrun");
410 assert(event_id == kOverrun);
411 assert(event_msg > 0);
412 // Wait for all enqueued buffers be flushed.
413 if (event_msg != kNumOpenSlBuffers) {
416 // All buffers passed to OpenSL have been flushed. Restart the audio from
418 // No need to check sles_recorder_itf_ as recording_ would be false before it
420 OPENSL_RETURN_ON_FAILURE(
421 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
422 SL_RECORDSTATE_STOPPED),
425 OPENSL_RETURN_ON_FAILURE(
426 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
427 SL_RECORDSTATE_RECORDING),
432 void OpenSlesInput::RecorderSimpleBufferQueueCallback(
433 SLAndroidSimpleBufferQueueItf queue_itf,
435 OpenSlesInput* audio_device = reinterpret_cast<OpenSlesInput*>(context);
436 audio_device->RecorderSimpleBufferQueueCallbackHandler(queue_itf);
439 void OpenSlesInput::RecorderSimpleBufferQueueCallbackHandler(
440 SLAndroidSimpleBufferQueueItf queue_itf) {
441 if (fifo_->size() >= fifo_->capacity() || number_overruns_ > 0) {
443 event_.SignalEvent(kOverrun, number_overruns_);
446 int8_t* audio = rec_buf_[active_queue_].get();
447 // There is at least one spot available in the fifo.
449 active_queue_ = (active_queue_ + 1) % TotalBuffersUsed();
450 event_.SignalEvent(kNoOverrun, 0);
451 // active_queue_ is indexing the next buffer to record to. Since the current
452 // buffer has been recorded it means that the buffer index
453 // kNumOpenSlBuffers - 1 past |active_queue_| contains the next free buffer.
454 // Since |fifo_| wasn't at capacity, at least one buffer is free to be used.
455 int next_free_buffer =
456 (active_queue_ + kNumOpenSlBuffers - 1) % TotalBuffersUsed();
457 OPENSL_RETURN_ON_FAILURE(
458 (*sles_recorder_sbq_itf_)->Enqueue(
459 sles_recorder_sbq_itf_,
460 reinterpret_cast<void*>(rec_buf_[next_free_buffer].get()),
461 buffer_size_bytes()),
465 bool OpenSlesInput::StartCbThreads() {
466 rec_thread_.reset(ThreadWrapper::CreateThread(CbThread,
469 "opensl_rec_thread"));
470 assert(rec_thread_.get());
471 unsigned int thread_id = 0;
472 if (!rec_thread_->Start(thread_id)) {
476 OPENSL_RETURN_ON_FAILURE(
477 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
478 SL_RECORDSTATE_RECORDING),
483 void OpenSlesInput::StopCbThreads() {
485 CriticalSectionScoped lock(crit_sect_.get());
488 if (sles_recorder_itf_) {
489 OPENSL_RETURN_ON_FAILURE(
490 (*sles_recorder_itf_)->SetRecordState(sles_recorder_itf_,
491 SL_RECORDSTATE_STOPPED),
494 if (rec_thread_.get() == NULL) {
498 if (rec_thread_->Stop()) {
505 bool OpenSlesInput::CbThread(void* context) {
506 return reinterpret_cast<OpenSlesInput*>(context)->CbThreadImpl();
509 bool OpenSlesInput::CbThreadImpl() {
512 // event_ must not be waited on while a lock has been taken.
513 event_.WaitOnEvent(&event_id, &event_msg);
515 CriticalSectionScoped lock(crit_sect_.get());
516 if (HandleOverrun(event_id, event_msg)) {
519 // If the fifo_ has audio data process it.
520 while (fifo_->size() > 0 && recording_) {
521 int8_t* audio = fifo_->Pop();
522 audio_buffer_->SetRecordedBuffer(audio, buffer_size_samples());
523 audio_buffer_->SetVQEData(delay_provider_->PlayoutDelayMs(),
524 recording_delay_, 0);
525 audio_buffer_->DeliverRecordedData();
530 } // namespace webrtc