Upstream version 11.40.277.0
[platform/framework/web/crosswalk.git] / src / third_party / webrtc / voice_engine / channel.cc
1 /*
2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10
11 #include "webrtc/voice_engine/channel.h"
12
13 #include "webrtc/base/timeutils.h"
14 #include "webrtc/common.h"
15 #include "webrtc/modules/audio_device/include/audio_device.h"
16 #include "webrtc/modules/audio_processing/include/audio_processing.h"
17 #include "webrtc/modules/interface/module_common_types.h"
18 #include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
19 #include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
20 #include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
21 #include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
22 #include "webrtc/modules/utility/interface/audio_frame_operations.h"
23 #include "webrtc/modules/utility/interface/process_thread.h"
24 #include "webrtc/modules/utility/interface/rtp_dump.h"
25 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
26 #include "webrtc/system_wrappers/interface/logging.h"
27 #include "webrtc/system_wrappers/interface/trace.h"
28 #include "webrtc/video_engine/include/vie_network.h"
29 #include "webrtc/voice_engine/include/voe_base.h"
30 #include "webrtc/voice_engine/include/voe_external_media.h"
31 #include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
32 #include "webrtc/voice_engine/output_mixer.h"
33 #include "webrtc/voice_engine/statistics.h"
34 #include "webrtc/voice_engine/transmit_mixer.h"
35 #include "webrtc/voice_engine/utility.h"
36
37 #if defined(_WIN32)
38 #include <Qos.h>
39 #endif
40
41 namespace webrtc {
42 namespace voe {
43
44 // Extend the default RTCP statistics struct with max_jitter, defined as the
45 // maximum jitter value seen in an RTCP report block.
46 struct ChannelStatistics : public RtcpStatistics {
47   ChannelStatistics() : rtcp(), max_jitter(0) {}
48
49   RtcpStatistics rtcp;
50   uint32_t max_jitter;
51 };
52
53 // Statistics callback, called at each generation of a new RTCP report block.
54 class StatisticsProxy : public RtcpStatisticsCallback {
55  public:
56   StatisticsProxy(uint32_t ssrc)
57    : stats_lock_(CriticalSectionWrapper::CreateCriticalSection()),
58      ssrc_(ssrc) {}
59   virtual ~StatisticsProxy() {}
60
61   virtual void StatisticsUpdated(const RtcpStatistics& statistics,
62                                  uint32_t ssrc) OVERRIDE {
63     if (ssrc != ssrc_)
64       return;
65
66     CriticalSectionScoped cs(stats_lock_.get());
67     stats_.rtcp = statistics;
68     if (statistics.jitter > stats_.max_jitter) {
69       stats_.max_jitter = statistics.jitter;
70     }
71   }
72
73   void ResetStatistics() {
74     CriticalSectionScoped cs(stats_lock_.get());
75     stats_ = ChannelStatistics();
76   }
77
78   ChannelStatistics GetStats() {
79     CriticalSectionScoped cs(stats_lock_.get());
80     return stats_;
81   }
82
83  private:
84   // StatisticsUpdated calls are triggered from threads in the RTP module,
85   // while GetStats calls can be triggered from the public voice engine API,
86   // hence synchronization is needed.
87   scoped_ptr<CriticalSectionWrapper> stats_lock_;
88   const uint32_t ssrc_;
89   ChannelStatistics stats_;
90 };
91
92 class VoEBitrateObserver : public BitrateObserver {
93  public:
94   explicit VoEBitrateObserver(Channel* owner)
95       : owner_(owner) {}
96   virtual ~VoEBitrateObserver() {}
97
98   // Implements BitrateObserver.
99   virtual void OnNetworkChanged(const uint32_t bitrate_bps,
100                                 const uint8_t fraction_lost,
101                                 const uint32_t rtt) OVERRIDE {
102     // |fraction_lost| has a scale of 0 - 255.
103     owner_->OnNetworkChanged(bitrate_bps, fraction_lost, rtt);
104   }
105
106  private:
107   Channel* owner_;
108 };
109
110 int32_t
111 Channel::SendData(FrameType frameType,
112                   uint8_t   payloadType,
113                   uint32_t  timeStamp,
114                   const uint8_t*  payloadData,
115                   uint16_t  payloadSize,
116                   const RTPFragmentationHeader* fragmentation)
117 {
118     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
119                  "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
120                  " payloadSize=%u, fragmentation=0x%x)",
121                  frameType, payloadType, timeStamp, payloadSize, fragmentation);
122
123     if (_includeAudioLevelIndication)
124     {
125         // Store current audio level in the RTP/RTCP module.
126         // The level will be used in combination with voice-activity state
127         // (frameType) to add an RTP header extension
128         _rtpRtcpModule->SetAudioLevel(rms_level_.RMS());
129     }
130
131     // Push data from ACM to RTP/RTCP-module to deliver audio frame for
132     // packetization.
133     // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
134     if (_rtpRtcpModule->SendOutgoingData((FrameType&)frameType,
135                                         payloadType,
136                                         timeStamp,
137                                         // Leaving the time when this frame was
138                                         // received from the capture device as
139                                         // undefined for voice for now.
140                                         -1,
141                                         payloadData,
142                                         payloadSize,
143                                         fragmentation) == -1)
144     {
145         _engineStatisticsPtr->SetLastError(
146             VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
147             "Channel::SendData() failed to send data to RTP/RTCP module");
148         return -1;
149     }
150
151     _lastLocalTimeStamp = timeStamp;
152     _lastPayloadType = payloadType;
153
154     return 0;
155 }
156
157 int32_t
158 Channel::InFrameType(int16_t frameType)
159 {
160     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
161                  "Channel::InFrameType(frameType=%d)", frameType);
162
163     CriticalSectionScoped cs(&_callbackCritSect);
164     // 1 indicates speech
165     _sendFrameType = (frameType == 1) ? 1 : 0;
166     return 0;
167 }
168
169 int32_t
170 Channel::OnRxVadDetected(int vadDecision)
171 {
172     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
173                  "Channel::OnRxVadDetected(vadDecision=%d)", vadDecision);
174
175     CriticalSectionScoped cs(&_callbackCritSect);
176     if (_rxVadObserverPtr)
177     {
178         _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
179     }
180
181     return 0;
182 }
183
184 int
185 Channel::SendPacket(int channel, const void *data, int len)
186 {
187     channel = VoEChannelId(channel);
188     assert(channel == _channelId);
189
190     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
191                  "Channel::SendPacket(channel=%d, len=%d)", channel, len);
192
193     CriticalSectionScoped cs(&_callbackCritSect);
194
195     if (_transportPtr == NULL)
196     {
197         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
198                      "Channel::SendPacket() failed to send RTP packet due to"
199                      " invalid transport object");
200         return -1;
201     }
202
203     uint8_t* bufferToSendPtr = (uint8_t*)data;
204     int32_t bufferLength = len;
205
206     // Dump the RTP packet to a file (if RTP dump is enabled).
207     if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
208     {
209         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
210                      VoEId(_instanceId,_channelId),
211                      "Channel::SendPacket() RTP dump to output file failed");
212     }
213
214     int n = _transportPtr->SendPacket(channel, bufferToSendPtr,
215                                       bufferLength);
216     if (n < 0) {
217       std::string transport_name =
218           _externalTransport ? "external transport" : "WebRtc sockets";
219       WEBRTC_TRACE(kTraceError, kTraceVoice,
220                    VoEId(_instanceId,_channelId),
221                    "Channel::SendPacket() RTP transmission using %s failed",
222                    transport_name.c_str());
223       return -1;
224     }
225     return n;
226 }
227
228 int
229 Channel::SendRTCPPacket(int channel, const void *data, int len)
230 {
231     channel = VoEChannelId(channel);
232     assert(channel == _channelId);
233
234     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
235                  "Channel::SendRTCPPacket(channel=%d, len=%d)", channel, len);
236
237     CriticalSectionScoped cs(&_callbackCritSect);
238     if (_transportPtr == NULL)
239     {
240         WEBRTC_TRACE(kTraceError, kTraceVoice,
241                      VoEId(_instanceId,_channelId),
242                      "Channel::SendRTCPPacket() failed to send RTCP packet"
243                      " due to invalid transport object");
244         return -1;
245     }
246
247     uint8_t* bufferToSendPtr = (uint8_t*)data;
248     int32_t bufferLength = len;
249
250     // Dump the RTCP packet to a file (if RTP dump is enabled).
251     if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
252     {
253         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
254                      VoEId(_instanceId,_channelId),
255                      "Channel::SendPacket() RTCP dump to output file failed");
256     }
257
258     int n = _transportPtr->SendRTCPPacket(channel,
259                                           bufferToSendPtr,
260                                           bufferLength);
261     if (n < 0) {
262       std::string transport_name =
263           _externalTransport ? "external transport" : "WebRtc sockets";
264       WEBRTC_TRACE(kTraceInfo, kTraceVoice,
265                    VoEId(_instanceId,_channelId),
266                    "Channel::SendRTCPPacket() transmission using %s failed",
267                    transport_name.c_str());
268       return -1;
269     }
270     return n;
271 }
272
273 void
274 Channel::OnPlayTelephoneEvent(int32_t id,
275                               uint8_t event,
276                               uint16_t lengthMs,
277                               uint8_t volume)
278 {
279     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
280                  "Channel::OnPlayTelephoneEvent(id=%d, event=%u, lengthMs=%u,"
281                  " volume=%u)", id, event, lengthMs, volume);
282
283     if (!_playOutbandDtmfEvent || (event > 15))
284     {
285         // Ignore callback since feedback is disabled or event is not a
286         // Dtmf tone event.
287         return;
288     }
289
290     assert(_outputMixerPtr != NULL);
291
292     // Start playing out the Dtmf tone (if playout is enabled).
293     // Reduce length of tone with 80ms to the reduce risk of echo.
294     _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
295 }
296
297 void
298 Channel::OnIncomingSSRCChanged(int32_t id, uint32_t ssrc)
299 {
300     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
301                  "Channel::OnIncomingSSRCChanged(id=%d, SSRC=%d)",
302                  id, ssrc);
303
304     // Update ssrc so that NTP for AV sync can be updated.
305     _rtpRtcpModule->SetRemoteSSRC(ssrc);
306 }
307
308 void Channel::OnIncomingCSRCChanged(int32_t id,
309                                     uint32_t CSRC,
310                                     bool added)
311 {
312     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
313                  "Channel::OnIncomingCSRCChanged(id=%d, CSRC=%d, added=%d)",
314                  id, CSRC, added);
315 }
316
317 void Channel::ResetStatistics(uint32_t ssrc) {
318   StreamStatistician* statistician =
319       rtp_receive_statistics_->GetStatistician(ssrc);
320   if (statistician) {
321     statistician->ResetStatistics();
322   }
323   statistics_proxy_->ResetStatistics();
324 }
325
326 void
327 Channel::OnApplicationDataReceived(int32_t id,
328                                    uint8_t subType,
329                                    uint32_t name,
330                                    uint16_t length,
331                                    const uint8_t* data)
332 {
333     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
334                  "Channel::OnApplicationDataReceived(id=%d, subType=%u,"
335                  " name=%u, length=%u)",
336                  id, subType, name, length);
337
338     int32_t channel = VoEChannelId(id);
339     assert(channel == _channelId);
340
341     if (_rtcpObserver)
342     {
343         CriticalSectionScoped cs(&_callbackCritSect);
344
345         if (_rtcpObserverPtr)
346         {
347             _rtcpObserverPtr->OnApplicationDataReceived(channel,
348                                                         subType,
349                                                         name,
350                                                         data,
351                                                         length);
352         }
353     }
354 }
355
356 int32_t
357 Channel::OnInitializeDecoder(
358     int32_t id,
359     int8_t payloadType,
360     const char payloadName[RTP_PAYLOAD_NAME_SIZE],
361     int frequency,
362     uint8_t channels,
363     uint32_t rate)
364 {
365     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
366                  "Channel::OnInitializeDecoder(id=%d, payloadType=%d, "
367                  "payloadName=%s, frequency=%u, channels=%u, rate=%u)",
368                  id, payloadType, payloadName, frequency, channels, rate);
369
370     assert(VoEChannelId(id) == _channelId);
371
372     CodecInst receiveCodec = {0};
373     CodecInst dummyCodec = {0};
374
375     receiveCodec.pltype = payloadType;
376     receiveCodec.plfreq = frequency;
377     receiveCodec.channels = channels;
378     receiveCodec.rate = rate;
379     strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
380
381     audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
382     receiveCodec.pacsize = dummyCodec.pacsize;
383
384     // Register the new codec to the ACM
385     if (audio_coding_->RegisterReceiveCodec(receiveCodec) == -1)
386     {
387         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
388                      VoEId(_instanceId, _channelId),
389                      "Channel::OnInitializeDecoder() invalid codec ("
390                      "pt=%d, name=%s) received - 1", payloadType, payloadName);
391         _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
392         return -1;
393     }
394
395     return 0;
396 }
397
398 int32_t
399 Channel::OnReceivedPayloadData(const uint8_t* payloadData,
400                                uint16_t payloadSize,
401                                const WebRtcRTPHeader* rtpHeader)
402 {
403     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
404                  "Channel::OnReceivedPayloadData(payloadSize=%d,"
405                  " payloadType=%u, audioChannel=%u)",
406                  payloadSize,
407                  rtpHeader->header.payloadType,
408                  rtpHeader->type.Audio.channel);
409
410     if (!channel_state_.Get().playing)
411     {
412         // Avoid inserting into NetEQ when we are not playing. Count the
413         // packet as discarded.
414         WEBRTC_TRACE(kTraceStream, kTraceVoice,
415                      VoEId(_instanceId, _channelId),
416                      "received packet is discarded since playing is not"
417                      " activated");
418         _numberOfDiscardedPackets++;
419         return 0;
420     }
421
422     // Push the incoming payload (parsed and ready for decoding) into the ACM
423     if (audio_coding_->IncomingPacket(payloadData,
424                                       payloadSize,
425                                       *rtpHeader) != 0)
426     {
427         _engineStatisticsPtr->SetLastError(
428             VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
429             "Channel::OnReceivedPayloadData() unable to push data to the ACM");
430         return -1;
431     }
432
433     // Update the packet delay.
434     UpdatePacketDelay(rtpHeader->header.timestamp,
435                       rtpHeader->header.sequenceNumber);
436
437     uint16_t round_trip_time = 0;
438     _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time,
439                         NULL, NULL, NULL);
440
441     std::vector<uint16_t> nack_list = audio_coding_->GetNackList(
442         round_trip_time);
443     if (!nack_list.empty()) {
444       // Can't use nack_list.data() since it's not supported by all
445       // compilers.
446       ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
447     }
448     return 0;
449 }
450
451 bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
452                                 int rtp_packet_length) {
453   RTPHeader header;
454   if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
455     WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
456                  "IncomingPacket invalid RTP header");
457     return false;
458   }
459   header.payload_type_frequency =
460       rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
461   if (header.payload_type_frequency < 0)
462     return false;
463   return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
464 }
465
466 int32_t Channel::GetAudioFrame(int32_t id, AudioFrame& audioFrame)
467 {
468     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
469                  "Channel::GetAudioFrame(id=%d)", id);
470
471     // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
472     if (audio_coding_->PlayoutData10Ms(audioFrame.sample_rate_hz_,
473                                        &audioFrame) == -1)
474     {
475         WEBRTC_TRACE(kTraceError, kTraceVoice,
476                      VoEId(_instanceId,_channelId),
477                      "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
478         // In all likelihood, the audio in this frame is garbage. We return an
479         // error so that the audio mixer module doesn't add it to the mix. As
480         // a result, it won't be played out and the actions skipped here are
481         // irrelevant.
482         return -1;
483     }
484
485     if (_RxVadDetection)
486     {
487         UpdateRxVadDetection(audioFrame);
488     }
489
490     // Convert module ID to internal VoE channel ID
491     audioFrame.id_ = VoEChannelId(audioFrame.id_);
492     // Store speech type for dead-or-alive detection
493     _outputSpeechType = audioFrame.speech_type_;
494
495     ChannelState::State state = channel_state_.Get();
496
497     if (state.rx_apm_is_enabled) {
498       int err = rx_audioproc_->ProcessStream(&audioFrame);
499       if (err) {
500         LOG(LS_ERROR) << "ProcessStream() error: " << err;
501         assert(false);
502       }
503     }
504
505     float output_gain = 1.0f;
506     float left_pan =  1.0f;
507     float right_pan =  1.0f;
508     {
509       CriticalSectionScoped cs(&volume_settings_critsect_);
510       output_gain = _outputGain;
511       left_pan = _panLeft;
512       right_pan= _panRight;
513     }
514
515     // Output volume scaling
516     if (output_gain < 0.99f || output_gain > 1.01f)
517     {
518         AudioFrameOperations::ScaleWithSat(output_gain, audioFrame);
519     }
520
521     // Scale left and/or right channel(s) if stereo and master balance is
522     // active
523
524     if (left_pan != 1.0f || right_pan != 1.0f)
525     {
526         if (audioFrame.num_channels_ == 1)
527         {
528             // Emulate stereo mode since panning is active.
529             // The mono signal is copied to both left and right channels here.
530             AudioFrameOperations::MonoToStereo(&audioFrame);
531         }
532         // For true stereo mode (when we are receiving a stereo signal), no
533         // action is needed.
534
535         // Do the panning operation (the audio frame contains stereo at this
536         // stage)
537         AudioFrameOperations::Scale(left_pan, right_pan, audioFrame);
538     }
539
540     // Mix decoded PCM output with file if file mixing is enabled
541     if (state.output_file_playing)
542     {
543         MixAudioWithFile(audioFrame, audioFrame.sample_rate_hz_);
544     }
545
546     // External media
547     if (_outputExternalMedia)
548     {
549         CriticalSectionScoped cs(&_callbackCritSect);
550         const bool isStereo = (audioFrame.num_channels_ == 2);
551         if (_outputExternalMediaCallbackPtr)
552         {
553             _outputExternalMediaCallbackPtr->Process(
554                 _channelId,
555                 kPlaybackPerChannel,
556                 (int16_t*)audioFrame.data_,
557                 audioFrame.samples_per_channel_,
558                 audioFrame.sample_rate_hz_,
559                 isStereo);
560         }
561     }
562
563     // Record playout if enabled
564     {
565         CriticalSectionScoped cs(&_fileCritSect);
566
567         if (_outputFileRecording && _outputFileRecorderPtr)
568         {
569             _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
570         }
571     }
572
573     // Measure audio level (0-9)
574     _outputAudioLevel.ComputeLevel(audioFrame);
575
576     if (capture_start_rtp_time_stamp_ < 0 && audioFrame.timestamp_ != 0) {
577       // The first frame with a valid rtp timestamp.
578       capture_start_rtp_time_stamp_ = audioFrame.timestamp_;
579     }
580
581     if (capture_start_rtp_time_stamp_ >= 0) {
582       // audioFrame.timestamp_ should be valid from now on.
583
584       // Compute elapsed time.
585       int64_t unwrap_timestamp =
586           rtp_ts_wraparound_handler_->Unwrap(audioFrame.timestamp_);
587       audioFrame.elapsed_time_ms_ =
588           (unwrap_timestamp - capture_start_rtp_time_stamp_) /
589           (GetPlayoutFrequency() / 1000);
590
591       {
592         CriticalSectionScoped lock(ts_stats_lock_.get());
593         // Compute ntp time.
594         audioFrame.ntp_time_ms_ = ntp_estimator_.Estimate(
595             audioFrame.timestamp_);
596         // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
597         if (audioFrame.ntp_time_ms_ > 0) {
598           // Compute |capture_start_ntp_time_ms_| so that
599           // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
600           capture_start_ntp_time_ms_ =
601               audioFrame.ntp_time_ms_ - audioFrame.elapsed_time_ms_;
602         }
603       }
604     }
605
606     return 0;
607 }
608
609 int32_t
610 Channel::NeededFrequency(int32_t id)
611 {
612     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
613                  "Channel::NeededFrequency(id=%d)", id);
614
615     int highestNeeded = 0;
616
617     // Determine highest needed receive frequency
618     int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
619
620     // Return the bigger of playout and receive frequency in the ACM.
621     if (audio_coding_->PlayoutFrequency() > receiveFrequency)
622     {
623         highestNeeded = audio_coding_->PlayoutFrequency();
624     }
625     else
626     {
627         highestNeeded = receiveFrequency;
628     }
629
630     // Special case, if we're playing a file on the playout side
631     // we take that frequency into consideration as well
632     // This is not needed on sending side, since the codec will
633     // limit the spectrum anyway.
634     if (channel_state_.Get().output_file_playing)
635     {
636         CriticalSectionScoped cs(&_fileCritSect);
637         if (_outputFilePlayerPtr)
638         {
639             if(_outputFilePlayerPtr->Frequency()>highestNeeded)
640             {
641                 highestNeeded=_outputFilePlayerPtr->Frequency();
642             }
643         }
644     }
645
646     return(highestNeeded);
647 }
648
649 int32_t
650 Channel::CreateChannel(Channel*& channel,
651                        int32_t channelId,
652                        uint32_t instanceId,
653                        const Config& config)
654 {
655     WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId,channelId),
656                  "Channel::CreateChannel(channelId=%d, instanceId=%d)",
657         channelId, instanceId);
658
659     channel = new Channel(channelId, instanceId, config);
660     if (channel == NULL)
661     {
662         WEBRTC_TRACE(kTraceMemory, kTraceVoice,
663                      VoEId(instanceId,channelId),
664                      "Channel::CreateChannel() unable to allocate memory for"
665                      " channel");
666         return -1;
667     }
668     return 0;
669 }
670
671 void
672 Channel::PlayNotification(int32_t id, uint32_t durationMs)
673 {
674     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
675                  "Channel::PlayNotification(id=%d, durationMs=%d)",
676                  id, durationMs);
677
678     // Not implement yet
679 }
680
681 void
682 Channel::RecordNotification(int32_t id, uint32_t durationMs)
683 {
684     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
685                  "Channel::RecordNotification(id=%d, durationMs=%d)",
686                  id, durationMs);
687
688     // Not implement yet
689 }
690
691 void
692 Channel::PlayFileEnded(int32_t id)
693 {
694     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
695                  "Channel::PlayFileEnded(id=%d)", id);
696
697     if (id == _inputFilePlayerId)
698     {
699         channel_state_.SetInputFilePlaying(false);
700         WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
701                      VoEId(_instanceId,_channelId),
702                      "Channel::PlayFileEnded() => input file player module is"
703                      " shutdown");
704     }
705     else if (id == _outputFilePlayerId)
706     {
707         channel_state_.SetOutputFilePlaying(false);
708         WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
709                      VoEId(_instanceId,_channelId),
710                      "Channel::PlayFileEnded() => output file player module is"
711                      " shutdown");
712     }
713 }
714
715 void
716 Channel::RecordFileEnded(int32_t id)
717 {
718     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
719                  "Channel::RecordFileEnded(id=%d)", id);
720
721     assert(id == _outputFileRecorderId);
722
723     CriticalSectionScoped cs(&_fileCritSect);
724
725     _outputFileRecording = false;
726     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
727                  VoEId(_instanceId,_channelId),
728                  "Channel::RecordFileEnded() => output file recorder module is"
729                  " shutdown");
730 }
731
732 Channel::Channel(int32_t channelId,
733                  uint32_t instanceId,
734                  const Config& config) :
735     _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
736     _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
737     volume_settings_critsect_(*CriticalSectionWrapper::CreateCriticalSection()),
738     _instanceId(instanceId),
739     _channelId(channelId),
740     rtp_header_parser_(RtpHeaderParser::Create()),
741     rtp_payload_registry_(
742         new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))),
743     rtp_receive_statistics_(ReceiveStatistics::Create(
744         Clock::GetRealTimeClock())),
745     rtp_receiver_(RtpReceiver::CreateAudioReceiver(
746         VoEModuleId(instanceId, channelId), Clock::GetRealTimeClock(), this,
747         this, this, rtp_payload_registry_.get())),
748     telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
749     audio_coding_(AudioCodingModule::Create(
750         VoEModuleId(instanceId, channelId))),
751     _rtpDumpIn(*RtpDump::CreateRtpDump()),
752     _rtpDumpOut(*RtpDump::CreateRtpDump()),
753     _outputAudioLevel(),
754     _externalTransport(false),
755     _inputFilePlayerPtr(NULL),
756     _outputFilePlayerPtr(NULL),
757     _outputFileRecorderPtr(NULL),
758     // Avoid conflict with other channels by adding 1024 - 1026,
759     // won't use as much as 1024 channels.
760     _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
761     _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
762     _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
763     _outputFileRecording(false),
764     _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
765     _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
766     _outputExternalMedia(false),
767     _inputExternalMediaCallbackPtr(NULL),
768     _outputExternalMediaCallbackPtr(NULL),
769     _timeStamp(0), // This is just an offset, RTP module will add it's own random offset
770     _sendTelephoneEventPayloadType(106),
771     ntp_estimator_(Clock::GetRealTimeClock()),
772     jitter_buffer_playout_timestamp_(0),
773     playout_timestamp_rtp_(0),
774     playout_timestamp_rtcp_(0),
775     playout_delay_ms_(0),
776     _numberOfDiscardedPackets(0),
777     send_sequence_number_(0),
778     ts_stats_lock_(CriticalSectionWrapper::CreateCriticalSection()),
779     rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
780     capture_start_rtp_time_stamp_(-1),
781     capture_start_ntp_time_ms_(-1),
782     _engineStatisticsPtr(NULL),
783     _outputMixerPtr(NULL),
784     _transmitMixerPtr(NULL),
785     _moduleProcessThreadPtr(NULL),
786     _audioDeviceModulePtr(NULL),
787     _voiceEngineObserverPtr(NULL),
788     _callbackCritSectPtr(NULL),
789     _transportPtr(NULL),
790     _rxVadObserverPtr(NULL),
791     _oldVadDecision(-1),
792     _sendFrameType(0),
793     _rtcpObserverPtr(NULL),
794     _externalMixing(false),
795     _mixFileWithMicrophone(false),
796     _rtcpObserver(false),
797     _mute(false),
798     _panLeft(1.0f),
799     _panRight(1.0f),
800     _outputGain(1.0f),
801     _playOutbandDtmfEvent(false),
802     _playInbandDtmfEvent(false),
803     _lastLocalTimeStamp(0),
804     _lastPayloadType(0),
805     _includeAudioLevelIndication(false),
806     _outputSpeechType(AudioFrame::kNormalSpeech),
807     vie_network_(NULL),
808     video_channel_(-1),
809     _average_jitter_buffer_delay_us(0),
810     least_required_delay_ms_(0),
811     _previousTimestamp(0),
812     _recPacketDelayMs(20),
813     _RxVadDetection(false),
814     _rxAgcIsEnabled(false),
815     _rxNsIsEnabled(false),
816     restored_packet_in_use_(false),
817     bitrate_controller_(
818         BitrateController::CreateBitrateController(Clock::GetRealTimeClock(),
819                                                    true)),
820     rtcp_bandwidth_observer_(
821         bitrate_controller_->CreateRtcpBandwidthObserver()),
822     send_bitrate_observer_(new VoEBitrateObserver(this)),
823     network_predictor_(new NetworkPredictor(Clock::GetRealTimeClock()))
824 {
825     WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
826                  "Channel::Channel() - ctor");
827     _inbandDtmfQueue.ResetDtmf();
828     _inbandDtmfGenerator.Init();
829     _outputAudioLevel.Clear();
830
831     RtpRtcp::Configuration configuration;
832     configuration.id = VoEModuleId(instanceId, channelId);
833     configuration.audio = true;
834     configuration.outgoing_transport = this;
835     configuration.rtcp_feedback = this;
836     configuration.audio_messages = this;
837     configuration.receive_statistics = rtp_receive_statistics_.get();
838     configuration.bandwidth_callback = rtcp_bandwidth_observer_.get();
839
840     _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
841
842     statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
843     rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
844         statistics_proxy_.get());
845
846     Config audioproc_config;
847     audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
848     rx_audioproc_.reset(AudioProcessing::Create(audioproc_config));
849 }
850
851 Channel::~Channel()
852 {
853     rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
854     WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
855                  "Channel::~Channel() - dtor");
856
857     if (_outputExternalMedia)
858     {
859         DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
860     }
861     if (channel_state_.Get().input_external_media)
862     {
863         DeRegisterExternalMediaProcessing(kRecordingPerChannel);
864     }
865     StopSend();
866     StopPlayout();
867
868     {
869         CriticalSectionScoped cs(&_fileCritSect);
870         if (_inputFilePlayerPtr)
871         {
872             _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
873             _inputFilePlayerPtr->StopPlayingFile();
874             FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
875             _inputFilePlayerPtr = NULL;
876         }
877         if (_outputFilePlayerPtr)
878         {
879             _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
880             _outputFilePlayerPtr->StopPlayingFile();
881             FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
882             _outputFilePlayerPtr = NULL;
883         }
884         if (_outputFileRecorderPtr)
885         {
886             _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
887             _outputFileRecorderPtr->StopRecording();
888             FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
889             _outputFileRecorderPtr = NULL;
890         }
891     }
892
893     // The order to safely shutdown modules in a channel is:
894     // 1. De-register callbacks in modules
895     // 2. De-register modules in process thread
896     // 3. Destroy modules
897     if (audio_coding_->RegisterTransportCallback(NULL) == -1)
898     {
899         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
900                      VoEId(_instanceId,_channelId),
901                      "~Channel() failed to de-register transport callback"
902                      " (Audio coding module)");
903     }
904     if (audio_coding_->RegisterVADCallback(NULL) == -1)
905     {
906         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
907                      VoEId(_instanceId,_channelId),
908                      "~Channel() failed to de-register VAD callback"
909                      " (Audio coding module)");
910     }
911     // De-register modules in process thread
912     if (_moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()) == -1)
913     {
914         WEBRTC_TRACE(kTraceInfo, kTraceVoice,
915                      VoEId(_instanceId,_channelId),
916                      "~Channel() failed to deregister RTP/RTCP module");
917     }
918     // End of modules shutdown
919
920     // Delete other objects
921     if (vie_network_) {
922       vie_network_->Release();
923       vie_network_ = NULL;
924     }
925     RtpDump::DestroyRtpDump(&_rtpDumpIn);
926     RtpDump::DestroyRtpDump(&_rtpDumpOut);
927     delete &_callbackCritSect;
928     delete &_fileCritSect;
929     delete &volume_settings_critsect_;
930 }
931
932 int32_t
933 Channel::Init()
934 {
935     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
936                  "Channel::Init()");
937
938     channel_state_.Reset();
939
940     // --- Initial sanity
941
942     if ((_engineStatisticsPtr == NULL) ||
943         (_moduleProcessThreadPtr == NULL))
944     {
945         WEBRTC_TRACE(kTraceError, kTraceVoice,
946                      VoEId(_instanceId,_channelId),
947                      "Channel::Init() must call SetEngineInformation() first");
948         return -1;
949     }
950
951     // --- Add modules to process thread (for periodic schedulation)
952
953     const bool processThreadFail =
954         ((_moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get()) != 0) ||
955         false);
956     if (processThreadFail)
957     {
958         _engineStatisticsPtr->SetLastError(
959             VE_CANNOT_INIT_CHANNEL, kTraceError,
960             "Channel::Init() modules not registered");
961         return -1;
962     }
963     // --- ACM initialization
964
965     if ((audio_coding_->InitializeReceiver() == -1) ||
966 #ifdef WEBRTC_CODEC_AVT
967         // out-of-band Dtmf tones are played out by default
968         (audio_coding_->SetDtmfPlayoutStatus(true) == -1) ||
969 #endif
970         (audio_coding_->InitializeSender() == -1))
971     {
972         _engineStatisticsPtr->SetLastError(
973             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
974             "Channel::Init() unable to initialize the ACM - 1");
975         return -1;
976     }
977
978     // --- RTP/RTCP module initialization
979
980     // Ensure that RTCP is enabled by default for the created channel.
981     // Note that, the module will keep generating RTCP until it is explicitly
982     // disabled by the user.
983     // After StopListen (when no sockets exists), RTCP packets will no longer
984     // be transmitted since the Transport object will then be invalid.
985     telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
986     // RTCP is enabled by default.
987     if (_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1)
988     {
989         _engineStatisticsPtr->SetLastError(
990             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
991             "Channel::Init() RTP/RTCP module not initialized");
992         return -1;
993     }
994
995      // --- Register all permanent callbacks
996     const bool fail =
997         (audio_coding_->RegisterTransportCallback(this) == -1) ||
998         (audio_coding_->RegisterVADCallback(this) == -1);
999
1000     if (fail)
1001     {
1002         _engineStatisticsPtr->SetLastError(
1003             VE_CANNOT_INIT_CHANNEL, kTraceError,
1004             "Channel::Init() callbacks not registered");
1005         return -1;
1006     }
1007
1008     // --- Register all supported codecs to the receiving side of the
1009     // RTP/RTCP module
1010
1011     CodecInst codec;
1012     const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
1013
1014     for (int idx = 0; idx < nSupportedCodecs; idx++)
1015     {
1016         // Open up the RTP/RTCP receiver for all supported codecs
1017         if ((audio_coding_->Codec(idx, &codec) == -1) ||
1018             (rtp_receiver_->RegisterReceivePayload(
1019                 codec.plname,
1020                 codec.pltype,
1021                 codec.plfreq,
1022                 codec.channels,
1023                 (codec.rate < 0) ? 0 : codec.rate) == -1))
1024         {
1025             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1026                          VoEId(_instanceId,_channelId),
1027                          "Channel::Init() unable to register %s (%d/%d/%d/%d) "
1028                          "to RTP/RTCP receiver",
1029                          codec.plname, codec.pltype, codec.plfreq,
1030                          codec.channels, codec.rate);
1031         }
1032         else
1033         {
1034             WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1035                          VoEId(_instanceId,_channelId),
1036                          "Channel::Init() %s (%d/%d/%d/%d) has been added to "
1037                          "the RTP/RTCP receiver",
1038                          codec.plname, codec.pltype, codec.plfreq,
1039                          codec.channels, codec.rate);
1040         }
1041
1042         // Ensure that PCMU is used as default codec on the sending side
1043         if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1))
1044         {
1045             SetSendCodec(codec);
1046         }
1047
1048         // Register default PT for outband 'telephone-event'
1049         if (!STR_CASE_CMP(codec.plname, "telephone-event"))
1050         {
1051             if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
1052                 (audio_coding_->RegisterReceiveCodec(codec) == -1))
1053             {
1054                 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1055                              VoEId(_instanceId,_channelId),
1056                              "Channel::Init() failed to register outband "
1057                              "'telephone-event' (%d/%d) correctly",
1058                              codec.pltype, codec.plfreq);
1059             }
1060         }
1061
1062         if (!STR_CASE_CMP(codec.plname, "CN"))
1063         {
1064             if ((audio_coding_->RegisterSendCodec(codec) == -1) ||
1065                 (audio_coding_->RegisterReceiveCodec(codec) == -1) ||
1066                 (_rtpRtcpModule->RegisterSendPayload(codec) == -1))
1067             {
1068                 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1069                              VoEId(_instanceId,_channelId),
1070                              "Channel::Init() failed to register CN (%d/%d) "
1071                              "correctly - 1",
1072                              codec.pltype, codec.plfreq);
1073             }
1074         }
1075 #ifdef WEBRTC_CODEC_RED
1076         // Register RED to the receiving side of the ACM.
1077         // We will not receive an OnInitializeDecoder() callback for RED.
1078         if (!STR_CASE_CMP(codec.plname, "RED"))
1079         {
1080             if (audio_coding_->RegisterReceiveCodec(codec) == -1)
1081             {
1082                 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1083                              VoEId(_instanceId,_channelId),
1084                              "Channel::Init() failed to register RED (%d/%d) "
1085                              "correctly",
1086                              codec.pltype, codec.plfreq);
1087             }
1088         }
1089 #endif
1090     }
1091
1092     if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) {
1093       LOG_FERR1(LS_ERROR, noise_suppression()->set_level, kDefaultNsMode);
1094       return -1;
1095     }
1096     if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) {
1097       LOG_FERR1(LS_ERROR, gain_control()->set_mode, kDefaultRxAgcMode);
1098       return -1;
1099     }
1100
1101     return 0;
1102 }
1103
1104 int32_t
1105 Channel::SetEngineInformation(Statistics& engineStatistics,
1106                               OutputMixer& outputMixer,
1107                               voe::TransmitMixer& transmitMixer,
1108                               ProcessThread& moduleProcessThread,
1109                               AudioDeviceModule& audioDeviceModule,
1110                               VoiceEngineObserver* voiceEngineObserver,
1111                               CriticalSectionWrapper* callbackCritSect)
1112 {
1113     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1114                  "Channel::SetEngineInformation()");
1115     _engineStatisticsPtr = &engineStatistics;
1116     _outputMixerPtr = &outputMixer;
1117     _transmitMixerPtr = &transmitMixer,
1118     _moduleProcessThreadPtr = &moduleProcessThread;
1119     _audioDeviceModulePtr = &audioDeviceModule;
1120     _voiceEngineObserverPtr = voiceEngineObserver;
1121     _callbackCritSectPtr = callbackCritSect;
1122     return 0;
1123 }
1124
1125 int32_t
1126 Channel::UpdateLocalTimeStamp()
1127 {
1128
1129     _timeStamp += _audioFrame.samples_per_channel_;
1130     return 0;
1131 }
1132
1133 int32_t
1134 Channel::StartPlayout()
1135 {
1136     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1137                  "Channel::StartPlayout()");
1138     if (channel_state_.Get().playing)
1139     {
1140         return 0;
1141     }
1142
1143     if (!_externalMixing) {
1144         // Add participant as candidates for mixing.
1145         if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0)
1146         {
1147             _engineStatisticsPtr->SetLastError(
1148                 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1149                 "StartPlayout() failed to add participant to mixer");
1150             return -1;
1151         }
1152     }
1153
1154     channel_state_.SetPlaying(true);
1155     if (RegisterFilePlayingToMixer() != 0)
1156         return -1;
1157
1158     return 0;
1159 }
1160
1161 int32_t
1162 Channel::StopPlayout()
1163 {
1164     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1165                  "Channel::StopPlayout()");
1166     if (!channel_state_.Get().playing)
1167     {
1168         return 0;
1169     }
1170
1171     if (!_externalMixing) {
1172         // Remove participant as candidates for mixing
1173         if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0)
1174         {
1175             _engineStatisticsPtr->SetLastError(
1176                 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1177                 "StopPlayout() failed to remove participant from mixer");
1178             return -1;
1179         }
1180     }
1181
1182     channel_state_.SetPlaying(false);
1183     _outputAudioLevel.Clear();
1184
1185     return 0;
1186 }
1187
1188 int32_t
1189 Channel::StartSend()
1190 {
1191     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1192                  "Channel::StartSend()");
1193     // Resume the previous sequence number which was reset by StopSend().
1194     // This needs to be done before |sending| is set to true.
1195     if (send_sequence_number_)
1196       SetInitSequenceNumber(send_sequence_number_);
1197
1198     if (channel_state_.Get().sending)
1199     {
1200       return 0;
1201     }
1202     channel_state_.SetSending(true);
1203
1204     if (_rtpRtcpModule->SetSendingStatus(true) != 0)
1205     {
1206         _engineStatisticsPtr->SetLastError(
1207             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1208             "StartSend() RTP/RTCP failed to start sending");
1209         CriticalSectionScoped cs(&_callbackCritSect);
1210         channel_state_.SetSending(false);
1211         return -1;
1212     }
1213
1214     return 0;
1215 }
1216
1217 int32_t
1218 Channel::StopSend()
1219 {
1220     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1221                  "Channel::StopSend()");
1222     if (!channel_state_.Get().sending)
1223     {
1224       return 0;
1225     }
1226     channel_state_.SetSending(false);
1227
1228     // Store the sequence number to be able to pick up the same sequence for
1229     // the next StartSend(). This is needed for restarting device, otherwise
1230     // it might cause libSRTP to complain about packets being replayed.
1231     // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1232     // CL is landed. See issue
1233     // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1234     send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1235
1236     // Reset sending SSRC and sequence number and triggers direct transmission
1237     // of RTCP BYE
1238     if (_rtpRtcpModule->SetSendingStatus(false) == -1 ||
1239         _rtpRtcpModule->ResetSendDataCountersRTP() == -1)
1240     {
1241         _engineStatisticsPtr->SetLastError(
1242             VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1243             "StartSend() RTP/RTCP failed to stop sending");
1244     }
1245
1246     return 0;
1247 }
1248
1249 int32_t
1250 Channel::StartReceiving()
1251 {
1252     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1253                  "Channel::StartReceiving()");
1254     if (channel_state_.Get().receiving)
1255     {
1256         return 0;
1257     }
1258     channel_state_.SetReceiving(true);
1259     _numberOfDiscardedPackets = 0;
1260     return 0;
1261 }
1262
1263 int32_t
1264 Channel::StopReceiving()
1265 {
1266     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1267                  "Channel::StopReceiving()");
1268     if (!channel_state_.Get().receiving)
1269     {
1270         return 0;
1271     }
1272
1273     channel_state_.SetReceiving(false);
1274     return 0;
1275 }
1276
1277 int32_t
1278 Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
1279 {
1280     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1281                  "Channel::RegisterVoiceEngineObserver()");
1282     CriticalSectionScoped cs(&_callbackCritSect);
1283
1284     if (_voiceEngineObserverPtr)
1285     {
1286         _engineStatisticsPtr->SetLastError(
1287             VE_INVALID_OPERATION, kTraceError,
1288             "RegisterVoiceEngineObserver() observer already enabled");
1289         return -1;
1290     }
1291     _voiceEngineObserverPtr = &observer;
1292     return 0;
1293 }
1294
1295 int32_t
1296 Channel::DeRegisterVoiceEngineObserver()
1297 {
1298     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1299                  "Channel::DeRegisterVoiceEngineObserver()");
1300     CriticalSectionScoped cs(&_callbackCritSect);
1301
1302     if (!_voiceEngineObserverPtr)
1303     {
1304         _engineStatisticsPtr->SetLastError(
1305             VE_INVALID_OPERATION, kTraceWarning,
1306             "DeRegisterVoiceEngineObserver() observer already disabled");
1307         return 0;
1308     }
1309     _voiceEngineObserverPtr = NULL;
1310     return 0;
1311 }
1312
1313 int32_t
1314 Channel::GetSendCodec(CodecInst& codec)
1315 {
1316     return (audio_coding_->SendCodec(&codec));
1317 }
1318
1319 int32_t
1320 Channel::GetRecCodec(CodecInst& codec)
1321 {
1322     return (audio_coding_->ReceiveCodec(&codec));
1323 }
1324
1325 int32_t
1326 Channel::SetSendCodec(const CodecInst& codec)
1327 {
1328     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1329                  "Channel::SetSendCodec()");
1330
1331     if (audio_coding_->RegisterSendCodec(codec) != 0)
1332     {
1333         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1334                      "SetSendCodec() failed to register codec to ACM");
1335         return -1;
1336     }
1337
1338     if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
1339     {
1340         _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1341         if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
1342         {
1343             WEBRTC_TRACE(
1344                     kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1345                     "SetSendCodec() failed to register codec to"
1346                     " RTP/RTCP module");
1347             return -1;
1348         }
1349     }
1350
1351     if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0)
1352     {
1353         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1354                      "SetSendCodec() failed to set audio packet size");
1355         return -1;
1356     }
1357
1358     bitrate_controller_->SetBitrateObserver(send_bitrate_observer_.get(),
1359                                             codec.rate, 0, 0);
1360
1361     return 0;
1362 }
1363
1364 void
1365 Channel::OnNetworkChanged(const uint32_t bitrate_bps,
1366                           const uint8_t fraction_lost,  // 0 - 255.
1367                           const uint32_t rtt) {
1368   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1369       "Channel::OnNetworkChanged(bitrate_bps=%d, fration_lost=%d, rtt=%d)",
1370       bitrate_bps, fraction_lost, rtt);
1371   // |fraction_lost| from BitrateObserver is short time observation of packet
1372   // loss rate from past. We use network predictor to make a more reasonable
1373   // loss rate estimation.
1374   network_predictor_->UpdatePacketLossRate(fraction_lost);
1375   uint8_t loss_rate = network_predictor_->GetLossRate();
1376   // Normalizes rate to 0 - 100.
1377   if (audio_coding_->SetPacketLossRate(100 * loss_rate / 255) != 0) {
1378     _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1379         kTraceError, "OnNetworkChanged() failed to set packet loss rate");
1380     assert(false);  // This should not happen.
1381   }
1382 }
1383
1384 int32_t
1385 Channel::SetVADStatus(bool enableVAD, ACMVADMode mode, bool disableDTX)
1386 {
1387     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1388                  "Channel::SetVADStatus(mode=%d)", mode);
1389     // To disable VAD, DTX must be disabled too
1390     disableDTX = ((enableVAD == false) ? true : disableDTX);
1391     if (audio_coding_->SetVAD(!disableDTX, enableVAD, mode) != 0)
1392     {
1393         _engineStatisticsPtr->SetLastError(
1394             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1395             "SetVADStatus() failed to set VAD");
1396         return -1;
1397     }
1398     return 0;
1399 }
1400
1401 int32_t
1402 Channel::GetVADStatus(bool& enabledVAD, ACMVADMode& mode, bool& disabledDTX)
1403 {
1404     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1405                  "Channel::GetVADStatus");
1406     if (audio_coding_->VAD(&disabledDTX, &enabledVAD, &mode) != 0)
1407     {
1408         _engineStatisticsPtr->SetLastError(
1409             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1410             "GetVADStatus() failed to get VAD status");
1411         return -1;
1412     }
1413     disabledDTX = !disabledDTX;
1414     return 0;
1415 }
1416
1417 int32_t
1418 Channel::SetRecPayloadType(const CodecInst& codec)
1419 {
1420     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1421                  "Channel::SetRecPayloadType()");
1422
1423     if (channel_state_.Get().playing)
1424     {
1425         _engineStatisticsPtr->SetLastError(
1426             VE_ALREADY_PLAYING, kTraceError,
1427             "SetRecPayloadType() unable to set PT while playing");
1428         return -1;
1429     }
1430     if (channel_state_.Get().receiving)
1431     {
1432         _engineStatisticsPtr->SetLastError(
1433             VE_ALREADY_LISTENING, kTraceError,
1434             "SetRecPayloadType() unable to set PT while listening");
1435         return -1;
1436     }
1437
1438     if (codec.pltype == -1)
1439     {
1440         // De-register the selected codec (RTP/RTCP module and ACM)
1441
1442         int8_t pltype(-1);
1443         CodecInst rxCodec = codec;
1444
1445         // Get payload type for the given codec
1446         rtp_payload_registry_->ReceivePayloadType(
1447             rxCodec.plname,
1448             rxCodec.plfreq,
1449             rxCodec.channels,
1450             (rxCodec.rate < 0) ? 0 : rxCodec.rate,
1451             &pltype);
1452         rxCodec.pltype = pltype;
1453
1454         if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0)
1455         {
1456             _engineStatisticsPtr->SetLastError(
1457                     VE_RTP_RTCP_MODULE_ERROR,
1458                     kTraceError,
1459                     "SetRecPayloadType() RTP/RTCP-module deregistration "
1460                     "failed");
1461             return -1;
1462         }
1463         if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0)
1464         {
1465             _engineStatisticsPtr->SetLastError(
1466                 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1467                 "SetRecPayloadType() ACM deregistration failed - 1");
1468             return -1;
1469         }
1470         return 0;
1471     }
1472
1473     if (rtp_receiver_->RegisterReceivePayload(
1474         codec.plname,
1475         codec.pltype,
1476         codec.plfreq,
1477         codec.channels,
1478         (codec.rate < 0) ? 0 : codec.rate) != 0)
1479     {
1480         // First attempt to register failed => de-register and try again
1481         rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
1482         if (rtp_receiver_->RegisterReceivePayload(
1483             codec.plname,
1484             codec.pltype,
1485             codec.plfreq,
1486             codec.channels,
1487             (codec.rate < 0) ? 0 : codec.rate) != 0)
1488         {
1489             _engineStatisticsPtr->SetLastError(
1490                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1491                 "SetRecPayloadType() RTP/RTCP-module registration failed");
1492             return -1;
1493         }
1494     }
1495     if (audio_coding_->RegisterReceiveCodec(codec) != 0)
1496     {
1497         audio_coding_->UnregisterReceiveCodec(codec.pltype);
1498         if (audio_coding_->RegisterReceiveCodec(codec) != 0)
1499         {
1500             _engineStatisticsPtr->SetLastError(
1501                 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1502                 "SetRecPayloadType() ACM registration failed - 1");
1503             return -1;
1504         }
1505     }
1506     return 0;
1507 }
1508
1509 int32_t
1510 Channel::GetRecPayloadType(CodecInst& codec)
1511 {
1512     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1513                  "Channel::GetRecPayloadType()");
1514     int8_t payloadType(-1);
1515     if (rtp_payload_registry_->ReceivePayloadType(
1516         codec.plname,
1517         codec.plfreq,
1518         codec.channels,
1519         (codec.rate < 0) ? 0 : codec.rate,
1520         &payloadType) != 0)
1521     {
1522         _engineStatisticsPtr->SetLastError(
1523             VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1524             "GetRecPayloadType() failed to retrieve RX payload type");
1525         return -1;
1526     }
1527     codec.pltype = payloadType;
1528     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1529                  "Channel::GetRecPayloadType() => pltype=%u", codec.pltype);
1530     return 0;
1531 }
1532
1533 int32_t
1534 Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
1535 {
1536     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1537                  "Channel::SetSendCNPayloadType()");
1538
1539     CodecInst codec;
1540     int32_t samplingFreqHz(-1);
1541     const int kMono = 1;
1542     if (frequency == kFreq32000Hz)
1543         samplingFreqHz = 32000;
1544     else if (frequency == kFreq16000Hz)
1545         samplingFreqHz = 16000;
1546
1547     if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1)
1548     {
1549         _engineStatisticsPtr->SetLastError(
1550             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1551             "SetSendCNPayloadType() failed to retrieve default CN codec "
1552             "settings");
1553         return -1;
1554     }
1555
1556     // Modify the payload type (must be set to dynamic range)
1557     codec.pltype = type;
1558
1559     if (audio_coding_->RegisterSendCodec(codec) != 0)
1560     {
1561         _engineStatisticsPtr->SetLastError(
1562             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1563             "SetSendCNPayloadType() failed to register CN to ACM");
1564         return -1;
1565     }
1566
1567     if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
1568     {
1569         _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1570         if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
1571         {
1572             _engineStatisticsPtr->SetLastError(
1573                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1574                 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1575                 "module");
1576             return -1;
1577         }
1578     }
1579     return 0;
1580 }
1581
1582 int Channel::SetOpusMaxPlaybackRate(int frequency_hz) {
1583   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1584                "Channel::SetOpusMaxPlaybackRate()");
1585
1586   if (audio_coding_->SetOpusMaxPlaybackRate(frequency_hz) != 0) {
1587     _engineStatisticsPtr->SetLastError(
1588         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1589         "SetOpusMaxPlaybackRate() failed to set maximum playback rate");
1590     return -1;
1591   }
1592   return 0;
1593 }
1594
1595 int32_t Channel::RegisterExternalTransport(Transport& transport)
1596 {
1597     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1598                "Channel::RegisterExternalTransport()");
1599
1600     CriticalSectionScoped cs(&_callbackCritSect);
1601
1602     if (_externalTransport)
1603     {
1604         _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION,
1605                                            kTraceError,
1606               "RegisterExternalTransport() external transport already enabled");
1607        return -1;
1608     }
1609     _externalTransport = true;
1610     _transportPtr = &transport;
1611     return 0;
1612 }
1613
1614 int32_t
1615 Channel::DeRegisterExternalTransport()
1616 {
1617     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1618                  "Channel::DeRegisterExternalTransport()");
1619
1620     CriticalSectionScoped cs(&_callbackCritSect);
1621
1622     if (!_transportPtr)
1623     {
1624         _engineStatisticsPtr->SetLastError(
1625             VE_INVALID_OPERATION, kTraceWarning,
1626             "DeRegisterExternalTransport() external transport already "
1627             "disabled");
1628         return 0;
1629     }
1630     _externalTransport = false;
1631     _transportPtr = NULL;
1632     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1633                  "DeRegisterExternalTransport() all transport is disabled");
1634     return 0;
1635 }
1636
1637 int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length,
1638                                    const PacketTime& packet_time) {
1639   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
1640                "Channel::ReceivedRTPPacket()");
1641
1642   // Store playout timestamp for the received RTP packet
1643   UpdatePlayoutTimestamp(false);
1644
1645   // Dump the RTP packet to a file (if RTP dump is enabled).
1646   if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
1647                             (uint16_t)length) == -1) {
1648     WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1649                  VoEId(_instanceId,_channelId),
1650                  "Channel::SendPacket() RTP dump to input file failed");
1651   }
1652   const uint8_t* received_packet = reinterpret_cast<const uint8_t*>(data);
1653   RTPHeader header;
1654   if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
1655     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1656                  "Incoming packet: invalid RTP header");
1657     return -1;
1658   }
1659   header.payload_type_frequency =
1660       rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
1661   if (header.payload_type_frequency < 0)
1662     return -1;
1663   bool in_order = IsPacketInOrder(header);
1664   rtp_receive_statistics_->IncomingPacket(header, length,
1665       IsPacketRetransmitted(header, in_order));
1666   rtp_payload_registry_->SetIncomingPayloadType(header);
1667
1668   // Forward any packets to ViE bandwidth estimator, if enabled.
1669   {
1670     CriticalSectionScoped cs(&_callbackCritSect);
1671     if (vie_network_) {
1672       int64_t arrival_time_ms;
1673       if (packet_time.timestamp != -1) {
1674         arrival_time_ms = (packet_time.timestamp + 500) / 1000;
1675       } else {
1676         arrival_time_ms = TickTime::MillisecondTimestamp();
1677       }
1678       int payload_length = length - header.headerLength;
1679       vie_network_->ReceivedBWEPacket(video_channel_, arrival_time_ms,
1680                                       payload_length, header);
1681     }
1682   }
1683
1684   return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
1685 }
1686
1687 bool Channel::ReceivePacket(const uint8_t* packet,
1688                             int packet_length,
1689                             const RTPHeader& header,
1690                             bool in_order) {
1691   if (rtp_payload_registry_->IsEncapsulated(header)) {
1692     return HandleEncapsulation(packet, packet_length, header);
1693   }
1694   const uint8_t* payload = packet + header.headerLength;
1695   int payload_length = packet_length - header.headerLength;
1696   assert(payload_length >= 0);
1697   PayloadUnion payload_specific;
1698   if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
1699                                                   &payload_specific)) {
1700     return false;
1701   }
1702   return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
1703                                           payload_specific, in_order);
1704 }
1705
1706 bool Channel::HandleEncapsulation(const uint8_t* packet,
1707                                   int packet_length,
1708                                   const RTPHeader& header) {
1709   if (!rtp_payload_registry_->IsRtx(header))
1710     return false;
1711
1712   // Remove the RTX header and parse the original RTP header.
1713   if (packet_length < header.headerLength)
1714     return false;
1715   if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
1716     return false;
1717   if (restored_packet_in_use_) {
1718     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1719                  "Multiple RTX headers detected, dropping packet");
1720     return false;
1721   }
1722   uint8_t* restored_packet_ptr = restored_packet_;
1723   if (!rtp_payload_registry_->RestoreOriginalPacket(
1724       &restored_packet_ptr, packet, &packet_length, rtp_receiver_->SSRC(),
1725       header)) {
1726     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1727                  "Incoming RTX packet: invalid RTP header");
1728     return false;
1729   }
1730   restored_packet_in_use_ = true;
1731   bool ret = OnRecoveredPacket(restored_packet_ptr, packet_length);
1732   restored_packet_in_use_ = false;
1733   return ret;
1734 }
1735
1736 bool Channel::IsPacketInOrder(const RTPHeader& header) const {
1737   StreamStatistician* statistician =
1738       rtp_receive_statistics_->GetStatistician(header.ssrc);
1739   if (!statistician)
1740     return false;
1741   return statistician->IsPacketInOrder(header.sequenceNumber);
1742 }
1743
1744 bool Channel::IsPacketRetransmitted(const RTPHeader& header,
1745                                     bool in_order) const {
1746   // Retransmissions are handled separately if RTX is enabled.
1747   if (rtp_payload_registry_->RtxEnabled())
1748     return false;
1749   StreamStatistician* statistician =
1750       rtp_receive_statistics_->GetStatistician(header.ssrc);
1751   if (!statistician)
1752     return false;
1753   // Check if this is a retransmission.
1754   uint16_t min_rtt = 0;
1755   _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
1756   return !in_order &&
1757       statistician->IsRetransmitOfOldPacket(header, min_rtt);
1758 }
1759
1760 int32_t Channel::ReceivedRTCPPacket(const int8_t* data, int32_t length) {
1761   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
1762                "Channel::ReceivedRTCPPacket()");
1763   // Store playout timestamp for the received RTCP packet
1764   UpdatePlayoutTimestamp(true);
1765
1766   // Dump the RTCP packet to a file (if RTP dump is enabled).
1767   if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
1768                             (uint16_t)length) == -1) {
1769     WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1770                  VoEId(_instanceId,_channelId),
1771                  "Channel::SendPacket() RTCP dump to input file failed");
1772   }
1773
1774   // Deliver RTCP packet to RTP/RTCP module for parsing
1775   if (_rtpRtcpModule->IncomingRtcpPacket((const uint8_t*)data,
1776                                          (uint16_t)length) == -1) {
1777     _engineStatisticsPtr->SetLastError(
1778         VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
1779         "Channel::IncomingRTPPacket() RTCP packet is invalid");
1780   }
1781
1782   {
1783     CriticalSectionScoped lock(ts_stats_lock_.get());
1784     uint16_t rtt = GetRTT();
1785     if (rtt == 0) {
1786       // Waiting for valid RTT.
1787       return 0;
1788     }
1789     uint32_t ntp_secs = 0;
1790     uint32_t ntp_frac = 0;
1791     uint32_t rtp_timestamp = 0;
1792     if (0 != _rtpRtcpModule->RemoteNTP(&ntp_secs, &ntp_frac, NULL, NULL,
1793                                        &rtp_timestamp)) {
1794       // Waiting for RTCP.
1795       return 0;
1796     }
1797     ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
1798   }
1799   return 0;
1800 }
1801
1802 int Channel::StartPlayingFileLocally(const char* fileName,
1803                                      bool loop,
1804                                      FileFormats format,
1805                                      int startPosition,
1806                                      float volumeScaling,
1807                                      int stopPosition,
1808                                      const CodecInst* codecInst)
1809 {
1810     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1811                  "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
1812                  " format=%d, volumeScaling=%5.3f, startPosition=%d, "
1813                  "stopPosition=%d)", fileName, loop, format, volumeScaling,
1814                  startPosition, stopPosition);
1815
1816     if (channel_state_.Get().output_file_playing)
1817     {
1818         _engineStatisticsPtr->SetLastError(
1819             VE_ALREADY_PLAYING, kTraceError,
1820             "StartPlayingFileLocally() is already playing");
1821         return -1;
1822     }
1823
1824     {
1825         CriticalSectionScoped cs(&_fileCritSect);
1826
1827         if (_outputFilePlayerPtr)
1828         {
1829             _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1830             FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1831             _outputFilePlayerPtr = NULL;
1832         }
1833
1834         _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1835             _outputFilePlayerId, (const FileFormats)format);
1836
1837         if (_outputFilePlayerPtr == NULL)
1838         {
1839             _engineStatisticsPtr->SetLastError(
1840                 VE_INVALID_ARGUMENT, kTraceError,
1841                 "StartPlayingFileLocally() filePlayer format is not correct");
1842             return -1;
1843         }
1844
1845         const uint32_t notificationTime(0);
1846
1847         if (_outputFilePlayerPtr->StartPlayingFile(
1848                 fileName,
1849                 loop,
1850                 startPosition,
1851                 volumeScaling,
1852                 notificationTime,
1853                 stopPosition,
1854                 (const CodecInst*)codecInst) != 0)
1855         {
1856             _engineStatisticsPtr->SetLastError(
1857                 VE_BAD_FILE, kTraceError,
1858                 "StartPlayingFile() failed to start file playout");
1859             _outputFilePlayerPtr->StopPlayingFile();
1860             FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1861             _outputFilePlayerPtr = NULL;
1862             return -1;
1863         }
1864         _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1865         channel_state_.SetOutputFilePlaying(true);
1866     }
1867
1868     if (RegisterFilePlayingToMixer() != 0)
1869         return -1;
1870
1871     return 0;
1872 }
1873
1874 int Channel::StartPlayingFileLocally(InStream* stream,
1875                                      FileFormats format,
1876                                      int startPosition,
1877                                      float volumeScaling,
1878                                      int stopPosition,
1879                                      const CodecInst* codecInst)
1880 {
1881     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1882                  "Channel::StartPlayingFileLocally(format=%d,"
1883                  " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1884                  format, volumeScaling, startPosition, stopPosition);
1885
1886     if(stream == NULL)
1887     {
1888         _engineStatisticsPtr->SetLastError(
1889             VE_BAD_FILE, kTraceError,
1890             "StartPlayingFileLocally() NULL as input stream");
1891         return -1;
1892     }
1893
1894
1895     if (channel_state_.Get().output_file_playing)
1896     {
1897         _engineStatisticsPtr->SetLastError(
1898             VE_ALREADY_PLAYING, kTraceError,
1899             "StartPlayingFileLocally() is already playing");
1900         return -1;
1901     }
1902
1903     {
1904       CriticalSectionScoped cs(&_fileCritSect);
1905
1906       // Destroy the old instance
1907       if (_outputFilePlayerPtr)
1908       {
1909           _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1910           FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1911           _outputFilePlayerPtr = NULL;
1912       }
1913
1914       // Create the instance
1915       _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1916           _outputFilePlayerId,
1917           (const FileFormats)format);
1918
1919       if (_outputFilePlayerPtr == NULL)
1920       {
1921           _engineStatisticsPtr->SetLastError(
1922               VE_INVALID_ARGUMENT, kTraceError,
1923               "StartPlayingFileLocally() filePlayer format isnot correct");
1924           return -1;
1925       }
1926
1927       const uint32_t notificationTime(0);
1928
1929       if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
1930                                                  volumeScaling,
1931                                                  notificationTime,
1932                                                  stopPosition, codecInst) != 0)
1933       {
1934           _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
1935                                              "StartPlayingFile() failed to "
1936                                              "start file playout");
1937           _outputFilePlayerPtr->StopPlayingFile();
1938           FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1939           _outputFilePlayerPtr = NULL;
1940           return -1;
1941       }
1942       _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1943       channel_state_.SetOutputFilePlaying(true);
1944     }
1945
1946     if (RegisterFilePlayingToMixer() != 0)
1947         return -1;
1948
1949     return 0;
1950 }
1951
1952 int Channel::StopPlayingFileLocally()
1953 {
1954     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1955                  "Channel::StopPlayingFileLocally()");
1956
1957     if (!channel_state_.Get().output_file_playing)
1958     {
1959         _engineStatisticsPtr->SetLastError(
1960             VE_INVALID_OPERATION, kTraceWarning,
1961             "StopPlayingFileLocally() isnot playing");
1962         return 0;
1963     }
1964
1965     {
1966         CriticalSectionScoped cs(&_fileCritSect);
1967
1968         if (_outputFilePlayerPtr->StopPlayingFile() != 0)
1969         {
1970             _engineStatisticsPtr->SetLastError(
1971                 VE_STOP_RECORDING_FAILED, kTraceError,
1972                 "StopPlayingFile() could not stop playing");
1973             return -1;
1974         }
1975         _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1976         FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1977         _outputFilePlayerPtr = NULL;
1978         channel_state_.SetOutputFilePlaying(false);
1979     }
1980     // _fileCritSect cannot be taken while calling
1981     // SetAnonymousMixibilityStatus. Refer to comments in
1982     // StartPlayingFileLocally(const char* ...) for more details.
1983     if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0)
1984     {
1985         _engineStatisticsPtr->SetLastError(
1986             VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1987             "StopPlayingFile() failed to stop participant from playing as"
1988             "file in the mixer");
1989         return -1;
1990     }
1991
1992     return 0;
1993 }
1994
1995 int Channel::IsPlayingFileLocally() const
1996 {
1997     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1998                  "Channel::IsPlayingFileLocally()");
1999
2000     return channel_state_.Get().output_file_playing;
2001 }
2002
2003 int Channel::RegisterFilePlayingToMixer()
2004 {
2005     // Return success for not registering for file playing to mixer if:
2006     // 1. playing file before playout is started on that channel.
2007     // 2. starting playout without file playing on that channel.
2008     if (!channel_state_.Get().playing ||
2009         !channel_state_.Get().output_file_playing)
2010     {
2011         return 0;
2012     }
2013
2014     // |_fileCritSect| cannot be taken while calling
2015     // SetAnonymousMixabilityStatus() since as soon as the participant is added
2016     // frames can be pulled by the mixer. Since the frames are generated from
2017     // the file, _fileCritSect will be taken. This would result in a deadlock.
2018     if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
2019     {
2020         channel_state_.SetOutputFilePlaying(false);
2021         CriticalSectionScoped cs(&_fileCritSect);
2022         _engineStatisticsPtr->SetLastError(
2023             VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
2024             "StartPlayingFile() failed to add participant as file to mixer");
2025         _outputFilePlayerPtr->StopPlayingFile();
2026         FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2027         _outputFilePlayerPtr = NULL;
2028         return -1;
2029     }
2030
2031     return 0;
2032 }
2033
2034 int Channel::StartPlayingFileAsMicrophone(const char* fileName,
2035                                           bool loop,
2036                                           FileFormats format,
2037                                           int startPosition,
2038                                           float volumeScaling,
2039                                           int stopPosition,
2040                                           const CodecInst* codecInst)
2041 {
2042     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2043                  "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
2044                  "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
2045                  "stopPosition=%d)", fileName, loop, format, volumeScaling,
2046                  startPosition, stopPosition);
2047
2048     CriticalSectionScoped cs(&_fileCritSect);
2049
2050     if (channel_state_.Get().input_file_playing)
2051     {
2052         _engineStatisticsPtr->SetLastError(
2053             VE_ALREADY_PLAYING, kTraceWarning,
2054             "StartPlayingFileAsMicrophone() filePlayer is playing");
2055         return 0;
2056     }
2057
2058     // Destroy the old instance
2059     if (_inputFilePlayerPtr)
2060     {
2061         _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2062         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2063         _inputFilePlayerPtr = NULL;
2064     }
2065
2066     // Create the instance
2067     _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2068         _inputFilePlayerId, (const FileFormats)format);
2069
2070     if (_inputFilePlayerPtr == NULL)
2071     {
2072         _engineStatisticsPtr->SetLastError(
2073             VE_INVALID_ARGUMENT, kTraceError,
2074             "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
2075         return -1;
2076     }
2077
2078     const uint32_t notificationTime(0);
2079
2080     if (_inputFilePlayerPtr->StartPlayingFile(
2081         fileName,
2082         loop,
2083         startPosition,
2084         volumeScaling,
2085         notificationTime,
2086         stopPosition,
2087         (const CodecInst*)codecInst) != 0)
2088     {
2089         _engineStatisticsPtr->SetLastError(
2090             VE_BAD_FILE, kTraceError,
2091             "StartPlayingFile() failed to start file playout");
2092         _inputFilePlayerPtr->StopPlayingFile();
2093         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2094         _inputFilePlayerPtr = NULL;
2095         return -1;
2096     }
2097     _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2098     channel_state_.SetInputFilePlaying(true);
2099
2100     return 0;
2101 }
2102
2103 int Channel::StartPlayingFileAsMicrophone(InStream* stream,
2104                                           FileFormats format,
2105                                           int startPosition,
2106                                           float volumeScaling,
2107                                           int stopPosition,
2108                                           const CodecInst* codecInst)
2109 {
2110     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2111                  "Channel::StartPlayingFileAsMicrophone(format=%d, "
2112                  "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2113                  format, volumeScaling, startPosition, stopPosition);
2114
2115     if(stream == NULL)
2116     {
2117         _engineStatisticsPtr->SetLastError(
2118             VE_BAD_FILE, kTraceError,
2119             "StartPlayingFileAsMicrophone NULL as input stream");
2120         return -1;
2121     }
2122
2123     CriticalSectionScoped cs(&_fileCritSect);
2124
2125     if (channel_state_.Get().input_file_playing)
2126     {
2127         _engineStatisticsPtr->SetLastError(
2128             VE_ALREADY_PLAYING, kTraceWarning,
2129             "StartPlayingFileAsMicrophone() is playing");
2130         return 0;
2131     }
2132
2133     // Destroy the old instance
2134     if (_inputFilePlayerPtr)
2135     {
2136         _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2137         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2138         _inputFilePlayerPtr = NULL;
2139     }
2140
2141     // Create the instance
2142     _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2143         _inputFilePlayerId, (const FileFormats)format);
2144
2145     if (_inputFilePlayerPtr == NULL)
2146     {
2147         _engineStatisticsPtr->SetLastError(
2148             VE_INVALID_ARGUMENT, kTraceError,
2149             "StartPlayingInputFile() filePlayer format isnot correct");
2150         return -1;
2151     }
2152
2153     const uint32_t notificationTime(0);
2154
2155     if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2156                                               volumeScaling, notificationTime,
2157                                               stopPosition, codecInst) != 0)
2158     {
2159         _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2160                                            "StartPlayingFile() failed to start "
2161                                            "file playout");
2162         _inputFilePlayerPtr->StopPlayingFile();
2163         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2164         _inputFilePlayerPtr = NULL;
2165         return -1;
2166     }
2167
2168     _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2169     channel_state_.SetInputFilePlaying(true);
2170
2171     return 0;
2172 }
2173
2174 int Channel::StopPlayingFileAsMicrophone()
2175 {
2176     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2177                  "Channel::StopPlayingFileAsMicrophone()");
2178
2179     CriticalSectionScoped cs(&_fileCritSect);
2180
2181     if (!channel_state_.Get().input_file_playing)
2182     {
2183         _engineStatisticsPtr->SetLastError(
2184             VE_INVALID_OPERATION, kTraceWarning,
2185             "StopPlayingFileAsMicrophone() isnot playing");
2186         return 0;
2187     }
2188
2189     if (_inputFilePlayerPtr->StopPlayingFile() != 0)
2190     {
2191         _engineStatisticsPtr->SetLastError(
2192             VE_STOP_RECORDING_FAILED, kTraceError,
2193             "StopPlayingFile() could not stop playing");
2194         return -1;
2195     }
2196     _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2197     FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2198     _inputFilePlayerPtr = NULL;
2199     channel_state_.SetInputFilePlaying(false);
2200
2201     return 0;
2202 }
2203
2204 int Channel::IsPlayingFileAsMicrophone() const
2205 {
2206     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2207                  "Channel::IsPlayingFileAsMicrophone()");
2208     return channel_state_.Get().input_file_playing;
2209 }
2210
2211 int Channel::StartRecordingPlayout(const char* fileName,
2212                                    const CodecInst* codecInst)
2213 {
2214     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2215                  "Channel::StartRecordingPlayout(fileName=%s)", fileName);
2216
2217     if (_outputFileRecording)
2218     {
2219         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2220                      "StartRecordingPlayout() is already recording");
2221         return 0;
2222     }
2223
2224     FileFormats format;
2225     const uint32_t notificationTime(0); // Not supported in VoE
2226     CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2227
2228     if ((codecInst != NULL) &&
2229       ((codecInst->channels < 1) || (codecInst->channels > 2)))
2230     {
2231         _engineStatisticsPtr->SetLastError(
2232             VE_BAD_ARGUMENT, kTraceError,
2233             "StartRecordingPlayout() invalid compression");
2234         return(-1);
2235     }
2236     if(codecInst == NULL)
2237     {
2238         format = kFileFormatPcm16kHzFile;
2239         codecInst=&dummyCodec;
2240     }
2241     else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2242         (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2243         (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2244     {
2245         format = kFileFormatWavFile;
2246     }
2247     else
2248     {
2249         format = kFileFormatCompressedFile;
2250     }
2251
2252     CriticalSectionScoped cs(&_fileCritSect);
2253
2254     // Destroy the old instance
2255     if (_outputFileRecorderPtr)
2256     {
2257         _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2258         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2259         _outputFileRecorderPtr = NULL;
2260     }
2261
2262     _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2263         _outputFileRecorderId, (const FileFormats)format);
2264     if (_outputFileRecorderPtr == NULL)
2265     {
2266         _engineStatisticsPtr->SetLastError(
2267             VE_INVALID_ARGUMENT, kTraceError,
2268             "StartRecordingPlayout() fileRecorder format isnot correct");
2269         return -1;
2270     }
2271
2272     if (_outputFileRecorderPtr->StartRecordingAudioFile(
2273         fileName, (const CodecInst&)*codecInst, notificationTime) != 0)
2274     {
2275         _engineStatisticsPtr->SetLastError(
2276             VE_BAD_FILE, kTraceError,
2277             "StartRecordingAudioFile() failed to start file recording");
2278         _outputFileRecorderPtr->StopRecording();
2279         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2280         _outputFileRecorderPtr = NULL;
2281         return -1;
2282     }
2283     _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2284     _outputFileRecording = true;
2285
2286     return 0;
2287 }
2288
2289 int Channel::StartRecordingPlayout(OutStream* stream,
2290                                    const CodecInst* codecInst)
2291 {
2292     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2293                  "Channel::StartRecordingPlayout()");
2294
2295     if (_outputFileRecording)
2296     {
2297         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2298                      "StartRecordingPlayout() is already recording");
2299         return 0;
2300     }
2301
2302     FileFormats format;
2303     const uint32_t notificationTime(0); // Not supported in VoE
2304     CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2305
2306     if (codecInst != NULL && codecInst->channels != 1)
2307     {
2308         _engineStatisticsPtr->SetLastError(
2309             VE_BAD_ARGUMENT, kTraceError,
2310             "StartRecordingPlayout() invalid compression");
2311         return(-1);
2312     }
2313     if(codecInst == NULL)
2314     {
2315         format = kFileFormatPcm16kHzFile;
2316         codecInst=&dummyCodec;
2317     }
2318     else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2319         (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2320         (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2321     {
2322         format = kFileFormatWavFile;
2323     }
2324     else
2325     {
2326         format = kFileFormatCompressedFile;
2327     }
2328
2329     CriticalSectionScoped cs(&_fileCritSect);
2330
2331     // Destroy the old instance
2332     if (_outputFileRecorderPtr)
2333     {
2334         _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2335         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2336         _outputFileRecorderPtr = NULL;
2337     }
2338
2339     _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2340         _outputFileRecorderId, (const FileFormats)format);
2341     if (_outputFileRecorderPtr == NULL)
2342     {
2343         _engineStatisticsPtr->SetLastError(
2344             VE_INVALID_ARGUMENT, kTraceError,
2345             "StartRecordingPlayout() fileRecorder format isnot correct");
2346         return -1;
2347     }
2348
2349     if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2350                                                         notificationTime) != 0)
2351     {
2352         _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2353                                            "StartRecordingPlayout() failed to "
2354                                            "start file recording");
2355         _outputFileRecorderPtr->StopRecording();
2356         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2357         _outputFileRecorderPtr = NULL;
2358         return -1;
2359     }
2360
2361     _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2362     _outputFileRecording = true;
2363
2364     return 0;
2365 }
2366
2367 int Channel::StopRecordingPlayout()
2368 {
2369     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
2370                  "Channel::StopRecordingPlayout()");
2371
2372     if (!_outputFileRecording)
2373     {
2374         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
2375                      "StopRecordingPlayout() isnot recording");
2376         return -1;
2377     }
2378
2379
2380     CriticalSectionScoped cs(&_fileCritSect);
2381
2382     if (_outputFileRecorderPtr->StopRecording() != 0)
2383     {
2384         _engineStatisticsPtr->SetLastError(
2385             VE_STOP_RECORDING_FAILED, kTraceError,
2386             "StopRecording() could not stop recording");
2387         return(-1);
2388     }
2389     _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2390     FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2391     _outputFileRecorderPtr = NULL;
2392     _outputFileRecording = false;
2393
2394     return 0;
2395 }
2396
2397 void
2398 Channel::SetMixWithMicStatus(bool mix)
2399 {
2400     CriticalSectionScoped cs(&_fileCritSect);
2401     _mixFileWithMicrophone=mix;
2402 }
2403
2404 int
2405 Channel::GetSpeechOutputLevel(uint32_t& level) const
2406 {
2407     int8_t currentLevel = _outputAudioLevel.Level();
2408     level = static_cast<int32_t> (currentLevel);
2409     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2410                VoEId(_instanceId,_channelId),
2411                "GetSpeechOutputLevel() => level=%u", level);
2412     return 0;
2413 }
2414
2415 int
2416 Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const
2417 {
2418     int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2419     level = static_cast<int32_t> (currentLevel);
2420     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2421                VoEId(_instanceId,_channelId),
2422                "GetSpeechOutputLevelFullRange() => level=%u", level);
2423     return 0;
2424 }
2425
2426 int
2427 Channel::SetMute(bool enable)
2428 {
2429     CriticalSectionScoped cs(&volume_settings_critsect_);
2430     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2431                "Channel::SetMute(enable=%d)", enable);
2432     _mute = enable;
2433     return 0;
2434 }
2435
2436 bool
2437 Channel::Mute() const
2438 {
2439     CriticalSectionScoped cs(&volume_settings_critsect_);
2440     return _mute;
2441 }
2442
2443 int
2444 Channel::SetOutputVolumePan(float left, float right)
2445 {
2446     CriticalSectionScoped cs(&volume_settings_critsect_);
2447     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2448                "Channel::SetOutputVolumePan()");
2449     _panLeft = left;
2450     _panRight = right;
2451     return 0;
2452 }
2453
2454 int
2455 Channel::GetOutputVolumePan(float& left, float& right) const
2456 {
2457     CriticalSectionScoped cs(&volume_settings_critsect_);
2458     left = _panLeft;
2459     right = _panRight;
2460     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2461                VoEId(_instanceId,_channelId),
2462                "GetOutputVolumePan() => left=%3.2f, right=%3.2f", left, right);
2463     return 0;
2464 }
2465
2466 int
2467 Channel::SetChannelOutputVolumeScaling(float scaling)
2468 {
2469     CriticalSectionScoped cs(&volume_settings_critsect_);
2470     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2471                "Channel::SetChannelOutputVolumeScaling()");
2472     _outputGain = scaling;
2473     return 0;
2474 }
2475
2476 int
2477 Channel::GetChannelOutputVolumeScaling(float& scaling) const
2478 {
2479     CriticalSectionScoped cs(&volume_settings_critsect_);
2480     scaling = _outputGain;
2481     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2482                VoEId(_instanceId,_channelId),
2483                "GetChannelOutputVolumeScaling() => scaling=%3.2f", scaling);
2484     return 0;
2485 }
2486
2487 int Channel::SendTelephoneEventOutband(unsigned char eventCode,
2488                                        int lengthMs, int attenuationDb,
2489                                        bool playDtmfEvent)
2490 {
2491     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2492                "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
2493                playDtmfEvent);
2494
2495     _playOutbandDtmfEvent = playDtmfEvent;
2496
2497     if (_rtpRtcpModule->SendTelephoneEventOutband(eventCode, lengthMs,
2498                                                  attenuationDb) != 0)
2499     {
2500         _engineStatisticsPtr->SetLastError(
2501             VE_SEND_DTMF_FAILED,
2502             kTraceWarning,
2503             "SendTelephoneEventOutband() failed to send event");
2504         return -1;
2505     }
2506     return 0;
2507 }
2508
2509 int Channel::SendTelephoneEventInband(unsigned char eventCode,
2510                                          int lengthMs,
2511                                          int attenuationDb,
2512                                          bool playDtmfEvent)
2513 {
2514     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2515                "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
2516                playDtmfEvent);
2517
2518     _playInbandDtmfEvent = playDtmfEvent;
2519     _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
2520
2521     return 0;
2522 }
2523
2524 int
2525 Channel::SetSendTelephoneEventPayloadType(unsigned char type)
2526 {
2527     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2528                "Channel::SetSendTelephoneEventPayloadType()");
2529     if (type > 127)
2530     {
2531         _engineStatisticsPtr->SetLastError(
2532             VE_INVALID_ARGUMENT, kTraceError,
2533             "SetSendTelephoneEventPayloadType() invalid type");
2534         return -1;
2535     }
2536     CodecInst codec = {};
2537     codec.plfreq = 8000;
2538     codec.pltype = type;
2539     memcpy(codec.plname, "telephone-event", 16);
2540     if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
2541     {
2542         _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
2543         if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2544             _engineStatisticsPtr->SetLastError(
2545                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2546                 "SetSendTelephoneEventPayloadType() failed to register send"
2547                 "payload type");
2548             return -1;
2549         }
2550     }
2551     _sendTelephoneEventPayloadType = type;
2552     return 0;
2553 }
2554
2555 int
2556 Channel::GetSendTelephoneEventPayloadType(unsigned char& type)
2557 {
2558     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2559                  "Channel::GetSendTelephoneEventPayloadType()");
2560     type = _sendTelephoneEventPayloadType;
2561     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2562                VoEId(_instanceId,_channelId),
2563                "GetSendTelephoneEventPayloadType() => type=%u", type);
2564     return 0;
2565 }
2566
2567 int
2568 Channel::UpdateRxVadDetection(AudioFrame& audioFrame)
2569 {
2570     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2571                  "Channel::UpdateRxVadDetection()");
2572
2573     int vadDecision = 1;
2574
2575     vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive)? 1 : 0;
2576
2577     if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr)
2578     {
2579         OnRxVadDetected(vadDecision);
2580         _oldVadDecision = vadDecision;
2581     }
2582
2583     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2584                  "Channel::UpdateRxVadDetection() => vadDecision=%d",
2585                  vadDecision);
2586     return 0;
2587 }
2588
2589 int
2590 Channel::RegisterRxVadObserver(VoERxVadCallback &observer)
2591 {
2592     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2593                  "Channel::RegisterRxVadObserver()");
2594     CriticalSectionScoped cs(&_callbackCritSect);
2595
2596     if (_rxVadObserverPtr)
2597     {
2598         _engineStatisticsPtr->SetLastError(
2599             VE_INVALID_OPERATION, kTraceError,
2600             "RegisterRxVadObserver() observer already enabled");
2601         return -1;
2602     }
2603     _rxVadObserverPtr = &observer;
2604     _RxVadDetection = true;
2605     return 0;
2606 }
2607
2608 int
2609 Channel::DeRegisterRxVadObserver()
2610 {
2611     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2612                  "Channel::DeRegisterRxVadObserver()");
2613     CriticalSectionScoped cs(&_callbackCritSect);
2614
2615     if (!_rxVadObserverPtr)
2616     {
2617         _engineStatisticsPtr->SetLastError(
2618             VE_INVALID_OPERATION, kTraceWarning,
2619             "DeRegisterRxVadObserver() observer already disabled");
2620         return 0;
2621     }
2622     _rxVadObserverPtr = NULL;
2623     _RxVadDetection = false;
2624     return 0;
2625 }
2626
2627 int
2628 Channel::VoiceActivityIndicator(int &activity)
2629 {
2630     activity = _sendFrameType;
2631
2632     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2633                  "Channel::VoiceActivityIndicator(indicator=%d)", activity);
2634     return 0;
2635 }
2636
2637 #ifdef WEBRTC_VOICE_ENGINE_AGC
2638
2639 int
2640 Channel::SetRxAgcStatus(bool enable, AgcModes mode)
2641 {
2642     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2643                  "Channel::SetRxAgcStatus(enable=%d, mode=%d)",
2644                  (int)enable, (int)mode);
2645
2646     GainControl::Mode agcMode = kDefaultRxAgcMode;
2647     switch (mode)
2648     {
2649         case kAgcDefault:
2650             break;
2651         case kAgcUnchanged:
2652             agcMode = rx_audioproc_->gain_control()->mode();
2653             break;
2654         case kAgcFixedDigital:
2655             agcMode = GainControl::kFixedDigital;
2656             break;
2657         case kAgcAdaptiveDigital:
2658             agcMode =GainControl::kAdaptiveDigital;
2659             break;
2660         default:
2661             _engineStatisticsPtr->SetLastError(
2662                 VE_INVALID_ARGUMENT, kTraceError,
2663                 "SetRxAgcStatus() invalid Agc mode");
2664             return -1;
2665     }
2666
2667     if (rx_audioproc_->gain_control()->set_mode(agcMode) != 0)
2668     {
2669         _engineStatisticsPtr->SetLastError(
2670             VE_APM_ERROR, kTraceError,
2671             "SetRxAgcStatus() failed to set Agc mode");
2672         return -1;
2673     }
2674     if (rx_audioproc_->gain_control()->Enable(enable) != 0)
2675     {
2676         _engineStatisticsPtr->SetLastError(
2677             VE_APM_ERROR, kTraceError,
2678             "SetRxAgcStatus() failed to set Agc state");
2679         return -1;
2680     }
2681
2682     _rxAgcIsEnabled = enable;
2683     channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
2684
2685     return 0;
2686 }
2687
2688 int
2689 Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode)
2690 {
2691     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2692                      "Channel::GetRxAgcStatus(enable=?, mode=?)");
2693
2694     bool enable = rx_audioproc_->gain_control()->is_enabled();
2695     GainControl::Mode agcMode =
2696         rx_audioproc_->gain_control()->mode();
2697
2698     enabled = enable;
2699
2700     switch (agcMode)
2701     {
2702         case GainControl::kFixedDigital:
2703             mode = kAgcFixedDigital;
2704             break;
2705         case GainControl::kAdaptiveDigital:
2706             mode = kAgcAdaptiveDigital;
2707             break;
2708         default:
2709             _engineStatisticsPtr->SetLastError(
2710                 VE_APM_ERROR, kTraceError,
2711                 "GetRxAgcStatus() invalid Agc mode");
2712             return -1;
2713     }
2714
2715     return 0;
2716 }
2717
2718 int
2719 Channel::SetRxAgcConfig(AgcConfig config)
2720 {
2721     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2722                  "Channel::SetRxAgcConfig()");
2723
2724     if (rx_audioproc_->gain_control()->set_target_level_dbfs(
2725         config.targetLeveldBOv) != 0)
2726     {
2727         _engineStatisticsPtr->SetLastError(
2728             VE_APM_ERROR, kTraceError,
2729             "SetRxAgcConfig() failed to set target peak |level|"
2730             "(or envelope) of the Agc");
2731         return -1;
2732     }
2733     if (rx_audioproc_->gain_control()->set_compression_gain_db(
2734         config.digitalCompressionGaindB) != 0)
2735     {
2736         _engineStatisticsPtr->SetLastError(
2737             VE_APM_ERROR, kTraceError,
2738             "SetRxAgcConfig() failed to set the range in |gain| the"
2739             " digital compression stage may apply");
2740         return -1;
2741     }
2742     if (rx_audioproc_->gain_control()->enable_limiter(
2743         config.limiterEnable) != 0)
2744     {
2745         _engineStatisticsPtr->SetLastError(
2746             VE_APM_ERROR, kTraceError,
2747             "SetRxAgcConfig() failed to set hard limiter to the signal");
2748         return -1;
2749     }
2750
2751     return 0;
2752 }
2753
2754 int
2755 Channel::GetRxAgcConfig(AgcConfig& config)
2756 {
2757     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2758                  "Channel::GetRxAgcConfig(config=%?)");
2759
2760     config.targetLeveldBOv =
2761         rx_audioproc_->gain_control()->target_level_dbfs();
2762     config.digitalCompressionGaindB =
2763         rx_audioproc_->gain_control()->compression_gain_db();
2764     config.limiterEnable =
2765         rx_audioproc_->gain_control()->is_limiter_enabled();
2766
2767     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2768                VoEId(_instanceId,_channelId), "GetRxAgcConfig() => "
2769                    "targetLeveldBOv=%u, digitalCompressionGaindB=%u,"
2770                    " limiterEnable=%d",
2771                    config.targetLeveldBOv,
2772                    config.digitalCompressionGaindB,
2773                    config.limiterEnable);
2774
2775     return 0;
2776 }
2777
2778 #endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
2779
2780 #ifdef WEBRTC_VOICE_ENGINE_NR
2781
2782 int
2783 Channel::SetRxNsStatus(bool enable, NsModes mode)
2784 {
2785     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2786                  "Channel::SetRxNsStatus(enable=%d, mode=%d)",
2787                  (int)enable, (int)mode);
2788
2789     NoiseSuppression::Level nsLevel = kDefaultNsMode;
2790     switch (mode)
2791     {
2792
2793         case kNsDefault:
2794             break;
2795         case kNsUnchanged:
2796             nsLevel = rx_audioproc_->noise_suppression()->level();
2797             break;
2798         case kNsConference:
2799             nsLevel = NoiseSuppression::kHigh;
2800             break;
2801         case kNsLowSuppression:
2802             nsLevel = NoiseSuppression::kLow;
2803             break;
2804         case kNsModerateSuppression:
2805             nsLevel = NoiseSuppression::kModerate;
2806             break;
2807         case kNsHighSuppression:
2808             nsLevel = NoiseSuppression::kHigh;
2809             break;
2810         case kNsVeryHighSuppression:
2811             nsLevel = NoiseSuppression::kVeryHigh;
2812             break;
2813     }
2814
2815     if (rx_audioproc_->noise_suppression()->set_level(nsLevel)
2816         != 0)
2817     {
2818         _engineStatisticsPtr->SetLastError(
2819             VE_APM_ERROR, kTraceError,
2820             "SetRxNsStatus() failed to set NS level");
2821         return -1;
2822     }
2823     if (rx_audioproc_->noise_suppression()->Enable(enable) != 0)
2824     {
2825         _engineStatisticsPtr->SetLastError(
2826             VE_APM_ERROR, kTraceError,
2827             "SetRxNsStatus() failed to set NS state");
2828         return -1;
2829     }
2830
2831     _rxNsIsEnabled = enable;
2832     channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
2833
2834     return 0;
2835 }
2836
2837 int
2838 Channel::GetRxNsStatus(bool& enabled, NsModes& mode)
2839 {
2840     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2841                  "Channel::GetRxNsStatus(enable=?, mode=?)");
2842
2843     bool enable =
2844         rx_audioproc_->noise_suppression()->is_enabled();
2845     NoiseSuppression::Level ncLevel =
2846         rx_audioproc_->noise_suppression()->level();
2847
2848     enabled = enable;
2849
2850     switch (ncLevel)
2851     {
2852         case NoiseSuppression::kLow:
2853             mode = kNsLowSuppression;
2854             break;
2855         case NoiseSuppression::kModerate:
2856             mode = kNsModerateSuppression;
2857             break;
2858         case NoiseSuppression::kHigh:
2859             mode = kNsHighSuppression;
2860             break;
2861         case NoiseSuppression::kVeryHigh:
2862             mode = kNsVeryHighSuppression;
2863             break;
2864     }
2865
2866     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2867                VoEId(_instanceId,_channelId),
2868                "GetRxNsStatus() => enabled=%d, mode=%d", enabled, mode);
2869     return 0;
2870 }
2871
2872 #endif // #ifdef WEBRTC_VOICE_ENGINE_NR
2873
2874 int
2875 Channel::RegisterRTCPObserver(VoERTCPObserver& observer)
2876 {
2877     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2878                  "Channel::RegisterRTCPObserver()");
2879     CriticalSectionScoped cs(&_callbackCritSect);
2880
2881     if (_rtcpObserverPtr)
2882     {
2883         _engineStatisticsPtr->SetLastError(
2884             VE_INVALID_OPERATION, kTraceError,
2885             "RegisterRTCPObserver() observer already enabled");
2886         return -1;
2887     }
2888
2889     _rtcpObserverPtr = &observer;
2890     _rtcpObserver = true;
2891
2892     return 0;
2893 }
2894
2895 int
2896 Channel::DeRegisterRTCPObserver()
2897 {
2898     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2899                  "Channel::DeRegisterRTCPObserver()");
2900     CriticalSectionScoped cs(&_callbackCritSect);
2901
2902     if (!_rtcpObserverPtr)
2903     {
2904         _engineStatisticsPtr->SetLastError(
2905             VE_INVALID_OPERATION, kTraceWarning,
2906             "DeRegisterRTCPObserver() observer already disabled");
2907         return 0;
2908     }
2909
2910     _rtcpObserver = false;
2911     _rtcpObserverPtr = NULL;
2912
2913     return 0;
2914 }
2915
2916 int
2917 Channel::SetLocalSSRC(unsigned int ssrc)
2918 {
2919     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2920                  "Channel::SetLocalSSRC()");
2921     if (channel_state_.Get().sending)
2922     {
2923         _engineStatisticsPtr->SetLastError(
2924             VE_ALREADY_SENDING, kTraceError,
2925             "SetLocalSSRC() already sending");
2926         return -1;
2927     }
2928     _rtpRtcpModule->SetSSRC(ssrc);
2929     return 0;
2930 }
2931
2932 int
2933 Channel::GetLocalSSRC(unsigned int& ssrc)
2934 {
2935     ssrc = _rtpRtcpModule->SSRC();
2936     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2937                  VoEId(_instanceId,_channelId),
2938                  "GetLocalSSRC() => ssrc=%lu", ssrc);
2939     return 0;
2940 }
2941
2942 int
2943 Channel::GetRemoteSSRC(unsigned int& ssrc)
2944 {
2945     ssrc = rtp_receiver_->SSRC();
2946     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2947                  VoEId(_instanceId,_channelId),
2948                  "GetRemoteSSRC() => ssrc=%lu", ssrc);
2949     return 0;
2950 }
2951
2952 int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
2953   _includeAudioLevelIndication = enable;
2954   return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
2955 }
2956
2957 int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
2958                                                   unsigned char id) {
2959   rtp_header_parser_->DeregisterRtpHeaderExtension(
2960       kRtpExtensionAudioLevel);
2961   if (enable && !rtp_header_parser_->RegisterRtpHeaderExtension(
2962           kRtpExtensionAudioLevel, id)) {
2963     return -1;
2964   }
2965   return 0;
2966 }
2967
2968 int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2969   return SetSendRtpHeaderExtension(enable, kRtpExtensionAbsoluteSendTime, id);
2970 }
2971
2972 int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
2973   rtp_header_parser_->DeregisterRtpHeaderExtension(
2974       kRtpExtensionAbsoluteSendTime);
2975   if (enable && !rtp_header_parser_->RegisterRtpHeaderExtension(
2976       kRtpExtensionAbsoluteSendTime, id)) {
2977     return -1;
2978   }
2979   return 0;
2980 }
2981
2982 int
2983 Channel::SetRTCPStatus(bool enable)
2984 {
2985     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2986                  "Channel::SetRTCPStatus()");
2987     if (_rtpRtcpModule->SetRTCPStatus(enable ?
2988         kRtcpCompound : kRtcpOff) != 0)
2989     {
2990         _engineStatisticsPtr->SetLastError(
2991             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2992             "SetRTCPStatus() failed to set RTCP status");
2993         return -1;
2994     }
2995     return 0;
2996 }
2997
2998 int
2999 Channel::GetRTCPStatus(bool& enabled)
3000 {
3001     RTCPMethod method = _rtpRtcpModule->RTCP();
3002     enabled = (method != kRtcpOff);
3003     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3004                  VoEId(_instanceId,_channelId),
3005                  "GetRTCPStatus() => enabled=%d", enabled);
3006     return 0;
3007 }
3008
3009 int
3010 Channel::SetRTCP_CNAME(const char cName[256])
3011 {
3012     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3013                  "Channel::SetRTCP_CNAME()");
3014     if (_rtpRtcpModule->SetCNAME(cName) != 0)
3015     {
3016         _engineStatisticsPtr->SetLastError(
3017             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3018             "SetRTCP_CNAME() failed to set RTCP CNAME");
3019         return -1;
3020     }
3021     return 0;
3022 }
3023
3024 int
3025 Channel::GetRemoteRTCP_CNAME(char cName[256])
3026 {
3027     if (cName == NULL)
3028     {
3029         _engineStatisticsPtr->SetLastError(
3030             VE_INVALID_ARGUMENT, kTraceError,
3031             "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
3032         return -1;
3033     }
3034     char cname[RTCP_CNAME_SIZE];
3035     const uint32_t remoteSSRC = rtp_receiver_->SSRC();
3036     if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0)
3037     {
3038         _engineStatisticsPtr->SetLastError(
3039             VE_CANNOT_RETRIEVE_CNAME, kTraceError,
3040             "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
3041         return -1;
3042     }
3043     strcpy(cName, cname);
3044     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3045                  VoEId(_instanceId, _channelId),
3046                  "GetRemoteRTCP_CNAME() => cName=%s", cName);
3047     return 0;
3048 }
3049
3050 int
3051 Channel::GetRemoteRTCPData(
3052     unsigned int& NTPHigh,
3053     unsigned int& NTPLow,
3054     unsigned int& timestamp,
3055     unsigned int& playoutTimestamp,
3056     unsigned int* jitter,
3057     unsigned short* fractionLost)
3058 {
3059     // --- Information from sender info in received Sender Reports
3060
3061     RTCPSenderInfo senderInfo;
3062     if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0)
3063     {
3064         _engineStatisticsPtr->SetLastError(
3065             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3066             "GetRemoteRTCPData() failed to retrieve sender info for remote "
3067             "side");
3068         return -1;
3069     }
3070
3071     // We only utilize 12 out of 20 bytes in the sender info (ignores packet
3072     // and octet count)
3073     NTPHigh = senderInfo.NTPseconds;
3074     NTPLow = senderInfo.NTPfraction;
3075     timestamp = senderInfo.RTPtimeStamp;
3076
3077     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3078                  VoEId(_instanceId, _channelId),
3079                  "GetRemoteRTCPData() => NTPHigh=%lu, NTPLow=%lu, "
3080                  "timestamp=%lu",
3081                  NTPHigh, NTPLow, timestamp);
3082
3083     // --- Locally derived information
3084
3085     // This value is updated on each incoming RTCP packet (0 when no packet
3086     // has been received)
3087     playoutTimestamp = playout_timestamp_rtcp_;
3088
3089     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3090                  VoEId(_instanceId, _channelId),
3091                  "GetRemoteRTCPData() => playoutTimestamp=%lu",
3092                  playout_timestamp_rtcp_);
3093
3094     if (NULL != jitter || NULL != fractionLost)
3095     {
3096         // Get all RTCP receiver report blocks that have been received on this
3097         // channel. If we receive RTP packets from a remote source we know the
3098         // remote SSRC and use the report block from him.
3099         // Otherwise use the first report block.
3100         std::vector<RTCPReportBlock> remote_stats;
3101         if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
3102             remote_stats.empty()) {
3103           WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3104                        VoEId(_instanceId, _channelId),
3105                        "GetRemoteRTCPData() failed to measure statistics due"
3106                        " to lack of received RTP and/or RTCP packets");
3107           return -1;
3108         }
3109
3110         uint32_t remoteSSRC = rtp_receiver_->SSRC();
3111         std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
3112         for (; it != remote_stats.end(); ++it) {
3113           if (it->remoteSSRC == remoteSSRC)
3114             break;
3115         }
3116
3117         if (it == remote_stats.end()) {
3118           // If we have not received any RTCP packets from this SSRC it probably
3119           // means that we have not received any RTP packets.
3120           // Use the first received report block instead.
3121           it = remote_stats.begin();
3122           remoteSSRC = it->remoteSSRC;
3123         }
3124
3125         if (jitter) {
3126           *jitter = it->jitter;
3127           WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3128                        VoEId(_instanceId, _channelId),
3129                        "GetRemoteRTCPData() => jitter = %lu", *jitter);
3130         }
3131
3132         if (fractionLost) {
3133           *fractionLost = it->fractionLost;
3134           WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3135                        VoEId(_instanceId, _channelId),
3136                        "GetRemoteRTCPData() => fractionLost = %lu",
3137                        *fractionLost);
3138         }
3139     }
3140     return 0;
3141 }
3142
3143 int
3144 Channel::SendApplicationDefinedRTCPPacket(unsigned char subType,
3145                                              unsigned int name,
3146                                              const char* data,
3147                                              unsigned short dataLengthInBytes)
3148 {
3149     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3150                  "Channel::SendApplicationDefinedRTCPPacket()");
3151     if (!channel_state_.Get().sending)
3152     {
3153         _engineStatisticsPtr->SetLastError(
3154             VE_NOT_SENDING, kTraceError,
3155             "SendApplicationDefinedRTCPPacket() not sending");
3156         return -1;
3157     }
3158     if (NULL == data)
3159     {
3160         _engineStatisticsPtr->SetLastError(
3161             VE_INVALID_ARGUMENT, kTraceError,
3162             "SendApplicationDefinedRTCPPacket() invalid data value");
3163         return -1;
3164     }
3165     if (dataLengthInBytes % 4 != 0)
3166     {
3167         _engineStatisticsPtr->SetLastError(
3168             VE_INVALID_ARGUMENT, kTraceError,
3169             "SendApplicationDefinedRTCPPacket() invalid length value");
3170         return -1;
3171     }
3172     RTCPMethod status = _rtpRtcpModule->RTCP();
3173     if (status == kRtcpOff)
3174     {
3175         _engineStatisticsPtr->SetLastError(
3176             VE_RTCP_ERROR, kTraceError,
3177             "SendApplicationDefinedRTCPPacket() RTCP is disabled");
3178         return -1;
3179     }
3180
3181     // Create and schedule the RTCP APP packet for transmission
3182     if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
3183         subType,
3184         name,
3185         (const unsigned char*) data,
3186         dataLengthInBytes) != 0)
3187     {
3188         _engineStatisticsPtr->SetLastError(
3189             VE_SEND_ERROR, kTraceError,
3190             "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
3191         return -1;
3192     }
3193     return 0;
3194 }
3195
3196 int
3197 Channel::GetRTPStatistics(
3198         unsigned int& averageJitterMs,
3199         unsigned int& maxJitterMs,
3200         unsigned int& discardedPackets)
3201 {
3202     // The jitter statistics is updated for each received RTP packet and is
3203     // based on received packets.
3204     if (_rtpRtcpModule->RTCP() == kRtcpOff) {
3205       // If RTCP is off, there is no timed thread in the RTCP module regularly
3206       // generating new stats, trigger the update manually here instead.
3207       StreamStatistician* statistician =
3208           rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
3209       if (statistician) {
3210         // Don't use returned statistics, use data from proxy instead so that
3211         // max jitter can be fetched atomically.
3212         RtcpStatistics s;
3213         statistician->GetStatistics(&s, true);
3214       }
3215     }
3216
3217     ChannelStatistics stats = statistics_proxy_->GetStats();
3218     const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
3219     if (playoutFrequency > 0) {
3220       // Scale RTP statistics given the current playout frequency
3221       maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
3222       averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
3223     }
3224
3225     discardedPackets = _numberOfDiscardedPackets;
3226
3227     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3228                VoEId(_instanceId, _channelId),
3229                "GetRTPStatistics() => averageJitterMs = %lu, maxJitterMs = %lu,"
3230                " discardedPackets = %lu)",
3231                averageJitterMs, maxJitterMs, discardedPackets);
3232     return 0;
3233 }
3234
3235 int Channel::GetRemoteRTCPReportBlocks(
3236     std::vector<ReportBlock>* report_blocks) {
3237   if (report_blocks == NULL) {
3238     _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
3239       "GetRemoteRTCPReportBlock()s invalid report_blocks.");
3240     return -1;
3241   }
3242
3243   // Get the report blocks from the latest received RTCP Sender or Receiver
3244   // Report. Each element in the vector contains the sender's SSRC and a
3245   // report block according to RFC 3550.
3246   std::vector<RTCPReportBlock> rtcp_report_blocks;
3247   if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
3248     _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3249         "GetRemoteRTCPReportBlocks() failed to read RTCP SR/RR report block.");
3250     return -1;
3251   }
3252
3253   if (rtcp_report_blocks.empty())
3254     return 0;
3255
3256   std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
3257   for (; it != rtcp_report_blocks.end(); ++it) {
3258     ReportBlock report_block;
3259     report_block.sender_SSRC = it->remoteSSRC;
3260     report_block.source_SSRC = it->sourceSSRC;
3261     report_block.fraction_lost = it->fractionLost;
3262     report_block.cumulative_num_packets_lost = it->cumulativeLost;
3263     report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
3264     report_block.interarrival_jitter = it->jitter;
3265     report_block.last_SR_timestamp = it->lastSR;
3266     report_block.delay_since_last_SR = it->delaySinceLastSR;
3267     report_blocks->push_back(report_block);
3268   }
3269   return 0;
3270 }
3271
3272 int
3273 Channel::GetRTPStatistics(CallStatistics& stats)
3274 {
3275     // --- RtcpStatistics
3276
3277     // The jitter statistics is updated for each received RTP packet and is
3278     // based on received packets.
3279     RtcpStatistics statistics;
3280     StreamStatistician* statistician =
3281         rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
3282     if (!statistician || !statistician->GetStatistics(
3283         &statistics, _rtpRtcpModule->RTCP() == kRtcpOff)) {
3284       _engineStatisticsPtr->SetLastError(
3285           VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
3286           "GetRTPStatistics() failed to read RTP statistics from the "
3287           "RTP/RTCP module");
3288     }
3289
3290     stats.fractionLost = statistics.fraction_lost;
3291     stats.cumulativeLost = statistics.cumulative_lost;
3292     stats.extendedMax = statistics.extended_max_sequence_number;
3293     stats.jitterSamples = statistics.jitter;
3294
3295     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3296                  VoEId(_instanceId, _channelId),
3297                  "GetRTPStatistics() => fractionLost=%lu, cumulativeLost=%lu,"
3298                  " extendedMax=%lu, jitterSamples=%li)",
3299                  stats.fractionLost, stats.cumulativeLost, stats.extendedMax,
3300                  stats.jitterSamples);
3301
3302     // --- RTT
3303     stats.rttMs = GetRTT();
3304
3305     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3306                  VoEId(_instanceId, _channelId),
3307                  "GetRTPStatistics() => rttMs=%d", stats.rttMs);
3308
3309     // --- Data counters
3310
3311     uint32_t bytesSent(0);
3312     uint32_t packetsSent(0);
3313     uint32_t bytesReceived(0);
3314     uint32_t packetsReceived(0);
3315
3316     if (statistician) {
3317       statistician->GetDataCounters(&bytesReceived, &packetsReceived);
3318     }
3319
3320     if (_rtpRtcpModule->DataCountersRTP(&bytesSent,
3321                                         &packetsSent) != 0)
3322     {
3323         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3324                      VoEId(_instanceId, _channelId),
3325                      "GetRTPStatistics() failed to retrieve RTP datacounters =>"
3326                      " output will not be complete");
3327     }
3328
3329     stats.bytesSent = bytesSent;
3330     stats.packetsSent = packetsSent;
3331     stats.bytesReceived = bytesReceived;
3332     stats.packetsReceived = packetsReceived;
3333
3334     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3335                  VoEId(_instanceId, _channelId),
3336                  "GetRTPStatistics() => bytesSent=%d, packetsSent=%d,"
3337                  " bytesReceived=%d, packetsReceived=%d)",
3338                  stats.bytesSent, stats.packetsSent, stats.bytesReceived,
3339                  stats.packetsReceived);
3340
3341     // --- Timestamps
3342     {
3343       CriticalSectionScoped lock(ts_stats_lock_.get());
3344       stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
3345     }
3346     return 0;
3347 }
3348
3349 int Channel::SetREDStatus(bool enable, int redPayloadtype) {
3350   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3351                "Channel::SetREDStatus()");
3352
3353   if (enable) {
3354     if (redPayloadtype < 0 || redPayloadtype > 127) {
3355       _engineStatisticsPtr->SetLastError(
3356           VE_PLTYPE_ERROR, kTraceError,
3357           "SetREDStatus() invalid RED payload type");
3358       return -1;
3359     }
3360
3361     if (SetRedPayloadType(redPayloadtype) < 0) {
3362       _engineStatisticsPtr->SetLastError(
3363           VE_CODEC_ERROR, kTraceError,
3364           "SetSecondarySendCodec() Failed to register RED ACM");
3365       return -1;
3366     }
3367   }
3368
3369   if (audio_coding_->SetREDStatus(enable) != 0) {
3370     _engineStatisticsPtr->SetLastError(
3371         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3372         "SetREDStatus() failed to set RED state in the ACM");
3373     return -1;
3374   }
3375   return 0;
3376 }
3377
3378 int
3379 Channel::GetREDStatus(bool& enabled, int& redPayloadtype)
3380 {
3381     enabled = audio_coding_->REDStatus();
3382     if (enabled)
3383     {
3384         int8_t payloadType(0);
3385         if (_rtpRtcpModule->SendREDPayloadType(payloadType) != 0)
3386         {
3387             _engineStatisticsPtr->SetLastError(
3388                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3389                 "GetREDStatus() failed to retrieve RED PT from RTP/RTCP "
3390                 "module");
3391             return -1;
3392         }
3393         WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3394                    VoEId(_instanceId, _channelId),
3395                    "GetREDStatus() => enabled=%d, redPayloadtype=%d",
3396                    enabled, redPayloadtype);
3397         return 0;
3398     }
3399     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3400                  VoEId(_instanceId, _channelId),
3401                  "GetREDStatus() => enabled=%d", enabled);
3402     return 0;
3403 }
3404
3405 int Channel::SetCodecFECStatus(bool enable) {
3406   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3407                "Channel::SetCodecFECStatus()");
3408
3409   if (audio_coding_->SetCodecFEC(enable) != 0) {
3410     _engineStatisticsPtr->SetLastError(
3411         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3412         "SetCodecFECStatus() failed to set FEC state");
3413     return -1;
3414   }
3415   return 0;
3416 }
3417
3418 bool Channel::GetCodecFECStatus() {
3419   bool enabled = audio_coding_->CodecFEC();
3420   WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3421                VoEId(_instanceId, _channelId),
3422                "GetCodecFECStatus() => enabled=%d", enabled);
3423   return enabled;
3424 }
3425
3426 void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
3427   // None of these functions can fail.
3428   _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
3429   rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
3430   rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff);
3431   if (enable)
3432     audio_coding_->EnableNack(maxNumberOfPackets);
3433   else
3434     audio_coding_->DisableNack();
3435 }
3436
3437 // Called when we are missing one or more packets.
3438 int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
3439   return _rtpRtcpModule->SendNACK(sequence_numbers, length);
3440 }
3441
3442 int
3443 Channel::StartRTPDump(const char fileNameUTF8[1024],
3444                       RTPDirections direction)
3445 {
3446     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3447                  "Channel::StartRTPDump()");
3448     if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
3449     {
3450         _engineStatisticsPtr->SetLastError(
3451             VE_INVALID_ARGUMENT, kTraceError,
3452             "StartRTPDump() invalid RTP direction");
3453         return -1;
3454     }
3455     RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
3456         &_rtpDumpIn : &_rtpDumpOut;
3457     if (rtpDumpPtr == NULL)
3458     {
3459         assert(false);
3460         return -1;
3461     }
3462     if (rtpDumpPtr->IsActive())
3463     {
3464         rtpDumpPtr->Stop();
3465     }
3466     if (rtpDumpPtr->Start(fileNameUTF8) != 0)
3467     {
3468         _engineStatisticsPtr->SetLastError(
3469             VE_BAD_FILE, kTraceError,
3470             "StartRTPDump() failed to create file");
3471         return -1;
3472     }
3473     return 0;
3474 }
3475
3476 int
3477 Channel::StopRTPDump(RTPDirections direction)
3478 {
3479     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3480                  "Channel::StopRTPDump()");
3481     if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
3482     {
3483         _engineStatisticsPtr->SetLastError(
3484             VE_INVALID_ARGUMENT, kTraceError,
3485             "StopRTPDump() invalid RTP direction");
3486         return -1;
3487     }
3488     RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
3489         &_rtpDumpIn : &_rtpDumpOut;
3490     if (rtpDumpPtr == NULL)
3491     {
3492         assert(false);
3493         return -1;
3494     }
3495     if (!rtpDumpPtr->IsActive())
3496     {
3497         return 0;
3498     }
3499     return rtpDumpPtr->Stop();
3500 }
3501
3502 bool
3503 Channel::RTPDumpIsActive(RTPDirections direction)
3504 {
3505     if ((direction != kRtpIncoming) &&
3506         (direction != kRtpOutgoing))
3507     {
3508         _engineStatisticsPtr->SetLastError(
3509             VE_INVALID_ARGUMENT, kTraceError,
3510             "RTPDumpIsActive() invalid RTP direction");
3511         return false;
3512     }
3513     RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
3514         &_rtpDumpIn : &_rtpDumpOut;
3515     return rtpDumpPtr->IsActive();
3516 }
3517
3518 void Channel::SetVideoEngineBWETarget(ViENetwork* vie_network,
3519                                       int video_channel) {
3520   CriticalSectionScoped cs(&_callbackCritSect);
3521   if (vie_network_) {
3522     vie_network_->Release();
3523     vie_network_ = NULL;
3524   }
3525   video_channel_ = -1;
3526
3527   if (vie_network != NULL && video_channel != -1) {
3528     vie_network_ = vie_network;
3529     video_channel_ = video_channel;
3530   }
3531 }
3532
3533 uint32_t
3534 Channel::Demultiplex(const AudioFrame& audioFrame)
3535 {
3536     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3537                  "Channel::Demultiplex()");
3538     _audioFrame.CopyFrom(audioFrame);
3539     _audioFrame.id_ = _channelId;
3540     return 0;
3541 }
3542
3543 void Channel::Demultiplex(const int16_t* audio_data,
3544                           int sample_rate,
3545                           int number_of_frames,
3546                           int number_of_channels) {
3547   CodecInst codec;
3548   GetSendCodec(codec);
3549
3550   if (!mono_recording_audio_.get()) {
3551     // Temporary space for DownConvertToCodecFormat.
3552     mono_recording_audio_.reset(new int16_t[kMaxMonoDataSizeSamples]);
3553   }
3554   DownConvertToCodecFormat(audio_data,
3555                            number_of_frames,
3556                            number_of_channels,
3557                            sample_rate,
3558                            codec.channels,
3559                            codec.plfreq,
3560                            mono_recording_audio_.get(),
3561                            &input_resampler_,
3562                            &_audioFrame);
3563 }
3564
3565 uint32_t
3566 Channel::PrepareEncodeAndSend(int mixingFrequency)
3567 {
3568     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3569                  "Channel::PrepareEncodeAndSend()");
3570
3571     if (_audioFrame.samples_per_channel_ == 0)
3572     {
3573         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
3574                      "Channel::PrepareEncodeAndSend() invalid audio frame");
3575         return 0xFFFFFFFF;
3576     }
3577
3578     if (channel_state_.Get().input_file_playing)
3579     {
3580         MixOrReplaceAudioWithFile(mixingFrequency);
3581     }
3582
3583     bool is_muted = Mute();  // Cache locally as Mute() takes a lock.
3584     if (is_muted) {
3585       AudioFrameOperations::Mute(_audioFrame);
3586     }
3587
3588     if (channel_state_.Get().input_external_media)
3589     {
3590         CriticalSectionScoped cs(&_callbackCritSect);
3591         const bool isStereo = (_audioFrame.num_channels_ == 2);
3592         if (_inputExternalMediaCallbackPtr)
3593         {
3594             _inputExternalMediaCallbackPtr->Process(
3595                 _channelId,
3596                 kRecordingPerChannel,
3597                (int16_t*)_audioFrame.data_,
3598                 _audioFrame.samples_per_channel_,
3599                 _audioFrame.sample_rate_hz_,
3600                 isStereo);
3601         }
3602     }
3603
3604     InsertInbandDtmfTone();
3605
3606     if (_includeAudioLevelIndication) {
3607       int length = _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
3608       if (is_muted) {
3609         rms_level_.ProcessMuted(length);
3610       } else {
3611         rms_level_.Process(_audioFrame.data_, length);
3612       }
3613     }
3614
3615     return 0;
3616 }
3617
3618 uint32_t
3619 Channel::EncodeAndSend()
3620 {
3621     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3622                  "Channel::EncodeAndSend()");
3623
3624     assert(_audioFrame.num_channels_ <= 2);
3625     if (_audioFrame.samples_per_channel_ == 0)
3626     {
3627         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
3628                      "Channel::EncodeAndSend() invalid audio frame");
3629         return 0xFFFFFFFF;
3630     }
3631
3632     _audioFrame.id_ = _channelId;
3633
3634     // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
3635
3636     // The ACM resamples internally.
3637     _audioFrame.timestamp_ = _timeStamp;
3638     if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) != 0)
3639     {
3640         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
3641                      "Channel::EncodeAndSend() ACM encoding failed");
3642         return 0xFFFFFFFF;
3643     }
3644
3645     _timeStamp += _audioFrame.samples_per_channel_;
3646
3647     // --- Encode if complete frame is ready
3648
3649     // This call will trigger AudioPacketizationCallback::SendData if encoding
3650     // is done and payload is ready for packetization and transmission.
3651     return audio_coding_->Process();
3652 }
3653
3654 int Channel::RegisterExternalMediaProcessing(
3655     ProcessingTypes type,
3656     VoEMediaProcess& processObject)
3657 {
3658     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3659                  "Channel::RegisterExternalMediaProcessing()");
3660
3661     CriticalSectionScoped cs(&_callbackCritSect);
3662
3663     if (kPlaybackPerChannel == type)
3664     {
3665         if (_outputExternalMediaCallbackPtr)
3666         {
3667             _engineStatisticsPtr->SetLastError(
3668                 VE_INVALID_OPERATION, kTraceError,
3669                 "Channel::RegisterExternalMediaProcessing() "
3670                 "output external media already enabled");
3671             return -1;
3672         }
3673         _outputExternalMediaCallbackPtr = &processObject;
3674         _outputExternalMedia = true;
3675     }
3676     else if (kRecordingPerChannel == type)
3677     {
3678         if (_inputExternalMediaCallbackPtr)
3679         {
3680             _engineStatisticsPtr->SetLastError(
3681                 VE_INVALID_OPERATION, kTraceError,
3682                 "Channel::RegisterExternalMediaProcessing() "
3683                 "output external media already enabled");
3684             return -1;
3685         }
3686         _inputExternalMediaCallbackPtr = &processObject;
3687         channel_state_.SetInputExternalMedia(true);
3688     }
3689     return 0;
3690 }
3691
3692 int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type)
3693 {
3694     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3695                  "Channel::DeRegisterExternalMediaProcessing()");
3696
3697     CriticalSectionScoped cs(&_callbackCritSect);
3698
3699     if (kPlaybackPerChannel == type)
3700     {
3701         if (!_outputExternalMediaCallbackPtr)
3702         {
3703             _engineStatisticsPtr->SetLastError(
3704                 VE_INVALID_OPERATION, kTraceWarning,
3705                 "Channel::DeRegisterExternalMediaProcessing() "
3706                 "output external media already disabled");
3707             return 0;
3708         }
3709         _outputExternalMedia = false;
3710         _outputExternalMediaCallbackPtr = NULL;
3711     }
3712     else if (kRecordingPerChannel == type)
3713     {
3714         if (!_inputExternalMediaCallbackPtr)
3715         {
3716             _engineStatisticsPtr->SetLastError(
3717                 VE_INVALID_OPERATION, kTraceWarning,
3718                 "Channel::DeRegisterExternalMediaProcessing() "
3719                 "input external media already disabled");
3720             return 0;
3721         }
3722         channel_state_.SetInputExternalMedia(false);
3723         _inputExternalMediaCallbackPtr = NULL;
3724     }
3725
3726     return 0;
3727 }
3728
3729 int Channel::SetExternalMixing(bool enabled) {
3730     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3731                  "Channel::SetExternalMixing(enabled=%d)", enabled);
3732
3733     if (channel_state_.Get().playing)
3734     {
3735         _engineStatisticsPtr->SetLastError(
3736             VE_INVALID_OPERATION, kTraceError,
3737             "Channel::SetExternalMixing() "
3738             "external mixing cannot be changed while playing.");
3739         return -1;
3740     }
3741
3742     _externalMixing = enabled;
3743
3744     return 0;
3745 }
3746
3747 int
3748 Channel::GetNetworkStatistics(NetworkStatistics& stats)
3749 {
3750     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3751                  "Channel::GetNetworkStatistics()");
3752     ACMNetworkStatistics acm_stats;
3753     int return_value = audio_coding_->NetworkStatistics(&acm_stats);
3754     if (return_value >= 0) {
3755       memcpy(&stats, &acm_stats, sizeof(NetworkStatistics));
3756     }
3757     return return_value;
3758 }
3759
3760 void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
3761   audio_coding_->GetDecodingCallStatistics(stats);
3762 }
3763
3764 bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
3765                                int* playout_buffer_delay_ms) const {
3766   if (_average_jitter_buffer_delay_us == 0) {
3767     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3768                  "Channel::GetDelayEstimate() no valid estimate.");
3769     return false;
3770   }
3771   *jitter_buffer_delay_ms = (_average_jitter_buffer_delay_us + 500) / 1000 +
3772       _recPacketDelayMs;
3773   *playout_buffer_delay_ms = playout_delay_ms_;
3774   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3775                "Channel::GetDelayEstimate()");
3776   return true;
3777 }
3778
3779 int Channel::SetInitialPlayoutDelay(int delay_ms)
3780 {
3781   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3782                "Channel::SetInitialPlayoutDelay()");
3783   if ((delay_ms < kVoiceEngineMinMinPlayoutDelayMs) ||
3784       (delay_ms > kVoiceEngineMaxMinPlayoutDelayMs))
3785   {
3786     _engineStatisticsPtr->SetLastError(
3787         VE_INVALID_ARGUMENT, kTraceError,
3788         "SetInitialPlayoutDelay() invalid min delay");
3789     return -1;
3790   }
3791   if (audio_coding_->SetInitialPlayoutDelay(delay_ms) != 0)
3792   {
3793     _engineStatisticsPtr->SetLastError(
3794         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3795         "SetInitialPlayoutDelay() failed to set min playout delay");
3796     return -1;
3797   }
3798   return 0;
3799 }
3800
3801
3802 int
3803 Channel::SetMinimumPlayoutDelay(int delayMs)
3804 {
3805     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3806                  "Channel::SetMinimumPlayoutDelay()");
3807     if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
3808         (delayMs > kVoiceEngineMaxMinPlayoutDelayMs))
3809     {
3810         _engineStatisticsPtr->SetLastError(
3811             VE_INVALID_ARGUMENT, kTraceError,
3812             "SetMinimumPlayoutDelay() invalid min delay");
3813         return -1;
3814     }
3815     if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0)
3816     {
3817         _engineStatisticsPtr->SetLastError(
3818             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3819             "SetMinimumPlayoutDelay() failed to set min playout delay");
3820         return -1;
3821     }
3822     return 0;
3823 }
3824
3825 void Channel::UpdatePlayoutTimestamp(bool rtcp) {
3826   uint32_t playout_timestamp = 0;
3827
3828   if (audio_coding_->PlayoutTimestamp(&playout_timestamp) == -1)  {
3829     // This can happen if this channel has not been received any RTP packet. In
3830     // this case, NetEq is not capable of computing playout timestamp.
3831     return;
3832   }
3833
3834   uint16_t delay_ms = 0;
3835   if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
3836     WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
3837                  "Channel::UpdatePlayoutTimestamp() failed to read playout"
3838                  " delay from the ADM");
3839     _engineStatisticsPtr->SetLastError(
3840         VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3841         "UpdatePlayoutTimestamp() failed to retrieve playout delay");
3842     return;
3843   }
3844
3845   jitter_buffer_playout_timestamp_ = playout_timestamp;
3846
3847   // Remove the playout delay.
3848   playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000));
3849
3850   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3851                "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
3852                playout_timestamp);
3853
3854   if (rtcp) {
3855     playout_timestamp_rtcp_ = playout_timestamp;
3856   } else {
3857     playout_timestamp_rtp_ = playout_timestamp;
3858   }
3859   playout_delay_ms_ = delay_ms;
3860 }
3861
3862 int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
3863   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3864                "Channel::GetPlayoutTimestamp()");
3865   if (playout_timestamp_rtp_ == 0)  {
3866     _engineStatisticsPtr->SetLastError(
3867         VE_CANNOT_RETRIEVE_VALUE, kTraceError,
3868         "GetPlayoutTimestamp() failed to retrieve timestamp");
3869     return -1;
3870   }
3871   timestamp = playout_timestamp_rtp_;
3872   WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3873                VoEId(_instanceId,_channelId),
3874                "GetPlayoutTimestamp() => timestamp=%u", timestamp);
3875   return 0;
3876 }
3877
3878 int
3879 Channel::SetInitTimestamp(unsigned int timestamp)
3880 {
3881     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3882                "Channel::SetInitTimestamp()");
3883     if (channel_state_.Get().sending)
3884     {
3885         _engineStatisticsPtr->SetLastError(
3886             VE_SENDING, kTraceError, "SetInitTimestamp() already sending");
3887         return -1;
3888     }
3889     if (_rtpRtcpModule->SetStartTimestamp(timestamp) != 0)
3890     {
3891         _engineStatisticsPtr->SetLastError(
3892             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3893             "SetInitTimestamp() failed to set timestamp");
3894         return -1;
3895     }
3896     return 0;
3897 }
3898
3899 int
3900 Channel::SetInitSequenceNumber(short sequenceNumber)
3901 {
3902     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3903                  "Channel::SetInitSequenceNumber()");
3904     if (channel_state_.Get().sending)
3905     {
3906         _engineStatisticsPtr->SetLastError(
3907             VE_SENDING, kTraceError,
3908             "SetInitSequenceNumber() already sending");
3909         return -1;
3910     }
3911     if (_rtpRtcpModule->SetSequenceNumber(sequenceNumber) != 0)
3912     {
3913         _engineStatisticsPtr->SetLastError(
3914             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3915             "SetInitSequenceNumber() failed to set sequence number");
3916         return -1;
3917     }
3918     return 0;
3919 }
3920
3921 int
3922 Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule, RtpReceiver** rtp_receiver) const
3923 {
3924     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3925                  "Channel::GetRtpRtcp()");
3926     *rtpRtcpModule = _rtpRtcpModule.get();
3927     *rtp_receiver = rtp_receiver_.get();
3928     return 0;
3929 }
3930
3931 // TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
3932 // a shared helper.
3933 int32_t
3934 Channel::MixOrReplaceAudioWithFile(int mixingFrequency)
3935 {
3936     scoped_ptr<int16_t[]> fileBuffer(new int16_t[640]);
3937     int fileSamples(0);
3938
3939     {
3940         CriticalSectionScoped cs(&_fileCritSect);
3941
3942         if (_inputFilePlayerPtr == NULL)
3943         {
3944             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3945                          VoEId(_instanceId, _channelId),
3946                          "Channel::MixOrReplaceAudioWithFile() fileplayer"
3947                              " doesnt exist");
3948             return -1;
3949         }
3950
3951         if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
3952                                                       fileSamples,
3953                                                       mixingFrequency) == -1)
3954         {
3955             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3956                          VoEId(_instanceId, _channelId),
3957                          "Channel::MixOrReplaceAudioWithFile() file mixing "
3958                          "failed");
3959             return -1;
3960         }
3961         if (fileSamples == 0)
3962         {
3963             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3964                          VoEId(_instanceId, _channelId),
3965                          "Channel::MixOrReplaceAudioWithFile() file is ended");
3966             return 0;
3967         }
3968     }
3969
3970     assert(_audioFrame.samples_per_channel_ == fileSamples);
3971
3972     if (_mixFileWithMicrophone)
3973     {
3974         // Currently file stream is always mono.
3975         // TODO(xians): Change the code when FilePlayer supports real stereo.
3976         MixWithSat(_audioFrame.data_,
3977                    _audioFrame.num_channels_,
3978                    fileBuffer.get(),
3979                    1,
3980                    fileSamples);
3981     }
3982     else
3983     {
3984         // Replace ACM audio with file.
3985         // Currently file stream is always mono.
3986         // TODO(xians): Change the code when FilePlayer supports real stereo.
3987         _audioFrame.UpdateFrame(_channelId,
3988                                 0xFFFFFFFF,
3989                                 fileBuffer.get(),
3990                                 fileSamples,
3991                                 mixingFrequency,
3992                                 AudioFrame::kNormalSpeech,
3993                                 AudioFrame::kVadUnknown,
3994                                 1);
3995
3996     }
3997     return 0;
3998 }
3999
4000 int32_t
4001 Channel::MixAudioWithFile(AudioFrame& audioFrame,
4002                           int mixingFrequency)
4003 {
4004     assert(mixingFrequency <= 48000);
4005
4006     scoped_ptr<int16_t[]> fileBuffer(new int16_t[960]);
4007     int fileSamples(0);
4008
4009     {
4010         CriticalSectionScoped cs(&_fileCritSect);
4011
4012         if (_outputFilePlayerPtr == NULL)
4013         {
4014             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4015                          VoEId(_instanceId, _channelId),
4016                          "Channel::MixAudioWithFile() file mixing failed");
4017             return -1;
4018         }
4019
4020         // We should get the frequency we ask for.
4021         if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
4022                                                        fileSamples,
4023                                                        mixingFrequency) == -1)
4024         {
4025             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4026                          VoEId(_instanceId, _channelId),
4027                          "Channel::MixAudioWithFile() file mixing failed");
4028             return -1;
4029         }
4030     }
4031
4032     if (audioFrame.samples_per_channel_ == fileSamples)
4033     {
4034         // Currently file stream is always mono.
4035         // TODO(xians): Change the code when FilePlayer supports real stereo.
4036         MixWithSat(audioFrame.data_,
4037                    audioFrame.num_channels_,
4038                    fileBuffer.get(),
4039                    1,
4040                    fileSamples);
4041     }
4042     else
4043     {
4044         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4045             "Channel::MixAudioWithFile() samples_per_channel_(%d) != "
4046             "fileSamples(%d)",
4047             audioFrame.samples_per_channel_, fileSamples);
4048         return -1;
4049     }
4050
4051     return 0;
4052 }
4053
4054 int
4055 Channel::InsertInbandDtmfTone()
4056 {
4057     // Check if we should start a new tone.
4058     if (_inbandDtmfQueue.PendingDtmf() &&
4059         !_inbandDtmfGenerator.IsAddingTone() &&
4060         _inbandDtmfGenerator.DelaySinceLastTone() >
4061         kMinTelephoneEventSeparationMs)
4062     {
4063         int8_t eventCode(0);
4064         uint16_t lengthMs(0);
4065         uint8_t attenuationDb(0);
4066
4067         eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
4068         _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
4069         if (_playInbandDtmfEvent)
4070         {
4071             // Add tone to output mixer using a reduced length to minimize
4072             // risk of echo.
4073             _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80,
4074                                           attenuationDb);
4075         }
4076     }
4077
4078     if (_inbandDtmfGenerator.IsAddingTone())
4079     {
4080         uint16_t frequency(0);
4081         _inbandDtmfGenerator.GetSampleRate(frequency);
4082
4083         if (frequency != _audioFrame.sample_rate_hz_)
4084         {
4085             // Update sample rate of Dtmf tone since the mixing frequency
4086             // has changed.
4087             _inbandDtmfGenerator.SetSampleRate(
4088                 (uint16_t) (_audioFrame.sample_rate_hz_));
4089             // Reset the tone to be added taking the new sample rate into
4090             // account.
4091             _inbandDtmfGenerator.ResetTone();
4092         }
4093
4094         int16_t toneBuffer[320];
4095         uint16_t toneSamples(0);
4096         // Get 10ms tone segment and set time since last tone to zero
4097         if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
4098         {
4099             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4100                        VoEId(_instanceId, _channelId),
4101                        "Channel::EncodeAndSend() inserting Dtmf failed");
4102             return -1;
4103         }
4104
4105         // Replace mixed audio with DTMF tone.
4106         for (int sample = 0;
4107             sample < _audioFrame.samples_per_channel_;
4108             sample++)
4109         {
4110             for (int channel = 0;
4111                 channel < _audioFrame.num_channels_;
4112                 channel++)
4113             {
4114                 const int index = sample * _audioFrame.num_channels_ + channel;
4115                 _audioFrame.data_[index] = toneBuffer[sample];
4116             }
4117         }
4118
4119         assert(_audioFrame.samples_per_channel_ == toneSamples);
4120     } else
4121     {
4122         // Add 10ms to "delay-since-last-tone" counter
4123         _inbandDtmfGenerator.UpdateDelaySinceLastTone();
4124     }
4125     return 0;
4126 }
4127
4128 int32_t
4129 Channel::SendPacketRaw(const void *data, int len, bool RTCP)
4130 {
4131     CriticalSectionScoped cs(&_callbackCritSect);
4132     if (_transportPtr == NULL)
4133     {
4134         return -1;
4135     }
4136     if (!RTCP)
4137     {
4138         return _transportPtr->SendPacket(_channelId, data, len);
4139     }
4140     else
4141     {
4142         return _transportPtr->SendRTCPPacket(_channelId, data, len);
4143     }
4144 }
4145
4146 // Called for incoming RTP packets after successful RTP header parsing.
4147 void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
4148                                 uint16_t sequence_number) {
4149   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4150                "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
4151                rtp_timestamp, sequence_number);
4152
4153   // Get frequency of last received payload
4154   int rtp_receive_frequency = GetPlayoutFrequency();
4155
4156   // Update the least required delay.
4157   least_required_delay_ms_ = audio_coding_->LeastRequiredDelayMs();
4158
4159   // |jitter_buffer_playout_timestamp_| updated in UpdatePlayoutTimestamp for
4160   // every incoming packet.
4161   uint32_t timestamp_diff_ms = (rtp_timestamp -
4162       jitter_buffer_playout_timestamp_) / (rtp_receive_frequency / 1000);
4163   if (!IsNewerTimestamp(rtp_timestamp, jitter_buffer_playout_timestamp_) ||
4164       timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
4165     // If |jitter_buffer_playout_timestamp_| is newer than the incoming RTP
4166     // timestamp, the resulting difference is negative, but is set to zero.
4167     // This can happen when a network glitch causes a packet to arrive late,
4168     // and during long comfort noise periods with clock drift.
4169     timestamp_diff_ms = 0;
4170   }
4171
4172   uint16_t packet_delay_ms = (rtp_timestamp - _previousTimestamp) /
4173       (rtp_receive_frequency / 1000);
4174
4175   _previousTimestamp = rtp_timestamp;
4176
4177   if (timestamp_diff_ms == 0) return;
4178
4179   if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
4180     _recPacketDelayMs = packet_delay_ms;
4181   }
4182
4183   if (_average_jitter_buffer_delay_us == 0) {
4184     _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
4185     return;
4186   }
4187
4188   // Filter average delay value using exponential filter (alpha is
4189   // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
4190   // risk of rounding error) and compensate for it in GetDelayEstimate()
4191   // later.
4192   _average_jitter_buffer_delay_us = (_average_jitter_buffer_delay_us * 7 +
4193       1000 * timestamp_diff_ms + 500) / 8;
4194 }
4195
4196 void
4197 Channel::RegisterReceiveCodecsToRTPModule()
4198 {
4199     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4200                  "Channel::RegisterReceiveCodecsToRTPModule()");
4201
4202
4203     CodecInst codec;
4204     const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
4205
4206     for (int idx = 0; idx < nSupportedCodecs; idx++)
4207     {
4208         // Open up the RTP/RTCP receiver for all supported codecs
4209         if ((audio_coding_->Codec(idx, &codec) == -1) ||
4210             (rtp_receiver_->RegisterReceivePayload(
4211                 codec.plname,
4212                 codec.pltype,
4213                 codec.plfreq,
4214                 codec.channels,
4215                 (codec.rate < 0) ? 0 : codec.rate) == -1))
4216         {
4217             WEBRTC_TRACE(
4218                          kTraceWarning,
4219                          kTraceVoice,
4220                          VoEId(_instanceId, _channelId),
4221                          "Channel::RegisterReceiveCodecsToRTPModule() unable"
4222                          " to register %s (%d/%d/%d/%d) to RTP/RTCP receiver",
4223                          codec.plname, codec.pltype, codec.plfreq,
4224                          codec.channels, codec.rate);
4225         }
4226         else
4227         {
4228             WEBRTC_TRACE(
4229                          kTraceInfo,
4230                          kTraceVoice,
4231                          VoEId(_instanceId, _channelId),
4232                          "Channel::RegisterReceiveCodecsToRTPModule() %s "
4233                          "(%d/%d/%d/%d) has been added to the RTP/RTCP "
4234                          "receiver",
4235                          codec.plname, codec.pltype, codec.plfreq,
4236                          codec.channels, codec.rate);
4237         }
4238     }
4239 }
4240
4241 int Channel::SetSecondarySendCodec(const CodecInst& codec,
4242                                    int red_payload_type) {
4243   // Sanity check for payload type.
4244   if (red_payload_type < 0 || red_payload_type > 127) {
4245     _engineStatisticsPtr->SetLastError(
4246         VE_PLTYPE_ERROR, kTraceError,
4247         "SetRedPayloadType() invalid RED payload type");
4248     return -1;
4249   }
4250
4251   if (SetRedPayloadType(red_payload_type) < 0) {
4252     _engineStatisticsPtr->SetLastError(
4253         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4254         "SetSecondarySendCodec() Failed to register RED ACM");
4255     return -1;
4256   }
4257   if (audio_coding_->RegisterSecondarySendCodec(codec) < 0) {
4258     _engineStatisticsPtr->SetLastError(
4259         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4260         "SetSecondarySendCodec() Failed to register secondary send codec in "
4261         "ACM");
4262     return -1;
4263   }
4264
4265   return 0;
4266 }
4267
4268 void Channel::RemoveSecondarySendCodec() {
4269   audio_coding_->UnregisterSecondarySendCodec();
4270 }
4271
4272 int Channel::GetSecondarySendCodec(CodecInst* codec) {
4273   if (audio_coding_->SecondarySendCodec(codec) < 0) {
4274     _engineStatisticsPtr->SetLastError(
4275         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4276         "GetSecondarySendCodec() Failed to get secondary sent codec from ACM");
4277     return -1;
4278   }
4279   return 0;
4280 }
4281
4282 // Assuming this method is called with valid payload type.
4283 int Channel::SetRedPayloadType(int red_payload_type) {
4284   CodecInst codec;
4285   bool found_red = false;
4286
4287   // Get default RED settings from the ACM database
4288   const int num_codecs = AudioCodingModule::NumberOfCodecs();
4289   for (int idx = 0; idx < num_codecs; idx++) {
4290     audio_coding_->Codec(idx, &codec);
4291     if (!STR_CASE_CMP(codec.plname, "RED")) {
4292       found_red = true;
4293       break;
4294     }
4295   }
4296
4297   if (!found_red) {
4298     _engineStatisticsPtr->SetLastError(
4299         VE_CODEC_ERROR, kTraceError,
4300         "SetRedPayloadType() RED is not supported");
4301     return -1;
4302   }
4303
4304   codec.pltype = red_payload_type;
4305   if (audio_coding_->RegisterSendCodec(codec) < 0) {
4306     _engineStatisticsPtr->SetLastError(
4307         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4308         "SetRedPayloadType() RED registration in ACM module failed");
4309     return -1;
4310   }
4311
4312   if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
4313     _engineStatisticsPtr->SetLastError(
4314         VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4315         "SetRedPayloadType() RED registration in RTP/RTCP module failed");
4316     return -1;
4317   }
4318   return 0;
4319 }
4320
4321 int Channel::SetSendRtpHeaderExtension(bool enable, RTPExtensionType type,
4322                                        unsigned char id) {
4323   int error = 0;
4324   _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
4325   if (enable) {
4326     error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
4327   }
4328   return error;
4329 }
4330
4331 int32_t Channel::GetPlayoutFrequency() {
4332   int32_t playout_frequency = audio_coding_->PlayoutFrequency();
4333   CodecInst current_recive_codec;
4334   if (audio_coding_->ReceiveCodec(&current_recive_codec) == 0) {
4335     if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
4336       // Even though the actual sampling rate for G.722 audio is
4337       // 16,000 Hz, the RTP clock rate for the G722 payload format is
4338       // 8,000 Hz because that value was erroneously assigned in
4339       // RFC 1890 and must remain unchanged for backward compatibility.
4340       playout_frequency = 8000;
4341     } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
4342       // We are resampling Opus internally to 32,000 Hz until all our
4343       // DSP routines can operate at 48,000 Hz, but the RTP clock
4344       // rate for the Opus payload format is standardized to 48,000 Hz,
4345       // because that is the maximum supported decoding sampling rate.
4346       playout_frequency = 48000;
4347     }
4348   }
4349   return playout_frequency;
4350 }
4351
4352 int Channel::GetRTT() const {
4353   RTCPMethod method = _rtpRtcpModule->RTCP();
4354   if (method == kRtcpOff) {
4355     WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4356                  VoEId(_instanceId, _channelId),
4357                  "GetRTPStatistics() RTCP is disabled => valid RTT "
4358                  "measurements cannot be retrieved");
4359     return 0;
4360   }
4361   std::vector<RTCPReportBlock> report_blocks;
4362   _rtpRtcpModule->RemoteRTCPStat(&report_blocks);
4363   if (report_blocks.empty()) {
4364     WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4365                  VoEId(_instanceId, _channelId),
4366                  "GetRTPStatistics() failed to measure RTT since no "
4367                  "RTCP packets have been received yet");
4368     return 0;
4369   }
4370
4371   uint32_t remoteSSRC = rtp_receiver_->SSRC();
4372   std::vector<RTCPReportBlock>::const_iterator it = report_blocks.begin();
4373   for (; it != report_blocks.end(); ++it) {
4374     if (it->remoteSSRC == remoteSSRC)
4375       break;
4376   }
4377   if (it == report_blocks.end()) {
4378     // We have not received packets with SSRC matching the report blocks.
4379     // To calculate RTT we try with the SSRC of the first report block.
4380     // This is very important for send-only channels where we don't know
4381     // the SSRC of the other end.
4382     remoteSSRC = report_blocks[0].remoteSSRC;
4383   }
4384   uint16_t rtt = 0;
4385   uint16_t avg_rtt = 0;
4386   uint16_t max_rtt= 0;
4387   uint16_t min_rtt = 0;
4388   if (_rtpRtcpModule->RTT(remoteSSRC, &rtt, &avg_rtt, &min_rtt, &max_rtt)
4389       != 0) {
4390     WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4391                  VoEId(_instanceId, _channelId),
4392                  "GetRTPStatistics() failed to retrieve RTT from "
4393                  "the RTP/RTCP module");
4394     return 0;
4395   }
4396   return static_cast<int>(rtt);
4397 }
4398
4399 }  // namespace voe
4400 }  // namespace webrtc