Upstream version 6.35.121.0
[platform/framework/web/crosswalk.git] / src / third_party / webrtc / voice_engine / channel.cc
1 /*
2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10
11 #include "webrtc/voice_engine/channel.h"
12
13 #include "webrtc/common.h"
14 #include "webrtc/modules/audio_device/include/audio_device.h"
15 #include "webrtc/modules/audio_processing/include/audio_processing.h"
16 #include "webrtc/modules/interface/module_common_types.h"
17 #include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
18 #include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
19 #include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
20 #include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
21 #include "webrtc/modules/utility/interface/audio_frame_operations.h"
22 #include "webrtc/modules/utility/interface/process_thread.h"
23 #include "webrtc/modules/utility/interface/rtp_dump.h"
24 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
25 #include "webrtc/system_wrappers/interface/logging.h"
26 #include "webrtc/system_wrappers/interface/trace.h"
27 #include "webrtc/video_engine/include/vie_network.h"
28 #include "webrtc/voice_engine/include/voe_base.h"
29 #include "webrtc/voice_engine/include/voe_external_media.h"
30 #include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
31 #include "webrtc/voice_engine/output_mixer.h"
32 #include "webrtc/voice_engine/statistics.h"
33 #include "webrtc/voice_engine/transmit_mixer.h"
34 #include "webrtc/voice_engine/utility.h"
35
36 #if defined(_WIN32)
37 #include <Qos.h>
38 #endif
39
40 namespace webrtc {
41 namespace voe {
42
43 // Extend the default RTCP statistics struct with max_jitter, defined as the
44 // maximum jitter value seen in an RTCP report block.
45 struct ChannelStatistics : public RtcpStatistics {
46   ChannelStatistics() : rtcp(), max_jitter(0) {}
47
48   RtcpStatistics rtcp;
49   uint32_t max_jitter;
50 };
51
52 // Statistics callback, called at each generation of a new RTCP report block.
53 class StatisticsProxy : public RtcpStatisticsCallback {
54  public:
55   StatisticsProxy(uint32_t ssrc)
56    : stats_lock_(CriticalSectionWrapper::CreateCriticalSection()),
57      ssrc_(ssrc) {}
58   virtual ~StatisticsProxy() {}
59
60   virtual void StatisticsUpdated(const RtcpStatistics& statistics,
61                                  uint32_t ssrc) OVERRIDE {
62     if (ssrc != ssrc_)
63       return;
64
65     CriticalSectionScoped cs(stats_lock_.get());
66     stats_.rtcp = statistics;
67     if (statistics.jitter > stats_.max_jitter) {
68       stats_.max_jitter = statistics.jitter;
69     }
70   }
71
72   void ResetStatistics() {
73     CriticalSectionScoped cs(stats_lock_.get());
74     stats_ = ChannelStatistics();
75   }
76
77   ChannelStatistics GetStats() {
78     CriticalSectionScoped cs(stats_lock_.get());
79     return stats_;
80   }
81
82  private:
83   // StatisticsUpdated calls are triggered from threads in the RTP module,
84   // while GetStats calls can be triggered from the public voice engine API,
85   // hence synchronization is needed.
86   scoped_ptr<CriticalSectionWrapper> stats_lock_;
87   const uint32_t ssrc_;
88   ChannelStatistics stats_;
89 };
90
91 int32_t
92 Channel::SendData(FrameType frameType,
93                   uint8_t   payloadType,
94                   uint32_t  timeStamp,
95                   const uint8_t*  payloadData,
96                   uint16_t  payloadSize,
97                   const RTPFragmentationHeader* fragmentation)
98 {
99     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
100                  "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
101                  " payloadSize=%u, fragmentation=0x%x)",
102                  frameType, payloadType, timeStamp, payloadSize, fragmentation);
103
104     if (_includeAudioLevelIndication)
105     {
106         // Store current audio level in the RTP/RTCP module.
107         // The level will be used in combination with voice-activity state
108         // (frameType) to add an RTP header extension
109         _rtpRtcpModule->SetAudioLevel(rtp_audioproc_->level_estimator()->RMS());
110     }
111
112     // Push data from ACM to RTP/RTCP-module to deliver audio frame for
113     // packetization.
114     // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
115     if (_rtpRtcpModule->SendOutgoingData((FrameType&)frameType,
116                                         payloadType,
117                                         timeStamp,
118                                         // Leaving the time when this frame was
119                                         // received from the capture device as
120                                         // undefined for voice for now.
121                                         -1,
122                                         payloadData,
123                                         payloadSize,
124                                         fragmentation) == -1)
125     {
126         _engineStatisticsPtr->SetLastError(
127             VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
128             "Channel::SendData() failed to send data to RTP/RTCP module");
129         return -1;
130     }
131
132     _lastLocalTimeStamp = timeStamp;
133     _lastPayloadType = payloadType;
134
135     return 0;
136 }
137
138 int32_t
139 Channel::InFrameType(int16_t frameType)
140 {
141     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
142                  "Channel::InFrameType(frameType=%d)", frameType);
143
144     CriticalSectionScoped cs(&_callbackCritSect);
145     // 1 indicates speech
146     _sendFrameType = (frameType == 1) ? 1 : 0;
147     return 0;
148 }
149
150 int32_t
151 Channel::OnRxVadDetected(int vadDecision)
152 {
153     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
154                  "Channel::OnRxVadDetected(vadDecision=%d)", vadDecision);
155
156     CriticalSectionScoped cs(&_callbackCritSect);
157     if (_rxVadObserverPtr)
158     {
159         _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
160     }
161
162     return 0;
163 }
164
165 int
166 Channel::SendPacket(int channel, const void *data, int len)
167 {
168     channel = VoEChannelId(channel);
169     assert(channel == _channelId);
170
171     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
172                  "Channel::SendPacket(channel=%d, len=%d)", channel, len);
173
174     CriticalSectionScoped cs(&_callbackCritSect);
175
176     if (_transportPtr == NULL)
177     {
178         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
179                      "Channel::SendPacket() failed to send RTP packet due to"
180                      " invalid transport object");
181         return -1;
182     }
183
184     uint8_t* bufferToSendPtr = (uint8_t*)data;
185     int32_t bufferLength = len;
186
187     // Dump the RTP packet to a file (if RTP dump is enabled).
188     if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
189     {
190         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
191                      VoEId(_instanceId,_channelId),
192                      "Channel::SendPacket() RTP dump to output file failed");
193     }
194
195     int n = _transportPtr->SendPacket(channel, bufferToSendPtr,
196                                       bufferLength);
197     if (n < 0) {
198       std::string transport_name =
199           _externalTransport ? "external transport" : "WebRtc sockets";
200       WEBRTC_TRACE(kTraceError, kTraceVoice,
201                    VoEId(_instanceId,_channelId),
202                    "Channel::SendPacket() RTP transmission using %s failed",
203                    transport_name.c_str());
204       return -1;
205     }
206     return n;
207 }
208
209 int
210 Channel::SendRTCPPacket(int channel, const void *data, int len)
211 {
212     channel = VoEChannelId(channel);
213     assert(channel == _channelId);
214
215     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
216                  "Channel::SendRTCPPacket(channel=%d, len=%d)", channel, len);
217
218     CriticalSectionScoped cs(&_callbackCritSect);
219     if (_transportPtr == NULL)
220     {
221         WEBRTC_TRACE(kTraceError, kTraceVoice,
222                      VoEId(_instanceId,_channelId),
223                      "Channel::SendRTCPPacket() failed to send RTCP packet"
224                      " due to invalid transport object");
225         return -1;
226     }
227
228     uint8_t* bufferToSendPtr = (uint8_t*)data;
229     int32_t bufferLength = len;
230
231     // Dump the RTCP packet to a file (if RTP dump is enabled).
232     if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
233     {
234         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
235                      VoEId(_instanceId,_channelId),
236                      "Channel::SendPacket() RTCP dump to output file failed");
237     }
238
239     int n = _transportPtr->SendRTCPPacket(channel,
240                                           bufferToSendPtr,
241                                           bufferLength);
242     if (n < 0) {
243       std::string transport_name =
244           _externalTransport ? "external transport" : "WebRtc sockets";
245       WEBRTC_TRACE(kTraceInfo, kTraceVoice,
246                    VoEId(_instanceId,_channelId),
247                    "Channel::SendRTCPPacket() transmission using %s failed",
248                    transport_name.c_str());
249       return -1;
250     }
251     return n;
252 }
253
254 void
255 Channel::OnPlayTelephoneEvent(int32_t id,
256                               uint8_t event,
257                               uint16_t lengthMs,
258                               uint8_t volume)
259 {
260     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
261                  "Channel::OnPlayTelephoneEvent(id=%d, event=%u, lengthMs=%u,"
262                  " volume=%u)", id, event, lengthMs, volume);
263
264     if (!_playOutbandDtmfEvent || (event > 15))
265     {
266         // Ignore callback since feedback is disabled or event is not a
267         // Dtmf tone event.
268         return;
269     }
270
271     assert(_outputMixerPtr != NULL);
272
273     // Start playing out the Dtmf tone (if playout is enabled).
274     // Reduce length of tone with 80ms to the reduce risk of echo.
275     _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
276 }
277
278 void
279 Channel::OnIncomingSSRCChanged(int32_t id, uint32_t ssrc)
280 {
281     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
282                  "Channel::OnIncomingSSRCChanged(id=%d, SSRC=%d)",
283                  id, ssrc);
284
285     int32_t channel = VoEChannelId(id);
286     assert(channel == _channelId);
287
288     // Update ssrc so that NTP for AV sync can be updated.
289     _rtpRtcpModule->SetRemoteSSRC(ssrc);
290
291     if (_rtpObserver)
292     {
293         CriticalSectionScoped cs(&_callbackCritSect);
294
295         if (_rtpObserverPtr)
296         {
297             // Send new SSRC to registered observer using callback
298             _rtpObserverPtr->OnIncomingSSRCChanged(channel, ssrc);
299         }
300     }
301 }
302
303 void Channel::OnIncomingCSRCChanged(int32_t id,
304                                     uint32_t CSRC,
305                                     bool added)
306 {
307     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
308                  "Channel::OnIncomingCSRCChanged(id=%d, CSRC=%d, added=%d)",
309                  id, CSRC, added);
310
311     int32_t channel = VoEChannelId(id);
312     assert(channel == _channelId);
313
314     if (_rtpObserver)
315     {
316         CriticalSectionScoped cs(&_callbackCritSect);
317
318         if (_rtpObserverPtr)
319         {
320             _rtpObserverPtr->OnIncomingCSRCChanged(channel, CSRC, added);
321         }
322     }
323 }
324
325 void Channel::ResetStatistics(uint32_t ssrc) {
326   StreamStatistician* statistician =
327       rtp_receive_statistics_->GetStatistician(ssrc);
328   if (statistician) {
329     statistician->ResetStatistics();
330   }
331   statistics_proxy_->ResetStatistics();
332 }
333
334 void
335 Channel::OnApplicationDataReceived(int32_t id,
336                                    uint8_t subType,
337                                    uint32_t name,
338                                    uint16_t length,
339                                    const uint8_t* data)
340 {
341     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
342                  "Channel::OnApplicationDataReceived(id=%d, subType=%u,"
343                  " name=%u, length=%u)",
344                  id, subType, name, length);
345
346     int32_t channel = VoEChannelId(id);
347     assert(channel == _channelId);
348
349     if (_rtcpObserver)
350     {
351         CriticalSectionScoped cs(&_callbackCritSect);
352
353         if (_rtcpObserverPtr)
354         {
355             _rtcpObserverPtr->OnApplicationDataReceived(channel,
356                                                         subType,
357                                                         name,
358                                                         data,
359                                                         length);
360         }
361     }
362 }
363
364 int32_t
365 Channel::OnInitializeDecoder(
366     int32_t id,
367     int8_t payloadType,
368     const char payloadName[RTP_PAYLOAD_NAME_SIZE],
369     int frequency,
370     uint8_t channels,
371     uint32_t rate)
372 {
373     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
374                  "Channel::OnInitializeDecoder(id=%d, payloadType=%d, "
375                  "payloadName=%s, frequency=%u, channels=%u, rate=%u)",
376                  id, payloadType, payloadName, frequency, channels, rate);
377
378     assert(VoEChannelId(id) == _channelId);
379
380     CodecInst receiveCodec = {0};
381     CodecInst dummyCodec = {0};
382
383     receiveCodec.pltype = payloadType;
384     receiveCodec.plfreq = frequency;
385     receiveCodec.channels = channels;
386     receiveCodec.rate = rate;
387     strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
388
389     audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
390     receiveCodec.pacsize = dummyCodec.pacsize;
391
392     // Register the new codec to the ACM
393     if (audio_coding_->RegisterReceiveCodec(receiveCodec) == -1)
394     {
395         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
396                      VoEId(_instanceId, _channelId),
397                      "Channel::OnInitializeDecoder() invalid codec ("
398                      "pt=%d, name=%s) received - 1", payloadType, payloadName);
399         _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
400         return -1;
401     }
402
403     return 0;
404 }
405
406 void
407 Channel::OnPacketTimeout(int32_t id)
408 {
409     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
410                  "Channel::OnPacketTimeout(id=%d)", id);
411
412     CriticalSectionScoped cs(_callbackCritSectPtr);
413     if (_voiceEngineObserverPtr)
414     {
415         if (channel_state_.Get().receiving || _externalTransport)
416         {
417             int32_t channel = VoEChannelId(id);
418             assert(channel == _channelId);
419             // Ensure that next OnReceivedPacket() callback will trigger
420             // a VE_PACKET_RECEIPT_RESTARTED callback.
421             _rtpPacketTimedOut = true;
422             // Deliver callback to the observer
423             WEBRTC_TRACE(kTraceInfo, kTraceVoice,
424                          VoEId(_instanceId,_channelId),
425                          "Channel::OnPacketTimeout() => "
426                          "CallbackOnError(VE_RECEIVE_PACKET_TIMEOUT)");
427             _voiceEngineObserverPtr->CallbackOnError(channel,
428                                                      VE_RECEIVE_PACKET_TIMEOUT);
429         }
430     }
431 }
432
433 void
434 Channel::OnReceivedPacket(int32_t id,
435                           RtpRtcpPacketType packetType)
436 {
437     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
438                  "Channel::OnReceivedPacket(id=%d, packetType=%d)",
439                  id, packetType);
440
441     assert(VoEChannelId(id) == _channelId);
442
443     // Notify only for the case when we have restarted an RTP session.
444     if (_rtpPacketTimedOut && (kPacketRtp == packetType))
445     {
446         CriticalSectionScoped cs(_callbackCritSectPtr);
447         if (_voiceEngineObserverPtr)
448         {
449             int32_t channel = VoEChannelId(id);
450             assert(channel == _channelId);
451             // Reset timeout mechanism
452             _rtpPacketTimedOut = false;
453             // Deliver callback to the observer
454             WEBRTC_TRACE(kTraceInfo, kTraceVoice,
455                          VoEId(_instanceId,_channelId),
456                          "Channel::OnPacketTimeout() =>"
457                          " CallbackOnError(VE_PACKET_RECEIPT_RESTARTED)");
458             _voiceEngineObserverPtr->CallbackOnError(
459                 channel,
460                 VE_PACKET_RECEIPT_RESTARTED);
461         }
462     }
463 }
464
465 void
466 Channel::OnPeriodicDeadOrAlive(int32_t id,
467                                RTPAliveType alive)
468 {
469     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
470                  "Channel::OnPeriodicDeadOrAlive(id=%d, alive=%d)", id, alive);
471
472     {
473         CriticalSectionScoped cs(&_callbackCritSect);
474         if (!_connectionObserver)
475             return;
476     }
477
478     int32_t channel = VoEChannelId(id);
479     assert(channel == _channelId);
480
481     // Use Alive as default to limit risk of false Dead detections
482     bool isAlive(true);
483
484     // Always mark the connection as Dead when the module reports kRtpDead
485     if (kRtpDead == alive)
486     {
487         isAlive = false;
488     }
489
490     // It is possible that the connection is alive even if no RTP packet has
491     // been received for a long time since the other side might use VAD/DTX
492     // and a low SID-packet update rate.
493     if ((kRtpNoRtp == alive) && channel_state_.Get().playing)
494     {
495         // Detect Alive for all NetEQ states except for the case when we are
496         // in PLC_CNG state.
497         // PLC_CNG <=> background noise only due to long expand or error.
498         // Note that, the case where the other side stops sending during CNG
499         // state will be detected as Alive. Dead is is not set until after
500         // missing RTCP packets for at least twelve seconds (handled
501         // internally by the RTP/RTCP module).
502         isAlive = (_outputSpeechType != AudioFrame::kPLCCNG);
503     }
504
505     UpdateDeadOrAliveCounters(isAlive);
506
507     // Send callback to the registered observer
508     if (_connectionObserver)
509     {
510         CriticalSectionScoped cs(&_callbackCritSect);
511         if (_connectionObserverPtr)
512         {
513             _connectionObserverPtr->OnPeriodicDeadOrAlive(channel, isAlive);
514         }
515     }
516 }
517
518 int32_t
519 Channel::OnReceivedPayloadData(const uint8_t* payloadData,
520                                uint16_t payloadSize,
521                                const WebRtcRTPHeader* rtpHeader)
522 {
523     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
524                  "Channel::OnReceivedPayloadData(payloadSize=%d,"
525                  " payloadType=%u, audioChannel=%u)",
526                  payloadSize,
527                  rtpHeader->header.payloadType,
528                  rtpHeader->type.Audio.channel);
529
530     _lastRemoteTimeStamp = rtpHeader->header.timestamp;
531
532     if (!channel_state_.Get().playing)
533     {
534         // Avoid inserting into NetEQ when we are not playing. Count the
535         // packet as discarded.
536         WEBRTC_TRACE(kTraceStream, kTraceVoice,
537                      VoEId(_instanceId, _channelId),
538                      "received packet is discarded since playing is not"
539                      " activated");
540         _numberOfDiscardedPackets++;
541         return 0;
542     }
543
544     // Push the incoming payload (parsed and ready for decoding) into the ACM
545     if (audio_coding_->IncomingPacket(payloadData,
546                                       payloadSize,
547                                       *rtpHeader) != 0)
548     {
549         _engineStatisticsPtr->SetLastError(
550             VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
551             "Channel::OnReceivedPayloadData() unable to push data to the ACM");
552         return -1;
553     }
554
555     // Update the packet delay.
556     UpdatePacketDelay(rtpHeader->header.timestamp,
557                       rtpHeader->header.sequenceNumber);
558
559     uint16_t round_trip_time = 0;
560     _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time,
561                         NULL, NULL, NULL);
562
563     std::vector<uint16_t> nack_list = audio_coding_->GetNackList(
564         round_trip_time);
565     if (!nack_list.empty()) {
566       // Can't use nack_list.data() since it's not supported by all
567       // compilers.
568       ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
569     }
570     return 0;
571 }
572
573 bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
574                                 int rtp_packet_length) {
575   RTPHeader header;
576   if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
577     WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
578                  "IncomingPacket invalid RTP header");
579     return false;
580   }
581   header.payload_type_frequency =
582       rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
583   if (header.payload_type_frequency < 0)
584     return false;
585   return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
586 }
587
588 int32_t Channel::GetAudioFrame(int32_t id, AudioFrame& audioFrame)
589 {
590     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
591                  "Channel::GetAudioFrame(id=%d)", id);
592
593     // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
594     if (audio_coding_->PlayoutData10Ms(audioFrame.sample_rate_hz_,
595                                        &audioFrame) == -1)
596     {
597         WEBRTC_TRACE(kTraceError, kTraceVoice,
598                      VoEId(_instanceId,_channelId),
599                      "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
600         // In all likelihood, the audio in this frame is garbage. We return an
601         // error so that the audio mixer module doesn't add it to the mix. As
602         // a result, it won't be played out and the actions skipped here are
603         // irrelevant.
604         return -1;
605     }
606
607     if (_RxVadDetection)
608     {
609         UpdateRxVadDetection(audioFrame);
610     }
611
612     // Convert module ID to internal VoE channel ID
613     audioFrame.id_ = VoEChannelId(audioFrame.id_);
614     // Store speech type for dead-or-alive detection
615     _outputSpeechType = audioFrame.speech_type_;
616
617     ChannelState::State state = channel_state_.Get();
618
619     if (state.rx_apm_is_enabled) {
620       int err = rx_audioproc_->ProcessStream(&audioFrame);
621       if (err) {
622         LOG(LS_ERROR) << "ProcessStream() error: " << err;
623         assert(false);
624       }
625     }
626
627     float output_gain = 1.0f;
628     float left_pan =  1.0f;
629     float right_pan =  1.0f;
630     {
631       CriticalSectionScoped cs(&volume_settings_critsect_);
632       output_gain = _outputGain;
633       left_pan = _panLeft;
634       right_pan= _panRight;
635     }
636
637     // Output volume scaling
638     if (output_gain < 0.99f || output_gain > 1.01f)
639     {
640         AudioFrameOperations::ScaleWithSat(output_gain, audioFrame);
641     }
642
643     // Scale left and/or right channel(s) if stereo and master balance is
644     // active
645
646     if (left_pan != 1.0f || right_pan != 1.0f)
647     {
648         if (audioFrame.num_channels_ == 1)
649         {
650             // Emulate stereo mode since panning is active.
651             // The mono signal is copied to both left and right channels here.
652             AudioFrameOperations::MonoToStereo(&audioFrame);
653         }
654         // For true stereo mode (when we are receiving a stereo signal), no
655         // action is needed.
656
657         // Do the panning operation (the audio frame contains stereo at this
658         // stage)
659         AudioFrameOperations::Scale(left_pan, right_pan, audioFrame);
660     }
661
662     // Mix decoded PCM output with file if file mixing is enabled
663     if (state.output_file_playing)
664     {
665         MixAudioWithFile(audioFrame, audioFrame.sample_rate_hz_);
666     }
667
668     // Place channel in on-hold state (~muted) if on-hold is activated
669     if (state.output_is_on_hold)
670     {
671         AudioFrameOperations::Mute(audioFrame);
672     }
673
674     // External media
675     if (_outputExternalMedia)
676     {
677         CriticalSectionScoped cs(&_callbackCritSect);
678         const bool isStereo = (audioFrame.num_channels_ == 2);
679         if (_outputExternalMediaCallbackPtr)
680         {
681             _outputExternalMediaCallbackPtr->Process(
682                 _channelId,
683                 kPlaybackPerChannel,
684                 (int16_t*)audioFrame.data_,
685                 audioFrame.samples_per_channel_,
686                 audioFrame.sample_rate_hz_,
687                 isStereo);
688         }
689     }
690
691     // Record playout if enabled
692     {
693         CriticalSectionScoped cs(&_fileCritSect);
694
695         if (_outputFileRecording && _outputFileRecorderPtr)
696         {
697             _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
698         }
699     }
700
701     // Measure audio level (0-9)
702     _outputAudioLevel.ComputeLevel(audioFrame);
703
704     return 0;
705 }
706
707 int32_t
708 Channel::NeededFrequency(int32_t id)
709 {
710     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
711                  "Channel::NeededFrequency(id=%d)", id);
712
713     int highestNeeded = 0;
714
715     // Determine highest needed receive frequency
716     int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
717
718     // Return the bigger of playout and receive frequency in the ACM.
719     if (audio_coding_->PlayoutFrequency() > receiveFrequency)
720     {
721         highestNeeded = audio_coding_->PlayoutFrequency();
722     }
723     else
724     {
725         highestNeeded = receiveFrequency;
726     }
727
728     // Special case, if we're playing a file on the playout side
729     // we take that frequency into consideration as well
730     // This is not needed on sending side, since the codec will
731     // limit the spectrum anyway.
732     if (channel_state_.Get().output_file_playing)
733     {
734         CriticalSectionScoped cs(&_fileCritSect);
735         if (_outputFilePlayerPtr)
736         {
737             if(_outputFilePlayerPtr->Frequency()>highestNeeded)
738             {
739                 highestNeeded=_outputFilePlayerPtr->Frequency();
740             }
741         }
742     }
743
744     return(highestNeeded);
745 }
746
747 int32_t
748 Channel::CreateChannel(Channel*& channel,
749                        int32_t channelId,
750                        uint32_t instanceId,
751                        const Config& config)
752 {
753     WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId,channelId),
754                  "Channel::CreateChannel(channelId=%d, instanceId=%d)",
755         channelId, instanceId);
756
757     channel = new Channel(channelId, instanceId, config);
758     if (channel == NULL)
759     {
760         WEBRTC_TRACE(kTraceMemory, kTraceVoice,
761                      VoEId(instanceId,channelId),
762                      "Channel::CreateChannel() unable to allocate memory for"
763                      " channel");
764         return -1;
765     }
766     return 0;
767 }
768
769 void
770 Channel::PlayNotification(int32_t id, uint32_t durationMs)
771 {
772     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
773                  "Channel::PlayNotification(id=%d, durationMs=%d)",
774                  id, durationMs);
775
776     // Not implement yet
777 }
778
779 void
780 Channel::RecordNotification(int32_t id, uint32_t durationMs)
781 {
782     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
783                  "Channel::RecordNotification(id=%d, durationMs=%d)",
784                  id, durationMs);
785
786     // Not implement yet
787 }
788
789 void
790 Channel::PlayFileEnded(int32_t id)
791 {
792     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
793                  "Channel::PlayFileEnded(id=%d)", id);
794
795     if (id == _inputFilePlayerId)
796     {
797         channel_state_.SetInputFilePlaying(false);
798         WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
799                      VoEId(_instanceId,_channelId),
800                      "Channel::PlayFileEnded() => input file player module is"
801                      " shutdown");
802     }
803     else if (id == _outputFilePlayerId)
804     {
805         channel_state_.SetOutputFilePlaying(false);
806         WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
807                      VoEId(_instanceId,_channelId),
808                      "Channel::PlayFileEnded() => output file player module is"
809                      " shutdown");
810     }
811 }
812
813 void
814 Channel::RecordFileEnded(int32_t id)
815 {
816     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
817                  "Channel::RecordFileEnded(id=%d)", id);
818
819     assert(id == _outputFileRecorderId);
820
821     CriticalSectionScoped cs(&_fileCritSect);
822
823     _outputFileRecording = false;
824     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
825                  VoEId(_instanceId,_channelId),
826                  "Channel::RecordFileEnded() => output file recorder module is"
827                  " shutdown");
828 }
829
830 Channel::Channel(int32_t channelId,
831                  uint32_t instanceId,
832                  const Config& config) :
833     _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
834     _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
835     volume_settings_critsect_(*CriticalSectionWrapper::CreateCriticalSection()),
836     _instanceId(instanceId),
837     _channelId(channelId),
838     rtp_header_parser_(RtpHeaderParser::Create()),
839     rtp_payload_registry_(
840         new RTPPayloadRegistry(channelId,
841                                RTPPayloadStrategy::CreateStrategy(true))),
842     rtp_receive_statistics_(ReceiveStatistics::Create(
843         Clock::GetRealTimeClock())),
844     rtp_receiver_(RtpReceiver::CreateAudioReceiver(
845         VoEModuleId(instanceId, channelId), Clock::GetRealTimeClock(), this,
846         this, this, rtp_payload_registry_.get())),
847     telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
848     audio_coding_(config.Get<AudioCodingModuleFactory>().Create(
849         VoEModuleId(instanceId, channelId))),
850     _rtpDumpIn(*RtpDump::CreateRtpDump()),
851     _rtpDumpOut(*RtpDump::CreateRtpDump()),
852     _outputAudioLevel(),
853     _externalTransport(false),
854     _audioLevel_dBov(0),
855     _inputFilePlayerPtr(NULL),
856     _outputFilePlayerPtr(NULL),
857     _outputFileRecorderPtr(NULL),
858     // Avoid conflict with other channels by adding 1024 - 1026,
859     // won't use as much as 1024 channels.
860     _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
861     _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
862     _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
863     _outputFileRecording(false),
864     _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
865     _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
866     _outputExternalMedia(false),
867     _inputExternalMediaCallbackPtr(NULL),
868     _outputExternalMediaCallbackPtr(NULL),
869     _timeStamp(0), // This is just an offset, RTP module will add it's own random offset
870     _sendTelephoneEventPayloadType(106),
871     jitter_buffer_playout_timestamp_(0),
872     playout_timestamp_rtp_(0),
873     playout_timestamp_rtcp_(0),
874     playout_delay_ms_(0),
875     _numberOfDiscardedPackets(0),
876     send_sequence_number_(0),
877     _engineStatisticsPtr(NULL),
878     _outputMixerPtr(NULL),
879     _transmitMixerPtr(NULL),
880     _moduleProcessThreadPtr(NULL),
881     _audioDeviceModulePtr(NULL),
882     _voiceEngineObserverPtr(NULL),
883     _callbackCritSectPtr(NULL),
884     _transportPtr(NULL),
885     rx_audioproc_(AudioProcessing::Create(VoEModuleId(instanceId, channelId))),
886     _rxVadObserverPtr(NULL),
887     _oldVadDecision(-1),
888     _sendFrameType(0),
889     _rtpObserverPtr(NULL),
890     _rtcpObserverPtr(NULL),
891     _externalPlayout(false),
892     _externalMixing(false),
893     _inputIsOnHold(false),
894     _mixFileWithMicrophone(false),
895     _rtpObserver(false),
896     _rtcpObserver(false),
897     _mute(false),
898     _panLeft(1.0f),
899     _panRight(1.0f),
900     _outputGain(1.0f),
901     _playOutbandDtmfEvent(false),
902     _playInbandDtmfEvent(false),
903     _lastLocalTimeStamp(0),
904     _lastRemoteTimeStamp(0),
905     _lastPayloadType(0),
906     _includeAudioLevelIndication(false),
907     _rtpPacketTimedOut(false),
908     _rtpPacketTimeOutIsEnabled(false),
909     _rtpTimeOutSeconds(0),
910     _connectionObserver(false),
911     _connectionObserverPtr(NULL),
912     _countAliveDetections(0),
913     _countDeadDetections(0),
914     _outputSpeechType(AudioFrame::kNormalSpeech),
915     vie_network_(NULL),
916     video_channel_(-1),
917     _average_jitter_buffer_delay_us(0),
918     least_required_delay_ms_(0),
919     _previousTimestamp(0),
920     _recPacketDelayMs(20),
921     _RxVadDetection(false),
922     _rxAgcIsEnabled(false),
923     _rxNsIsEnabled(false),
924     restored_packet_in_use_(false)
925 {
926     WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
927                  "Channel::Channel() - ctor");
928     _inbandDtmfQueue.ResetDtmf();
929     _inbandDtmfGenerator.Init();
930     _outputAudioLevel.Clear();
931
932     RtpRtcp::Configuration configuration;
933     configuration.id = VoEModuleId(instanceId, channelId);
934     configuration.audio = true;
935     configuration.outgoing_transport = this;
936     configuration.rtcp_feedback = this;
937     configuration.audio_messages = this;
938     configuration.receive_statistics = rtp_receive_statistics_.get();
939
940     _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
941
942     statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
943     rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
944         statistics_proxy_.get());
945 }
946
947 Channel::~Channel()
948 {
949     rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
950     WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
951                  "Channel::~Channel() - dtor");
952
953     if (_outputExternalMedia)
954     {
955         DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
956     }
957     if (channel_state_.Get().input_external_media)
958     {
959         DeRegisterExternalMediaProcessing(kRecordingPerChannel);
960     }
961     StopSend();
962     StopPlayout();
963
964     {
965         CriticalSectionScoped cs(&_fileCritSect);
966         if (_inputFilePlayerPtr)
967         {
968             _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
969             _inputFilePlayerPtr->StopPlayingFile();
970             FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
971             _inputFilePlayerPtr = NULL;
972         }
973         if (_outputFilePlayerPtr)
974         {
975             _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
976             _outputFilePlayerPtr->StopPlayingFile();
977             FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
978             _outputFilePlayerPtr = NULL;
979         }
980         if (_outputFileRecorderPtr)
981         {
982             _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
983             _outputFileRecorderPtr->StopRecording();
984             FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
985             _outputFileRecorderPtr = NULL;
986         }
987     }
988
989     // The order to safely shutdown modules in a channel is:
990     // 1. De-register callbacks in modules
991     // 2. De-register modules in process thread
992     // 3. Destroy modules
993     if (audio_coding_->RegisterTransportCallback(NULL) == -1)
994     {
995         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
996                      VoEId(_instanceId,_channelId),
997                      "~Channel() failed to de-register transport callback"
998                      " (Audio coding module)");
999     }
1000     if (audio_coding_->RegisterVADCallback(NULL) == -1)
1001     {
1002         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1003                      VoEId(_instanceId,_channelId),
1004                      "~Channel() failed to de-register VAD callback"
1005                      " (Audio coding module)");
1006     }
1007     // De-register modules in process thread
1008     if (_moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()) == -1)
1009     {
1010         WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1011                      VoEId(_instanceId,_channelId),
1012                      "~Channel() failed to deregister RTP/RTCP module");
1013     }
1014     // End of modules shutdown
1015
1016     // Delete other objects
1017     if (vie_network_) {
1018       vie_network_->Release();
1019       vie_network_ = NULL;
1020     }
1021     RtpDump::DestroyRtpDump(&_rtpDumpIn);
1022     RtpDump::DestroyRtpDump(&_rtpDumpOut);
1023     delete &_callbackCritSect;
1024     delete &_fileCritSect;
1025     delete &volume_settings_critsect_;
1026 }
1027
1028 int32_t
1029 Channel::Init()
1030 {
1031     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1032                  "Channel::Init()");
1033
1034     channel_state_.Reset();
1035
1036     // --- Initial sanity
1037
1038     if ((_engineStatisticsPtr == NULL) ||
1039         (_moduleProcessThreadPtr == NULL))
1040     {
1041         WEBRTC_TRACE(kTraceError, kTraceVoice,
1042                      VoEId(_instanceId,_channelId),
1043                      "Channel::Init() must call SetEngineInformation() first");
1044         return -1;
1045     }
1046
1047     // --- Add modules to process thread (for periodic schedulation)
1048
1049     const bool processThreadFail =
1050         ((_moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get()) != 0) ||
1051         false);
1052     if (processThreadFail)
1053     {
1054         _engineStatisticsPtr->SetLastError(
1055             VE_CANNOT_INIT_CHANNEL, kTraceError,
1056             "Channel::Init() modules not registered");
1057         return -1;
1058     }
1059     // --- ACM initialization
1060
1061     if ((audio_coding_->InitializeReceiver() == -1) ||
1062 #ifdef WEBRTC_CODEC_AVT
1063         // out-of-band Dtmf tones are played out by default
1064         (audio_coding_->SetDtmfPlayoutStatus(true) == -1) ||
1065 #endif
1066         (audio_coding_->InitializeSender() == -1))
1067     {
1068         _engineStatisticsPtr->SetLastError(
1069             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1070             "Channel::Init() unable to initialize the ACM - 1");
1071         return -1;
1072     }
1073
1074     // --- RTP/RTCP module initialization
1075
1076     // Ensure that RTCP is enabled by default for the created channel.
1077     // Note that, the module will keep generating RTCP until it is explicitly
1078     // disabled by the user.
1079     // After StopListen (when no sockets exists), RTCP packets will no longer
1080     // be transmitted since the Transport object will then be invalid.
1081     telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
1082     // RTCP is enabled by default.
1083     if (_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1)
1084     {
1085         _engineStatisticsPtr->SetLastError(
1086             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1087             "Channel::Init() RTP/RTCP module not initialized");
1088         return -1;
1089     }
1090
1091      // --- Register all permanent callbacks
1092     const bool fail =
1093         (audio_coding_->RegisterTransportCallback(this) == -1) ||
1094         (audio_coding_->RegisterVADCallback(this) == -1);
1095
1096     if (fail)
1097     {
1098         _engineStatisticsPtr->SetLastError(
1099             VE_CANNOT_INIT_CHANNEL, kTraceError,
1100             "Channel::Init() callbacks not registered");
1101         return -1;
1102     }
1103
1104     // --- Register all supported codecs to the receiving side of the
1105     // RTP/RTCP module
1106
1107     CodecInst codec;
1108     const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
1109
1110     for (int idx = 0; idx < nSupportedCodecs; idx++)
1111     {
1112         // Open up the RTP/RTCP receiver for all supported codecs
1113         if ((audio_coding_->Codec(idx, &codec) == -1) ||
1114             (rtp_receiver_->RegisterReceivePayload(
1115                 codec.plname,
1116                 codec.pltype,
1117                 codec.plfreq,
1118                 codec.channels,
1119                 (codec.rate < 0) ? 0 : codec.rate) == -1))
1120         {
1121             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1122                          VoEId(_instanceId,_channelId),
1123                          "Channel::Init() unable to register %s (%d/%d/%d/%d) "
1124                          "to RTP/RTCP receiver",
1125                          codec.plname, codec.pltype, codec.plfreq,
1126                          codec.channels, codec.rate);
1127         }
1128         else
1129         {
1130             WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1131                          VoEId(_instanceId,_channelId),
1132                          "Channel::Init() %s (%d/%d/%d/%d) has been added to "
1133                          "the RTP/RTCP receiver",
1134                          codec.plname, codec.pltype, codec.plfreq,
1135                          codec.channels, codec.rate);
1136         }
1137
1138         // Ensure that PCMU is used as default codec on the sending side
1139         if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1))
1140         {
1141             SetSendCodec(codec);
1142         }
1143
1144         // Register default PT for outband 'telephone-event'
1145         if (!STR_CASE_CMP(codec.plname, "telephone-event"))
1146         {
1147             if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
1148                 (audio_coding_->RegisterReceiveCodec(codec) == -1))
1149             {
1150                 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1151                              VoEId(_instanceId,_channelId),
1152                              "Channel::Init() failed to register outband "
1153                              "'telephone-event' (%d/%d) correctly",
1154                              codec.pltype, codec.plfreq);
1155             }
1156         }
1157
1158         if (!STR_CASE_CMP(codec.plname, "CN"))
1159         {
1160             if ((audio_coding_->RegisterSendCodec(codec) == -1) ||
1161                 (audio_coding_->RegisterReceiveCodec(codec) == -1) ||
1162                 (_rtpRtcpModule->RegisterSendPayload(codec) == -1))
1163             {
1164                 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1165                              VoEId(_instanceId,_channelId),
1166                              "Channel::Init() failed to register CN (%d/%d) "
1167                              "correctly - 1",
1168                              codec.pltype, codec.plfreq);
1169             }
1170         }
1171 #ifdef WEBRTC_CODEC_RED
1172         // Register RED to the receiving side of the ACM.
1173         // We will not receive an OnInitializeDecoder() callback for RED.
1174         if (!STR_CASE_CMP(codec.plname, "RED"))
1175         {
1176             if (audio_coding_->RegisterReceiveCodec(codec) == -1)
1177             {
1178                 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1179                              VoEId(_instanceId,_channelId),
1180                              "Channel::Init() failed to register RED (%d/%d) "
1181                              "correctly",
1182                              codec.pltype, codec.plfreq);
1183             }
1184         }
1185 #endif
1186     }
1187
1188     if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) {
1189       LOG_FERR1(LS_ERROR, noise_suppression()->set_level, kDefaultNsMode);
1190       return -1;
1191     }
1192     if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) {
1193       LOG_FERR1(LS_ERROR, gain_control()->set_mode, kDefaultRxAgcMode);
1194       return -1;
1195     }
1196
1197     return 0;
1198 }
1199
1200 int32_t
1201 Channel::SetEngineInformation(Statistics& engineStatistics,
1202                               OutputMixer& outputMixer,
1203                               voe::TransmitMixer& transmitMixer,
1204                               ProcessThread& moduleProcessThread,
1205                               AudioDeviceModule& audioDeviceModule,
1206                               VoiceEngineObserver* voiceEngineObserver,
1207                               CriticalSectionWrapper* callbackCritSect)
1208 {
1209     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1210                  "Channel::SetEngineInformation()");
1211     _engineStatisticsPtr = &engineStatistics;
1212     _outputMixerPtr = &outputMixer;
1213     _transmitMixerPtr = &transmitMixer,
1214     _moduleProcessThreadPtr = &moduleProcessThread;
1215     _audioDeviceModulePtr = &audioDeviceModule;
1216     _voiceEngineObserverPtr = voiceEngineObserver;
1217     _callbackCritSectPtr = callbackCritSect;
1218     return 0;
1219 }
1220
1221 int32_t
1222 Channel::UpdateLocalTimeStamp()
1223 {
1224
1225     _timeStamp += _audioFrame.samples_per_channel_;
1226     return 0;
1227 }
1228
1229 int32_t
1230 Channel::StartPlayout()
1231 {
1232     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1233                  "Channel::StartPlayout()");
1234     if (channel_state_.Get().playing)
1235     {
1236         return 0;
1237     }
1238
1239     if (!_externalMixing) {
1240         // Add participant as candidates for mixing.
1241         if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0)
1242         {
1243             _engineStatisticsPtr->SetLastError(
1244                 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1245                 "StartPlayout() failed to add participant to mixer");
1246             return -1;
1247         }
1248     }
1249
1250     channel_state_.SetPlaying(true);
1251     if (RegisterFilePlayingToMixer() != 0)
1252         return -1;
1253
1254     return 0;
1255 }
1256
1257 int32_t
1258 Channel::StopPlayout()
1259 {
1260     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1261                  "Channel::StopPlayout()");
1262     if (!channel_state_.Get().playing)
1263     {
1264         return 0;
1265     }
1266
1267     if (!_externalMixing) {
1268         // Remove participant as candidates for mixing
1269         if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0)
1270         {
1271             _engineStatisticsPtr->SetLastError(
1272                 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1273                 "StopPlayout() failed to remove participant from mixer");
1274             return -1;
1275         }
1276     }
1277
1278     channel_state_.SetPlaying(false);
1279     _outputAudioLevel.Clear();
1280
1281     return 0;
1282 }
1283
1284 int32_t
1285 Channel::StartSend()
1286 {
1287     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1288                  "Channel::StartSend()");
1289     // Resume the previous sequence number which was reset by StopSend().
1290     // This needs to be done before |sending| is set to true.
1291     if (send_sequence_number_)
1292       SetInitSequenceNumber(send_sequence_number_);
1293
1294     if (channel_state_.Get().sending)
1295     {
1296       return 0;
1297     }
1298     channel_state_.SetSending(true);
1299
1300     if (_rtpRtcpModule->SetSendingStatus(true) != 0)
1301     {
1302         _engineStatisticsPtr->SetLastError(
1303             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1304             "StartSend() RTP/RTCP failed to start sending");
1305         CriticalSectionScoped cs(&_callbackCritSect);
1306         channel_state_.SetSending(false);
1307         return -1;
1308     }
1309
1310     return 0;
1311 }
1312
1313 int32_t
1314 Channel::StopSend()
1315 {
1316     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1317                  "Channel::StopSend()");
1318     if (!channel_state_.Get().sending)
1319     {
1320       return 0;
1321     }
1322     channel_state_.SetSending(false);
1323
1324     // Store the sequence number to be able to pick up the same sequence for
1325     // the next StartSend(). This is needed for restarting device, otherwise
1326     // it might cause libSRTP to complain about packets being replayed.
1327     // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1328     // CL is landed. See issue
1329     // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1330     send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1331
1332     // Reset sending SSRC and sequence number and triggers direct transmission
1333     // of RTCP BYE
1334     if (_rtpRtcpModule->SetSendingStatus(false) == -1 ||
1335         _rtpRtcpModule->ResetSendDataCountersRTP() == -1)
1336     {
1337         _engineStatisticsPtr->SetLastError(
1338             VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1339             "StartSend() RTP/RTCP failed to stop sending");
1340     }
1341
1342     return 0;
1343 }
1344
1345 int32_t
1346 Channel::StartReceiving()
1347 {
1348     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1349                  "Channel::StartReceiving()");
1350     if (channel_state_.Get().receiving)
1351     {
1352         return 0;
1353     }
1354     channel_state_.SetReceiving(true);
1355     _numberOfDiscardedPackets = 0;
1356     return 0;
1357 }
1358
1359 int32_t
1360 Channel::StopReceiving()
1361 {
1362     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1363                  "Channel::StopReceiving()");
1364     if (!channel_state_.Get().receiving)
1365     {
1366         return 0;
1367     }
1368
1369     // Recover DTMF detection status.
1370     telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
1371     RegisterReceiveCodecsToRTPModule();
1372     channel_state_.SetReceiving(false);
1373     return 0;
1374 }
1375
1376 int32_t
1377 Channel::SetNetEQPlayoutMode(NetEqModes mode)
1378 {
1379     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1380                  "Channel::SetNetEQPlayoutMode()");
1381     AudioPlayoutMode playoutMode(voice);
1382     switch (mode)
1383     {
1384         case kNetEqDefault:
1385             playoutMode = voice;
1386             break;
1387         case kNetEqStreaming:
1388             playoutMode = streaming;
1389             break;
1390         case kNetEqFax:
1391             playoutMode = fax;
1392             break;
1393         case kNetEqOff:
1394             playoutMode = off;
1395             break;
1396     }
1397     if (audio_coding_->SetPlayoutMode(playoutMode) != 0)
1398     {
1399         _engineStatisticsPtr->SetLastError(
1400             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1401             "SetNetEQPlayoutMode() failed to set playout mode");
1402         return -1;
1403     }
1404     return 0;
1405 }
1406
1407 int32_t
1408 Channel::GetNetEQPlayoutMode(NetEqModes& mode)
1409 {
1410     const AudioPlayoutMode playoutMode = audio_coding_->PlayoutMode();
1411     switch (playoutMode)
1412     {
1413         case voice:
1414             mode = kNetEqDefault;
1415             break;
1416         case streaming:
1417             mode = kNetEqStreaming;
1418             break;
1419         case fax:
1420             mode = kNetEqFax;
1421             break;
1422         case off:
1423             mode = kNetEqOff;
1424     }
1425     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
1426                  VoEId(_instanceId,_channelId),
1427                  "Channel::GetNetEQPlayoutMode() => mode=%u", mode);
1428     return 0;
1429 }
1430
1431 int32_t
1432 Channel::SetOnHoldStatus(bool enable, OnHoldModes mode)
1433 {
1434     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1435                  "Channel::SetOnHoldStatus()");
1436     if (mode == kHoldSendAndPlay)
1437     {
1438         channel_state_.SetOutputIsOnHold(enable);
1439         _inputIsOnHold = enable;
1440     }
1441     else if (mode == kHoldPlayOnly)
1442     {
1443         channel_state_.SetOutputIsOnHold(enable);
1444     }
1445     if (mode == kHoldSendOnly)
1446     {
1447         _inputIsOnHold = enable;
1448     }
1449     return 0;
1450 }
1451
1452 int32_t
1453 Channel::GetOnHoldStatus(bool& enabled, OnHoldModes& mode)
1454 {
1455     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1456                  "Channel::GetOnHoldStatus()");
1457     bool output_is_on_hold = channel_state_.Get().output_is_on_hold;
1458     enabled = (output_is_on_hold || _inputIsOnHold);
1459     if (output_is_on_hold && _inputIsOnHold)
1460     {
1461         mode = kHoldSendAndPlay;
1462     }
1463     else if (output_is_on_hold && !_inputIsOnHold)
1464     {
1465         mode = kHoldPlayOnly;
1466     }
1467     else if (!output_is_on_hold && _inputIsOnHold)
1468     {
1469         mode = kHoldSendOnly;
1470     }
1471     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1472                  "Channel::GetOnHoldStatus() => enabled=%d, mode=%d",
1473                  enabled, mode);
1474     return 0;
1475 }
1476
1477 int32_t
1478 Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
1479 {
1480     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1481                  "Channel::RegisterVoiceEngineObserver()");
1482     CriticalSectionScoped cs(&_callbackCritSect);
1483
1484     if (_voiceEngineObserverPtr)
1485     {
1486         _engineStatisticsPtr->SetLastError(
1487             VE_INVALID_OPERATION, kTraceError,
1488             "RegisterVoiceEngineObserver() observer already enabled");
1489         return -1;
1490     }
1491     _voiceEngineObserverPtr = &observer;
1492     return 0;
1493 }
1494
1495 int32_t
1496 Channel::DeRegisterVoiceEngineObserver()
1497 {
1498     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1499                  "Channel::DeRegisterVoiceEngineObserver()");
1500     CriticalSectionScoped cs(&_callbackCritSect);
1501
1502     if (!_voiceEngineObserverPtr)
1503     {
1504         _engineStatisticsPtr->SetLastError(
1505             VE_INVALID_OPERATION, kTraceWarning,
1506             "DeRegisterVoiceEngineObserver() observer already disabled");
1507         return 0;
1508     }
1509     _voiceEngineObserverPtr = NULL;
1510     return 0;
1511 }
1512
1513 int32_t
1514 Channel::GetSendCodec(CodecInst& codec)
1515 {
1516     return (audio_coding_->SendCodec(&codec));
1517 }
1518
1519 int32_t
1520 Channel::GetRecCodec(CodecInst& codec)
1521 {
1522     return (audio_coding_->ReceiveCodec(&codec));
1523 }
1524
1525 int32_t
1526 Channel::SetSendCodec(const CodecInst& codec)
1527 {
1528     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1529                  "Channel::SetSendCodec()");
1530
1531     if (audio_coding_->RegisterSendCodec(codec) != 0)
1532     {
1533         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1534                      "SetSendCodec() failed to register codec to ACM");
1535         return -1;
1536     }
1537
1538     if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
1539     {
1540         _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1541         if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
1542         {
1543             WEBRTC_TRACE(
1544                     kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1545                     "SetSendCodec() failed to register codec to"
1546                     " RTP/RTCP module");
1547             return -1;
1548         }
1549     }
1550
1551     if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0)
1552     {
1553         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1554                      "SetSendCodec() failed to set audio packet size");
1555         return -1;
1556     }
1557
1558     return 0;
1559 }
1560
1561 int32_t
1562 Channel::SetVADStatus(bool enableVAD, ACMVADMode mode, bool disableDTX)
1563 {
1564     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1565                  "Channel::SetVADStatus(mode=%d)", mode);
1566     // To disable VAD, DTX must be disabled too
1567     disableDTX = ((enableVAD == false) ? true : disableDTX);
1568     if (audio_coding_->SetVAD(!disableDTX, enableVAD, mode) != 0)
1569     {
1570         _engineStatisticsPtr->SetLastError(
1571             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1572             "SetVADStatus() failed to set VAD");
1573         return -1;
1574     }
1575     return 0;
1576 }
1577
1578 int32_t
1579 Channel::GetVADStatus(bool& enabledVAD, ACMVADMode& mode, bool& disabledDTX)
1580 {
1581     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1582                  "Channel::GetVADStatus");
1583     if (audio_coding_->VAD(&disabledDTX, &enabledVAD, &mode) != 0)
1584     {
1585         _engineStatisticsPtr->SetLastError(
1586             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1587             "GetVADStatus() failed to get VAD status");
1588         return -1;
1589     }
1590     disabledDTX = !disabledDTX;
1591     return 0;
1592 }
1593
1594 int32_t
1595 Channel::SetRecPayloadType(const CodecInst& codec)
1596 {
1597     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1598                  "Channel::SetRecPayloadType()");
1599
1600     if (channel_state_.Get().playing)
1601     {
1602         _engineStatisticsPtr->SetLastError(
1603             VE_ALREADY_PLAYING, kTraceError,
1604             "SetRecPayloadType() unable to set PT while playing");
1605         return -1;
1606     }
1607     if (channel_state_.Get().receiving)
1608     {
1609         _engineStatisticsPtr->SetLastError(
1610             VE_ALREADY_LISTENING, kTraceError,
1611             "SetRecPayloadType() unable to set PT while listening");
1612         return -1;
1613     }
1614
1615     if (codec.pltype == -1)
1616     {
1617         // De-register the selected codec (RTP/RTCP module and ACM)
1618
1619         int8_t pltype(-1);
1620         CodecInst rxCodec = codec;
1621
1622         // Get payload type for the given codec
1623         rtp_payload_registry_->ReceivePayloadType(
1624             rxCodec.plname,
1625             rxCodec.plfreq,
1626             rxCodec.channels,
1627             (rxCodec.rate < 0) ? 0 : rxCodec.rate,
1628             &pltype);
1629         rxCodec.pltype = pltype;
1630
1631         if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0)
1632         {
1633             _engineStatisticsPtr->SetLastError(
1634                     VE_RTP_RTCP_MODULE_ERROR,
1635                     kTraceError,
1636                     "SetRecPayloadType() RTP/RTCP-module deregistration "
1637                     "failed");
1638             return -1;
1639         }
1640         if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0)
1641         {
1642             _engineStatisticsPtr->SetLastError(
1643                 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1644                 "SetRecPayloadType() ACM deregistration failed - 1");
1645             return -1;
1646         }
1647         return 0;
1648     }
1649
1650     if (rtp_receiver_->RegisterReceivePayload(
1651         codec.plname,
1652         codec.pltype,
1653         codec.plfreq,
1654         codec.channels,
1655         (codec.rate < 0) ? 0 : codec.rate) != 0)
1656     {
1657         // First attempt to register failed => de-register and try again
1658         rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
1659         if (rtp_receiver_->RegisterReceivePayload(
1660             codec.plname,
1661             codec.pltype,
1662             codec.plfreq,
1663             codec.channels,
1664             (codec.rate < 0) ? 0 : codec.rate) != 0)
1665         {
1666             _engineStatisticsPtr->SetLastError(
1667                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1668                 "SetRecPayloadType() RTP/RTCP-module registration failed");
1669             return -1;
1670         }
1671     }
1672     if (audio_coding_->RegisterReceiveCodec(codec) != 0)
1673     {
1674         audio_coding_->UnregisterReceiveCodec(codec.pltype);
1675         if (audio_coding_->RegisterReceiveCodec(codec) != 0)
1676         {
1677             _engineStatisticsPtr->SetLastError(
1678                 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1679                 "SetRecPayloadType() ACM registration failed - 1");
1680             return -1;
1681         }
1682     }
1683     return 0;
1684 }
1685
1686 int32_t
1687 Channel::GetRecPayloadType(CodecInst& codec)
1688 {
1689     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1690                  "Channel::GetRecPayloadType()");
1691     int8_t payloadType(-1);
1692     if (rtp_payload_registry_->ReceivePayloadType(
1693         codec.plname,
1694         codec.plfreq,
1695         codec.channels,
1696         (codec.rate < 0) ? 0 : codec.rate,
1697         &payloadType) != 0)
1698     {
1699         _engineStatisticsPtr->SetLastError(
1700             VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1701             "GetRecPayloadType() failed to retrieve RX payload type");
1702         return -1;
1703     }
1704     codec.pltype = payloadType;
1705     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1706                  "Channel::GetRecPayloadType() => pltype=%u", codec.pltype);
1707     return 0;
1708 }
1709
1710 int32_t
1711 Channel::SetAMREncFormat(AmrMode mode)
1712 {
1713     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1714                  "Channel::SetAMREncFormat()");
1715
1716     // ACM doesn't support AMR
1717     return -1;
1718 }
1719
1720 int32_t
1721 Channel::SetAMRDecFormat(AmrMode mode)
1722 {
1723     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1724                  "Channel::SetAMRDecFormat()");
1725
1726     // ACM doesn't support AMR
1727     return -1;
1728 }
1729
1730 int32_t
1731 Channel::SetAMRWbEncFormat(AmrMode mode)
1732 {
1733     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1734                  "Channel::SetAMRWbEncFormat()");
1735
1736     // ACM doesn't support AMR
1737     return -1;
1738
1739 }
1740
1741 int32_t
1742 Channel::SetAMRWbDecFormat(AmrMode mode)
1743 {
1744     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1745                  "Channel::SetAMRWbDecFormat()");
1746
1747     // ACM doesn't support AMR
1748     return -1;
1749 }
1750
1751 int32_t
1752 Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
1753 {
1754     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1755                  "Channel::SetSendCNPayloadType()");
1756
1757     CodecInst codec;
1758     int32_t samplingFreqHz(-1);
1759     const int kMono = 1;
1760     if (frequency == kFreq32000Hz)
1761         samplingFreqHz = 32000;
1762     else if (frequency == kFreq16000Hz)
1763         samplingFreqHz = 16000;
1764
1765     if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1)
1766     {
1767         _engineStatisticsPtr->SetLastError(
1768             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1769             "SetSendCNPayloadType() failed to retrieve default CN codec "
1770             "settings");
1771         return -1;
1772     }
1773
1774     // Modify the payload type (must be set to dynamic range)
1775     codec.pltype = type;
1776
1777     if (audio_coding_->RegisterSendCodec(codec) != 0)
1778     {
1779         _engineStatisticsPtr->SetLastError(
1780             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1781             "SetSendCNPayloadType() failed to register CN to ACM");
1782         return -1;
1783     }
1784
1785     if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
1786     {
1787         _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1788         if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
1789         {
1790             _engineStatisticsPtr->SetLastError(
1791                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1792                 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1793                 "module");
1794             return -1;
1795         }
1796     }
1797     return 0;
1798 }
1799
1800 int32_t
1801 Channel::SetISACInitTargetRate(int rateBps, bool useFixedFrameSize)
1802 {
1803     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1804                  "Channel::SetISACInitTargetRate()");
1805
1806     CodecInst sendCodec;
1807     if (audio_coding_->SendCodec(&sendCodec) == -1)
1808     {
1809         _engineStatisticsPtr->SetLastError(
1810             VE_CODEC_ERROR, kTraceError,
1811             "SetISACInitTargetRate() failed to retrieve send codec");
1812         return -1;
1813     }
1814     if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
1815     {
1816         // This API is only valid if iSAC is setup to run in channel-adaptive
1817         // mode.
1818         // We do not validate the adaptive mode here. It is done later in the
1819         // ConfigISACBandwidthEstimator() API.
1820         _engineStatisticsPtr->SetLastError(
1821             VE_CODEC_ERROR, kTraceError,
1822             "SetISACInitTargetRate() send codec is not iSAC");
1823         return -1;
1824     }
1825
1826     uint8_t initFrameSizeMsec(0);
1827     if (16000 == sendCodec.plfreq)
1828     {
1829         // Note that 0 is a valid and corresponds to "use default
1830         if ((rateBps != 0 &&
1831             rateBps < kVoiceEngineMinIsacInitTargetRateBpsWb) ||
1832             (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsWb))
1833         {
1834              _engineStatisticsPtr->SetLastError(
1835                 VE_INVALID_ARGUMENT, kTraceError,
1836                 "SetISACInitTargetRate() invalid target rate - 1");
1837             return -1;
1838         }
1839         // 30 or 60ms
1840         initFrameSizeMsec = (uint8_t)(sendCodec.pacsize / 16);
1841     }
1842     else if (32000 == sendCodec.plfreq)
1843     {
1844         if ((rateBps != 0 &&
1845             rateBps < kVoiceEngineMinIsacInitTargetRateBpsSwb) ||
1846             (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsSwb))
1847         {
1848             _engineStatisticsPtr->SetLastError(
1849                 VE_INVALID_ARGUMENT, kTraceError,
1850                 "SetISACInitTargetRate() invalid target rate - 2");
1851             return -1;
1852         }
1853         initFrameSizeMsec = (uint8_t)(sendCodec.pacsize / 32); // 30ms
1854     }
1855
1856     if (audio_coding_->ConfigISACBandwidthEstimator(
1857         initFrameSizeMsec, rateBps, useFixedFrameSize) == -1)
1858     {
1859         _engineStatisticsPtr->SetLastError(
1860             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1861             "SetISACInitTargetRate() iSAC BWE config failed");
1862         return -1;
1863     }
1864
1865     return 0;
1866 }
1867
1868 int32_t
1869 Channel::SetISACMaxRate(int rateBps)
1870 {
1871     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1872                  "Channel::SetISACMaxRate()");
1873
1874     CodecInst sendCodec;
1875     if (audio_coding_->SendCodec(&sendCodec) == -1)
1876     {
1877         _engineStatisticsPtr->SetLastError(
1878             VE_CODEC_ERROR, kTraceError,
1879             "SetISACMaxRate() failed to retrieve send codec");
1880         return -1;
1881     }
1882     if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
1883     {
1884         // This API is only valid if iSAC is selected as sending codec.
1885         _engineStatisticsPtr->SetLastError(
1886             VE_CODEC_ERROR, kTraceError,
1887             "SetISACMaxRate() send codec is not iSAC");
1888         return -1;
1889     }
1890     if (16000 == sendCodec.plfreq)
1891     {
1892         if ((rateBps < kVoiceEngineMinIsacMaxRateBpsWb) ||
1893             (rateBps > kVoiceEngineMaxIsacMaxRateBpsWb))
1894         {
1895             _engineStatisticsPtr->SetLastError(
1896                 VE_INVALID_ARGUMENT, kTraceError,
1897                 "SetISACMaxRate() invalid max rate - 1");
1898             return -1;
1899         }
1900     }
1901     else if (32000 == sendCodec.plfreq)
1902     {
1903         if ((rateBps < kVoiceEngineMinIsacMaxRateBpsSwb) ||
1904             (rateBps > kVoiceEngineMaxIsacMaxRateBpsSwb))
1905         {
1906             _engineStatisticsPtr->SetLastError(
1907                 VE_INVALID_ARGUMENT, kTraceError,
1908                 "SetISACMaxRate() invalid max rate - 2");
1909             return -1;
1910         }
1911     }
1912     if (channel_state_.Get().sending)
1913     {
1914         _engineStatisticsPtr->SetLastError(
1915             VE_SENDING, kTraceError,
1916             "SetISACMaxRate() unable to set max rate while sending");
1917         return -1;
1918     }
1919
1920     // Set the maximum instantaneous rate of iSAC (works for both adaptive
1921     // and non-adaptive mode)
1922     if (audio_coding_->SetISACMaxRate(rateBps) == -1)
1923     {
1924         _engineStatisticsPtr->SetLastError(
1925             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1926             "SetISACMaxRate() failed to set max rate");
1927         return -1;
1928     }
1929
1930     return 0;
1931 }
1932
1933 int32_t
1934 Channel::SetISACMaxPayloadSize(int sizeBytes)
1935 {
1936     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1937                  "Channel::SetISACMaxPayloadSize()");
1938     CodecInst sendCodec;
1939     if (audio_coding_->SendCodec(&sendCodec) == -1)
1940     {
1941         _engineStatisticsPtr->SetLastError(
1942             VE_CODEC_ERROR, kTraceError,
1943             "SetISACMaxPayloadSize() failed to retrieve send codec");
1944         return -1;
1945     }
1946     if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
1947     {
1948         _engineStatisticsPtr->SetLastError(
1949             VE_CODEC_ERROR, kTraceError,
1950             "SetISACMaxPayloadSize() send codec is not iSAC");
1951         return -1;
1952     }
1953     if (16000 == sendCodec.plfreq)
1954     {
1955         if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesWb) ||
1956             (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesWb))
1957         {
1958             _engineStatisticsPtr->SetLastError(
1959                 VE_INVALID_ARGUMENT, kTraceError,
1960                 "SetISACMaxPayloadSize() invalid max payload - 1");
1961             return -1;
1962         }
1963     }
1964     else if (32000 == sendCodec.plfreq)
1965     {
1966         if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesSwb) ||
1967             (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesSwb))
1968         {
1969             _engineStatisticsPtr->SetLastError(
1970                 VE_INVALID_ARGUMENT, kTraceError,
1971                 "SetISACMaxPayloadSize() invalid max payload - 2");
1972             return -1;
1973         }
1974     }
1975     if (channel_state_.Get().sending)
1976     {
1977         _engineStatisticsPtr->SetLastError(
1978             VE_SENDING, kTraceError,
1979             "SetISACMaxPayloadSize() unable to set max rate while sending");
1980         return -1;
1981     }
1982
1983     if (audio_coding_->SetISACMaxPayloadSize(sizeBytes) == -1)
1984     {
1985         _engineStatisticsPtr->SetLastError(
1986             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1987             "SetISACMaxPayloadSize() failed to set max payload size");
1988         return -1;
1989     }
1990     return 0;
1991 }
1992
1993 int32_t Channel::RegisterExternalTransport(Transport& transport)
1994 {
1995     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1996                "Channel::RegisterExternalTransport()");
1997
1998     CriticalSectionScoped cs(&_callbackCritSect);
1999
2000     if (_externalTransport)
2001     {
2002         _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION,
2003                                            kTraceError,
2004               "RegisterExternalTransport() external transport already enabled");
2005        return -1;
2006     }
2007     _externalTransport = true;
2008     _transportPtr = &transport;
2009     return 0;
2010 }
2011
2012 int32_t
2013 Channel::DeRegisterExternalTransport()
2014 {
2015     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2016                  "Channel::DeRegisterExternalTransport()");
2017
2018     CriticalSectionScoped cs(&_callbackCritSect);
2019
2020     if (!_transportPtr)
2021     {
2022         _engineStatisticsPtr->SetLastError(
2023             VE_INVALID_OPERATION, kTraceWarning,
2024             "DeRegisterExternalTransport() external transport already "
2025             "disabled");
2026         return 0;
2027     }
2028     _externalTransport = false;
2029     _transportPtr = NULL;
2030     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2031                  "DeRegisterExternalTransport() all transport is disabled");
2032     return 0;
2033 }
2034
2035 int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length,
2036                                    const PacketTime& packet_time) {
2037   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2038                "Channel::ReceivedRTPPacket()");
2039
2040   // Store playout timestamp for the received RTP packet
2041   UpdatePlayoutTimestamp(false);
2042
2043   // Dump the RTP packet to a file (if RTP dump is enabled).
2044   if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
2045                             (uint16_t)length) == -1) {
2046     WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2047                  VoEId(_instanceId,_channelId),
2048                  "Channel::SendPacket() RTP dump to input file failed");
2049   }
2050   const uint8_t* received_packet = reinterpret_cast<const uint8_t*>(data);
2051   RTPHeader header;
2052   if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
2053     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
2054                  "Incoming packet: invalid RTP header");
2055     return -1;
2056   }
2057   header.payload_type_frequency =
2058       rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
2059   if (header.payload_type_frequency < 0)
2060     return -1;
2061   bool in_order = IsPacketInOrder(header);
2062   rtp_receive_statistics_->IncomingPacket(header, length,
2063       IsPacketRetransmitted(header, in_order));
2064   rtp_payload_registry_->SetIncomingPayloadType(header);
2065
2066   // Forward any packets to ViE bandwidth estimator, if enabled.
2067   {
2068     CriticalSectionScoped cs(&_callbackCritSect);
2069     if (vie_network_) {
2070       int64_t arrival_time_ms;
2071       if (packet_time.timestamp != -1) {
2072         arrival_time_ms = (packet_time.timestamp + 500) / 1000;
2073       } else {
2074         arrival_time_ms = TickTime::MillisecondTimestamp();
2075       }
2076       int payload_length = length - header.headerLength;
2077       vie_network_->ReceivedBWEPacket(video_channel_, arrival_time_ms,
2078                                       payload_length, header);
2079     }
2080   }
2081
2082   return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
2083 }
2084
2085 bool Channel::ReceivePacket(const uint8_t* packet,
2086                             int packet_length,
2087                             const RTPHeader& header,
2088                             bool in_order) {
2089   if (rtp_payload_registry_->IsEncapsulated(header)) {
2090     return HandleEncapsulation(packet, packet_length, header);
2091   }
2092   const uint8_t* payload = packet + header.headerLength;
2093   int payload_length = packet_length - header.headerLength;
2094   assert(payload_length >= 0);
2095   PayloadUnion payload_specific;
2096   if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
2097                                                   &payload_specific)) {
2098     return false;
2099   }
2100   return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
2101                                           payload_specific, in_order);
2102 }
2103
2104 bool Channel::HandleEncapsulation(const uint8_t* packet,
2105                                   int packet_length,
2106                                   const RTPHeader& header) {
2107   if (!rtp_payload_registry_->IsRtx(header))
2108     return false;
2109
2110   // Remove the RTX header and parse the original RTP header.
2111   if (packet_length < header.headerLength)
2112     return false;
2113   if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
2114     return false;
2115   if (restored_packet_in_use_) {
2116     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
2117                  "Multiple RTX headers detected, dropping packet");
2118     return false;
2119   }
2120   uint8_t* restored_packet_ptr = restored_packet_;
2121   if (!rtp_payload_registry_->RestoreOriginalPacket(
2122       &restored_packet_ptr, packet, &packet_length, rtp_receiver_->SSRC(),
2123       header)) {
2124     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
2125                  "Incoming RTX packet: invalid RTP header");
2126     return false;
2127   }
2128   restored_packet_in_use_ = true;
2129   bool ret = OnRecoveredPacket(restored_packet_ptr, packet_length);
2130   restored_packet_in_use_ = false;
2131   return ret;
2132 }
2133
2134 bool Channel::IsPacketInOrder(const RTPHeader& header) const {
2135   StreamStatistician* statistician =
2136       rtp_receive_statistics_->GetStatistician(header.ssrc);
2137   if (!statistician)
2138     return false;
2139   return statistician->IsPacketInOrder(header.sequenceNumber);
2140 }
2141
2142 bool Channel::IsPacketRetransmitted(const RTPHeader& header,
2143                                     bool in_order) const {
2144   // Retransmissions are handled separately if RTX is enabled.
2145   if (rtp_payload_registry_->RtxEnabled())
2146     return false;
2147   StreamStatistician* statistician =
2148       rtp_receive_statistics_->GetStatistician(header.ssrc);
2149   if (!statistician)
2150     return false;
2151   // Check if this is a retransmission.
2152   uint16_t min_rtt = 0;
2153   _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
2154   return !in_order &&
2155       statistician->IsRetransmitOfOldPacket(header, min_rtt);
2156 }
2157
2158 int32_t Channel::ReceivedRTCPPacket(const int8_t* data, int32_t length) {
2159   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2160                "Channel::ReceivedRTCPPacket()");
2161   // Store playout timestamp for the received RTCP packet
2162   UpdatePlayoutTimestamp(true);
2163
2164   // Dump the RTCP packet to a file (if RTP dump is enabled).
2165   if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
2166                             (uint16_t)length) == -1) {
2167     WEBRTC_TRACE(kTraceWarning, kTraceVoice,
2168                  VoEId(_instanceId,_channelId),
2169                  "Channel::SendPacket() RTCP dump to input file failed");
2170   }
2171
2172   // Deliver RTCP packet to RTP/RTCP module for parsing
2173   if (_rtpRtcpModule->IncomingRtcpPacket((const uint8_t*)data,
2174                                          (uint16_t)length) == -1) {
2175     _engineStatisticsPtr->SetLastError(
2176         VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
2177         "Channel::IncomingRTPPacket() RTCP packet is invalid");
2178   }
2179   return 0;
2180 }
2181
2182 int Channel::StartPlayingFileLocally(const char* fileName,
2183                                      bool loop,
2184                                      FileFormats format,
2185                                      int startPosition,
2186                                      float volumeScaling,
2187                                      int stopPosition,
2188                                      const CodecInst* codecInst)
2189 {
2190     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2191                  "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
2192                  " format=%d, volumeScaling=%5.3f, startPosition=%d, "
2193                  "stopPosition=%d)", fileName, loop, format, volumeScaling,
2194                  startPosition, stopPosition);
2195
2196     if (channel_state_.Get().output_file_playing)
2197     {
2198         _engineStatisticsPtr->SetLastError(
2199             VE_ALREADY_PLAYING, kTraceError,
2200             "StartPlayingFileLocally() is already playing");
2201         return -1;
2202     }
2203
2204     {
2205         CriticalSectionScoped cs(&_fileCritSect);
2206
2207         if (_outputFilePlayerPtr)
2208         {
2209             _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2210             FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2211             _outputFilePlayerPtr = NULL;
2212         }
2213
2214         _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2215             _outputFilePlayerId, (const FileFormats)format);
2216
2217         if (_outputFilePlayerPtr == NULL)
2218         {
2219             _engineStatisticsPtr->SetLastError(
2220                 VE_INVALID_ARGUMENT, kTraceError,
2221                 "StartPlayingFileLocally() filePlayer format is not correct");
2222             return -1;
2223         }
2224
2225         const uint32_t notificationTime(0);
2226
2227         if (_outputFilePlayerPtr->StartPlayingFile(
2228                 fileName,
2229                 loop,
2230                 startPosition,
2231                 volumeScaling,
2232                 notificationTime,
2233                 stopPosition,
2234                 (const CodecInst*)codecInst) != 0)
2235         {
2236             _engineStatisticsPtr->SetLastError(
2237                 VE_BAD_FILE, kTraceError,
2238                 "StartPlayingFile() failed to start file playout");
2239             _outputFilePlayerPtr->StopPlayingFile();
2240             FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2241             _outputFilePlayerPtr = NULL;
2242             return -1;
2243         }
2244         _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2245         channel_state_.SetOutputFilePlaying(true);
2246     }
2247
2248     if (RegisterFilePlayingToMixer() != 0)
2249         return -1;
2250
2251     return 0;
2252 }
2253
2254 int Channel::StartPlayingFileLocally(InStream* stream,
2255                                      FileFormats format,
2256                                      int startPosition,
2257                                      float volumeScaling,
2258                                      int stopPosition,
2259                                      const CodecInst* codecInst)
2260 {
2261     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2262                  "Channel::StartPlayingFileLocally(format=%d,"
2263                  " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2264                  format, volumeScaling, startPosition, stopPosition);
2265
2266     if(stream == NULL)
2267     {
2268         _engineStatisticsPtr->SetLastError(
2269             VE_BAD_FILE, kTraceError,
2270             "StartPlayingFileLocally() NULL as input stream");
2271         return -1;
2272     }
2273
2274
2275     if (channel_state_.Get().output_file_playing)
2276     {
2277         _engineStatisticsPtr->SetLastError(
2278             VE_ALREADY_PLAYING, kTraceError,
2279             "StartPlayingFileLocally() is already playing");
2280         return -1;
2281     }
2282
2283     {
2284       CriticalSectionScoped cs(&_fileCritSect);
2285
2286       // Destroy the old instance
2287       if (_outputFilePlayerPtr)
2288       {
2289           _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2290           FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2291           _outputFilePlayerPtr = NULL;
2292       }
2293
2294       // Create the instance
2295       _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2296           _outputFilePlayerId,
2297           (const FileFormats)format);
2298
2299       if (_outputFilePlayerPtr == NULL)
2300       {
2301           _engineStatisticsPtr->SetLastError(
2302               VE_INVALID_ARGUMENT, kTraceError,
2303               "StartPlayingFileLocally() filePlayer format isnot correct");
2304           return -1;
2305       }
2306
2307       const uint32_t notificationTime(0);
2308
2309       if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2310                                                  volumeScaling,
2311                                                  notificationTime,
2312                                                  stopPosition, codecInst) != 0)
2313       {
2314           _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2315                                              "StartPlayingFile() failed to "
2316                                              "start file playout");
2317           _outputFilePlayerPtr->StopPlayingFile();
2318           FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2319           _outputFilePlayerPtr = NULL;
2320           return -1;
2321       }
2322       _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2323       channel_state_.SetOutputFilePlaying(true);
2324     }
2325
2326     if (RegisterFilePlayingToMixer() != 0)
2327         return -1;
2328
2329     return 0;
2330 }
2331
2332 int Channel::StopPlayingFileLocally()
2333 {
2334     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2335                  "Channel::StopPlayingFileLocally()");
2336
2337     if (!channel_state_.Get().output_file_playing)
2338     {
2339         _engineStatisticsPtr->SetLastError(
2340             VE_INVALID_OPERATION, kTraceWarning,
2341             "StopPlayingFileLocally() isnot playing");
2342         return 0;
2343     }
2344
2345     {
2346         CriticalSectionScoped cs(&_fileCritSect);
2347
2348         if (_outputFilePlayerPtr->StopPlayingFile() != 0)
2349         {
2350             _engineStatisticsPtr->SetLastError(
2351                 VE_STOP_RECORDING_FAILED, kTraceError,
2352                 "StopPlayingFile() could not stop playing");
2353             return -1;
2354         }
2355         _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2356         FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2357         _outputFilePlayerPtr = NULL;
2358         channel_state_.SetOutputFilePlaying(false);
2359     }
2360     // _fileCritSect cannot be taken while calling
2361     // SetAnonymousMixibilityStatus. Refer to comments in
2362     // StartPlayingFileLocally(const char* ...) for more details.
2363     if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0)
2364     {
2365         _engineStatisticsPtr->SetLastError(
2366             VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
2367             "StopPlayingFile() failed to stop participant from playing as"
2368             "file in the mixer");
2369         return -1;
2370     }
2371
2372     return 0;
2373 }
2374
2375 int Channel::IsPlayingFileLocally() const
2376 {
2377     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2378                  "Channel::IsPlayingFileLocally()");
2379
2380     return channel_state_.Get().output_file_playing;
2381 }
2382
2383 int Channel::RegisterFilePlayingToMixer()
2384 {
2385     // Return success for not registering for file playing to mixer if:
2386     // 1. playing file before playout is started on that channel.
2387     // 2. starting playout without file playing on that channel.
2388     if (!channel_state_.Get().playing ||
2389         !channel_state_.Get().output_file_playing)
2390     {
2391         return 0;
2392     }
2393
2394     // |_fileCritSect| cannot be taken while calling
2395     // SetAnonymousMixabilityStatus() since as soon as the participant is added
2396     // frames can be pulled by the mixer. Since the frames are generated from
2397     // the file, _fileCritSect will be taken. This would result in a deadlock.
2398     if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
2399     {
2400         channel_state_.SetOutputFilePlaying(false);
2401         CriticalSectionScoped cs(&_fileCritSect);
2402         _engineStatisticsPtr->SetLastError(
2403             VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
2404             "StartPlayingFile() failed to add participant as file to mixer");
2405         _outputFilePlayerPtr->StopPlayingFile();
2406         FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2407         _outputFilePlayerPtr = NULL;
2408         return -1;
2409     }
2410
2411     return 0;
2412 }
2413
2414 int Channel::ScaleLocalFilePlayout(float scale)
2415 {
2416     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2417                  "Channel::ScaleLocalFilePlayout(scale=%5.3f)", scale);
2418
2419     CriticalSectionScoped cs(&_fileCritSect);
2420
2421     if (!channel_state_.Get().output_file_playing)
2422     {
2423         _engineStatisticsPtr->SetLastError(
2424             VE_INVALID_OPERATION, kTraceError,
2425             "ScaleLocalFilePlayout() isnot playing");
2426         return -1;
2427     }
2428     if ((_outputFilePlayerPtr == NULL) ||
2429         (_outputFilePlayerPtr->SetAudioScaling(scale) != 0))
2430     {
2431         _engineStatisticsPtr->SetLastError(
2432             VE_BAD_ARGUMENT, kTraceError,
2433             "SetAudioScaling() failed to scale the playout");
2434         return -1;
2435     }
2436
2437     return 0;
2438 }
2439
2440 int Channel::GetLocalPlayoutPosition(int& positionMs)
2441 {
2442     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2443                  "Channel::GetLocalPlayoutPosition(position=?)");
2444
2445     uint32_t position;
2446
2447     CriticalSectionScoped cs(&_fileCritSect);
2448
2449     if (_outputFilePlayerPtr == NULL)
2450     {
2451         _engineStatisticsPtr->SetLastError(
2452             VE_INVALID_OPERATION, kTraceError,
2453             "GetLocalPlayoutPosition() filePlayer instance doesnot exist");
2454         return -1;
2455     }
2456
2457     if (_outputFilePlayerPtr->GetPlayoutPosition(position) != 0)
2458     {
2459         _engineStatisticsPtr->SetLastError(
2460             VE_BAD_FILE, kTraceError,
2461             "GetLocalPlayoutPosition() failed");
2462         return -1;
2463     }
2464     positionMs = position;
2465
2466     return 0;
2467 }
2468
2469 int Channel::StartPlayingFileAsMicrophone(const char* fileName,
2470                                           bool loop,
2471                                           FileFormats format,
2472                                           int startPosition,
2473                                           float volumeScaling,
2474                                           int stopPosition,
2475                                           const CodecInst* codecInst)
2476 {
2477     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2478                  "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
2479                  "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
2480                  "stopPosition=%d)", fileName, loop, format, volumeScaling,
2481                  startPosition, stopPosition);
2482
2483     CriticalSectionScoped cs(&_fileCritSect);
2484
2485     if (channel_state_.Get().input_file_playing)
2486     {
2487         _engineStatisticsPtr->SetLastError(
2488             VE_ALREADY_PLAYING, kTraceWarning,
2489             "StartPlayingFileAsMicrophone() filePlayer is playing");
2490         return 0;
2491     }
2492
2493     // Destroy the old instance
2494     if (_inputFilePlayerPtr)
2495     {
2496         _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2497         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2498         _inputFilePlayerPtr = NULL;
2499     }
2500
2501     // Create the instance
2502     _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2503         _inputFilePlayerId, (const FileFormats)format);
2504
2505     if (_inputFilePlayerPtr == NULL)
2506     {
2507         _engineStatisticsPtr->SetLastError(
2508             VE_INVALID_ARGUMENT, kTraceError,
2509             "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
2510         return -1;
2511     }
2512
2513     const uint32_t notificationTime(0);
2514
2515     if (_inputFilePlayerPtr->StartPlayingFile(
2516         fileName,
2517         loop,
2518         startPosition,
2519         volumeScaling,
2520         notificationTime,
2521         stopPosition,
2522         (const CodecInst*)codecInst) != 0)
2523     {
2524         _engineStatisticsPtr->SetLastError(
2525             VE_BAD_FILE, kTraceError,
2526             "StartPlayingFile() failed to start file playout");
2527         _inputFilePlayerPtr->StopPlayingFile();
2528         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2529         _inputFilePlayerPtr = NULL;
2530         return -1;
2531     }
2532     _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2533     channel_state_.SetInputFilePlaying(true);
2534
2535     return 0;
2536 }
2537
2538 int Channel::StartPlayingFileAsMicrophone(InStream* stream,
2539                                           FileFormats format,
2540                                           int startPosition,
2541                                           float volumeScaling,
2542                                           int stopPosition,
2543                                           const CodecInst* codecInst)
2544 {
2545     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2546                  "Channel::StartPlayingFileAsMicrophone(format=%d, "
2547                  "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2548                  format, volumeScaling, startPosition, stopPosition);
2549
2550     if(stream == NULL)
2551     {
2552         _engineStatisticsPtr->SetLastError(
2553             VE_BAD_FILE, kTraceError,
2554             "StartPlayingFileAsMicrophone NULL as input stream");
2555         return -1;
2556     }
2557
2558     CriticalSectionScoped cs(&_fileCritSect);
2559
2560     if (channel_state_.Get().input_file_playing)
2561     {
2562         _engineStatisticsPtr->SetLastError(
2563             VE_ALREADY_PLAYING, kTraceWarning,
2564             "StartPlayingFileAsMicrophone() is playing");
2565         return 0;
2566     }
2567
2568     // Destroy the old instance
2569     if (_inputFilePlayerPtr)
2570     {
2571         _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2572         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2573         _inputFilePlayerPtr = NULL;
2574     }
2575
2576     // Create the instance
2577     _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2578         _inputFilePlayerId, (const FileFormats)format);
2579
2580     if (_inputFilePlayerPtr == NULL)
2581     {
2582         _engineStatisticsPtr->SetLastError(
2583             VE_INVALID_ARGUMENT, kTraceError,
2584             "StartPlayingInputFile() filePlayer format isnot correct");
2585         return -1;
2586     }
2587
2588     const uint32_t notificationTime(0);
2589
2590     if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2591                                               volumeScaling, notificationTime,
2592                                               stopPosition, codecInst) != 0)
2593     {
2594         _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2595                                            "StartPlayingFile() failed to start "
2596                                            "file playout");
2597         _inputFilePlayerPtr->StopPlayingFile();
2598         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2599         _inputFilePlayerPtr = NULL;
2600         return -1;
2601     }
2602
2603     _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2604     channel_state_.SetInputFilePlaying(true);
2605
2606     return 0;
2607 }
2608
2609 int Channel::StopPlayingFileAsMicrophone()
2610 {
2611     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2612                  "Channel::StopPlayingFileAsMicrophone()");
2613
2614     CriticalSectionScoped cs(&_fileCritSect);
2615
2616     if (!channel_state_.Get().input_file_playing)
2617     {
2618         _engineStatisticsPtr->SetLastError(
2619             VE_INVALID_OPERATION, kTraceWarning,
2620             "StopPlayingFileAsMicrophone() isnot playing");
2621         return 0;
2622     }
2623
2624     if (_inputFilePlayerPtr->StopPlayingFile() != 0)
2625     {
2626         _engineStatisticsPtr->SetLastError(
2627             VE_STOP_RECORDING_FAILED, kTraceError,
2628             "StopPlayingFile() could not stop playing");
2629         return -1;
2630     }
2631     _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2632     FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2633     _inputFilePlayerPtr = NULL;
2634     channel_state_.SetInputFilePlaying(false);
2635
2636     return 0;
2637 }
2638
2639 int Channel::IsPlayingFileAsMicrophone() const
2640 {
2641     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2642                  "Channel::IsPlayingFileAsMicrophone()");
2643     return channel_state_.Get().input_file_playing;
2644 }
2645
2646 int Channel::ScaleFileAsMicrophonePlayout(float scale)
2647 {
2648     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2649                  "Channel::ScaleFileAsMicrophonePlayout(scale=%5.3f)", scale);
2650
2651     CriticalSectionScoped cs(&_fileCritSect);
2652
2653     if (!channel_state_.Get().input_file_playing)
2654     {
2655         _engineStatisticsPtr->SetLastError(
2656             VE_INVALID_OPERATION, kTraceError,
2657             "ScaleFileAsMicrophonePlayout() isnot playing");
2658         return -1;
2659     }
2660
2661     if ((_inputFilePlayerPtr == NULL) ||
2662         (_inputFilePlayerPtr->SetAudioScaling(scale) != 0))
2663     {
2664         _engineStatisticsPtr->SetLastError(
2665             VE_BAD_ARGUMENT, kTraceError,
2666             "SetAudioScaling() failed to scale playout");
2667         return -1;
2668     }
2669
2670     return 0;
2671 }
2672
2673 int Channel::StartRecordingPlayout(const char* fileName,
2674                                    const CodecInst* codecInst)
2675 {
2676     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2677                  "Channel::StartRecordingPlayout(fileName=%s)", fileName);
2678
2679     if (_outputFileRecording)
2680     {
2681         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2682                      "StartRecordingPlayout() is already recording");
2683         return 0;
2684     }
2685
2686     FileFormats format;
2687     const uint32_t notificationTime(0); // Not supported in VoE
2688     CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2689
2690     if ((codecInst != NULL) &&
2691       ((codecInst->channels < 1) || (codecInst->channels > 2)))
2692     {
2693         _engineStatisticsPtr->SetLastError(
2694             VE_BAD_ARGUMENT, kTraceError,
2695             "StartRecordingPlayout() invalid compression");
2696         return(-1);
2697     }
2698     if(codecInst == NULL)
2699     {
2700         format = kFileFormatPcm16kHzFile;
2701         codecInst=&dummyCodec;
2702     }
2703     else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2704         (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2705         (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2706     {
2707         format = kFileFormatWavFile;
2708     }
2709     else
2710     {
2711         format = kFileFormatCompressedFile;
2712     }
2713
2714     CriticalSectionScoped cs(&_fileCritSect);
2715
2716     // Destroy the old instance
2717     if (_outputFileRecorderPtr)
2718     {
2719         _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2720         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2721         _outputFileRecorderPtr = NULL;
2722     }
2723
2724     _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2725         _outputFileRecorderId, (const FileFormats)format);
2726     if (_outputFileRecorderPtr == NULL)
2727     {
2728         _engineStatisticsPtr->SetLastError(
2729             VE_INVALID_ARGUMENT, kTraceError,
2730             "StartRecordingPlayout() fileRecorder format isnot correct");
2731         return -1;
2732     }
2733
2734     if (_outputFileRecorderPtr->StartRecordingAudioFile(
2735         fileName, (const CodecInst&)*codecInst, notificationTime) != 0)
2736     {
2737         _engineStatisticsPtr->SetLastError(
2738             VE_BAD_FILE, kTraceError,
2739             "StartRecordingAudioFile() failed to start file recording");
2740         _outputFileRecorderPtr->StopRecording();
2741         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2742         _outputFileRecorderPtr = NULL;
2743         return -1;
2744     }
2745     _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2746     _outputFileRecording = true;
2747
2748     return 0;
2749 }
2750
2751 int Channel::StartRecordingPlayout(OutStream* stream,
2752                                    const CodecInst* codecInst)
2753 {
2754     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2755                  "Channel::StartRecordingPlayout()");
2756
2757     if (_outputFileRecording)
2758     {
2759         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2760                      "StartRecordingPlayout() is already recording");
2761         return 0;
2762     }
2763
2764     FileFormats format;
2765     const uint32_t notificationTime(0); // Not supported in VoE
2766     CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2767
2768     if (codecInst != NULL && codecInst->channels != 1)
2769     {
2770         _engineStatisticsPtr->SetLastError(
2771             VE_BAD_ARGUMENT, kTraceError,
2772             "StartRecordingPlayout() invalid compression");
2773         return(-1);
2774     }
2775     if(codecInst == NULL)
2776     {
2777         format = kFileFormatPcm16kHzFile;
2778         codecInst=&dummyCodec;
2779     }
2780     else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2781         (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2782         (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2783     {
2784         format = kFileFormatWavFile;
2785     }
2786     else
2787     {
2788         format = kFileFormatCompressedFile;
2789     }
2790
2791     CriticalSectionScoped cs(&_fileCritSect);
2792
2793     // Destroy the old instance
2794     if (_outputFileRecorderPtr)
2795     {
2796         _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2797         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2798         _outputFileRecorderPtr = NULL;
2799     }
2800
2801     _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2802         _outputFileRecorderId, (const FileFormats)format);
2803     if (_outputFileRecorderPtr == NULL)
2804     {
2805         _engineStatisticsPtr->SetLastError(
2806             VE_INVALID_ARGUMENT, kTraceError,
2807             "StartRecordingPlayout() fileRecorder format isnot correct");
2808         return -1;
2809     }
2810
2811     if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2812                                                         notificationTime) != 0)
2813     {
2814         _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2815                                            "StartRecordingPlayout() failed to "
2816                                            "start file recording");
2817         _outputFileRecorderPtr->StopRecording();
2818         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2819         _outputFileRecorderPtr = NULL;
2820         return -1;
2821     }
2822
2823     _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2824     _outputFileRecording = true;
2825
2826     return 0;
2827 }
2828
2829 int Channel::StopRecordingPlayout()
2830 {
2831     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
2832                  "Channel::StopRecordingPlayout()");
2833
2834     if (!_outputFileRecording)
2835     {
2836         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
2837                      "StopRecordingPlayout() isnot recording");
2838         return -1;
2839     }
2840
2841
2842     CriticalSectionScoped cs(&_fileCritSect);
2843
2844     if (_outputFileRecorderPtr->StopRecording() != 0)
2845     {
2846         _engineStatisticsPtr->SetLastError(
2847             VE_STOP_RECORDING_FAILED, kTraceError,
2848             "StopRecording() could not stop recording");
2849         return(-1);
2850     }
2851     _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2852     FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2853     _outputFileRecorderPtr = NULL;
2854     _outputFileRecording = false;
2855
2856     return 0;
2857 }
2858
2859 void
2860 Channel::SetMixWithMicStatus(bool mix)
2861 {
2862     CriticalSectionScoped cs(&_fileCritSect);
2863     _mixFileWithMicrophone=mix;
2864 }
2865
2866 int
2867 Channel::GetSpeechOutputLevel(uint32_t& level) const
2868 {
2869     int8_t currentLevel = _outputAudioLevel.Level();
2870     level = static_cast<int32_t> (currentLevel);
2871     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2872                VoEId(_instanceId,_channelId),
2873                "GetSpeechOutputLevel() => level=%u", level);
2874     return 0;
2875 }
2876
2877 int
2878 Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const
2879 {
2880     int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2881     level = static_cast<int32_t> (currentLevel);
2882     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2883                VoEId(_instanceId,_channelId),
2884                "GetSpeechOutputLevelFullRange() => level=%u", level);
2885     return 0;
2886 }
2887
2888 int
2889 Channel::SetMute(bool enable)
2890 {
2891     CriticalSectionScoped cs(&volume_settings_critsect_);
2892     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2893                "Channel::SetMute(enable=%d)", enable);
2894     _mute = enable;
2895     return 0;
2896 }
2897
2898 bool
2899 Channel::Mute() const
2900 {
2901     CriticalSectionScoped cs(&volume_settings_critsect_);
2902     return _mute;
2903 }
2904
2905 int
2906 Channel::SetOutputVolumePan(float left, float right)
2907 {
2908     CriticalSectionScoped cs(&volume_settings_critsect_);
2909     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2910                "Channel::SetOutputVolumePan()");
2911     _panLeft = left;
2912     _panRight = right;
2913     return 0;
2914 }
2915
2916 int
2917 Channel::GetOutputVolumePan(float& left, float& right) const
2918 {
2919     CriticalSectionScoped cs(&volume_settings_critsect_);
2920     left = _panLeft;
2921     right = _panRight;
2922     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2923                VoEId(_instanceId,_channelId),
2924                "GetOutputVolumePan() => left=%3.2f, right=%3.2f", left, right);
2925     return 0;
2926 }
2927
2928 int
2929 Channel::SetChannelOutputVolumeScaling(float scaling)
2930 {
2931     CriticalSectionScoped cs(&volume_settings_critsect_);
2932     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2933                "Channel::SetChannelOutputVolumeScaling()");
2934     _outputGain = scaling;
2935     return 0;
2936 }
2937
2938 int
2939 Channel::GetChannelOutputVolumeScaling(float& scaling) const
2940 {
2941     CriticalSectionScoped cs(&volume_settings_critsect_);
2942     scaling = _outputGain;
2943     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2944                VoEId(_instanceId,_channelId),
2945                "GetChannelOutputVolumeScaling() => scaling=%3.2f", scaling);
2946     return 0;
2947 }
2948
2949 int Channel::SendTelephoneEventOutband(unsigned char eventCode,
2950                                        int lengthMs, int attenuationDb,
2951                                        bool playDtmfEvent)
2952 {
2953     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2954                "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
2955                playDtmfEvent);
2956
2957     _playOutbandDtmfEvent = playDtmfEvent;
2958
2959     if (_rtpRtcpModule->SendTelephoneEventOutband(eventCode, lengthMs,
2960                                                  attenuationDb) != 0)
2961     {
2962         _engineStatisticsPtr->SetLastError(
2963             VE_SEND_DTMF_FAILED,
2964             kTraceWarning,
2965             "SendTelephoneEventOutband() failed to send event");
2966         return -1;
2967     }
2968     return 0;
2969 }
2970
2971 int Channel::SendTelephoneEventInband(unsigned char eventCode,
2972                                          int lengthMs,
2973                                          int attenuationDb,
2974                                          bool playDtmfEvent)
2975 {
2976     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2977                "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
2978                playDtmfEvent);
2979
2980     _playInbandDtmfEvent = playDtmfEvent;
2981     _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
2982
2983     return 0;
2984 }
2985
2986 int
2987 Channel::SetDtmfPlayoutStatus(bool enable)
2988 {
2989     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2990                "Channel::SetDtmfPlayoutStatus()");
2991     if (audio_coding_->SetDtmfPlayoutStatus(enable) != 0)
2992     {
2993         _engineStatisticsPtr->SetLastError(
2994             VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
2995             "SetDtmfPlayoutStatus() failed to set Dtmf playout");
2996         return -1;
2997     }
2998     return 0;
2999 }
3000
3001 bool
3002 Channel::DtmfPlayoutStatus() const
3003 {
3004     return audio_coding_->DtmfPlayoutStatus();
3005 }
3006
3007 int
3008 Channel::SetSendTelephoneEventPayloadType(unsigned char type)
3009 {
3010     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3011                "Channel::SetSendTelephoneEventPayloadType()");
3012     if (type > 127)
3013     {
3014         _engineStatisticsPtr->SetLastError(
3015             VE_INVALID_ARGUMENT, kTraceError,
3016             "SetSendTelephoneEventPayloadType() invalid type");
3017         return -1;
3018     }
3019     CodecInst codec = {};
3020     codec.plfreq = 8000;
3021     codec.pltype = type;
3022     memcpy(codec.plname, "telephone-event", 16);
3023     if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
3024     {
3025         _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
3026         if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
3027             _engineStatisticsPtr->SetLastError(
3028                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3029                 "SetSendTelephoneEventPayloadType() failed to register send"
3030                 "payload type");
3031             return -1;
3032         }
3033     }
3034     _sendTelephoneEventPayloadType = type;
3035     return 0;
3036 }
3037
3038 int
3039 Channel::GetSendTelephoneEventPayloadType(unsigned char& type)
3040 {
3041     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3042                  "Channel::GetSendTelephoneEventPayloadType()");
3043     type = _sendTelephoneEventPayloadType;
3044     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3045                VoEId(_instanceId,_channelId),
3046                "GetSendTelephoneEventPayloadType() => type=%u", type);
3047     return 0;
3048 }
3049
3050 int
3051 Channel::UpdateRxVadDetection(AudioFrame& audioFrame)
3052 {
3053     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3054                  "Channel::UpdateRxVadDetection()");
3055
3056     int vadDecision = 1;
3057
3058     vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive)? 1 : 0;
3059
3060     if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr)
3061     {
3062         OnRxVadDetected(vadDecision);
3063         _oldVadDecision = vadDecision;
3064     }
3065
3066     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3067                  "Channel::UpdateRxVadDetection() => vadDecision=%d",
3068                  vadDecision);
3069     return 0;
3070 }
3071
3072 int
3073 Channel::RegisterRxVadObserver(VoERxVadCallback &observer)
3074 {
3075     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3076                  "Channel::RegisterRxVadObserver()");
3077     CriticalSectionScoped cs(&_callbackCritSect);
3078
3079     if (_rxVadObserverPtr)
3080     {
3081         _engineStatisticsPtr->SetLastError(
3082             VE_INVALID_OPERATION, kTraceError,
3083             "RegisterRxVadObserver() observer already enabled");
3084         return -1;
3085     }
3086     _rxVadObserverPtr = &observer;
3087     _RxVadDetection = true;
3088     return 0;
3089 }
3090
3091 int
3092 Channel::DeRegisterRxVadObserver()
3093 {
3094     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3095                  "Channel::DeRegisterRxVadObserver()");
3096     CriticalSectionScoped cs(&_callbackCritSect);
3097
3098     if (!_rxVadObserverPtr)
3099     {
3100         _engineStatisticsPtr->SetLastError(
3101             VE_INVALID_OPERATION, kTraceWarning,
3102             "DeRegisterRxVadObserver() observer already disabled");
3103         return 0;
3104     }
3105     _rxVadObserverPtr = NULL;
3106     _RxVadDetection = false;
3107     return 0;
3108 }
3109
3110 int
3111 Channel::VoiceActivityIndicator(int &activity)
3112 {
3113     activity = _sendFrameType;
3114
3115     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3116                  "Channel::VoiceActivityIndicator(indicator=%d)", activity);
3117     return 0;
3118 }
3119
3120 #ifdef WEBRTC_VOICE_ENGINE_AGC
3121
3122 int
3123 Channel::SetRxAgcStatus(bool enable, AgcModes mode)
3124 {
3125     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3126                  "Channel::SetRxAgcStatus(enable=%d, mode=%d)",
3127                  (int)enable, (int)mode);
3128
3129     GainControl::Mode agcMode = kDefaultRxAgcMode;
3130     switch (mode)
3131     {
3132         case kAgcDefault:
3133             break;
3134         case kAgcUnchanged:
3135             agcMode = rx_audioproc_->gain_control()->mode();
3136             break;
3137         case kAgcFixedDigital:
3138             agcMode = GainControl::kFixedDigital;
3139             break;
3140         case kAgcAdaptiveDigital:
3141             agcMode =GainControl::kAdaptiveDigital;
3142             break;
3143         default:
3144             _engineStatisticsPtr->SetLastError(
3145                 VE_INVALID_ARGUMENT, kTraceError,
3146                 "SetRxAgcStatus() invalid Agc mode");
3147             return -1;
3148     }
3149
3150     if (rx_audioproc_->gain_control()->set_mode(agcMode) != 0)
3151     {
3152         _engineStatisticsPtr->SetLastError(
3153             VE_APM_ERROR, kTraceError,
3154             "SetRxAgcStatus() failed to set Agc mode");
3155         return -1;
3156     }
3157     if (rx_audioproc_->gain_control()->Enable(enable) != 0)
3158     {
3159         _engineStatisticsPtr->SetLastError(
3160             VE_APM_ERROR, kTraceError,
3161             "SetRxAgcStatus() failed to set Agc state");
3162         return -1;
3163     }
3164
3165     _rxAgcIsEnabled = enable;
3166     channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
3167
3168     return 0;
3169 }
3170
3171 int
3172 Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode)
3173 {
3174     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3175                      "Channel::GetRxAgcStatus(enable=?, mode=?)");
3176
3177     bool enable = rx_audioproc_->gain_control()->is_enabled();
3178     GainControl::Mode agcMode =
3179         rx_audioproc_->gain_control()->mode();
3180
3181     enabled = enable;
3182
3183     switch (agcMode)
3184     {
3185         case GainControl::kFixedDigital:
3186             mode = kAgcFixedDigital;
3187             break;
3188         case GainControl::kAdaptiveDigital:
3189             mode = kAgcAdaptiveDigital;
3190             break;
3191         default:
3192             _engineStatisticsPtr->SetLastError(
3193                 VE_APM_ERROR, kTraceError,
3194                 "GetRxAgcStatus() invalid Agc mode");
3195             return -1;
3196     }
3197
3198     return 0;
3199 }
3200
3201 int
3202 Channel::SetRxAgcConfig(AgcConfig config)
3203 {
3204     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3205                  "Channel::SetRxAgcConfig()");
3206
3207     if (rx_audioproc_->gain_control()->set_target_level_dbfs(
3208         config.targetLeveldBOv) != 0)
3209     {
3210         _engineStatisticsPtr->SetLastError(
3211             VE_APM_ERROR, kTraceError,
3212             "SetRxAgcConfig() failed to set target peak |level|"
3213             "(or envelope) of the Agc");
3214         return -1;
3215     }
3216     if (rx_audioproc_->gain_control()->set_compression_gain_db(
3217         config.digitalCompressionGaindB) != 0)
3218     {
3219         _engineStatisticsPtr->SetLastError(
3220             VE_APM_ERROR, kTraceError,
3221             "SetRxAgcConfig() failed to set the range in |gain| the"
3222             " digital compression stage may apply");
3223         return -1;
3224     }
3225     if (rx_audioproc_->gain_control()->enable_limiter(
3226         config.limiterEnable) != 0)
3227     {
3228         _engineStatisticsPtr->SetLastError(
3229             VE_APM_ERROR, kTraceError,
3230             "SetRxAgcConfig() failed to set hard limiter to the signal");
3231         return -1;
3232     }
3233
3234     return 0;
3235 }
3236
3237 int
3238 Channel::GetRxAgcConfig(AgcConfig& config)
3239 {
3240     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3241                  "Channel::GetRxAgcConfig(config=%?)");
3242
3243     config.targetLeveldBOv =
3244         rx_audioproc_->gain_control()->target_level_dbfs();
3245     config.digitalCompressionGaindB =
3246         rx_audioproc_->gain_control()->compression_gain_db();
3247     config.limiterEnable =
3248         rx_audioproc_->gain_control()->is_limiter_enabled();
3249
3250     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3251                VoEId(_instanceId,_channelId), "GetRxAgcConfig() => "
3252                    "targetLeveldBOv=%u, digitalCompressionGaindB=%u,"
3253                    " limiterEnable=%d",
3254                    config.targetLeveldBOv,
3255                    config.digitalCompressionGaindB,
3256                    config.limiterEnable);
3257
3258     return 0;
3259 }
3260
3261 #endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
3262
3263 #ifdef WEBRTC_VOICE_ENGINE_NR
3264
3265 int
3266 Channel::SetRxNsStatus(bool enable, NsModes mode)
3267 {
3268     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3269                  "Channel::SetRxNsStatus(enable=%d, mode=%d)",
3270                  (int)enable, (int)mode);
3271
3272     NoiseSuppression::Level nsLevel = kDefaultNsMode;
3273     switch (mode)
3274     {
3275
3276         case kNsDefault:
3277             break;
3278         case kNsUnchanged:
3279             nsLevel = rx_audioproc_->noise_suppression()->level();
3280             break;
3281         case kNsConference:
3282             nsLevel = NoiseSuppression::kHigh;
3283             break;
3284         case kNsLowSuppression:
3285             nsLevel = NoiseSuppression::kLow;
3286             break;
3287         case kNsModerateSuppression:
3288             nsLevel = NoiseSuppression::kModerate;
3289             break;
3290         case kNsHighSuppression:
3291             nsLevel = NoiseSuppression::kHigh;
3292             break;
3293         case kNsVeryHighSuppression:
3294             nsLevel = NoiseSuppression::kVeryHigh;
3295             break;
3296     }
3297
3298     if (rx_audioproc_->noise_suppression()->set_level(nsLevel)
3299         != 0)
3300     {
3301         _engineStatisticsPtr->SetLastError(
3302             VE_APM_ERROR, kTraceError,
3303             "SetRxNsStatus() failed to set NS level");
3304         return -1;
3305     }
3306     if (rx_audioproc_->noise_suppression()->Enable(enable) != 0)
3307     {
3308         _engineStatisticsPtr->SetLastError(
3309             VE_APM_ERROR, kTraceError,
3310             "SetRxNsStatus() failed to set NS state");
3311         return -1;
3312     }
3313
3314     _rxNsIsEnabled = enable;
3315     channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
3316
3317     return 0;
3318 }
3319
3320 int
3321 Channel::GetRxNsStatus(bool& enabled, NsModes& mode)
3322 {
3323     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3324                  "Channel::GetRxNsStatus(enable=?, mode=?)");
3325
3326     bool enable =
3327         rx_audioproc_->noise_suppression()->is_enabled();
3328     NoiseSuppression::Level ncLevel =
3329         rx_audioproc_->noise_suppression()->level();
3330
3331     enabled = enable;
3332
3333     switch (ncLevel)
3334     {
3335         case NoiseSuppression::kLow:
3336             mode = kNsLowSuppression;
3337             break;
3338         case NoiseSuppression::kModerate:
3339             mode = kNsModerateSuppression;
3340             break;
3341         case NoiseSuppression::kHigh:
3342             mode = kNsHighSuppression;
3343             break;
3344         case NoiseSuppression::kVeryHigh:
3345             mode = kNsVeryHighSuppression;
3346             break;
3347     }
3348
3349     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3350                VoEId(_instanceId,_channelId),
3351                "GetRxNsStatus() => enabled=%d, mode=%d", enabled, mode);
3352     return 0;
3353 }
3354
3355 #endif // #ifdef WEBRTC_VOICE_ENGINE_NR
3356
3357 int
3358 Channel::RegisterRTPObserver(VoERTPObserver& observer)
3359 {
3360     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3361                  "Channel::RegisterRTPObserver()");
3362     CriticalSectionScoped cs(&_callbackCritSect);
3363
3364     if (_rtpObserverPtr)
3365     {
3366         _engineStatisticsPtr->SetLastError(
3367             VE_INVALID_OPERATION, kTraceError,
3368             "RegisterRTPObserver() observer already enabled");
3369         return -1;
3370     }
3371
3372     _rtpObserverPtr = &observer;
3373     _rtpObserver = true;
3374
3375     return 0;
3376 }
3377
3378 int
3379 Channel::DeRegisterRTPObserver()
3380 {
3381     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3382                  "Channel::DeRegisterRTPObserver()");
3383     CriticalSectionScoped cs(&_callbackCritSect);
3384
3385     if (!_rtpObserverPtr)
3386     {
3387         _engineStatisticsPtr->SetLastError(
3388             VE_INVALID_OPERATION, kTraceWarning,
3389             "DeRegisterRTPObserver() observer already disabled");
3390         return 0;
3391     }
3392
3393     _rtpObserver = false;
3394     _rtpObserverPtr = NULL;
3395
3396     return 0;
3397 }
3398
3399 int
3400 Channel::RegisterRTCPObserver(VoERTCPObserver& observer)
3401 {
3402     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3403                  "Channel::RegisterRTCPObserver()");
3404     CriticalSectionScoped cs(&_callbackCritSect);
3405
3406     if (_rtcpObserverPtr)
3407     {
3408         _engineStatisticsPtr->SetLastError(
3409             VE_INVALID_OPERATION, kTraceError,
3410             "RegisterRTCPObserver() observer already enabled");
3411         return -1;
3412     }
3413
3414     _rtcpObserverPtr = &observer;
3415     _rtcpObserver = true;
3416
3417     return 0;
3418 }
3419
3420 int
3421 Channel::DeRegisterRTCPObserver()
3422 {
3423     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3424                  "Channel::DeRegisterRTCPObserver()");
3425     CriticalSectionScoped cs(&_callbackCritSect);
3426
3427     if (!_rtcpObserverPtr)
3428     {
3429         _engineStatisticsPtr->SetLastError(
3430             VE_INVALID_OPERATION, kTraceWarning,
3431             "DeRegisterRTCPObserver() observer already disabled");
3432         return 0;
3433     }
3434
3435     _rtcpObserver = false;
3436     _rtcpObserverPtr = NULL;
3437
3438     return 0;
3439 }
3440
3441 int
3442 Channel::SetLocalSSRC(unsigned int ssrc)
3443 {
3444     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3445                  "Channel::SetLocalSSRC()");
3446     if (channel_state_.Get().sending)
3447     {
3448         _engineStatisticsPtr->SetLastError(
3449             VE_ALREADY_SENDING, kTraceError,
3450             "SetLocalSSRC() already sending");
3451         return -1;
3452     }
3453     if (_rtpRtcpModule->SetSSRC(ssrc) != 0)
3454     {
3455         _engineStatisticsPtr->SetLastError(
3456             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3457             "SetLocalSSRC() failed to set SSRC");
3458         return -1;
3459     }
3460     return 0;
3461 }
3462
3463 int
3464 Channel::GetLocalSSRC(unsigned int& ssrc)
3465 {
3466     ssrc = _rtpRtcpModule->SSRC();
3467     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3468                  VoEId(_instanceId,_channelId),
3469                  "GetLocalSSRC() => ssrc=%lu", ssrc);
3470     return 0;
3471 }
3472
3473 int
3474 Channel::GetRemoteSSRC(unsigned int& ssrc)
3475 {
3476     ssrc = rtp_receiver_->SSRC();
3477     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3478                  VoEId(_instanceId,_channelId),
3479                  "GetRemoteSSRC() => ssrc=%lu", ssrc);
3480     return 0;
3481 }
3482
3483 int
3484 Channel::GetRemoteCSRCs(unsigned int arrCSRC[15])
3485 {
3486     if (arrCSRC == NULL)
3487     {
3488         _engineStatisticsPtr->SetLastError(
3489             VE_INVALID_ARGUMENT, kTraceError,
3490             "GetRemoteCSRCs() invalid array argument");
3491         return -1;
3492     }
3493     uint32_t arrOfCSRC[kRtpCsrcSize];
3494     int32_t CSRCs(0);
3495     CSRCs = rtp_receiver_->CSRCs(arrOfCSRC);
3496     if (CSRCs > 0)
3497     {
3498         memcpy(arrCSRC, arrOfCSRC, CSRCs * sizeof(uint32_t));
3499         for (int i = 0; i < (int) CSRCs; i++)
3500         {
3501             WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3502                        VoEId(_instanceId, _channelId),
3503                        "GetRemoteCSRCs() => arrCSRC[%d]=%lu", i, arrCSRC[i]);
3504         }
3505     } else
3506     {
3507         WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3508                    VoEId(_instanceId, _channelId),
3509                    "GetRemoteCSRCs() => list is empty!");
3510     }
3511     return CSRCs;
3512 }
3513
3514 int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
3515   if (rtp_audioproc_.get() == NULL) {
3516     rtp_audioproc_.reset(AudioProcessing::Create(VoEModuleId(_instanceId,
3517                                                              _channelId)));
3518   }
3519
3520   if (rtp_audioproc_->level_estimator()->Enable(enable) !=
3521       AudioProcessing::kNoError) {
3522     _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceError,
3523         "Failed to enable AudioProcessing::level_estimator()");
3524     return -1;
3525   }
3526
3527   _includeAudioLevelIndication = enable;
3528
3529   return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
3530 }
3531
3532 int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
3533   return SetSendRtpHeaderExtension(enable, kRtpExtensionAbsoluteSendTime, id);
3534 }
3535
3536 int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
3537   rtp_header_parser_->DeregisterRtpHeaderExtension(
3538       kRtpExtensionAbsoluteSendTime);
3539   if (enable && !rtp_header_parser_->RegisterRtpHeaderExtension(
3540       kRtpExtensionAbsoluteSendTime, id)) {
3541     return -1;
3542   }
3543   return 0;
3544 }
3545
3546 int
3547 Channel::SetRTCPStatus(bool enable)
3548 {
3549     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3550                  "Channel::SetRTCPStatus()");
3551     if (_rtpRtcpModule->SetRTCPStatus(enable ?
3552         kRtcpCompound : kRtcpOff) != 0)
3553     {
3554         _engineStatisticsPtr->SetLastError(
3555             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3556             "SetRTCPStatus() failed to set RTCP status");
3557         return -1;
3558     }
3559     return 0;
3560 }
3561
3562 int
3563 Channel::GetRTCPStatus(bool& enabled)
3564 {
3565     RTCPMethod method = _rtpRtcpModule->RTCP();
3566     enabled = (method != kRtcpOff);
3567     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3568                  VoEId(_instanceId,_channelId),
3569                  "GetRTCPStatus() => enabled=%d", enabled);
3570     return 0;
3571 }
3572
3573 int
3574 Channel::SetRTCP_CNAME(const char cName[256])
3575 {
3576     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3577                  "Channel::SetRTCP_CNAME()");
3578     if (_rtpRtcpModule->SetCNAME(cName) != 0)
3579     {
3580         _engineStatisticsPtr->SetLastError(
3581             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3582             "SetRTCP_CNAME() failed to set RTCP CNAME");
3583         return -1;
3584     }
3585     return 0;
3586 }
3587
3588 int
3589 Channel::GetRTCP_CNAME(char cName[256])
3590 {
3591     if (_rtpRtcpModule->CNAME(cName) != 0)
3592     {
3593         _engineStatisticsPtr->SetLastError(
3594             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3595             "GetRTCP_CNAME() failed to retrieve RTCP CNAME");
3596         return -1;
3597     }
3598     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3599                  VoEId(_instanceId, _channelId),
3600                  "GetRTCP_CNAME() => cName=%s", cName);
3601     return 0;
3602 }
3603
3604 int
3605 Channel::GetRemoteRTCP_CNAME(char cName[256])
3606 {
3607     if (cName == NULL)
3608     {
3609         _engineStatisticsPtr->SetLastError(
3610             VE_INVALID_ARGUMENT, kTraceError,
3611             "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
3612         return -1;
3613     }
3614     char cname[RTCP_CNAME_SIZE];
3615     const uint32_t remoteSSRC = rtp_receiver_->SSRC();
3616     if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0)
3617     {
3618         _engineStatisticsPtr->SetLastError(
3619             VE_CANNOT_RETRIEVE_CNAME, kTraceError,
3620             "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
3621         return -1;
3622     }
3623     strcpy(cName, cname);
3624     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3625                  VoEId(_instanceId, _channelId),
3626                  "GetRemoteRTCP_CNAME() => cName=%s", cName);
3627     return 0;
3628 }
3629
3630 int
3631 Channel::GetRemoteRTCPData(
3632     unsigned int& NTPHigh,
3633     unsigned int& NTPLow,
3634     unsigned int& timestamp,
3635     unsigned int& playoutTimestamp,
3636     unsigned int* jitter,
3637     unsigned short* fractionLost)
3638 {
3639     // --- Information from sender info in received Sender Reports
3640
3641     RTCPSenderInfo senderInfo;
3642     if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0)
3643     {
3644         _engineStatisticsPtr->SetLastError(
3645             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3646             "GetRemoteRTCPData() failed to retrieve sender info for remote "
3647             "side");
3648         return -1;
3649     }
3650
3651     // We only utilize 12 out of 20 bytes in the sender info (ignores packet
3652     // and octet count)
3653     NTPHigh = senderInfo.NTPseconds;
3654     NTPLow = senderInfo.NTPfraction;
3655     timestamp = senderInfo.RTPtimeStamp;
3656
3657     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3658                  VoEId(_instanceId, _channelId),
3659                  "GetRemoteRTCPData() => NTPHigh=%lu, NTPLow=%lu, "
3660                  "timestamp=%lu",
3661                  NTPHigh, NTPLow, timestamp);
3662
3663     // --- Locally derived information
3664
3665     // This value is updated on each incoming RTCP packet (0 when no packet
3666     // has been received)
3667     playoutTimestamp = playout_timestamp_rtcp_;
3668
3669     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3670                  VoEId(_instanceId, _channelId),
3671                  "GetRemoteRTCPData() => playoutTimestamp=%lu",
3672                  playout_timestamp_rtcp_);
3673
3674     if (NULL != jitter || NULL != fractionLost)
3675     {
3676         // Get all RTCP receiver report blocks that have been received on this
3677         // channel. If we receive RTP packets from a remote source we know the
3678         // remote SSRC and use the report block from him.
3679         // Otherwise use the first report block.
3680         std::vector<RTCPReportBlock> remote_stats;
3681         if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
3682             remote_stats.empty()) {
3683           WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3684                        VoEId(_instanceId, _channelId),
3685                        "GetRemoteRTCPData() failed to measure statistics due"
3686                        " to lack of received RTP and/or RTCP packets");
3687           return -1;
3688         }
3689
3690         uint32_t remoteSSRC = rtp_receiver_->SSRC();
3691         std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
3692         for (; it != remote_stats.end(); ++it) {
3693           if (it->remoteSSRC == remoteSSRC)
3694             break;
3695         }
3696
3697         if (it == remote_stats.end()) {
3698           // If we have not received any RTCP packets from this SSRC it probably
3699           // means that we have not received any RTP packets.
3700           // Use the first received report block instead.
3701           it = remote_stats.begin();
3702           remoteSSRC = it->remoteSSRC;
3703         }
3704
3705         if (jitter) {
3706           *jitter = it->jitter;
3707           WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3708                        VoEId(_instanceId, _channelId),
3709                        "GetRemoteRTCPData() => jitter = %lu", *jitter);
3710         }
3711
3712         if (fractionLost) {
3713           *fractionLost = it->fractionLost;
3714           WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3715                        VoEId(_instanceId, _channelId),
3716                        "GetRemoteRTCPData() => fractionLost = %lu",
3717                        *fractionLost);
3718         }
3719     }
3720     return 0;
3721 }
3722
3723 int
3724 Channel::SendApplicationDefinedRTCPPacket(unsigned char subType,
3725                                              unsigned int name,
3726                                              const char* data,
3727                                              unsigned short dataLengthInBytes)
3728 {
3729     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3730                  "Channel::SendApplicationDefinedRTCPPacket()");
3731     if (!channel_state_.Get().sending)
3732     {
3733         _engineStatisticsPtr->SetLastError(
3734             VE_NOT_SENDING, kTraceError,
3735             "SendApplicationDefinedRTCPPacket() not sending");
3736         return -1;
3737     }
3738     if (NULL == data)
3739     {
3740         _engineStatisticsPtr->SetLastError(
3741             VE_INVALID_ARGUMENT, kTraceError,
3742             "SendApplicationDefinedRTCPPacket() invalid data value");
3743         return -1;
3744     }
3745     if (dataLengthInBytes % 4 != 0)
3746     {
3747         _engineStatisticsPtr->SetLastError(
3748             VE_INVALID_ARGUMENT, kTraceError,
3749             "SendApplicationDefinedRTCPPacket() invalid length value");
3750         return -1;
3751     }
3752     RTCPMethod status = _rtpRtcpModule->RTCP();
3753     if (status == kRtcpOff)
3754     {
3755         _engineStatisticsPtr->SetLastError(
3756             VE_RTCP_ERROR, kTraceError,
3757             "SendApplicationDefinedRTCPPacket() RTCP is disabled");
3758         return -1;
3759     }
3760
3761     // Create and schedule the RTCP APP packet for transmission
3762     if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
3763         subType,
3764         name,
3765         (const unsigned char*) data,
3766         dataLengthInBytes) != 0)
3767     {
3768         _engineStatisticsPtr->SetLastError(
3769             VE_SEND_ERROR, kTraceError,
3770             "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
3771         return -1;
3772     }
3773     return 0;
3774 }
3775
3776 int
3777 Channel::GetRTPStatistics(
3778         unsigned int& averageJitterMs,
3779         unsigned int& maxJitterMs,
3780         unsigned int& discardedPackets)
3781 {
3782     // The jitter statistics is updated for each received RTP packet and is
3783     // based on received packets.
3784     if (_rtpRtcpModule->RTCP() == kRtcpOff) {
3785       // If RTCP is off, there is no timed thread in the RTCP module regularly
3786       // generating new stats, trigger the update manually here instead.
3787       StreamStatistician* statistician =
3788           rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
3789       if (statistician) {
3790         // Don't use returned statistics, use data from proxy instead so that
3791         // max jitter can be fetched atomically.
3792         RtcpStatistics s;
3793         statistician->GetStatistics(&s, true);
3794       }
3795     }
3796
3797     ChannelStatistics stats = statistics_proxy_->GetStats();
3798     const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
3799     if (playoutFrequency > 0) {
3800       // Scale RTP statistics given the current playout frequency
3801       maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
3802       averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
3803     }
3804
3805     discardedPackets = _numberOfDiscardedPackets;
3806
3807     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3808                VoEId(_instanceId, _channelId),
3809                "GetRTPStatistics() => averageJitterMs = %lu, maxJitterMs = %lu,"
3810                " discardedPackets = %lu)",
3811                averageJitterMs, maxJitterMs, discardedPackets);
3812     return 0;
3813 }
3814
3815 int Channel::GetRemoteRTCPSenderInfo(SenderInfo* sender_info) {
3816   if (sender_info == NULL) {
3817     _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
3818         "GetRemoteRTCPSenderInfo() invalid sender_info.");
3819     return -1;
3820   }
3821
3822   // Get the sender info from the latest received RTCP Sender Report.
3823   RTCPSenderInfo rtcp_sender_info;
3824   if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_sender_info) != 0) {
3825     _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3826         "GetRemoteRTCPSenderInfo() failed to read RTCP SR sender info.");
3827     return -1;
3828   }
3829
3830   sender_info->NTP_timestamp_high = rtcp_sender_info.NTPseconds;
3831   sender_info->NTP_timestamp_low = rtcp_sender_info.NTPfraction;
3832   sender_info->RTP_timestamp = rtcp_sender_info.RTPtimeStamp;
3833   sender_info->sender_packet_count = rtcp_sender_info.sendPacketCount;
3834   sender_info->sender_octet_count = rtcp_sender_info.sendOctetCount;
3835   return 0;
3836 }
3837
3838 int Channel::GetRemoteRTCPReportBlocks(
3839     std::vector<ReportBlock>* report_blocks) {
3840   if (report_blocks == NULL) {
3841     _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
3842       "GetRemoteRTCPReportBlock()s invalid report_blocks.");
3843     return -1;
3844   }
3845
3846   // Get the report blocks from the latest received RTCP Sender or Receiver
3847   // Report. Each element in the vector contains the sender's SSRC and a
3848   // report block according to RFC 3550.
3849   std::vector<RTCPReportBlock> rtcp_report_blocks;
3850   if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
3851     _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3852         "GetRemoteRTCPReportBlocks() failed to read RTCP SR/RR report block.");
3853     return -1;
3854   }
3855
3856   if (rtcp_report_blocks.empty())
3857     return 0;
3858
3859   std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
3860   for (; it != rtcp_report_blocks.end(); ++it) {
3861     ReportBlock report_block;
3862     report_block.sender_SSRC = it->remoteSSRC;
3863     report_block.source_SSRC = it->sourceSSRC;
3864     report_block.fraction_lost = it->fractionLost;
3865     report_block.cumulative_num_packets_lost = it->cumulativeLost;
3866     report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
3867     report_block.interarrival_jitter = it->jitter;
3868     report_block.last_SR_timestamp = it->lastSR;
3869     report_block.delay_since_last_SR = it->delaySinceLastSR;
3870     report_blocks->push_back(report_block);
3871   }
3872   return 0;
3873 }
3874
3875 int
3876 Channel::GetRTPStatistics(CallStatistics& stats)
3877 {
3878     // --- Part one of the final structure (four values)
3879
3880     // The jitter statistics is updated for each received RTP packet and is
3881     // based on received packets.
3882     RtcpStatistics statistics;
3883     StreamStatistician* statistician =
3884         rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
3885     if (!statistician || !statistician->GetStatistics(
3886         &statistics, _rtpRtcpModule->RTCP() == kRtcpOff)) {
3887       _engineStatisticsPtr->SetLastError(
3888           VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
3889           "GetRTPStatistics() failed to read RTP statistics from the "
3890           "RTP/RTCP module");
3891     }
3892
3893     stats.fractionLost = statistics.fraction_lost;
3894     stats.cumulativeLost = statistics.cumulative_lost;
3895     stats.extendedMax = statistics.extended_max_sequence_number;
3896     stats.jitterSamples = statistics.jitter;
3897
3898     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3899                  VoEId(_instanceId, _channelId),
3900                  "GetRTPStatistics() => fractionLost=%lu, cumulativeLost=%lu,"
3901                  " extendedMax=%lu, jitterSamples=%li)",
3902                  stats.fractionLost, stats.cumulativeLost, stats.extendedMax,
3903                  stats.jitterSamples);
3904
3905     // --- Part two of the final structure (one value)
3906
3907     uint16_t RTT(0);
3908     RTCPMethod method = _rtpRtcpModule->RTCP();
3909     if (method == kRtcpOff)
3910     {
3911         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3912                      VoEId(_instanceId, _channelId),
3913                      "GetRTPStatistics() RTCP is disabled => valid RTT "
3914                      "measurements cannot be retrieved");
3915     } else
3916     {
3917         // The remote SSRC will be zero if no RTP packet has been received.
3918         uint32_t remoteSSRC = rtp_receiver_->SSRC();
3919         if (remoteSSRC > 0)
3920         {
3921             uint16_t avgRTT(0);
3922             uint16_t maxRTT(0);
3923             uint16_t minRTT(0);
3924
3925             if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT, &maxRTT)
3926                 != 0)
3927             {
3928                 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3929                              VoEId(_instanceId, _channelId),
3930                              "GetRTPStatistics() failed to retrieve RTT from "
3931                              "the RTP/RTCP module");
3932             }
3933         } else
3934         {
3935             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3936                          VoEId(_instanceId, _channelId),
3937                          "GetRTPStatistics() failed to measure RTT since no "
3938                          "RTP packets have been received yet");
3939         }
3940     }
3941
3942     stats.rttMs = static_cast<int> (RTT);
3943
3944     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3945                  VoEId(_instanceId, _channelId),
3946                  "GetRTPStatistics() => rttMs=%d", stats.rttMs);
3947
3948     // --- Part three of the final structure (four values)
3949
3950     uint32_t bytesSent(0);
3951     uint32_t packetsSent(0);
3952     uint32_t bytesReceived(0);
3953     uint32_t packetsReceived(0);
3954
3955     if (statistician) {
3956       statistician->GetDataCounters(&bytesReceived, &packetsReceived);
3957     }
3958
3959     if (_rtpRtcpModule->DataCountersRTP(&bytesSent,
3960                                         &packetsSent) != 0)
3961     {
3962         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3963                      VoEId(_instanceId, _channelId),
3964                      "GetRTPStatistics() failed to retrieve RTP datacounters =>"
3965                      " output will not be complete");
3966     }
3967
3968     stats.bytesSent = bytesSent;
3969     stats.packetsSent = packetsSent;
3970     stats.bytesReceived = bytesReceived;
3971     stats.packetsReceived = packetsReceived;
3972
3973     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3974                  VoEId(_instanceId, _channelId),
3975                  "GetRTPStatistics() => bytesSent=%d, packetsSent=%d,"
3976                  " bytesReceived=%d, packetsReceived=%d)",
3977                  stats.bytesSent, stats.packetsSent, stats.bytesReceived,
3978                  stats.packetsReceived);
3979
3980     return 0;
3981 }
3982
3983 int Channel::SetFECStatus(bool enable, int redPayloadtype) {
3984   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3985                "Channel::SetFECStatus()");
3986
3987   if (enable) {
3988     if (redPayloadtype < 0 || redPayloadtype > 127) {
3989       _engineStatisticsPtr->SetLastError(
3990           VE_PLTYPE_ERROR, kTraceError,
3991           "SetFECStatus() invalid RED payload type");
3992       return -1;
3993     }
3994
3995     if (SetRedPayloadType(redPayloadtype) < 0) {
3996       _engineStatisticsPtr->SetLastError(
3997           VE_CODEC_ERROR, kTraceError,
3998           "SetSecondarySendCodec() Failed to register RED ACM");
3999       return -1;
4000     }
4001   }
4002
4003   if (audio_coding_->SetFECStatus(enable) != 0) {
4004     _engineStatisticsPtr->SetLastError(
4005         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4006         "SetFECStatus() failed to set FEC state in the ACM");
4007     return -1;
4008   }
4009   return 0;
4010 }
4011
4012 int
4013 Channel::GetFECStatus(bool& enabled, int& redPayloadtype)
4014 {
4015     enabled = audio_coding_->FECStatus();
4016     if (enabled)
4017     {
4018         int8_t payloadType(0);
4019         if (_rtpRtcpModule->SendREDPayloadType(payloadType) != 0)
4020         {
4021             _engineStatisticsPtr->SetLastError(
4022                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4023                 "GetFECStatus() failed to retrieve RED PT from RTP/RTCP "
4024                 "module");
4025             return -1;
4026         }
4027         WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4028                    VoEId(_instanceId, _channelId),
4029                    "GetFECStatus() => enabled=%d, redPayloadtype=%d",
4030                    enabled, redPayloadtype);
4031         return 0;
4032     }
4033     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4034                  VoEId(_instanceId, _channelId),
4035                  "GetFECStatus() => enabled=%d", enabled);
4036     return 0;
4037 }
4038
4039 void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
4040   // None of these functions can fail.
4041   _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
4042   rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
4043   rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff);
4044   if (enable)
4045     audio_coding_->EnableNack(maxNumberOfPackets);
4046   else
4047     audio_coding_->DisableNack();
4048 }
4049
4050 // Called when we are missing one or more packets.
4051 int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
4052   return _rtpRtcpModule->SendNACK(sequence_numbers, length);
4053 }
4054
4055 int
4056 Channel::StartRTPDump(const char fileNameUTF8[1024],
4057                       RTPDirections direction)
4058 {
4059     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4060                  "Channel::StartRTPDump()");
4061     if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4062     {
4063         _engineStatisticsPtr->SetLastError(
4064             VE_INVALID_ARGUMENT, kTraceError,
4065             "StartRTPDump() invalid RTP direction");
4066         return -1;
4067     }
4068     RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4069         &_rtpDumpIn : &_rtpDumpOut;
4070     if (rtpDumpPtr == NULL)
4071     {
4072         assert(false);
4073         return -1;
4074     }
4075     if (rtpDumpPtr->IsActive())
4076     {
4077         rtpDumpPtr->Stop();
4078     }
4079     if (rtpDumpPtr->Start(fileNameUTF8) != 0)
4080     {
4081         _engineStatisticsPtr->SetLastError(
4082             VE_BAD_FILE, kTraceError,
4083             "StartRTPDump() failed to create file");
4084         return -1;
4085     }
4086     return 0;
4087 }
4088
4089 int
4090 Channel::StopRTPDump(RTPDirections direction)
4091 {
4092     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
4093                  "Channel::StopRTPDump()");
4094     if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
4095     {
4096         _engineStatisticsPtr->SetLastError(
4097             VE_INVALID_ARGUMENT, kTraceError,
4098             "StopRTPDump() invalid RTP direction");
4099         return -1;
4100     }
4101     RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4102         &_rtpDumpIn : &_rtpDumpOut;
4103     if (rtpDumpPtr == NULL)
4104     {
4105         assert(false);
4106         return -1;
4107     }
4108     if (!rtpDumpPtr->IsActive())
4109     {
4110         return 0;
4111     }
4112     return rtpDumpPtr->Stop();
4113 }
4114
4115 bool
4116 Channel::RTPDumpIsActive(RTPDirections direction)
4117 {
4118     if ((direction != kRtpIncoming) &&
4119         (direction != kRtpOutgoing))
4120     {
4121         _engineStatisticsPtr->SetLastError(
4122             VE_INVALID_ARGUMENT, kTraceError,
4123             "RTPDumpIsActive() invalid RTP direction");
4124         return false;
4125     }
4126     RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
4127         &_rtpDumpIn : &_rtpDumpOut;
4128     return rtpDumpPtr->IsActive();
4129 }
4130
4131 void Channel::SetVideoEngineBWETarget(ViENetwork* vie_network,
4132                                       int video_channel) {
4133   CriticalSectionScoped cs(&_callbackCritSect);
4134   if (vie_network_) {
4135     vie_network_->Release();
4136     vie_network_ = NULL;
4137   }
4138   video_channel_ = -1;
4139
4140   if (vie_network != NULL && video_channel != -1) {
4141     vie_network_ = vie_network;
4142     video_channel_ = video_channel;
4143   }
4144 }
4145
4146 uint32_t
4147 Channel::Demultiplex(const AudioFrame& audioFrame)
4148 {
4149     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4150                  "Channel::Demultiplex()");
4151     _audioFrame.CopyFrom(audioFrame);
4152     _audioFrame.id_ = _channelId;
4153     return 0;
4154 }
4155
4156 // TODO(xians): This method borrows quite some code from
4157 // TransmitMixer::GenerateAudioFrame(), refactor these two methods and reduce
4158 // code duplication.
4159 void Channel::Demultiplex(const int16_t* audio_data,
4160                           int sample_rate,
4161                           int number_of_frames,
4162                           int number_of_channels) {
4163   // The highest sample rate that WebRTC supports for mono audio is 96kHz.
4164   static const int kMaxNumberOfFrames = 960;
4165   assert(number_of_frames <= kMaxNumberOfFrames);
4166
4167   // Get the send codec information for doing resampling or downmixing later on.
4168   CodecInst codec;
4169   GetSendCodec(codec);
4170   assert(codec.channels == 1 || codec.channels == 2);
4171   int support_sample_rate = std::min(32000,
4172                                      std::min(sample_rate, codec.plfreq));
4173
4174   // Downmix the data to mono if needed.
4175   const int16_t* audio_ptr = audio_data;
4176   if (number_of_channels == 2 && codec.channels == 1) {
4177     if (!mono_recording_audio_.get())
4178       mono_recording_audio_.reset(new int16_t[kMaxNumberOfFrames]);
4179
4180     AudioFrameOperations::StereoToMono(audio_data, number_of_frames,
4181                                        mono_recording_audio_.get());
4182     audio_ptr = mono_recording_audio_.get();
4183   }
4184
4185   // Resample the data to the sample rate that the codec is using.
4186   if (input_resampler_.InitializeIfNeeded(sample_rate,
4187                                           support_sample_rate,
4188                                           codec.channels)) {
4189     WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
4190                  "Channel::Demultiplex() unable to resample");
4191     return;
4192   }
4193
4194   int out_length = input_resampler_.Resample(audio_ptr,
4195                                              number_of_frames * codec.channels,
4196                                              _audioFrame.data_,
4197                                              AudioFrame::kMaxDataSizeSamples);
4198   if (out_length == -1) {
4199     WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
4200                  "Channel::Demultiplex() resampling failed");
4201     return;
4202   }
4203
4204   _audioFrame.samples_per_channel_ = out_length / codec.channels;
4205   _audioFrame.timestamp_ = -1;
4206   _audioFrame.sample_rate_hz_ = support_sample_rate;
4207   _audioFrame.speech_type_ = AudioFrame::kNormalSpeech;
4208   _audioFrame.vad_activity_ = AudioFrame::kVadUnknown;
4209   _audioFrame.num_channels_ = codec.channels;
4210   _audioFrame.id_ = _channelId;
4211 }
4212
4213 uint32_t
4214 Channel::PrepareEncodeAndSend(int mixingFrequency)
4215 {
4216     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4217                  "Channel::PrepareEncodeAndSend()");
4218
4219     if (_audioFrame.samples_per_channel_ == 0)
4220     {
4221         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4222                      "Channel::PrepareEncodeAndSend() invalid audio frame");
4223         return -1;
4224     }
4225
4226     if (channel_state_.Get().input_file_playing)
4227     {
4228         MixOrReplaceAudioWithFile(mixingFrequency);
4229     }
4230
4231     if (Mute())
4232     {
4233         AudioFrameOperations::Mute(_audioFrame);
4234     }
4235
4236     if (channel_state_.Get().input_external_media)
4237     {
4238         CriticalSectionScoped cs(&_callbackCritSect);
4239         const bool isStereo = (_audioFrame.num_channels_ == 2);
4240         if (_inputExternalMediaCallbackPtr)
4241         {
4242             _inputExternalMediaCallbackPtr->Process(
4243                 _channelId,
4244                 kRecordingPerChannel,
4245                (int16_t*)_audioFrame.data_,
4246                 _audioFrame.samples_per_channel_,
4247                 _audioFrame.sample_rate_hz_,
4248                 isStereo);
4249         }
4250     }
4251
4252     InsertInbandDtmfTone();
4253
4254     if (_includeAudioLevelIndication) {
4255       // Performs level analysis only; does not affect the signal.
4256       int err = rtp_audioproc_->ProcessStream(&_audioFrame);
4257       if (err) {
4258         LOG(LS_ERROR) << "ProcessStream() error: " << err;
4259         assert(false);
4260       }
4261     }
4262
4263     return 0;
4264 }
4265
4266 uint32_t
4267 Channel::EncodeAndSend()
4268 {
4269     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4270                  "Channel::EncodeAndSend()");
4271
4272     assert(_audioFrame.num_channels_ <= 2);
4273     if (_audioFrame.samples_per_channel_ == 0)
4274     {
4275         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4276                      "Channel::EncodeAndSend() invalid audio frame");
4277         return -1;
4278     }
4279
4280     _audioFrame.id_ = _channelId;
4281
4282     // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
4283
4284     // The ACM resamples internally.
4285     _audioFrame.timestamp_ = _timeStamp;
4286     if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) != 0)
4287     {
4288         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
4289                      "Channel::EncodeAndSend() ACM encoding failed");
4290         return -1;
4291     }
4292
4293     _timeStamp += _audioFrame.samples_per_channel_;
4294
4295     // --- Encode if complete frame is ready
4296
4297     // This call will trigger AudioPacketizationCallback::SendData if encoding
4298     // is done and payload is ready for packetization and transmission.
4299     return audio_coding_->Process();
4300 }
4301
4302 int Channel::RegisterExternalMediaProcessing(
4303     ProcessingTypes type,
4304     VoEMediaProcess& processObject)
4305 {
4306     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4307                  "Channel::RegisterExternalMediaProcessing()");
4308
4309     CriticalSectionScoped cs(&_callbackCritSect);
4310
4311     if (kPlaybackPerChannel == type)
4312     {
4313         if (_outputExternalMediaCallbackPtr)
4314         {
4315             _engineStatisticsPtr->SetLastError(
4316                 VE_INVALID_OPERATION, kTraceError,
4317                 "Channel::RegisterExternalMediaProcessing() "
4318                 "output external media already enabled");
4319             return -1;
4320         }
4321         _outputExternalMediaCallbackPtr = &processObject;
4322         _outputExternalMedia = true;
4323     }
4324     else if (kRecordingPerChannel == type)
4325     {
4326         if (_inputExternalMediaCallbackPtr)
4327         {
4328             _engineStatisticsPtr->SetLastError(
4329                 VE_INVALID_OPERATION, kTraceError,
4330                 "Channel::RegisterExternalMediaProcessing() "
4331                 "output external media already enabled");
4332             return -1;
4333         }
4334         _inputExternalMediaCallbackPtr = &processObject;
4335         channel_state_.SetInputExternalMedia(true);
4336     }
4337     return 0;
4338 }
4339
4340 int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type)
4341 {
4342     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4343                  "Channel::DeRegisterExternalMediaProcessing()");
4344
4345     CriticalSectionScoped cs(&_callbackCritSect);
4346
4347     if (kPlaybackPerChannel == type)
4348     {
4349         if (!_outputExternalMediaCallbackPtr)
4350         {
4351             _engineStatisticsPtr->SetLastError(
4352                 VE_INVALID_OPERATION, kTraceWarning,
4353                 "Channel::DeRegisterExternalMediaProcessing() "
4354                 "output external media already disabled");
4355             return 0;
4356         }
4357         _outputExternalMedia = false;
4358         _outputExternalMediaCallbackPtr = NULL;
4359     }
4360     else if (kRecordingPerChannel == type)
4361     {
4362         if (!_inputExternalMediaCallbackPtr)
4363         {
4364             _engineStatisticsPtr->SetLastError(
4365                 VE_INVALID_OPERATION, kTraceWarning,
4366                 "Channel::DeRegisterExternalMediaProcessing() "
4367                 "input external media already disabled");
4368             return 0;
4369         }
4370         channel_state_.SetInputExternalMedia(false);
4371         _inputExternalMediaCallbackPtr = NULL;
4372     }
4373
4374     return 0;
4375 }
4376
4377 int Channel::SetExternalMixing(bool enabled) {
4378     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4379                  "Channel::SetExternalMixing(enabled=%d)", enabled);
4380
4381     if (channel_state_.Get().playing)
4382     {
4383         _engineStatisticsPtr->SetLastError(
4384             VE_INVALID_OPERATION, kTraceError,
4385             "Channel::SetExternalMixing() "
4386             "external mixing cannot be changed while playing.");
4387         return -1;
4388     }
4389
4390     _externalMixing = enabled;
4391
4392     return 0;
4393 }
4394
4395 int
4396 Channel::ResetRTCPStatistics()
4397 {
4398     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4399                  "Channel::ResetRTCPStatistics()");
4400     uint32_t remoteSSRC(0);
4401     remoteSSRC = rtp_receiver_->SSRC();
4402     return _rtpRtcpModule->ResetRTT(remoteSSRC);
4403 }
4404
4405 int
4406 Channel::GetRoundTripTimeSummary(StatVal& delaysMs) const
4407 {
4408     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4409                  "Channel::GetRoundTripTimeSummary()");
4410     // Override default module outputs for the case when RTCP is disabled.
4411     // This is done to ensure that we are backward compatible with the
4412     // VoiceEngine where we did not use RTP/RTCP module.
4413     if (!_rtpRtcpModule->RTCP())
4414     {
4415         delaysMs.min = -1;
4416         delaysMs.max = -1;
4417         delaysMs.average = -1;
4418         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4419                      "Channel::GetRoundTripTimeSummary() RTCP is disabled =>"
4420                      " valid RTT measurements cannot be retrieved");
4421         return 0;
4422     }
4423
4424     uint32_t remoteSSRC;
4425     uint16_t RTT;
4426     uint16_t avgRTT;
4427     uint16_t maxRTT;
4428     uint16_t minRTT;
4429     // The remote SSRC will be zero if no RTP packet has been received.
4430     remoteSSRC = rtp_receiver_->SSRC();
4431     if (remoteSSRC == 0)
4432     {
4433         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4434                      "Channel::GetRoundTripTimeSummary() unable to measure RTT"
4435                      " since no RTP packet has been received yet");
4436     }
4437
4438     // Retrieve RTT statistics from the RTP/RTCP module for the specified
4439     // channel and SSRC. The SSRC is required to parse out the correct source
4440     // in conference scenarios.
4441     if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT,&maxRTT) != 0)
4442     {
4443         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4444                      "GetRoundTripTimeSummary unable to retrieve RTT values"
4445                      " from the RTCP layer");
4446         delaysMs.min = -1; delaysMs.max = -1; delaysMs.average = -1;
4447     }
4448     else
4449     {
4450         delaysMs.min = minRTT;
4451         delaysMs.max = maxRTT;
4452         delaysMs.average = avgRTT;
4453     }
4454     return 0;
4455 }
4456
4457 int
4458 Channel::GetNetworkStatistics(NetworkStatistics& stats)
4459 {
4460     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4461                  "Channel::GetNetworkStatistics()");
4462     ACMNetworkStatistics acm_stats;
4463     int return_value = audio_coding_->NetworkStatistics(&acm_stats);
4464     if (return_value >= 0) {
4465       memcpy(&stats, &acm_stats, sizeof(NetworkStatistics));
4466     }
4467     return return_value;
4468 }
4469
4470 void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
4471   audio_coding_->GetDecodingCallStatistics(stats);
4472 }
4473
4474 bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
4475                                int* playout_buffer_delay_ms) const {
4476   if (_average_jitter_buffer_delay_us == 0) {
4477     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4478                  "Channel::GetDelayEstimate() no valid estimate.");
4479     return false;
4480   }
4481   *jitter_buffer_delay_ms = (_average_jitter_buffer_delay_us + 500) / 1000 +
4482       _recPacketDelayMs;
4483   *playout_buffer_delay_ms = playout_delay_ms_;
4484   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4485                "Channel::GetDelayEstimate()");
4486   return true;
4487 }
4488
4489 int Channel::SetInitialPlayoutDelay(int delay_ms)
4490 {
4491   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4492                "Channel::SetInitialPlayoutDelay()");
4493   if ((delay_ms < kVoiceEngineMinMinPlayoutDelayMs) ||
4494       (delay_ms > kVoiceEngineMaxMinPlayoutDelayMs))
4495   {
4496     _engineStatisticsPtr->SetLastError(
4497         VE_INVALID_ARGUMENT, kTraceError,
4498         "SetInitialPlayoutDelay() invalid min delay");
4499     return -1;
4500   }
4501   if (audio_coding_->SetInitialPlayoutDelay(delay_ms) != 0)
4502   {
4503     _engineStatisticsPtr->SetLastError(
4504         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4505         "SetInitialPlayoutDelay() failed to set min playout delay");
4506     return -1;
4507   }
4508   return 0;
4509 }
4510
4511
4512 int
4513 Channel::SetMinimumPlayoutDelay(int delayMs)
4514 {
4515     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4516                  "Channel::SetMinimumPlayoutDelay()");
4517     if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
4518         (delayMs > kVoiceEngineMaxMinPlayoutDelayMs))
4519     {
4520         _engineStatisticsPtr->SetLastError(
4521             VE_INVALID_ARGUMENT, kTraceError,
4522             "SetMinimumPlayoutDelay() invalid min delay");
4523         return -1;
4524     }
4525     if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0)
4526     {
4527         _engineStatisticsPtr->SetLastError(
4528             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4529             "SetMinimumPlayoutDelay() failed to set min playout delay");
4530         return -1;
4531     }
4532     return 0;
4533 }
4534
4535 void Channel::UpdatePlayoutTimestamp(bool rtcp) {
4536   uint32_t playout_timestamp = 0;
4537
4538   if (audio_coding_->PlayoutTimestamp(&playout_timestamp) == -1)  {
4539     WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4540                  "Channel::UpdatePlayoutTimestamp() failed to read playout"
4541                  " timestamp from the ACM");
4542     _engineStatisticsPtr->SetLastError(
4543         VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4544         "UpdatePlayoutTimestamp() failed to retrieve timestamp");
4545     return;
4546   }
4547
4548   uint16_t delay_ms = 0;
4549   if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
4550     WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4551                  "Channel::UpdatePlayoutTimestamp() failed to read playout"
4552                  " delay from the ADM");
4553     _engineStatisticsPtr->SetLastError(
4554         VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4555         "UpdatePlayoutTimestamp() failed to retrieve playout delay");
4556     return;
4557   }
4558
4559   int32_t playout_frequency = audio_coding_->PlayoutFrequency();
4560   CodecInst current_recive_codec;
4561   if (audio_coding_->ReceiveCodec(&current_recive_codec) == 0) {
4562     if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
4563       playout_frequency = 8000;
4564     } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
4565       playout_frequency = 48000;
4566     }
4567   }
4568
4569   jitter_buffer_playout_timestamp_ = playout_timestamp;
4570
4571   // Remove the playout delay.
4572   playout_timestamp -= (delay_ms * (playout_frequency / 1000));
4573
4574   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4575                "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
4576                playout_timestamp);
4577
4578   if (rtcp) {
4579     playout_timestamp_rtcp_ = playout_timestamp;
4580   } else {
4581     playout_timestamp_rtp_ = playout_timestamp;
4582   }
4583   playout_delay_ms_ = delay_ms;
4584 }
4585
4586 int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
4587   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4588                "Channel::GetPlayoutTimestamp()");
4589   if (playout_timestamp_rtp_ == 0)  {
4590     _engineStatisticsPtr->SetLastError(
4591         VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4592         "GetPlayoutTimestamp() failed to retrieve timestamp");
4593     return -1;
4594   }
4595   timestamp = playout_timestamp_rtp_;
4596   WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4597                VoEId(_instanceId,_channelId),
4598                "GetPlayoutTimestamp() => timestamp=%u", timestamp);
4599   return 0;
4600 }
4601
4602 int
4603 Channel::SetInitTimestamp(unsigned int timestamp)
4604 {
4605     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4606                "Channel::SetInitTimestamp()");
4607     if (channel_state_.Get().sending)
4608     {
4609         _engineStatisticsPtr->SetLastError(
4610             VE_SENDING, kTraceError, "SetInitTimestamp() already sending");
4611         return -1;
4612     }
4613     if (_rtpRtcpModule->SetStartTimestamp(timestamp) != 0)
4614     {
4615         _engineStatisticsPtr->SetLastError(
4616             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4617             "SetInitTimestamp() failed to set timestamp");
4618         return -1;
4619     }
4620     return 0;
4621 }
4622
4623 int
4624 Channel::SetInitSequenceNumber(short sequenceNumber)
4625 {
4626     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4627                  "Channel::SetInitSequenceNumber()");
4628     if (channel_state_.Get().sending)
4629     {
4630         _engineStatisticsPtr->SetLastError(
4631             VE_SENDING, kTraceError,
4632             "SetInitSequenceNumber() already sending");
4633         return -1;
4634     }
4635     if (_rtpRtcpModule->SetSequenceNumber(sequenceNumber) != 0)
4636     {
4637         _engineStatisticsPtr->SetLastError(
4638             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4639             "SetInitSequenceNumber() failed to set sequence number");
4640         return -1;
4641     }
4642     return 0;
4643 }
4644
4645 int
4646 Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule, RtpReceiver** rtp_receiver) const
4647 {
4648     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4649                  "Channel::GetRtpRtcp()");
4650     *rtpRtcpModule = _rtpRtcpModule.get();
4651     *rtp_receiver = rtp_receiver_.get();
4652     return 0;
4653 }
4654
4655 // TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
4656 // a shared helper.
4657 int32_t
4658 Channel::MixOrReplaceAudioWithFile(int mixingFrequency)
4659 {
4660     scoped_array<int16_t> fileBuffer(new int16_t[640]);
4661     int fileSamples(0);
4662
4663     {
4664         CriticalSectionScoped cs(&_fileCritSect);
4665
4666         if (_inputFilePlayerPtr == NULL)
4667         {
4668             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4669                          VoEId(_instanceId, _channelId),
4670                          "Channel::MixOrReplaceAudioWithFile() fileplayer"
4671                              " doesnt exist");
4672             return -1;
4673         }
4674
4675         if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
4676                                                       fileSamples,
4677                                                       mixingFrequency) == -1)
4678         {
4679             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4680                          VoEId(_instanceId, _channelId),
4681                          "Channel::MixOrReplaceAudioWithFile() file mixing "
4682                          "failed");
4683             return -1;
4684         }
4685         if (fileSamples == 0)
4686         {
4687             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4688                          VoEId(_instanceId, _channelId),
4689                          "Channel::MixOrReplaceAudioWithFile() file is ended");
4690             return 0;
4691         }
4692     }
4693
4694     assert(_audioFrame.samples_per_channel_ == fileSamples);
4695
4696     if (_mixFileWithMicrophone)
4697     {
4698         // Currently file stream is always mono.
4699         // TODO(xians): Change the code when FilePlayer supports real stereo.
4700         Utility::MixWithSat(_audioFrame.data_,
4701                             _audioFrame.num_channels_,
4702                             fileBuffer.get(),
4703                             1,
4704                             fileSamples);
4705     }
4706     else
4707     {
4708         // Replace ACM audio with file.
4709         // Currently file stream is always mono.
4710         // TODO(xians): Change the code when FilePlayer supports real stereo.
4711         _audioFrame.UpdateFrame(_channelId,
4712                                 -1,
4713                                 fileBuffer.get(),
4714                                 fileSamples,
4715                                 mixingFrequency,
4716                                 AudioFrame::kNormalSpeech,
4717                                 AudioFrame::kVadUnknown,
4718                                 1);
4719
4720     }
4721     return 0;
4722 }
4723
4724 int32_t
4725 Channel::MixAudioWithFile(AudioFrame& audioFrame,
4726                           int mixingFrequency)
4727 {
4728     assert(mixingFrequency <= 32000);
4729
4730     scoped_array<int16_t> fileBuffer(new int16_t[640]);
4731     int fileSamples(0);
4732
4733     {
4734         CriticalSectionScoped cs(&_fileCritSect);
4735
4736         if (_outputFilePlayerPtr == NULL)
4737         {
4738             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4739                          VoEId(_instanceId, _channelId),
4740                          "Channel::MixAudioWithFile() file mixing failed");
4741             return -1;
4742         }
4743
4744         // We should get the frequency we ask for.
4745         if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
4746                                                        fileSamples,
4747                                                        mixingFrequency) == -1)
4748         {
4749             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4750                          VoEId(_instanceId, _channelId),
4751                          "Channel::MixAudioWithFile() file mixing failed");
4752             return -1;
4753         }
4754     }
4755
4756     if (audioFrame.samples_per_channel_ == fileSamples)
4757     {
4758         // Currently file stream is always mono.
4759         // TODO(xians): Change the code when FilePlayer supports real stereo.
4760         Utility::MixWithSat(audioFrame.data_,
4761                             audioFrame.num_channels_,
4762                             fileBuffer.get(),
4763                             1,
4764                             fileSamples);
4765     }
4766     else
4767     {
4768         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4769             "Channel::MixAudioWithFile() samples_per_channel_(%d) != "
4770             "fileSamples(%d)",
4771             audioFrame.samples_per_channel_, fileSamples);
4772         return -1;
4773     }
4774
4775     return 0;
4776 }
4777
4778 int
4779 Channel::InsertInbandDtmfTone()
4780 {
4781     // Check if we should start a new tone.
4782     if (_inbandDtmfQueue.PendingDtmf() &&
4783         !_inbandDtmfGenerator.IsAddingTone() &&
4784         _inbandDtmfGenerator.DelaySinceLastTone() >
4785         kMinTelephoneEventSeparationMs)
4786     {
4787         int8_t eventCode(0);
4788         uint16_t lengthMs(0);
4789         uint8_t attenuationDb(0);
4790
4791         eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
4792         _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
4793         if (_playInbandDtmfEvent)
4794         {
4795             // Add tone to output mixer using a reduced length to minimize
4796             // risk of echo.
4797             _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80,
4798                                           attenuationDb);
4799         }
4800     }
4801
4802     if (_inbandDtmfGenerator.IsAddingTone())
4803     {
4804         uint16_t frequency(0);
4805         _inbandDtmfGenerator.GetSampleRate(frequency);
4806
4807         if (frequency != _audioFrame.sample_rate_hz_)
4808         {
4809             // Update sample rate of Dtmf tone since the mixing frequency
4810             // has changed.
4811             _inbandDtmfGenerator.SetSampleRate(
4812                 (uint16_t) (_audioFrame.sample_rate_hz_));
4813             // Reset the tone to be added taking the new sample rate into
4814             // account.
4815             _inbandDtmfGenerator.ResetTone();
4816         }
4817
4818         int16_t toneBuffer[320];
4819         uint16_t toneSamples(0);
4820         // Get 10ms tone segment and set time since last tone to zero
4821         if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
4822         {
4823             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4824                        VoEId(_instanceId, _channelId),
4825                        "Channel::EncodeAndSend() inserting Dtmf failed");
4826             return -1;
4827         }
4828
4829         // Replace mixed audio with DTMF tone.
4830         for (int sample = 0;
4831             sample < _audioFrame.samples_per_channel_;
4832             sample++)
4833         {
4834             for (int channel = 0;
4835                 channel < _audioFrame.num_channels_;
4836                 channel++)
4837             {
4838                 const int index = sample * _audioFrame.num_channels_ + channel;
4839                 _audioFrame.data_[index] = toneBuffer[sample];
4840             }
4841         }
4842
4843         assert(_audioFrame.samples_per_channel_ == toneSamples);
4844     } else
4845     {
4846         // Add 10ms to "delay-since-last-tone" counter
4847         _inbandDtmfGenerator.UpdateDelaySinceLastTone();
4848     }
4849     return 0;
4850 }
4851
4852 void
4853 Channel::ResetDeadOrAliveCounters()
4854 {
4855     _countDeadDetections = 0;
4856     _countAliveDetections = 0;
4857 }
4858
4859 void
4860 Channel::UpdateDeadOrAliveCounters(bool alive)
4861 {
4862     if (alive)
4863         _countAliveDetections++;
4864     else
4865         _countDeadDetections++;
4866 }
4867
4868 int
4869 Channel::GetDeadOrAliveCounters(int& countDead, int& countAlive) const
4870 {
4871     return 0;
4872 }
4873
4874 int32_t
4875 Channel::SendPacketRaw(const void *data, int len, bool RTCP)
4876 {
4877     CriticalSectionScoped cs(&_callbackCritSect);
4878     if (_transportPtr == NULL)
4879     {
4880         return -1;
4881     }
4882     if (!RTCP)
4883     {
4884         return _transportPtr->SendPacket(_channelId, data, len);
4885     }
4886     else
4887     {
4888         return _transportPtr->SendRTCPPacket(_channelId, data, len);
4889     }
4890 }
4891
4892 // Called for incoming RTP packets after successful RTP header parsing.
4893 void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
4894                                 uint16_t sequence_number) {
4895   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4896                "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
4897                rtp_timestamp, sequence_number);
4898
4899   // Get frequency of last received payload
4900   int rtp_receive_frequency = audio_coding_->ReceiveFrequency();
4901
4902   CodecInst current_receive_codec;
4903   if (audio_coding_->ReceiveCodec(&current_receive_codec) != 0) {
4904     return;
4905   }
4906
4907   // Update the least required delay.
4908   least_required_delay_ms_ = audio_coding_->LeastRequiredDelayMs();
4909
4910   if (STR_CASE_CMP("G722", current_receive_codec.plname) == 0) {
4911     // Even though the actual sampling rate for G.722 audio is
4912     // 16,000 Hz, the RTP clock rate for the G722 payload format is
4913     // 8,000 Hz because that value was erroneously assigned in
4914     // RFC 1890 and must remain unchanged for backward compatibility.
4915     rtp_receive_frequency = 8000;
4916   } else if (STR_CASE_CMP("opus", current_receive_codec.plname) == 0) {
4917     // We are resampling Opus internally to 32,000 Hz until all our
4918     // DSP routines can operate at 48,000 Hz, but the RTP clock
4919     // rate for the Opus payload format is standardized to 48,000 Hz,
4920     // because that is the maximum supported decoding sampling rate.
4921     rtp_receive_frequency = 48000;
4922   }
4923
4924   // |jitter_buffer_playout_timestamp_| updated in UpdatePlayoutTimestamp for
4925   // every incoming packet.
4926   uint32_t timestamp_diff_ms = (rtp_timestamp -
4927       jitter_buffer_playout_timestamp_) / (rtp_receive_frequency / 1000);
4928   if (!IsNewerTimestamp(rtp_timestamp, jitter_buffer_playout_timestamp_) ||
4929       timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
4930     // If |jitter_buffer_playout_timestamp_| is newer than the incoming RTP
4931     // timestamp, the resulting difference is negative, but is set to zero.
4932     // This can happen when a network glitch causes a packet to arrive late,
4933     // and during long comfort noise periods with clock drift.
4934     timestamp_diff_ms = 0;
4935   }
4936
4937   uint16_t packet_delay_ms = (rtp_timestamp - _previousTimestamp) /
4938       (rtp_receive_frequency / 1000);
4939
4940   _previousTimestamp = rtp_timestamp;
4941
4942   if (timestamp_diff_ms == 0) return;
4943
4944   if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
4945     _recPacketDelayMs = packet_delay_ms;
4946   }
4947
4948   if (_average_jitter_buffer_delay_us == 0) {
4949     _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
4950     return;
4951   }
4952
4953   // Filter average delay value using exponential filter (alpha is
4954   // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
4955   // risk of rounding error) and compensate for it in GetDelayEstimate()
4956   // later.
4957   _average_jitter_buffer_delay_us = (_average_jitter_buffer_delay_us * 7 +
4958       1000 * timestamp_diff_ms + 500) / 8;
4959 }
4960
4961 void
4962 Channel::RegisterReceiveCodecsToRTPModule()
4963 {
4964     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4965                  "Channel::RegisterReceiveCodecsToRTPModule()");
4966
4967
4968     CodecInst codec;
4969     const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
4970
4971     for (int idx = 0; idx < nSupportedCodecs; idx++)
4972     {
4973         // Open up the RTP/RTCP receiver for all supported codecs
4974         if ((audio_coding_->Codec(idx, &codec) == -1) ||
4975             (rtp_receiver_->RegisterReceivePayload(
4976                 codec.plname,
4977                 codec.pltype,
4978                 codec.plfreq,
4979                 codec.channels,
4980                 (codec.rate < 0) ? 0 : codec.rate) == -1))
4981         {
4982             WEBRTC_TRACE(
4983                          kTraceWarning,
4984                          kTraceVoice,
4985                          VoEId(_instanceId, _channelId),
4986                          "Channel::RegisterReceiveCodecsToRTPModule() unable"
4987                          " to register %s (%d/%d/%d/%d) to RTP/RTCP receiver",
4988                          codec.plname, codec.pltype, codec.plfreq,
4989                          codec.channels, codec.rate);
4990         }
4991         else
4992         {
4993             WEBRTC_TRACE(
4994                          kTraceInfo,
4995                          kTraceVoice,
4996                          VoEId(_instanceId, _channelId),
4997                          "Channel::RegisterReceiveCodecsToRTPModule() %s "
4998                          "(%d/%d/%d/%d) has been added to the RTP/RTCP "
4999                          "receiver",
5000                          codec.plname, codec.pltype, codec.plfreq,
5001                          codec.channels, codec.rate);
5002         }
5003     }
5004 }
5005
5006 int Channel::SetSecondarySendCodec(const CodecInst& codec,
5007                                    int red_payload_type) {
5008   // Sanity check for payload type.
5009   if (red_payload_type < 0 || red_payload_type > 127) {
5010     _engineStatisticsPtr->SetLastError(
5011         VE_PLTYPE_ERROR, kTraceError,
5012         "SetRedPayloadType() invalid RED payload type");
5013     return -1;
5014   }
5015
5016   if (SetRedPayloadType(red_payload_type) < 0) {
5017     _engineStatisticsPtr->SetLastError(
5018         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5019         "SetSecondarySendCodec() Failed to register RED ACM");
5020     return -1;
5021   }
5022   if (audio_coding_->RegisterSecondarySendCodec(codec) < 0) {
5023     _engineStatisticsPtr->SetLastError(
5024         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5025         "SetSecondarySendCodec() Failed to register secondary send codec in "
5026         "ACM");
5027     return -1;
5028   }
5029
5030   return 0;
5031 }
5032
5033 void Channel::RemoveSecondarySendCodec() {
5034   audio_coding_->UnregisterSecondarySendCodec();
5035 }
5036
5037 int Channel::GetSecondarySendCodec(CodecInst* codec) {
5038   if (audio_coding_->SecondarySendCodec(codec) < 0) {
5039     _engineStatisticsPtr->SetLastError(
5040         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5041         "GetSecondarySendCodec() Failed to get secondary sent codec from ACM");
5042     return -1;
5043   }
5044   return 0;
5045 }
5046
5047 // Assuming this method is called with valid payload type.
5048 int Channel::SetRedPayloadType(int red_payload_type) {
5049   CodecInst codec;
5050   bool found_red = false;
5051
5052   // Get default RED settings from the ACM database
5053   const int num_codecs = AudioCodingModule::NumberOfCodecs();
5054   for (int idx = 0; idx < num_codecs; idx++) {
5055     audio_coding_->Codec(idx, &codec);
5056     if (!STR_CASE_CMP(codec.plname, "RED")) {
5057       found_red = true;
5058       break;
5059     }
5060   }
5061
5062   if (!found_red) {
5063     _engineStatisticsPtr->SetLastError(
5064         VE_CODEC_ERROR, kTraceError,
5065         "SetRedPayloadType() RED is not supported");
5066     return -1;
5067   }
5068
5069   codec.pltype = red_payload_type;
5070   if (audio_coding_->RegisterSendCodec(codec) < 0) {
5071     _engineStatisticsPtr->SetLastError(
5072         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
5073         "SetRedPayloadType() RED registration in ACM module failed");
5074     return -1;
5075   }
5076
5077   if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
5078     _engineStatisticsPtr->SetLastError(
5079         VE_RTP_RTCP_MODULE_ERROR, kTraceError,
5080         "SetRedPayloadType() RED registration in RTP/RTCP module failed");
5081     return -1;
5082   }
5083   return 0;
5084 }
5085
5086 int Channel::SetSendRtpHeaderExtension(bool enable, RTPExtensionType type,
5087                                        unsigned char id) {
5088   int error = 0;
5089   _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
5090   if (enable) {
5091     error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
5092   }
5093   return error;
5094 }
5095 }  // namespace voe
5096 }  // namespace webrtc