Upstream version 7.36.149.0
[platform/framework/web/crosswalk.git] / src / third_party / webrtc / voice_engine / channel.cc
1 /*
2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10
11 #include "webrtc/voice_engine/channel.h"
12
13 #include "webrtc/common.h"
14 #include "webrtc/modules/audio_device/include/audio_device.h"
15 #include "webrtc/modules/audio_processing/include/audio_processing.h"
16 #include "webrtc/modules/interface/module_common_types.h"
17 #include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
18 #include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
19 #include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
20 #include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
21 #include "webrtc/modules/utility/interface/audio_frame_operations.h"
22 #include "webrtc/modules/utility/interface/process_thread.h"
23 #include "webrtc/modules/utility/interface/rtp_dump.h"
24 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
25 #include "webrtc/system_wrappers/interface/logging.h"
26 #include "webrtc/system_wrappers/interface/trace.h"
27 #include "webrtc/video_engine/include/vie_network.h"
28 #include "webrtc/voice_engine/include/voe_base.h"
29 #include "webrtc/voice_engine/include/voe_external_media.h"
30 #include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
31 #include "webrtc/voice_engine/output_mixer.h"
32 #include "webrtc/voice_engine/statistics.h"
33 #include "webrtc/voice_engine/transmit_mixer.h"
34 #include "webrtc/voice_engine/utility.h"
35
36 #if defined(_WIN32)
37 #include <Qos.h>
38 #endif
39
40 namespace webrtc {
41 namespace voe {
42
43 // Extend the default RTCP statistics struct with max_jitter, defined as the
44 // maximum jitter value seen in an RTCP report block.
45 struct ChannelStatistics : public RtcpStatistics {
46   ChannelStatistics() : rtcp(), max_jitter(0) {}
47
48   RtcpStatistics rtcp;
49   uint32_t max_jitter;
50 };
51
52 // Statistics callback, called at each generation of a new RTCP report block.
53 class StatisticsProxy : public RtcpStatisticsCallback {
54  public:
55   StatisticsProxy(uint32_t ssrc)
56    : stats_lock_(CriticalSectionWrapper::CreateCriticalSection()),
57      ssrc_(ssrc) {}
58   virtual ~StatisticsProxy() {}
59
60   virtual void StatisticsUpdated(const RtcpStatistics& statistics,
61                                  uint32_t ssrc) OVERRIDE {
62     if (ssrc != ssrc_)
63       return;
64
65     CriticalSectionScoped cs(stats_lock_.get());
66     stats_.rtcp = statistics;
67     if (statistics.jitter > stats_.max_jitter) {
68       stats_.max_jitter = statistics.jitter;
69     }
70   }
71
72   void ResetStatistics() {
73     CriticalSectionScoped cs(stats_lock_.get());
74     stats_ = ChannelStatistics();
75   }
76
77   ChannelStatistics GetStats() {
78     CriticalSectionScoped cs(stats_lock_.get());
79     return stats_;
80   }
81
82  private:
83   // StatisticsUpdated calls are triggered from threads in the RTP module,
84   // while GetStats calls can be triggered from the public voice engine API,
85   // hence synchronization is needed.
86   scoped_ptr<CriticalSectionWrapper> stats_lock_;
87   const uint32_t ssrc_;
88   ChannelStatistics stats_;
89 };
90
91 int32_t
92 Channel::SendData(FrameType frameType,
93                   uint8_t   payloadType,
94                   uint32_t  timeStamp,
95                   const uint8_t*  payloadData,
96                   uint16_t  payloadSize,
97                   const RTPFragmentationHeader* fragmentation)
98 {
99     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
100                  "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
101                  " payloadSize=%u, fragmentation=0x%x)",
102                  frameType, payloadType, timeStamp, payloadSize, fragmentation);
103
104     if (_includeAudioLevelIndication)
105     {
106         // Store current audio level in the RTP/RTCP module.
107         // The level will be used in combination with voice-activity state
108         // (frameType) to add an RTP header extension
109         _rtpRtcpModule->SetAudioLevel(rms_level_.RMS());
110     }
111
112     // Push data from ACM to RTP/RTCP-module to deliver audio frame for
113     // packetization.
114     // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
115     if (_rtpRtcpModule->SendOutgoingData((FrameType&)frameType,
116                                         payloadType,
117                                         timeStamp,
118                                         // Leaving the time when this frame was
119                                         // received from the capture device as
120                                         // undefined for voice for now.
121                                         -1,
122                                         payloadData,
123                                         payloadSize,
124                                         fragmentation) == -1)
125     {
126         _engineStatisticsPtr->SetLastError(
127             VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
128             "Channel::SendData() failed to send data to RTP/RTCP module");
129         return -1;
130     }
131
132     _lastLocalTimeStamp = timeStamp;
133     _lastPayloadType = payloadType;
134
135     return 0;
136 }
137
138 int32_t
139 Channel::InFrameType(int16_t frameType)
140 {
141     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
142                  "Channel::InFrameType(frameType=%d)", frameType);
143
144     CriticalSectionScoped cs(&_callbackCritSect);
145     // 1 indicates speech
146     _sendFrameType = (frameType == 1) ? 1 : 0;
147     return 0;
148 }
149
150 int32_t
151 Channel::OnRxVadDetected(int vadDecision)
152 {
153     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
154                  "Channel::OnRxVadDetected(vadDecision=%d)", vadDecision);
155
156     CriticalSectionScoped cs(&_callbackCritSect);
157     if (_rxVadObserverPtr)
158     {
159         _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
160     }
161
162     return 0;
163 }
164
165 int
166 Channel::SendPacket(int channel, const void *data, int len)
167 {
168     channel = VoEChannelId(channel);
169     assert(channel == _channelId);
170
171     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
172                  "Channel::SendPacket(channel=%d, len=%d)", channel, len);
173
174     CriticalSectionScoped cs(&_callbackCritSect);
175
176     if (_transportPtr == NULL)
177     {
178         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
179                      "Channel::SendPacket() failed to send RTP packet due to"
180                      " invalid transport object");
181         return -1;
182     }
183
184     uint8_t* bufferToSendPtr = (uint8_t*)data;
185     int32_t bufferLength = len;
186
187     // Dump the RTP packet to a file (if RTP dump is enabled).
188     if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
189     {
190         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
191                      VoEId(_instanceId,_channelId),
192                      "Channel::SendPacket() RTP dump to output file failed");
193     }
194
195     int n = _transportPtr->SendPacket(channel, bufferToSendPtr,
196                                       bufferLength);
197     if (n < 0) {
198       std::string transport_name =
199           _externalTransport ? "external transport" : "WebRtc sockets";
200       WEBRTC_TRACE(kTraceError, kTraceVoice,
201                    VoEId(_instanceId,_channelId),
202                    "Channel::SendPacket() RTP transmission using %s failed",
203                    transport_name.c_str());
204       return -1;
205     }
206     return n;
207 }
208
209 int
210 Channel::SendRTCPPacket(int channel, const void *data, int len)
211 {
212     channel = VoEChannelId(channel);
213     assert(channel == _channelId);
214
215     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
216                  "Channel::SendRTCPPacket(channel=%d, len=%d)", channel, len);
217
218     CriticalSectionScoped cs(&_callbackCritSect);
219     if (_transportPtr == NULL)
220     {
221         WEBRTC_TRACE(kTraceError, kTraceVoice,
222                      VoEId(_instanceId,_channelId),
223                      "Channel::SendRTCPPacket() failed to send RTCP packet"
224                      " due to invalid transport object");
225         return -1;
226     }
227
228     uint8_t* bufferToSendPtr = (uint8_t*)data;
229     int32_t bufferLength = len;
230
231     // Dump the RTCP packet to a file (if RTP dump is enabled).
232     if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
233     {
234         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
235                      VoEId(_instanceId,_channelId),
236                      "Channel::SendPacket() RTCP dump to output file failed");
237     }
238
239     int n = _transportPtr->SendRTCPPacket(channel,
240                                           bufferToSendPtr,
241                                           bufferLength);
242     if (n < 0) {
243       std::string transport_name =
244           _externalTransport ? "external transport" : "WebRtc sockets";
245       WEBRTC_TRACE(kTraceInfo, kTraceVoice,
246                    VoEId(_instanceId,_channelId),
247                    "Channel::SendRTCPPacket() transmission using %s failed",
248                    transport_name.c_str());
249       return -1;
250     }
251     return n;
252 }
253
254 void
255 Channel::OnPlayTelephoneEvent(int32_t id,
256                               uint8_t event,
257                               uint16_t lengthMs,
258                               uint8_t volume)
259 {
260     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
261                  "Channel::OnPlayTelephoneEvent(id=%d, event=%u, lengthMs=%u,"
262                  " volume=%u)", id, event, lengthMs, volume);
263
264     if (!_playOutbandDtmfEvent || (event > 15))
265     {
266         // Ignore callback since feedback is disabled or event is not a
267         // Dtmf tone event.
268         return;
269     }
270
271     assert(_outputMixerPtr != NULL);
272
273     // Start playing out the Dtmf tone (if playout is enabled).
274     // Reduce length of tone with 80ms to the reduce risk of echo.
275     _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
276 }
277
278 void
279 Channel::OnIncomingSSRCChanged(int32_t id, uint32_t ssrc)
280 {
281     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
282                  "Channel::OnIncomingSSRCChanged(id=%d, SSRC=%d)",
283                  id, ssrc);
284
285     int32_t channel = VoEChannelId(id);
286     assert(channel == _channelId);
287
288     // Update ssrc so that NTP for AV sync can be updated.
289     _rtpRtcpModule->SetRemoteSSRC(ssrc);
290
291     if (_rtpObserver)
292     {
293         CriticalSectionScoped cs(&_callbackCritSect);
294
295         if (_rtpObserverPtr)
296         {
297             // Send new SSRC to registered observer using callback
298             _rtpObserverPtr->OnIncomingSSRCChanged(channel, ssrc);
299         }
300     }
301 }
302
303 void Channel::OnIncomingCSRCChanged(int32_t id,
304                                     uint32_t CSRC,
305                                     bool added)
306 {
307     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
308                  "Channel::OnIncomingCSRCChanged(id=%d, CSRC=%d, added=%d)",
309                  id, CSRC, added);
310
311     int32_t channel = VoEChannelId(id);
312     assert(channel == _channelId);
313
314     if (_rtpObserver)
315     {
316         CriticalSectionScoped cs(&_callbackCritSect);
317
318         if (_rtpObserverPtr)
319         {
320             _rtpObserverPtr->OnIncomingCSRCChanged(channel, CSRC, added);
321         }
322     }
323 }
324
325 void Channel::ResetStatistics(uint32_t ssrc) {
326   StreamStatistician* statistician =
327       rtp_receive_statistics_->GetStatistician(ssrc);
328   if (statistician) {
329     statistician->ResetStatistics();
330   }
331   statistics_proxy_->ResetStatistics();
332 }
333
334 void
335 Channel::OnApplicationDataReceived(int32_t id,
336                                    uint8_t subType,
337                                    uint32_t name,
338                                    uint16_t length,
339                                    const uint8_t* data)
340 {
341     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
342                  "Channel::OnApplicationDataReceived(id=%d, subType=%u,"
343                  " name=%u, length=%u)",
344                  id, subType, name, length);
345
346     int32_t channel = VoEChannelId(id);
347     assert(channel == _channelId);
348
349     if (_rtcpObserver)
350     {
351         CriticalSectionScoped cs(&_callbackCritSect);
352
353         if (_rtcpObserverPtr)
354         {
355             _rtcpObserverPtr->OnApplicationDataReceived(channel,
356                                                         subType,
357                                                         name,
358                                                         data,
359                                                         length);
360         }
361     }
362 }
363
364 int32_t
365 Channel::OnInitializeDecoder(
366     int32_t id,
367     int8_t payloadType,
368     const char payloadName[RTP_PAYLOAD_NAME_SIZE],
369     int frequency,
370     uint8_t channels,
371     uint32_t rate)
372 {
373     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
374                  "Channel::OnInitializeDecoder(id=%d, payloadType=%d, "
375                  "payloadName=%s, frequency=%u, channels=%u, rate=%u)",
376                  id, payloadType, payloadName, frequency, channels, rate);
377
378     assert(VoEChannelId(id) == _channelId);
379
380     CodecInst receiveCodec = {0};
381     CodecInst dummyCodec = {0};
382
383     receiveCodec.pltype = payloadType;
384     receiveCodec.plfreq = frequency;
385     receiveCodec.channels = channels;
386     receiveCodec.rate = rate;
387     strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
388
389     audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
390     receiveCodec.pacsize = dummyCodec.pacsize;
391
392     // Register the new codec to the ACM
393     if (audio_coding_->RegisterReceiveCodec(receiveCodec) == -1)
394     {
395         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
396                      VoEId(_instanceId, _channelId),
397                      "Channel::OnInitializeDecoder() invalid codec ("
398                      "pt=%d, name=%s) received - 1", payloadType, payloadName);
399         _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
400         return -1;
401     }
402
403     return 0;
404 }
405
406 void
407 Channel::OnPacketTimeout(int32_t id)
408 {
409     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
410                  "Channel::OnPacketTimeout(id=%d)", id);
411
412     CriticalSectionScoped cs(_callbackCritSectPtr);
413     if (_voiceEngineObserverPtr)
414     {
415         if (channel_state_.Get().receiving || _externalTransport)
416         {
417             int32_t channel = VoEChannelId(id);
418             assert(channel == _channelId);
419             // Ensure that next OnReceivedPacket() callback will trigger
420             // a VE_PACKET_RECEIPT_RESTARTED callback.
421             _rtpPacketTimedOut = true;
422             // Deliver callback to the observer
423             WEBRTC_TRACE(kTraceInfo, kTraceVoice,
424                          VoEId(_instanceId,_channelId),
425                          "Channel::OnPacketTimeout() => "
426                          "CallbackOnError(VE_RECEIVE_PACKET_TIMEOUT)");
427             _voiceEngineObserverPtr->CallbackOnError(channel,
428                                                      VE_RECEIVE_PACKET_TIMEOUT);
429         }
430     }
431 }
432
433 void
434 Channel::OnReceivedPacket(int32_t id,
435                           RtpRtcpPacketType packetType)
436 {
437     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
438                  "Channel::OnReceivedPacket(id=%d, packetType=%d)",
439                  id, packetType);
440
441     assert(VoEChannelId(id) == _channelId);
442
443     // Notify only for the case when we have restarted an RTP session.
444     if (_rtpPacketTimedOut && (kPacketRtp == packetType))
445     {
446         CriticalSectionScoped cs(_callbackCritSectPtr);
447         if (_voiceEngineObserverPtr)
448         {
449             int32_t channel = VoEChannelId(id);
450             assert(channel == _channelId);
451             // Reset timeout mechanism
452             _rtpPacketTimedOut = false;
453             // Deliver callback to the observer
454             WEBRTC_TRACE(kTraceInfo, kTraceVoice,
455                          VoEId(_instanceId,_channelId),
456                          "Channel::OnPacketTimeout() =>"
457                          " CallbackOnError(VE_PACKET_RECEIPT_RESTARTED)");
458             _voiceEngineObserverPtr->CallbackOnError(
459                 channel,
460                 VE_PACKET_RECEIPT_RESTARTED);
461         }
462     }
463 }
464
465 void
466 Channel::OnPeriodicDeadOrAlive(int32_t id,
467                                RTPAliveType alive)
468 {
469     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
470                  "Channel::OnPeriodicDeadOrAlive(id=%d, alive=%d)", id, alive);
471
472     {
473         CriticalSectionScoped cs(&_callbackCritSect);
474         if (!_connectionObserver)
475             return;
476     }
477
478     int32_t channel = VoEChannelId(id);
479     assert(channel == _channelId);
480
481     // Use Alive as default to limit risk of false Dead detections
482     bool isAlive(true);
483
484     // Always mark the connection as Dead when the module reports kRtpDead
485     if (kRtpDead == alive)
486     {
487         isAlive = false;
488     }
489
490     // It is possible that the connection is alive even if no RTP packet has
491     // been received for a long time since the other side might use VAD/DTX
492     // and a low SID-packet update rate.
493     if ((kRtpNoRtp == alive) && channel_state_.Get().playing)
494     {
495         // Detect Alive for all NetEQ states except for the case when we are
496         // in PLC_CNG state.
497         // PLC_CNG <=> background noise only due to long expand or error.
498         // Note that, the case where the other side stops sending during CNG
499         // state will be detected as Alive. Dead is is not set until after
500         // missing RTCP packets for at least twelve seconds (handled
501         // internally by the RTP/RTCP module).
502         isAlive = (_outputSpeechType != AudioFrame::kPLCCNG);
503     }
504
505     // Send callback to the registered observer
506     if (_connectionObserver)
507     {
508         CriticalSectionScoped cs(&_callbackCritSect);
509         if (_connectionObserverPtr)
510         {
511             _connectionObserverPtr->OnPeriodicDeadOrAlive(channel, isAlive);
512         }
513     }
514 }
515
516 int32_t
517 Channel::OnReceivedPayloadData(const uint8_t* payloadData,
518                                uint16_t payloadSize,
519                                const WebRtcRTPHeader* rtpHeader)
520 {
521     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
522                  "Channel::OnReceivedPayloadData(payloadSize=%d,"
523                  " payloadType=%u, audioChannel=%u)",
524                  payloadSize,
525                  rtpHeader->header.payloadType,
526                  rtpHeader->type.Audio.channel);
527
528     _lastRemoteTimeStamp = rtpHeader->header.timestamp;
529
530     if (!channel_state_.Get().playing)
531     {
532         // Avoid inserting into NetEQ when we are not playing. Count the
533         // packet as discarded.
534         WEBRTC_TRACE(kTraceStream, kTraceVoice,
535                      VoEId(_instanceId, _channelId),
536                      "received packet is discarded since playing is not"
537                      " activated");
538         _numberOfDiscardedPackets++;
539         return 0;
540     }
541
542     // Push the incoming payload (parsed and ready for decoding) into the ACM
543     if (audio_coding_->IncomingPacket(payloadData,
544                                       payloadSize,
545                                       *rtpHeader) != 0)
546     {
547         _engineStatisticsPtr->SetLastError(
548             VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
549             "Channel::OnReceivedPayloadData() unable to push data to the ACM");
550         return -1;
551     }
552
553     // Update the packet delay.
554     UpdatePacketDelay(rtpHeader->header.timestamp,
555                       rtpHeader->header.sequenceNumber);
556
557     uint16_t round_trip_time = 0;
558     _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time,
559                         NULL, NULL, NULL);
560
561     std::vector<uint16_t> nack_list = audio_coding_->GetNackList(
562         round_trip_time);
563     if (!nack_list.empty()) {
564       // Can't use nack_list.data() since it's not supported by all
565       // compilers.
566       ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
567     }
568     return 0;
569 }
570
571 bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
572                                 int rtp_packet_length) {
573   RTPHeader header;
574   if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
575     WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
576                  "IncomingPacket invalid RTP header");
577     return false;
578   }
579   header.payload_type_frequency =
580       rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
581   if (header.payload_type_frequency < 0)
582     return false;
583   return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
584 }
585
586 int32_t Channel::GetAudioFrame(int32_t id, AudioFrame& audioFrame)
587 {
588     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
589                  "Channel::GetAudioFrame(id=%d)", id);
590
591     // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
592     if (audio_coding_->PlayoutData10Ms(audioFrame.sample_rate_hz_,
593                                        &audioFrame) == -1)
594     {
595         WEBRTC_TRACE(kTraceError, kTraceVoice,
596                      VoEId(_instanceId,_channelId),
597                      "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
598         // In all likelihood, the audio in this frame is garbage. We return an
599         // error so that the audio mixer module doesn't add it to the mix. As
600         // a result, it won't be played out and the actions skipped here are
601         // irrelevant.
602         return -1;
603     }
604
605     if (_RxVadDetection)
606     {
607         UpdateRxVadDetection(audioFrame);
608     }
609
610     // Convert module ID to internal VoE channel ID
611     audioFrame.id_ = VoEChannelId(audioFrame.id_);
612     // Store speech type for dead-or-alive detection
613     _outputSpeechType = audioFrame.speech_type_;
614
615     ChannelState::State state = channel_state_.Get();
616
617     if (state.rx_apm_is_enabled) {
618       int err = rx_audioproc_->ProcessStream(&audioFrame);
619       if (err) {
620         LOG(LS_ERROR) << "ProcessStream() error: " << err;
621         assert(false);
622       }
623     }
624
625     float output_gain = 1.0f;
626     float left_pan =  1.0f;
627     float right_pan =  1.0f;
628     {
629       CriticalSectionScoped cs(&volume_settings_critsect_);
630       output_gain = _outputGain;
631       left_pan = _panLeft;
632       right_pan= _panRight;
633     }
634
635     // Output volume scaling
636     if (output_gain < 0.99f || output_gain > 1.01f)
637     {
638         AudioFrameOperations::ScaleWithSat(output_gain, audioFrame);
639     }
640
641     // Scale left and/or right channel(s) if stereo and master balance is
642     // active
643
644     if (left_pan != 1.0f || right_pan != 1.0f)
645     {
646         if (audioFrame.num_channels_ == 1)
647         {
648             // Emulate stereo mode since panning is active.
649             // The mono signal is copied to both left and right channels here.
650             AudioFrameOperations::MonoToStereo(&audioFrame);
651         }
652         // For true stereo mode (when we are receiving a stereo signal), no
653         // action is needed.
654
655         // Do the panning operation (the audio frame contains stereo at this
656         // stage)
657         AudioFrameOperations::Scale(left_pan, right_pan, audioFrame);
658     }
659
660     // Mix decoded PCM output with file if file mixing is enabled
661     if (state.output_file_playing)
662     {
663         MixAudioWithFile(audioFrame, audioFrame.sample_rate_hz_);
664     }
665
666     // External media
667     if (_outputExternalMedia)
668     {
669         CriticalSectionScoped cs(&_callbackCritSect);
670         const bool isStereo = (audioFrame.num_channels_ == 2);
671         if (_outputExternalMediaCallbackPtr)
672         {
673             _outputExternalMediaCallbackPtr->Process(
674                 _channelId,
675                 kPlaybackPerChannel,
676                 (int16_t*)audioFrame.data_,
677                 audioFrame.samples_per_channel_,
678                 audioFrame.sample_rate_hz_,
679                 isStereo);
680         }
681     }
682
683     // Record playout if enabled
684     {
685         CriticalSectionScoped cs(&_fileCritSect);
686
687         if (_outputFileRecording && _outputFileRecorderPtr)
688         {
689             _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
690         }
691     }
692
693     // Measure audio level (0-9)
694     _outputAudioLevel.ComputeLevel(audioFrame);
695
696     return 0;
697 }
698
699 int32_t
700 Channel::NeededFrequency(int32_t id)
701 {
702     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
703                  "Channel::NeededFrequency(id=%d)", id);
704
705     int highestNeeded = 0;
706
707     // Determine highest needed receive frequency
708     int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
709
710     // Return the bigger of playout and receive frequency in the ACM.
711     if (audio_coding_->PlayoutFrequency() > receiveFrequency)
712     {
713         highestNeeded = audio_coding_->PlayoutFrequency();
714     }
715     else
716     {
717         highestNeeded = receiveFrequency;
718     }
719
720     // Special case, if we're playing a file on the playout side
721     // we take that frequency into consideration as well
722     // This is not needed on sending side, since the codec will
723     // limit the spectrum anyway.
724     if (channel_state_.Get().output_file_playing)
725     {
726         CriticalSectionScoped cs(&_fileCritSect);
727         if (_outputFilePlayerPtr)
728         {
729             if(_outputFilePlayerPtr->Frequency()>highestNeeded)
730             {
731                 highestNeeded=_outputFilePlayerPtr->Frequency();
732             }
733         }
734     }
735
736     return(highestNeeded);
737 }
738
739 int32_t
740 Channel::CreateChannel(Channel*& channel,
741                        int32_t channelId,
742                        uint32_t instanceId,
743                        const Config& config)
744 {
745     WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId,channelId),
746                  "Channel::CreateChannel(channelId=%d, instanceId=%d)",
747         channelId, instanceId);
748
749     channel = new Channel(channelId, instanceId, config);
750     if (channel == NULL)
751     {
752         WEBRTC_TRACE(kTraceMemory, kTraceVoice,
753                      VoEId(instanceId,channelId),
754                      "Channel::CreateChannel() unable to allocate memory for"
755                      " channel");
756         return -1;
757     }
758     return 0;
759 }
760
761 void
762 Channel::PlayNotification(int32_t id, uint32_t durationMs)
763 {
764     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
765                  "Channel::PlayNotification(id=%d, durationMs=%d)",
766                  id, durationMs);
767
768     // Not implement yet
769 }
770
771 void
772 Channel::RecordNotification(int32_t id, uint32_t durationMs)
773 {
774     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
775                  "Channel::RecordNotification(id=%d, durationMs=%d)",
776                  id, durationMs);
777
778     // Not implement yet
779 }
780
781 void
782 Channel::PlayFileEnded(int32_t id)
783 {
784     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
785                  "Channel::PlayFileEnded(id=%d)", id);
786
787     if (id == _inputFilePlayerId)
788     {
789         channel_state_.SetInputFilePlaying(false);
790         WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
791                      VoEId(_instanceId,_channelId),
792                      "Channel::PlayFileEnded() => input file player module is"
793                      " shutdown");
794     }
795     else if (id == _outputFilePlayerId)
796     {
797         channel_state_.SetOutputFilePlaying(false);
798         WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
799                      VoEId(_instanceId,_channelId),
800                      "Channel::PlayFileEnded() => output file player module is"
801                      " shutdown");
802     }
803 }
804
805 void
806 Channel::RecordFileEnded(int32_t id)
807 {
808     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
809                  "Channel::RecordFileEnded(id=%d)", id);
810
811     assert(id == _outputFileRecorderId);
812
813     CriticalSectionScoped cs(&_fileCritSect);
814
815     _outputFileRecording = false;
816     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
817                  VoEId(_instanceId,_channelId),
818                  "Channel::RecordFileEnded() => output file recorder module is"
819                  " shutdown");
820 }
821
822 Channel::Channel(int32_t channelId,
823                  uint32_t instanceId,
824                  const Config& config) :
825     _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
826     _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
827     volume_settings_critsect_(*CriticalSectionWrapper::CreateCriticalSection()),
828     _instanceId(instanceId),
829     _channelId(channelId),
830     rtp_header_parser_(RtpHeaderParser::Create()),
831     rtp_payload_registry_(
832         new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))),
833     rtp_receive_statistics_(ReceiveStatistics::Create(
834         Clock::GetRealTimeClock())),
835     rtp_receiver_(RtpReceiver::CreateAudioReceiver(
836         VoEModuleId(instanceId, channelId), Clock::GetRealTimeClock(), this,
837         this, this, rtp_payload_registry_.get())),
838     telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
839     audio_coding_(AudioCodingModule::Create(
840         VoEModuleId(instanceId, channelId))),
841     _rtpDumpIn(*RtpDump::CreateRtpDump()),
842     _rtpDumpOut(*RtpDump::CreateRtpDump()),
843     _outputAudioLevel(),
844     _externalTransport(false),
845     _audioLevel_dBov(0),
846     _inputFilePlayerPtr(NULL),
847     _outputFilePlayerPtr(NULL),
848     _outputFileRecorderPtr(NULL),
849     // Avoid conflict with other channels by adding 1024 - 1026,
850     // won't use as much as 1024 channels.
851     _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
852     _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
853     _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
854     _outputFileRecording(false),
855     _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
856     _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
857     _outputExternalMedia(false),
858     _inputExternalMediaCallbackPtr(NULL),
859     _outputExternalMediaCallbackPtr(NULL),
860     _timeStamp(0), // This is just an offset, RTP module will add it's own random offset
861     _sendTelephoneEventPayloadType(106),
862     jitter_buffer_playout_timestamp_(0),
863     playout_timestamp_rtp_(0),
864     playout_timestamp_rtcp_(0),
865     playout_delay_ms_(0),
866     _numberOfDiscardedPackets(0),
867     send_sequence_number_(0),
868     _engineStatisticsPtr(NULL),
869     _outputMixerPtr(NULL),
870     _transmitMixerPtr(NULL),
871     _moduleProcessThreadPtr(NULL),
872     _audioDeviceModulePtr(NULL),
873     _voiceEngineObserverPtr(NULL),
874     _callbackCritSectPtr(NULL),
875     _transportPtr(NULL),
876     _rxVadObserverPtr(NULL),
877     _oldVadDecision(-1),
878     _sendFrameType(0),
879     _rtpObserverPtr(NULL),
880     _rtcpObserverPtr(NULL),
881     _externalPlayout(false),
882     _externalMixing(false),
883     _mixFileWithMicrophone(false),
884     _rtpObserver(false),
885     _rtcpObserver(false),
886     _mute(false),
887     _panLeft(1.0f),
888     _panRight(1.0f),
889     _outputGain(1.0f),
890     _playOutbandDtmfEvent(false),
891     _playInbandDtmfEvent(false),
892     _lastLocalTimeStamp(0),
893     _lastRemoteTimeStamp(0),
894     _lastPayloadType(0),
895     _includeAudioLevelIndication(false),
896     _rtpPacketTimedOut(false),
897     _rtpPacketTimeOutIsEnabled(false),
898     _rtpTimeOutSeconds(0),
899     _connectionObserver(false),
900     _connectionObserverPtr(NULL),
901     _outputSpeechType(AudioFrame::kNormalSpeech),
902     vie_network_(NULL),
903     video_channel_(-1),
904     _average_jitter_buffer_delay_us(0),
905     least_required_delay_ms_(0),
906     _previousTimestamp(0),
907     _recPacketDelayMs(20),
908     _RxVadDetection(false),
909     _rxAgcIsEnabled(false),
910     _rxNsIsEnabled(false),
911     restored_packet_in_use_(false)
912 {
913     WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
914                  "Channel::Channel() - ctor");
915     _inbandDtmfQueue.ResetDtmf();
916     _inbandDtmfGenerator.Init();
917     _outputAudioLevel.Clear();
918
919     RtpRtcp::Configuration configuration;
920     configuration.id = VoEModuleId(instanceId, channelId);
921     configuration.audio = true;
922     configuration.outgoing_transport = this;
923     configuration.rtcp_feedback = this;
924     configuration.audio_messages = this;
925     configuration.receive_statistics = rtp_receive_statistics_.get();
926
927     _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
928
929     statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
930     rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
931         statistics_proxy_.get());
932
933     Config audioproc_config;
934     audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
935     rx_audioproc_.reset(AudioProcessing::Create(audioproc_config));
936 }
937
938 Channel::~Channel()
939 {
940     rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
941     WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
942                  "Channel::~Channel() - dtor");
943
944     if (_outputExternalMedia)
945     {
946         DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
947     }
948     if (channel_state_.Get().input_external_media)
949     {
950         DeRegisterExternalMediaProcessing(kRecordingPerChannel);
951     }
952     StopSend();
953     StopPlayout();
954
955     {
956         CriticalSectionScoped cs(&_fileCritSect);
957         if (_inputFilePlayerPtr)
958         {
959             _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
960             _inputFilePlayerPtr->StopPlayingFile();
961             FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
962             _inputFilePlayerPtr = NULL;
963         }
964         if (_outputFilePlayerPtr)
965         {
966             _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
967             _outputFilePlayerPtr->StopPlayingFile();
968             FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
969             _outputFilePlayerPtr = NULL;
970         }
971         if (_outputFileRecorderPtr)
972         {
973             _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
974             _outputFileRecorderPtr->StopRecording();
975             FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
976             _outputFileRecorderPtr = NULL;
977         }
978     }
979
980     // The order to safely shutdown modules in a channel is:
981     // 1. De-register callbacks in modules
982     // 2. De-register modules in process thread
983     // 3. Destroy modules
984     if (audio_coding_->RegisterTransportCallback(NULL) == -1)
985     {
986         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
987                      VoEId(_instanceId,_channelId),
988                      "~Channel() failed to de-register transport callback"
989                      " (Audio coding module)");
990     }
991     if (audio_coding_->RegisterVADCallback(NULL) == -1)
992     {
993         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
994                      VoEId(_instanceId,_channelId),
995                      "~Channel() failed to de-register VAD callback"
996                      " (Audio coding module)");
997     }
998     // De-register modules in process thread
999     if (_moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()) == -1)
1000     {
1001         WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1002                      VoEId(_instanceId,_channelId),
1003                      "~Channel() failed to deregister RTP/RTCP module");
1004     }
1005     // End of modules shutdown
1006
1007     // Delete other objects
1008     if (vie_network_) {
1009       vie_network_->Release();
1010       vie_network_ = NULL;
1011     }
1012     RtpDump::DestroyRtpDump(&_rtpDumpIn);
1013     RtpDump::DestroyRtpDump(&_rtpDumpOut);
1014     delete &_callbackCritSect;
1015     delete &_fileCritSect;
1016     delete &volume_settings_critsect_;
1017 }
1018
1019 int32_t
1020 Channel::Init()
1021 {
1022     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1023                  "Channel::Init()");
1024
1025     channel_state_.Reset();
1026
1027     // --- Initial sanity
1028
1029     if ((_engineStatisticsPtr == NULL) ||
1030         (_moduleProcessThreadPtr == NULL))
1031     {
1032         WEBRTC_TRACE(kTraceError, kTraceVoice,
1033                      VoEId(_instanceId,_channelId),
1034                      "Channel::Init() must call SetEngineInformation() first");
1035         return -1;
1036     }
1037
1038     // --- Add modules to process thread (for periodic schedulation)
1039
1040     const bool processThreadFail =
1041         ((_moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get()) != 0) ||
1042         false);
1043     if (processThreadFail)
1044     {
1045         _engineStatisticsPtr->SetLastError(
1046             VE_CANNOT_INIT_CHANNEL, kTraceError,
1047             "Channel::Init() modules not registered");
1048         return -1;
1049     }
1050     // --- ACM initialization
1051
1052     if ((audio_coding_->InitializeReceiver() == -1) ||
1053 #ifdef WEBRTC_CODEC_AVT
1054         // out-of-band Dtmf tones are played out by default
1055         (audio_coding_->SetDtmfPlayoutStatus(true) == -1) ||
1056 #endif
1057         (audio_coding_->InitializeSender() == -1))
1058     {
1059         _engineStatisticsPtr->SetLastError(
1060             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1061             "Channel::Init() unable to initialize the ACM - 1");
1062         return -1;
1063     }
1064
1065     // --- RTP/RTCP module initialization
1066
1067     // Ensure that RTCP is enabled by default for the created channel.
1068     // Note that, the module will keep generating RTCP until it is explicitly
1069     // disabled by the user.
1070     // After StopListen (when no sockets exists), RTCP packets will no longer
1071     // be transmitted since the Transport object will then be invalid.
1072     telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
1073     // RTCP is enabled by default.
1074     if (_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1)
1075     {
1076         _engineStatisticsPtr->SetLastError(
1077             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1078             "Channel::Init() RTP/RTCP module not initialized");
1079         return -1;
1080     }
1081
1082      // --- Register all permanent callbacks
1083     const bool fail =
1084         (audio_coding_->RegisterTransportCallback(this) == -1) ||
1085         (audio_coding_->RegisterVADCallback(this) == -1);
1086
1087     if (fail)
1088     {
1089         _engineStatisticsPtr->SetLastError(
1090             VE_CANNOT_INIT_CHANNEL, kTraceError,
1091             "Channel::Init() callbacks not registered");
1092         return -1;
1093     }
1094
1095     // --- Register all supported codecs to the receiving side of the
1096     // RTP/RTCP module
1097
1098     CodecInst codec;
1099     const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
1100
1101     for (int idx = 0; idx < nSupportedCodecs; idx++)
1102     {
1103         // Open up the RTP/RTCP receiver for all supported codecs
1104         if ((audio_coding_->Codec(idx, &codec) == -1) ||
1105             (rtp_receiver_->RegisterReceivePayload(
1106                 codec.plname,
1107                 codec.pltype,
1108                 codec.plfreq,
1109                 codec.channels,
1110                 (codec.rate < 0) ? 0 : codec.rate) == -1))
1111         {
1112             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1113                          VoEId(_instanceId,_channelId),
1114                          "Channel::Init() unable to register %s (%d/%d/%d/%d) "
1115                          "to RTP/RTCP receiver",
1116                          codec.plname, codec.pltype, codec.plfreq,
1117                          codec.channels, codec.rate);
1118         }
1119         else
1120         {
1121             WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1122                          VoEId(_instanceId,_channelId),
1123                          "Channel::Init() %s (%d/%d/%d/%d) has been added to "
1124                          "the RTP/RTCP receiver",
1125                          codec.plname, codec.pltype, codec.plfreq,
1126                          codec.channels, codec.rate);
1127         }
1128
1129         // Ensure that PCMU is used as default codec on the sending side
1130         if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1))
1131         {
1132             SetSendCodec(codec);
1133         }
1134
1135         // Register default PT for outband 'telephone-event'
1136         if (!STR_CASE_CMP(codec.plname, "telephone-event"))
1137         {
1138             if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
1139                 (audio_coding_->RegisterReceiveCodec(codec) == -1))
1140             {
1141                 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1142                              VoEId(_instanceId,_channelId),
1143                              "Channel::Init() failed to register outband "
1144                              "'telephone-event' (%d/%d) correctly",
1145                              codec.pltype, codec.plfreq);
1146             }
1147         }
1148
1149         if (!STR_CASE_CMP(codec.plname, "CN"))
1150         {
1151             if ((audio_coding_->RegisterSendCodec(codec) == -1) ||
1152                 (audio_coding_->RegisterReceiveCodec(codec) == -1) ||
1153                 (_rtpRtcpModule->RegisterSendPayload(codec) == -1))
1154             {
1155                 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1156                              VoEId(_instanceId,_channelId),
1157                              "Channel::Init() failed to register CN (%d/%d) "
1158                              "correctly - 1",
1159                              codec.pltype, codec.plfreq);
1160             }
1161         }
1162 #ifdef WEBRTC_CODEC_RED
1163         // Register RED to the receiving side of the ACM.
1164         // We will not receive an OnInitializeDecoder() callback for RED.
1165         if (!STR_CASE_CMP(codec.plname, "RED"))
1166         {
1167             if (audio_coding_->RegisterReceiveCodec(codec) == -1)
1168             {
1169                 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1170                              VoEId(_instanceId,_channelId),
1171                              "Channel::Init() failed to register RED (%d/%d) "
1172                              "correctly",
1173                              codec.pltype, codec.plfreq);
1174             }
1175         }
1176 #endif
1177     }
1178
1179     if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) {
1180       LOG_FERR1(LS_ERROR, noise_suppression()->set_level, kDefaultNsMode);
1181       return -1;
1182     }
1183     if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) {
1184       LOG_FERR1(LS_ERROR, gain_control()->set_mode, kDefaultRxAgcMode);
1185       return -1;
1186     }
1187
1188     return 0;
1189 }
1190
1191 int32_t
1192 Channel::SetEngineInformation(Statistics& engineStatistics,
1193                               OutputMixer& outputMixer,
1194                               voe::TransmitMixer& transmitMixer,
1195                               ProcessThread& moduleProcessThread,
1196                               AudioDeviceModule& audioDeviceModule,
1197                               VoiceEngineObserver* voiceEngineObserver,
1198                               CriticalSectionWrapper* callbackCritSect)
1199 {
1200     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1201                  "Channel::SetEngineInformation()");
1202     _engineStatisticsPtr = &engineStatistics;
1203     _outputMixerPtr = &outputMixer;
1204     _transmitMixerPtr = &transmitMixer,
1205     _moduleProcessThreadPtr = &moduleProcessThread;
1206     _audioDeviceModulePtr = &audioDeviceModule;
1207     _voiceEngineObserverPtr = voiceEngineObserver;
1208     _callbackCritSectPtr = callbackCritSect;
1209     return 0;
1210 }
1211
1212 int32_t
1213 Channel::UpdateLocalTimeStamp()
1214 {
1215
1216     _timeStamp += _audioFrame.samples_per_channel_;
1217     return 0;
1218 }
1219
1220 int32_t
1221 Channel::StartPlayout()
1222 {
1223     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1224                  "Channel::StartPlayout()");
1225     if (channel_state_.Get().playing)
1226     {
1227         return 0;
1228     }
1229
1230     if (!_externalMixing) {
1231         // Add participant as candidates for mixing.
1232         if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0)
1233         {
1234             _engineStatisticsPtr->SetLastError(
1235                 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1236                 "StartPlayout() failed to add participant to mixer");
1237             return -1;
1238         }
1239     }
1240
1241     channel_state_.SetPlaying(true);
1242     if (RegisterFilePlayingToMixer() != 0)
1243         return -1;
1244
1245     return 0;
1246 }
1247
1248 int32_t
1249 Channel::StopPlayout()
1250 {
1251     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1252                  "Channel::StopPlayout()");
1253     if (!channel_state_.Get().playing)
1254     {
1255         return 0;
1256     }
1257
1258     if (!_externalMixing) {
1259         // Remove participant as candidates for mixing
1260         if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0)
1261         {
1262             _engineStatisticsPtr->SetLastError(
1263                 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1264                 "StopPlayout() failed to remove participant from mixer");
1265             return -1;
1266         }
1267     }
1268
1269     channel_state_.SetPlaying(false);
1270     _outputAudioLevel.Clear();
1271
1272     return 0;
1273 }
1274
1275 int32_t
1276 Channel::StartSend()
1277 {
1278     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1279                  "Channel::StartSend()");
1280     // Resume the previous sequence number which was reset by StopSend().
1281     // This needs to be done before |sending| is set to true.
1282     if (send_sequence_number_)
1283       SetInitSequenceNumber(send_sequence_number_);
1284
1285     if (channel_state_.Get().sending)
1286     {
1287       return 0;
1288     }
1289     channel_state_.SetSending(true);
1290
1291     if (_rtpRtcpModule->SetSendingStatus(true) != 0)
1292     {
1293         _engineStatisticsPtr->SetLastError(
1294             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1295             "StartSend() RTP/RTCP failed to start sending");
1296         CriticalSectionScoped cs(&_callbackCritSect);
1297         channel_state_.SetSending(false);
1298         return -1;
1299     }
1300
1301     return 0;
1302 }
1303
1304 int32_t
1305 Channel::StopSend()
1306 {
1307     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1308                  "Channel::StopSend()");
1309     if (!channel_state_.Get().sending)
1310     {
1311       return 0;
1312     }
1313     channel_state_.SetSending(false);
1314
1315     // Store the sequence number to be able to pick up the same sequence for
1316     // the next StartSend(). This is needed for restarting device, otherwise
1317     // it might cause libSRTP to complain about packets being replayed.
1318     // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1319     // CL is landed. See issue
1320     // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1321     send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1322
1323     // Reset sending SSRC and sequence number and triggers direct transmission
1324     // of RTCP BYE
1325     if (_rtpRtcpModule->SetSendingStatus(false) == -1 ||
1326         _rtpRtcpModule->ResetSendDataCountersRTP() == -1)
1327     {
1328         _engineStatisticsPtr->SetLastError(
1329             VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1330             "StartSend() RTP/RTCP failed to stop sending");
1331     }
1332
1333     return 0;
1334 }
1335
1336 int32_t
1337 Channel::StartReceiving()
1338 {
1339     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1340                  "Channel::StartReceiving()");
1341     if (channel_state_.Get().receiving)
1342     {
1343         return 0;
1344     }
1345     channel_state_.SetReceiving(true);
1346     _numberOfDiscardedPackets = 0;
1347     return 0;
1348 }
1349
1350 int32_t
1351 Channel::StopReceiving()
1352 {
1353     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1354                  "Channel::StopReceiving()");
1355     if (!channel_state_.Get().receiving)
1356     {
1357         return 0;
1358     }
1359
1360     channel_state_.SetReceiving(false);
1361     return 0;
1362 }
1363
1364 int32_t
1365 Channel::SetNetEQPlayoutMode(NetEqModes mode)
1366 {
1367     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1368                  "Channel::SetNetEQPlayoutMode()");
1369     AudioPlayoutMode playoutMode(voice);
1370     switch (mode)
1371     {
1372         case kNetEqDefault:
1373             playoutMode = voice;
1374             break;
1375         case kNetEqStreaming:
1376             playoutMode = streaming;
1377             break;
1378         case kNetEqFax:
1379             playoutMode = fax;
1380             break;
1381         case kNetEqOff:
1382             playoutMode = off;
1383             break;
1384     }
1385     if (audio_coding_->SetPlayoutMode(playoutMode) != 0)
1386     {
1387         _engineStatisticsPtr->SetLastError(
1388             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1389             "SetNetEQPlayoutMode() failed to set playout mode");
1390         return -1;
1391     }
1392     return 0;
1393 }
1394
1395 int32_t
1396 Channel::GetNetEQPlayoutMode(NetEqModes& mode)
1397 {
1398     const AudioPlayoutMode playoutMode = audio_coding_->PlayoutMode();
1399     switch (playoutMode)
1400     {
1401         case voice:
1402             mode = kNetEqDefault;
1403             break;
1404         case streaming:
1405             mode = kNetEqStreaming;
1406             break;
1407         case fax:
1408             mode = kNetEqFax;
1409             break;
1410         case off:
1411             mode = kNetEqOff;
1412     }
1413     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
1414                  VoEId(_instanceId,_channelId),
1415                  "Channel::GetNetEQPlayoutMode() => mode=%u", mode);
1416     return 0;
1417 }
1418
1419 int32_t
1420 Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
1421 {
1422     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1423                  "Channel::RegisterVoiceEngineObserver()");
1424     CriticalSectionScoped cs(&_callbackCritSect);
1425
1426     if (_voiceEngineObserverPtr)
1427     {
1428         _engineStatisticsPtr->SetLastError(
1429             VE_INVALID_OPERATION, kTraceError,
1430             "RegisterVoiceEngineObserver() observer already enabled");
1431         return -1;
1432     }
1433     _voiceEngineObserverPtr = &observer;
1434     return 0;
1435 }
1436
1437 int32_t
1438 Channel::DeRegisterVoiceEngineObserver()
1439 {
1440     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1441                  "Channel::DeRegisterVoiceEngineObserver()");
1442     CriticalSectionScoped cs(&_callbackCritSect);
1443
1444     if (!_voiceEngineObserverPtr)
1445     {
1446         _engineStatisticsPtr->SetLastError(
1447             VE_INVALID_OPERATION, kTraceWarning,
1448             "DeRegisterVoiceEngineObserver() observer already disabled");
1449         return 0;
1450     }
1451     _voiceEngineObserverPtr = NULL;
1452     return 0;
1453 }
1454
1455 int32_t
1456 Channel::GetSendCodec(CodecInst& codec)
1457 {
1458     return (audio_coding_->SendCodec(&codec));
1459 }
1460
1461 int32_t
1462 Channel::GetRecCodec(CodecInst& codec)
1463 {
1464     return (audio_coding_->ReceiveCodec(&codec));
1465 }
1466
1467 int32_t
1468 Channel::SetSendCodec(const CodecInst& codec)
1469 {
1470     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1471                  "Channel::SetSendCodec()");
1472
1473     if (audio_coding_->RegisterSendCodec(codec) != 0)
1474     {
1475         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1476                      "SetSendCodec() failed to register codec to ACM");
1477         return -1;
1478     }
1479
1480     if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
1481     {
1482         _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1483         if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
1484         {
1485             WEBRTC_TRACE(
1486                     kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1487                     "SetSendCodec() failed to register codec to"
1488                     " RTP/RTCP module");
1489             return -1;
1490         }
1491     }
1492
1493     if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0)
1494     {
1495         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1496                      "SetSendCodec() failed to set audio packet size");
1497         return -1;
1498     }
1499
1500     return 0;
1501 }
1502
1503 int32_t
1504 Channel::SetVADStatus(bool enableVAD, ACMVADMode mode, bool disableDTX)
1505 {
1506     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1507                  "Channel::SetVADStatus(mode=%d)", mode);
1508     // To disable VAD, DTX must be disabled too
1509     disableDTX = ((enableVAD == false) ? true : disableDTX);
1510     if (audio_coding_->SetVAD(!disableDTX, enableVAD, mode) != 0)
1511     {
1512         _engineStatisticsPtr->SetLastError(
1513             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1514             "SetVADStatus() failed to set VAD");
1515         return -1;
1516     }
1517     return 0;
1518 }
1519
1520 int32_t
1521 Channel::GetVADStatus(bool& enabledVAD, ACMVADMode& mode, bool& disabledDTX)
1522 {
1523     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1524                  "Channel::GetVADStatus");
1525     if (audio_coding_->VAD(&disabledDTX, &enabledVAD, &mode) != 0)
1526     {
1527         _engineStatisticsPtr->SetLastError(
1528             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1529             "GetVADStatus() failed to get VAD status");
1530         return -1;
1531     }
1532     disabledDTX = !disabledDTX;
1533     return 0;
1534 }
1535
1536 int32_t
1537 Channel::SetRecPayloadType(const CodecInst& codec)
1538 {
1539     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1540                  "Channel::SetRecPayloadType()");
1541
1542     if (channel_state_.Get().playing)
1543     {
1544         _engineStatisticsPtr->SetLastError(
1545             VE_ALREADY_PLAYING, kTraceError,
1546             "SetRecPayloadType() unable to set PT while playing");
1547         return -1;
1548     }
1549     if (channel_state_.Get().receiving)
1550     {
1551         _engineStatisticsPtr->SetLastError(
1552             VE_ALREADY_LISTENING, kTraceError,
1553             "SetRecPayloadType() unable to set PT while listening");
1554         return -1;
1555     }
1556
1557     if (codec.pltype == -1)
1558     {
1559         // De-register the selected codec (RTP/RTCP module and ACM)
1560
1561         int8_t pltype(-1);
1562         CodecInst rxCodec = codec;
1563
1564         // Get payload type for the given codec
1565         rtp_payload_registry_->ReceivePayloadType(
1566             rxCodec.plname,
1567             rxCodec.plfreq,
1568             rxCodec.channels,
1569             (rxCodec.rate < 0) ? 0 : rxCodec.rate,
1570             &pltype);
1571         rxCodec.pltype = pltype;
1572
1573         if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0)
1574         {
1575             _engineStatisticsPtr->SetLastError(
1576                     VE_RTP_RTCP_MODULE_ERROR,
1577                     kTraceError,
1578                     "SetRecPayloadType() RTP/RTCP-module deregistration "
1579                     "failed");
1580             return -1;
1581         }
1582         if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0)
1583         {
1584             _engineStatisticsPtr->SetLastError(
1585                 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1586                 "SetRecPayloadType() ACM deregistration failed - 1");
1587             return -1;
1588         }
1589         return 0;
1590     }
1591
1592     if (rtp_receiver_->RegisterReceivePayload(
1593         codec.plname,
1594         codec.pltype,
1595         codec.plfreq,
1596         codec.channels,
1597         (codec.rate < 0) ? 0 : codec.rate) != 0)
1598     {
1599         // First attempt to register failed => de-register and try again
1600         rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
1601         if (rtp_receiver_->RegisterReceivePayload(
1602             codec.plname,
1603             codec.pltype,
1604             codec.plfreq,
1605             codec.channels,
1606             (codec.rate < 0) ? 0 : codec.rate) != 0)
1607         {
1608             _engineStatisticsPtr->SetLastError(
1609                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1610                 "SetRecPayloadType() RTP/RTCP-module registration failed");
1611             return -1;
1612         }
1613     }
1614     if (audio_coding_->RegisterReceiveCodec(codec) != 0)
1615     {
1616         audio_coding_->UnregisterReceiveCodec(codec.pltype);
1617         if (audio_coding_->RegisterReceiveCodec(codec) != 0)
1618         {
1619             _engineStatisticsPtr->SetLastError(
1620                 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1621                 "SetRecPayloadType() ACM registration failed - 1");
1622             return -1;
1623         }
1624     }
1625     return 0;
1626 }
1627
1628 int32_t
1629 Channel::GetRecPayloadType(CodecInst& codec)
1630 {
1631     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1632                  "Channel::GetRecPayloadType()");
1633     int8_t payloadType(-1);
1634     if (rtp_payload_registry_->ReceivePayloadType(
1635         codec.plname,
1636         codec.plfreq,
1637         codec.channels,
1638         (codec.rate < 0) ? 0 : codec.rate,
1639         &payloadType) != 0)
1640     {
1641         _engineStatisticsPtr->SetLastError(
1642             VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1643             "GetRecPayloadType() failed to retrieve RX payload type");
1644         return -1;
1645     }
1646     codec.pltype = payloadType;
1647     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1648                  "Channel::GetRecPayloadType() => pltype=%u", codec.pltype);
1649     return 0;
1650 }
1651
1652 int32_t
1653 Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
1654 {
1655     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1656                  "Channel::SetSendCNPayloadType()");
1657
1658     CodecInst codec;
1659     int32_t samplingFreqHz(-1);
1660     const int kMono = 1;
1661     if (frequency == kFreq32000Hz)
1662         samplingFreqHz = 32000;
1663     else if (frequency == kFreq16000Hz)
1664         samplingFreqHz = 16000;
1665
1666     if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1)
1667     {
1668         _engineStatisticsPtr->SetLastError(
1669             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1670             "SetSendCNPayloadType() failed to retrieve default CN codec "
1671             "settings");
1672         return -1;
1673     }
1674
1675     // Modify the payload type (must be set to dynamic range)
1676     codec.pltype = type;
1677
1678     if (audio_coding_->RegisterSendCodec(codec) != 0)
1679     {
1680         _engineStatisticsPtr->SetLastError(
1681             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1682             "SetSendCNPayloadType() failed to register CN to ACM");
1683         return -1;
1684     }
1685
1686     if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
1687     {
1688         _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1689         if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
1690         {
1691             _engineStatisticsPtr->SetLastError(
1692                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1693                 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1694                 "module");
1695             return -1;
1696         }
1697     }
1698     return 0;
1699 }
1700
1701 int32_t Channel::RegisterExternalTransport(Transport& transport)
1702 {
1703     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1704                "Channel::RegisterExternalTransport()");
1705
1706     CriticalSectionScoped cs(&_callbackCritSect);
1707
1708     if (_externalTransport)
1709     {
1710         _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION,
1711                                            kTraceError,
1712               "RegisterExternalTransport() external transport already enabled");
1713        return -1;
1714     }
1715     _externalTransport = true;
1716     _transportPtr = &transport;
1717     return 0;
1718 }
1719
1720 int32_t
1721 Channel::DeRegisterExternalTransport()
1722 {
1723     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1724                  "Channel::DeRegisterExternalTransport()");
1725
1726     CriticalSectionScoped cs(&_callbackCritSect);
1727
1728     if (!_transportPtr)
1729     {
1730         _engineStatisticsPtr->SetLastError(
1731             VE_INVALID_OPERATION, kTraceWarning,
1732             "DeRegisterExternalTransport() external transport already "
1733             "disabled");
1734         return 0;
1735     }
1736     _externalTransport = false;
1737     _transportPtr = NULL;
1738     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1739                  "DeRegisterExternalTransport() all transport is disabled");
1740     return 0;
1741 }
1742
1743 int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length,
1744                                    const PacketTime& packet_time) {
1745   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
1746                "Channel::ReceivedRTPPacket()");
1747
1748   // Store playout timestamp for the received RTP packet
1749   UpdatePlayoutTimestamp(false);
1750
1751   // Dump the RTP packet to a file (if RTP dump is enabled).
1752   if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
1753                             (uint16_t)length) == -1) {
1754     WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1755                  VoEId(_instanceId,_channelId),
1756                  "Channel::SendPacket() RTP dump to input file failed");
1757   }
1758   const uint8_t* received_packet = reinterpret_cast<const uint8_t*>(data);
1759   RTPHeader header;
1760   if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
1761     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1762                  "Incoming packet: invalid RTP header");
1763     return -1;
1764   }
1765   header.payload_type_frequency =
1766       rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
1767   if (header.payload_type_frequency < 0)
1768     return -1;
1769   bool in_order = IsPacketInOrder(header);
1770   rtp_receive_statistics_->IncomingPacket(header, length,
1771       IsPacketRetransmitted(header, in_order));
1772   rtp_payload_registry_->SetIncomingPayloadType(header);
1773
1774   // Forward any packets to ViE bandwidth estimator, if enabled.
1775   {
1776     CriticalSectionScoped cs(&_callbackCritSect);
1777     if (vie_network_) {
1778       int64_t arrival_time_ms;
1779       if (packet_time.timestamp != -1) {
1780         arrival_time_ms = (packet_time.timestamp + 500) / 1000;
1781       } else {
1782         arrival_time_ms = TickTime::MillisecondTimestamp();
1783       }
1784       int payload_length = length - header.headerLength;
1785       vie_network_->ReceivedBWEPacket(video_channel_, arrival_time_ms,
1786                                       payload_length, header);
1787     }
1788   }
1789
1790   return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
1791 }
1792
1793 bool Channel::ReceivePacket(const uint8_t* packet,
1794                             int packet_length,
1795                             const RTPHeader& header,
1796                             bool in_order) {
1797   if (rtp_payload_registry_->IsEncapsulated(header)) {
1798     return HandleEncapsulation(packet, packet_length, header);
1799   }
1800   const uint8_t* payload = packet + header.headerLength;
1801   int payload_length = packet_length - header.headerLength;
1802   assert(payload_length >= 0);
1803   PayloadUnion payload_specific;
1804   if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
1805                                                   &payload_specific)) {
1806     return false;
1807   }
1808   return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
1809                                           payload_specific, in_order);
1810 }
1811
1812 bool Channel::HandleEncapsulation(const uint8_t* packet,
1813                                   int packet_length,
1814                                   const RTPHeader& header) {
1815   if (!rtp_payload_registry_->IsRtx(header))
1816     return false;
1817
1818   // Remove the RTX header and parse the original RTP header.
1819   if (packet_length < header.headerLength)
1820     return false;
1821   if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
1822     return false;
1823   if (restored_packet_in_use_) {
1824     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1825                  "Multiple RTX headers detected, dropping packet");
1826     return false;
1827   }
1828   uint8_t* restored_packet_ptr = restored_packet_;
1829   if (!rtp_payload_registry_->RestoreOriginalPacket(
1830       &restored_packet_ptr, packet, &packet_length, rtp_receiver_->SSRC(),
1831       header)) {
1832     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1833                  "Incoming RTX packet: invalid RTP header");
1834     return false;
1835   }
1836   restored_packet_in_use_ = true;
1837   bool ret = OnRecoveredPacket(restored_packet_ptr, packet_length);
1838   restored_packet_in_use_ = false;
1839   return ret;
1840 }
1841
1842 bool Channel::IsPacketInOrder(const RTPHeader& header) const {
1843   StreamStatistician* statistician =
1844       rtp_receive_statistics_->GetStatistician(header.ssrc);
1845   if (!statistician)
1846     return false;
1847   return statistician->IsPacketInOrder(header.sequenceNumber);
1848 }
1849
1850 bool Channel::IsPacketRetransmitted(const RTPHeader& header,
1851                                     bool in_order) const {
1852   // Retransmissions are handled separately if RTX is enabled.
1853   if (rtp_payload_registry_->RtxEnabled())
1854     return false;
1855   StreamStatistician* statistician =
1856       rtp_receive_statistics_->GetStatistician(header.ssrc);
1857   if (!statistician)
1858     return false;
1859   // Check if this is a retransmission.
1860   uint16_t min_rtt = 0;
1861   _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
1862   return !in_order &&
1863       statistician->IsRetransmitOfOldPacket(header, min_rtt);
1864 }
1865
1866 int32_t Channel::ReceivedRTCPPacket(const int8_t* data, int32_t length) {
1867   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
1868                "Channel::ReceivedRTCPPacket()");
1869   // Store playout timestamp for the received RTCP packet
1870   UpdatePlayoutTimestamp(true);
1871
1872   // Dump the RTCP packet to a file (if RTP dump is enabled).
1873   if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
1874                             (uint16_t)length) == -1) {
1875     WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1876                  VoEId(_instanceId,_channelId),
1877                  "Channel::SendPacket() RTCP dump to input file failed");
1878   }
1879
1880   // Deliver RTCP packet to RTP/RTCP module for parsing
1881   if (_rtpRtcpModule->IncomingRtcpPacket((const uint8_t*)data,
1882                                          (uint16_t)length) == -1) {
1883     _engineStatisticsPtr->SetLastError(
1884         VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
1885         "Channel::IncomingRTPPacket() RTCP packet is invalid");
1886   }
1887   return 0;
1888 }
1889
1890 int Channel::StartPlayingFileLocally(const char* fileName,
1891                                      bool loop,
1892                                      FileFormats format,
1893                                      int startPosition,
1894                                      float volumeScaling,
1895                                      int stopPosition,
1896                                      const CodecInst* codecInst)
1897 {
1898     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1899                  "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
1900                  " format=%d, volumeScaling=%5.3f, startPosition=%d, "
1901                  "stopPosition=%d)", fileName, loop, format, volumeScaling,
1902                  startPosition, stopPosition);
1903
1904     if (channel_state_.Get().output_file_playing)
1905     {
1906         _engineStatisticsPtr->SetLastError(
1907             VE_ALREADY_PLAYING, kTraceError,
1908             "StartPlayingFileLocally() is already playing");
1909         return -1;
1910     }
1911
1912     {
1913         CriticalSectionScoped cs(&_fileCritSect);
1914
1915         if (_outputFilePlayerPtr)
1916         {
1917             _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1918             FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1919             _outputFilePlayerPtr = NULL;
1920         }
1921
1922         _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1923             _outputFilePlayerId, (const FileFormats)format);
1924
1925         if (_outputFilePlayerPtr == NULL)
1926         {
1927             _engineStatisticsPtr->SetLastError(
1928                 VE_INVALID_ARGUMENT, kTraceError,
1929                 "StartPlayingFileLocally() filePlayer format is not correct");
1930             return -1;
1931         }
1932
1933         const uint32_t notificationTime(0);
1934
1935         if (_outputFilePlayerPtr->StartPlayingFile(
1936                 fileName,
1937                 loop,
1938                 startPosition,
1939                 volumeScaling,
1940                 notificationTime,
1941                 stopPosition,
1942                 (const CodecInst*)codecInst) != 0)
1943         {
1944             _engineStatisticsPtr->SetLastError(
1945                 VE_BAD_FILE, kTraceError,
1946                 "StartPlayingFile() failed to start file playout");
1947             _outputFilePlayerPtr->StopPlayingFile();
1948             FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1949             _outputFilePlayerPtr = NULL;
1950             return -1;
1951         }
1952         _outputFilePlayerPtr->RegisterModuleFileCallback(this);
1953         channel_state_.SetOutputFilePlaying(true);
1954     }
1955
1956     if (RegisterFilePlayingToMixer() != 0)
1957         return -1;
1958
1959     return 0;
1960 }
1961
1962 int Channel::StartPlayingFileLocally(InStream* stream,
1963                                      FileFormats format,
1964                                      int startPosition,
1965                                      float volumeScaling,
1966                                      int stopPosition,
1967                                      const CodecInst* codecInst)
1968 {
1969     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1970                  "Channel::StartPlayingFileLocally(format=%d,"
1971                  " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
1972                  format, volumeScaling, startPosition, stopPosition);
1973
1974     if(stream == NULL)
1975     {
1976         _engineStatisticsPtr->SetLastError(
1977             VE_BAD_FILE, kTraceError,
1978             "StartPlayingFileLocally() NULL as input stream");
1979         return -1;
1980     }
1981
1982
1983     if (channel_state_.Get().output_file_playing)
1984     {
1985         _engineStatisticsPtr->SetLastError(
1986             VE_ALREADY_PLAYING, kTraceError,
1987             "StartPlayingFileLocally() is already playing");
1988         return -1;
1989     }
1990
1991     {
1992       CriticalSectionScoped cs(&_fileCritSect);
1993
1994       // Destroy the old instance
1995       if (_outputFilePlayerPtr)
1996       {
1997           _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1998           FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1999           _outputFilePlayerPtr = NULL;
2000       }
2001
2002       // Create the instance
2003       _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2004           _outputFilePlayerId,
2005           (const FileFormats)format);
2006
2007       if (_outputFilePlayerPtr == NULL)
2008       {
2009           _engineStatisticsPtr->SetLastError(
2010               VE_INVALID_ARGUMENT, kTraceError,
2011               "StartPlayingFileLocally() filePlayer format isnot correct");
2012           return -1;
2013       }
2014
2015       const uint32_t notificationTime(0);
2016
2017       if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2018                                                  volumeScaling,
2019                                                  notificationTime,
2020                                                  stopPosition, codecInst) != 0)
2021       {
2022           _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2023                                              "StartPlayingFile() failed to "
2024                                              "start file playout");
2025           _outputFilePlayerPtr->StopPlayingFile();
2026           FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2027           _outputFilePlayerPtr = NULL;
2028           return -1;
2029       }
2030       _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2031       channel_state_.SetOutputFilePlaying(true);
2032     }
2033
2034     if (RegisterFilePlayingToMixer() != 0)
2035         return -1;
2036
2037     return 0;
2038 }
2039
2040 int Channel::StopPlayingFileLocally()
2041 {
2042     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2043                  "Channel::StopPlayingFileLocally()");
2044
2045     if (!channel_state_.Get().output_file_playing)
2046     {
2047         _engineStatisticsPtr->SetLastError(
2048             VE_INVALID_OPERATION, kTraceWarning,
2049             "StopPlayingFileLocally() isnot playing");
2050         return 0;
2051     }
2052
2053     {
2054         CriticalSectionScoped cs(&_fileCritSect);
2055
2056         if (_outputFilePlayerPtr->StopPlayingFile() != 0)
2057         {
2058             _engineStatisticsPtr->SetLastError(
2059                 VE_STOP_RECORDING_FAILED, kTraceError,
2060                 "StopPlayingFile() could not stop playing");
2061             return -1;
2062         }
2063         _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2064         FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2065         _outputFilePlayerPtr = NULL;
2066         channel_state_.SetOutputFilePlaying(false);
2067     }
2068     // _fileCritSect cannot be taken while calling
2069     // SetAnonymousMixibilityStatus. Refer to comments in
2070     // StartPlayingFileLocally(const char* ...) for more details.
2071     if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0)
2072     {
2073         _engineStatisticsPtr->SetLastError(
2074             VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
2075             "StopPlayingFile() failed to stop participant from playing as"
2076             "file in the mixer");
2077         return -1;
2078     }
2079
2080     return 0;
2081 }
2082
2083 int Channel::IsPlayingFileLocally() const
2084 {
2085     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2086                  "Channel::IsPlayingFileLocally()");
2087
2088     return channel_state_.Get().output_file_playing;
2089 }
2090
2091 int Channel::RegisterFilePlayingToMixer()
2092 {
2093     // Return success for not registering for file playing to mixer if:
2094     // 1. playing file before playout is started on that channel.
2095     // 2. starting playout without file playing on that channel.
2096     if (!channel_state_.Get().playing ||
2097         !channel_state_.Get().output_file_playing)
2098     {
2099         return 0;
2100     }
2101
2102     // |_fileCritSect| cannot be taken while calling
2103     // SetAnonymousMixabilityStatus() since as soon as the participant is added
2104     // frames can be pulled by the mixer. Since the frames are generated from
2105     // the file, _fileCritSect will be taken. This would result in a deadlock.
2106     if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
2107     {
2108         channel_state_.SetOutputFilePlaying(false);
2109         CriticalSectionScoped cs(&_fileCritSect);
2110         _engineStatisticsPtr->SetLastError(
2111             VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
2112             "StartPlayingFile() failed to add participant as file to mixer");
2113         _outputFilePlayerPtr->StopPlayingFile();
2114         FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2115         _outputFilePlayerPtr = NULL;
2116         return -1;
2117     }
2118
2119     return 0;
2120 }
2121
2122 int Channel::ScaleLocalFilePlayout(float scale)
2123 {
2124     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2125                  "Channel::ScaleLocalFilePlayout(scale=%5.3f)", scale);
2126
2127     CriticalSectionScoped cs(&_fileCritSect);
2128
2129     if (!channel_state_.Get().output_file_playing)
2130     {
2131         _engineStatisticsPtr->SetLastError(
2132             VE_INVALID_OPERATION, kTraceError,
2133             "ScaleLocalFilePlayout() isnot playing");
2134         return -1;
2135     }
2136     if ((_outputFilePlayerPtr == NULL) ||
2137         (_outputFilePlayerPtr->SetAudioScaling(scale) != 0))
2138     {
2139         _engineStatisticsPtr->SetLastError(
2140             VE_BAD_ARGUMENT, kTraceError,
2141             "SetAudioScaling() failed to scale the playout");
2142         return -1;
2143     }
2144
2145     return 0;
2146 }
2147
2148 int Channel::GetLocalPlayoutPosition(int& positionMs)
2149 {
2150     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2151                  "Channel::GetLocalPlayoutPosition(position=?)");
2152
2153     uint32_t position;
2154
2155     CriticalSectionScoped cs(&_fileCritSect);
2156
2157     if (_outputFilePlayerPtr == NULL)
2158     {
2159         _engineStatisticsPtr->SetLastError(
2160             VE_INVALID_OPERATION, kTraceError,
2161             "GetLocalPlayoutPosition() filePlayer instance doesnot exist");
2162         return -1;
2163     }
2164
2165     if (_outputFilePlayerPtr->GetPlayoutPosition(position) != 0)
2166     {
2167         _engineStatisticsPtr->SetLastError(
2168             VE_BAD_FILE, kTraceError,
2169             "GetLocalPlayoutPosition() failed");
2170         return -1;
2171     }
2172     positionMs = position;
2173
2174     return 0;
2175 }
2176
2177 int Channel::StartPlayingFileAsMicrophone(const char* fileName,
2178                                           bool loop,
2179                                           FileFormats format,
2180                                           int startPosition,
2181                                           float volumeScaling,
2182                                           int stopPosition,
2183                                           const CodecInst* codecInst)
2184 {
2185     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2186                  "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
2187                  "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
2188                  "stopPosition=%d)", fileName, loop, format, volumeScaling,
2189                  startPosition, stopPosition);
2190
2191     CriticalSectionScoped cs(&_fileCritSect);
2192
2193     if (channel_state_.Get().input_file_playing)
2194     {
2195         _engineStatisticsPtr->SetLastError(
2196             VE_ALREADY_PLAYING, kTraceWarning,
2197             "StartPlayingFileAsMicrophone() filePlayer is playing");
2198         return 0;
2199     }
2200
2201     // Destroy the old instance
2202     if (_inputFilePlayerPtr)
2203     {
2204         _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2205         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2206         _inputFilePlayerPtr = NULL;
2207     }
2208
2209     // Create the instance
2210     _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2211         _inputFilePlayerId, (const FileFormats)format);
2212
2213     if (_inputFilePlayerPtr == NULL)
2214     {
2215         _engineStatisticsPtr->SetLastError(
2216             VE_INVALID_ARGUMENT, kTraceError,
2217             "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
2218         return -1;
2219     }
2220
2221     const uint32_t notificationTime(0);
2222
2223     if (_inputFilePlayerPtr->StartPlayingFile(
2224         fileName,
2225         loop,
2226         startPosition,
2227         volumeScaling,
2228         notificationTime,
2229         stopPosition,
2230         (const CodecInst*)codecInst) != 0)
2231     {
2232         _engineStatisticsPtr->SetLastError(
2233             VE_BAD_FILE, kTraceError,
2234             "StartPlayingFile() failed to start file playout");
2235         _inputFilePlayerPtr->StopPlayingFile();
2236         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2237         _inputFilePlayerPtr = NULL;
2238         return -1;
2239     }
2240     _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2241     channel_state_.SetInputFilePlaying(true);
2242
2243     return 0;
2244 }
2245
2246 int Channel::StartPlayingFileAsMicrophone(InStream* stream,
2247                                           FileFormats format,
2248                                           int startPosition,
2249                                           float volumeScaling,
2250                                           int stopPosition,
2251                                           const CodecInst* codecInst)
2252 {
2253     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2254                  "Channel::StartPlayingFileAsMicrophone(format=%d, "
2255                  "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2256                  format, volumeScaling, startPosition, stopPosition);
2257
2258     if(stream == NULL)
2259     {
2260         _engineStatisticsPtr->SetLastError(
2261             VE_BAD_FILE, kTraceError,
2262             "StartPlayingFileAsMicrophone NULL as input stream");
2263         return -1;
2264     }
2265
2266     CriticalSectionScoped cs(&_fileCritSect);
2267
2268     if (channel_state_.Get().input_file_playing)
2269     {
2270         _engineStatisticsPtr->SetLastError(
2271             VE_ALREADY_PLAYING, kTraceWarning,
2272             "StartPlayingFileAsMicrophone() is playing");
2273         return 0;
2274     }
2275
2276     // Destroy the old instance
2277     if (_inputFilePlayerPtr)
2278     {
2279         _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2280         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2281         _inputFilePlayerPtr = NULL;
2282     }
2283
2284     // Create the instance
2285     _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2286         _inputFilePlayerId, (const FileFormats)format);
2287
2288     if (_inputFilePlayerPtr == NULL)
2289     {
2290         _engineStatisticsPtr->SetLastError(
2291             VE_INVALID_ARGUMENT, kTraceError,
2292             "StartPlayingInputFile() filePlayer format isnot correct");
2293         return -1;
2294     }
2295
2296     const uint32_t notificationTime(0);
2297
2298     if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2299                                               volumeScaling, notificationTime,
2300                                               stopPosition, codecInst) != 0)
2301     {
2302         _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2303                                            "StartPlayingFile() failed to start "
2304                                            "file playout");
2305         _inputFilePlayerPtr->StopPlayingFile();
2306         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2307         _inputFilePlayerPtr = NULL;
2308         return -1;
2309     }
2310
2311     _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2312     channel_state_.SetInputFilePlaying(true);
2313
2314     return 0;
2315 }
2316
2317 int Channel::StopPlayingFileAsMicrophone()
2318 {
2319     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2320                  "Channel::StopPlayingFileAsMicrophone()");
2321
2322     CriticalSectionScoped cs(&_fileCritSect);
2323
2324     if (!channel_state_.Get().input_file_playing)
2325     {
2326         _engineStatisticsPtr->SetLastError(
2327             VE_INVALID_OPERATION, kTraceWarning,
2328             "StopPlayingFileAsMicrophone() isnot playing");
2329         return 0;
2330     }
2331
2332     if (_inputFilePlayerPtr->StopPlayingFile() != 0)
2333     {
2334         _engineStatisticsPtr->SetLastError(
2335             VE_STOP_RECORDING_FAILED, kTraceError,
2336             "StopPlayingFile() could not stop playing");
2337         return -1;
2338     }
2339     _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2340     FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2341     _inputFilePlayerPtr = NULL;
2342     channel_state_.SetInputFilePlaying(false);
2343
2344     return 0;
2345 }
2346
2347 int Channel::IsPlayingFileAsMicrophone() const
2348 {
2349     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2350                  "Channel::IsPlayingFileAsMicrophone()");
2351     return channel_state_.Get().input_file_playing;
2352 }
2353
2354 int Channel::ScaleFileAsMicrophonePlayout(float scale)
2355 {
2356     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2357                  "Channel::ScaleFileAsMicrophonePlayout(scale=%5.3f)", scale);
2358
2359     CriticalSectionScoped cs(&_fileCritSect);
2360
2361     if (!channel_state_.Get().input_file_playing)
2362     {
2363         _engineStatisticsPtr->SetLastError(
2364             VE_INVALID_OPERATION, kTraceError,
2365             "ScaleFileAsMicrophonePlayout() isnot playing");
2366         return -1;
2367     }
2368
2369     if ((_inputFilePlayerPtr == NULL) ||
2370         (_inputFilePlayerPtr->SetAudioScaling(scale) != 0))
2371     {
2372         _engineStatisticsPtr->SetLastError(
2373             VE_BAD_ARGUMENT, kTraceError,
2374             "SetAudioScaling() failed to scale playout");
2375         return -1;
2376     }
2377
2378     return 0;
2379 }
2380
2381 int Channel::StartRecordingPlayout(const char* fileName,
2382                                    const CodecInst* codecInst)
2383 {
2384     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2385                  "Channel::StartRecordingPlayout(fileName=%s)", fileName);
2386
2387     if (_outputFileRecording)
2388     {
2389         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2390                      "StartRecordingPlayout() is already recording");
2391         return 0;
2392     }
2393
2394     FileFormats format;
2395     const uint32_t notificationTime(0); // Not supported in VoE
2396     CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2397
2398     if ((codecInst != NULL) &&
2399       ((codecInst->channels < 1) || (codecInst->channels > 2)))
2400     {
2401         _engineStatisticsPtr->SetLastError(
2402             VE_BAD_ARGUMENT, kTraceError,
2403             "StartRecordingPlayout() invalid compression");
2404         return(-1);
2405     }
2406     if(codecInst == NULL)
2407     {
2408         format = kFileFormatPcm16kHzFile;
2409         codecInst=&dummyCodec;
2410     }
2411     else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2412         (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2413         (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2414     {
2415         format = kFileFormatWavFile;
2416     }
2417     else
2418     {
2419         format = kFileFormatCompressedFile;
2420     }
2421
2422     CriticalSectionScoped cs(&_fileCritSect);
2423
2424     // Destroy the old instance
2425     if (_outputFileRecorderPtr)
2426     {
2427         _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2428         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2429         _outputFileRecorderPtr = NULL;
2430     }
2431
2432     _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2433         _outputFileRecorderId, (const FileFormats)format);
2434     if (_outputFileRecorderPtr == NULL)
2435     {
2436         _engineStatisticsPtr->SetLastError(
2437             VE_INVALID_ARGUMENT, kTraceError,
2438             "StartRecordingPlayout() fileRecorder format isnot correct");
2439         return -1;
2440     }
2441
2442     if (_outputFileRecorderPtr->StartRecordingAudioFile(
2443         fileName, (const CodecInst&)*codecInst, notificationTime) != 0)
2444     {
2445         _engineStatisticsPtr->SetLastError(
2446             VE_BAD_FILE, kTraceError,
2447             "StartRecordingAudioFile() failed to start file recording");
2448         _outputFileRecorderPtr->StopRecording();
2449         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2450         _outputFileRecorderPtr = NULL;
2451         return -1;
2452     }
2453     _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2454     _outputFileRecording = true;
2455
2456     return 0;
2457 }
2458
2459 int Channel::StartRecordingPlayout(OutStream* stream,
2460                                    const CodecInst* codecInst)
2461 {
2462     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2463                  "Channel::StartRecordingPlayout()");
2464
2465     if (_outputFileRecording)
2466     {
2467         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2468                      "StartRecordingPlayout() is already recording");
2469         return 0;
2470     }
2471
2472     FileFormats format;
2473     const uint32_t notificationTime(0); // Not supported in VoE
2474     CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2475
2476     if (codecInst != NULL && codecInst->channels != 1)
2477     {
2478         _engineStatisticsPtr->SetLastError(
2479             VE_BAD_ARGUMENT, kTraceError,
2480             "StartRecordingPlayout() invalid compression");
2481         return(-1);
2482     }
2483     if(codecInst == NULL)
2484     {
2485         format = kFileFormatPcm16kHzFile;
2486         codecInst=&dummyCodec;
2487     }
2488     else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2489         (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2490         (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2491     {
2492         format = kFileFormatWavFile;
2493     }
2494     else
2495     {
2496         format = kFileFormatCompressedFile;
2497     }
2498
2499     CriticalSectionScoped cs(&_fileCritSect);
2500
2501     // Destroy the old instance
2502     if (_outputFileRecorderPtr)
2503     {
2504         _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2505         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2506         _outputFileRecorderPtr = NULL;
2507     }
2508
2509     _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2510         _outputFileRecorderId, (const FileFormats)format);
2511     if (_outputFileRecorderPtr == NULL)
2512     {
2513         _engineStatisticsPtr->SetLastError(
2514             VE_INVALID_ARGUMENT, kTraceError,
2515             "StartRecordingPlayout() fileRecorder format isnot correct");
2516         return -1;
2517     }
2518
2519     if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2520                                                         notificationTime) != 0)
2521     {
2522         _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2523                                            "StartRecordingPlayout() failed to "
2524                                            "start file recording");
2525         _outputFileRecorderPtr->StopRecording();
2526         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2527         _outputFileRecorderPtr = NULL;
2528         return -1;
2529     }
2530
2531     _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2532     _outputFileRecording = true;
2533
2534     return 0;
2535 }
2536
2537 int Channel::StopRecordingPlayout()
2538 {
2539     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
2540                  "Channel::StopRecordingPlayout()");
2541
2542     if (!_outputFileRecording)
2543     {
2544         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
2545                      "StopRecordingPlayout() isnot recording");
2546         return -1;
2547     }
2548
2549
2550     CriticalSectionScoped cs(&_fileCritSect);
2551
2552     if (_outputFileRecorderPtr->StopRecording() != 0)
2553     {
2554         _engineStatisticsPtr->SetLastError(
2555             VE_STOP_RECORDING_FAILED, kTraceError,
2556             "StopRecording() could not stop recording");
2557         return(-1);
2558     }
2559     _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2560     FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2561     _outputFileRecorderPtr = NULL;
2562     _outputFileRecording = false;
2563
2564     return 0;
2565 }
2566
2567 void
2568 Channel::SetMixWithMicStatus(bool mix)
2569 {
2570     CriticalSectionScoped cs(&_fileCritSect);
2571     _mixFileWithMicrophone=mix;
2572 }
2573
2574 int
2575 Channel::GetSpeechOutputLevel(uint32_t& level) const
2576 {
2577     int8_t currentLevel = _outputAudioLevel.Level();
2578     level = static_cast<int32_t> (currentLevel);
2579     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2580                VoEId(_instanceId,_channelId),
2581                "GetSpeechOutputLevel() => level=%u", level);
2582     return 0;
2583 }
2584
2585 int
2586 Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const
2587 {
2588     int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2589     level = static_cast<int32_t> (currentLevel);
2590     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2591                VoEId(_instanceId,_channelId),
2592                "GetSpeechOutputLevelFullRange() => level=%u", level);
2593     return 0;
2594 }
2595
2596 int
2597 Channel::SetMute(bool enable)
2598 {
2599     CriticalSectionScoped cs(&volume_settings_critsect_);
2600     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2601                "Channel::SetMute(enable=%d)", enable);
2602     _mute = enable;
2603     return 0;
2604 }
2605
2606 bool
2607 Channel::Mute() const
2608 {
2609     CriticalSectionScoped cs(&volume_settings_critsect_);
2610     return _mute;
2611 }
2612
2613 int
2614 Channel::SetOutputVolumePan(float left, float right)
2615 {
2616     CriticalSectionScoped cs(&volume_settings_critsect_);
2617     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2618                "Channel::SetOutputVolumePan()");
2619     _panLeft = left;
2620     _panRight = right;
2621     return 0;
2622 }
2623
2624 int
2625 Channel::GetOutputVolumePan(float& left, float& right) const
2626 {
2627     CriticalSectionScoped cs(&volume_settings_critsect_);
2628     left = _panLeft;
2629     right = _panRight;
2630     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2631                VoEId(_instanceId,_channelId),
2632                "GetOutputVolumePan() => left=%3.2f, right=%3.2f", left, right);
2633     return 0;
2634 }
2635
2636 int
2637 Channel::SetChannelOutputVolumeScaling(float scaling)
2638 {
2639     CriticalSectionScoped cs(&volume_settings_critsect_);
2640     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2641                "Channel::SetChannelOutputVolumeScaling()");
2642     _outputGain = scaling;
2643     return 0;
2644 }
2645
2646 int
2647 Channel::GetChannelOutputVolumeScaling(float& scaling) const
2648 {
2649     CriticalSectionScoped cs(&volume_settings_critsect_);
2650     scaling = _outputGain;
2651     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2652                VoEId(_instanceId,_channelId),
2653                "GetChannelOutputVolumeScaling() => scaling=%3.2f", scaling);
2654     return 0;
2655 }
2656
2657 int Channel::SendTelephoneEventOutband(unsigned char eventCode,
2658                                        int lengthMs, int attenuationDb,
2659                                        bool playDtmfEvent)
2660 {
2661     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2662                "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
2663                playDtmfEvent);
2664
2665     _playOutbandDtmfEvent = playDtmfEvent;
2666
2667     if (_rtpRtcpModule->SendTelephoneEventOutband(eventCode, lengthMs,
2668                                                  attenuationDb) != 0)
2669     {
2670         _engineStatisticsPtr->SetLastError(
2671             VE_SEND_DTMF_FAILED,
2672             kTraceWarning,
2673             "SendTelephoneEventOutband() failed to send event");
2674         return -1;
2675     }
2676     return 0;
2677 }
2678
2679 int Channel::SendTelephoneEventInband(unsigned char eventCode,
2680                                          int lengthMs,
2681                                          int attenuationDb,
2682                                          bool playDtmfEvent)
2683 {
2684     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2685                "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
2686                playDtmfEvent);
2687
2688     _playInbandDtmfEvent = playDtmfEvent;
2689     _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
2690
2691     return 0;
2692 }
2693
2694 int
2695 Channel::SetDtmfPlayoutStatus(bool enable)
2696 {
2697     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2698                "Channel::SetDtmfPlayoutStatus()");
2699     if (audio_coding_->SetDtmfPlayoutStatus(enable) != 0)
2700     {
2701         _engineStatisticsPtr->SetLastError(
2702             VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
2703             "SetDtmfPlayoutStatus() failed to set Dtmf playout");
2704         return -1;
2705     }
2706     return 0;
2707 }
2708
2709 bool
2710 Channel::DtmfPlayoutStatus() const
2711 {
2712     return audio_coding_->DtmfPlayoutStatus();
2713 }
2714
2715 int
2716 Channel::SetSendTelephoneEventPayloadType(unsigned char type)
2717 {
2718     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2719                "Channel::SetSendTelephoneEventPayloadType()");
2720     if (type > 127)
2721     {
2722         _engineStatisticsPtr->SetLastError(
2723             VE_INVALID_ARGUMENT, kTraceError,
2724             "SetSendTelephoneEventPayloadType() invalid type");
2725         return -1;
2726     }
2727     CodecInst codec = {};
2728     codec.plfreq = 8000;
2729     codec.pltype = type;
2730     memcpy(codec.plname, "telephone-event", 16);
2731     if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
2732     {
2733         _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
2734         if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2735             _engineStatisticsPtr->SetLastError(
2736                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2737                 "SetSendTelephoneEventPayloadType() failed to register send"
2738                 "payload type");
2739             return -1;
2740         }
2741     }
2742     _sendTelephoneEventPayloadType = type;
2743     return 0;
2744 }
2745
2746 int
2747 Channel::GetSendTelephoneEventPayloadType(unsigned char& type)
2748 {
2749     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2750                  "Channel::GetSendTelephoneEventPayloadType()");
2751     type = _sendTelephoneEventPayloadType;
2752     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2753                VoEId(_instanceId,_channelId),
2754                "GetSendTelephoneEventPayloadType() => type=%u", type);
2755     return 0;
2756 }
2757
2758 int
2759 Channel::UpdateRxVadDetection(AudioFrame& audioFrame)
2760 {
2761     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2762                  "Channel::UpdateRxVadDetection()");
2763
2764     int vadDecision = 1;
2765
2766     vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive)? 1 : 0;
2767
2768     if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr)
2769     {
2770         OnRxVadDetected(vadDecision);
2771         _oldVadDecision = vadDecision;
2772     }
2773
2774     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2775                  "Channel::UpdateRxVadDetection() => vadDecision=%d",
2776                  vadDecision);
2777     return 0;
2778 }
2779
2780 int
2781 Channel::RegisterRxVadObserver(VoERxVadCallback &observer)
2782 {
2783     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2784                  "Channel::RegisterRxVadObserver()");
2785     CriticalSectionScoped cs(&_callbackCritSect);
2786
2787     if (_rxVadObserverPtr)
2788     {
2789         _engineStatisticsPtr->SetLastError(
2790             VE_INVALID_OPERATION, kTraceError,
2791             "RegisterRxVadObserver() observer already enabled");
2792         return -1;
2793     }
2794     _rxVadObserverPtr = &observer;
2795     _RxVadDetection = true;
2796     return 0;
2797 }
2798
2799 int
2800 Channel::DeRegisterRxVadObserver()
2801 {
2802     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2803                  "Channel::DeRegisterRxVadObserver()");
2804     CriticalSectionScoped cs(&_callbackCritSect);
2805
2806     if (!_rxVadObserverPtr)
2807     {
2808         _engineStatisticsPtr->SetLastError(
2809             VE_INVALID_OPERATION, kTraceWarning,
2810             "DeRegisterRxVadObserver() observer already disabled");
2811         return 0;
2812     }
2813     _rxVadObserverPtr = NULL;
2814     _RxVadDetection = false;
2815     return 0;
2816 }
2817
2818 int
2819 Channel::VoiceActivityIndicator(int &activity)
2820 {
2821     activity = _sendFrameType;
2822
2823     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2824                  "Channel::VoiceActivityIndicator(indicator=%d)", activity);
2825     return 0;
2826 }
2827
2828 #ifdef WEBRTC_VOICE_ENGINE_AGC
2829
2830 int
2831 Channel::SetRxAgcStatus(bool enable, AgcModes mode)
2832 {
2833     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2834                  "Channel::SetRxAgcStatus(enable=%d, mode=%d)",
2835                  (int)enable, (int)mode);
2836
2837     GainControl::Mode agcMode = kDefaultRxAgcMode;
2838     switch (mode)
2839     {
2840         case kAgcDefault:
2841             break;
2842         case kAgcUnchanged:
2843             agcMode = rx_audioproc_->gain_control()->mode();
2844             break;
2845         case kAgcFixedDigital:
2846             agcMode = GainControl::kFixedDigital;
2847             break;
2848         case kAgcAdaptiveDigital:
2849             agcMode =GainControl::kAdaptiveDigital;
2850             break;
2851         default:
2852             _engineStatisticsPtr->SetLastError(
2853                 VE_INVALID_ARGUMENT, kTraceError,
2854                 "SetRxAgcStatus() invalid Agc mode");
2855             return -1;
2856     }
2857
2858     if (rx_audioproc_->gain_control()->set_mode(agcMode) != 0)
2859     {
2860         _engineStatisticsPtr->SetLastError(
2861             VE_APM_ERROR, kTraceError,
2862             "SetRxAgcStatus() failed to set Agc mode");
2863         return -1;
2864     }
2865     if (rx_audioproc_->gain_control()->Enable(enable) != 0)
2866     {
2867         _engineStatisticsPtr->SetLastError(
2868             VE_APM_ERROR, kTraceError,
2869             "SetRxAgcStatus() failed to set Agc state");
2870         return -1;
2871     }
2872
2873     _rxAgcIsEnabled = enable;
2874     channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
2875
2876     return 0;
2877 }
2878
2879 int
2880 Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode)
2881 {
2882     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2883                      "Channel::GetRxAgcStatus(enable=?, mode=?)");
2884
2885     bool enable = rx_audioproc_->gain_control()->is_enabled();
2886     GainControl::Mode agcMode =
2887         rx_audioproc_->gain_control()->mode();
2888
2889     enabled = enable;
2890
2891     switch (agcMode)
2892     {
2893         case GainControl::kFixedDigital:
2894             mode = kAgcFixedDigital;
2895             break;
2896         case GainControl::kAdaptiveDigital:
2897             mode = kAgcAdaptiveDigital;
2898             break;
2899         default:
2900             _engineStatisticsPtr->SetLastError(
2901                 VE_APM_ERROR, kTraceError,
2902                 "GetRxAgcStatus() invalid Agc mode");
2903             return -1;
2904     }
2905
2906     return 0;
2907 }
2908
2909 int
2910 Channel::SetRxAgcConfig(AgcConfig config)
2911 {
2912     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2913                  "Channel::SetRxAgcConfig()");
2914
2915     if (rx_audioproc_->gain_control()->set_target_level_dbfs(
2916         config.targetLeveldBOv) != 0)
2917     {
2918         _engineStatisticsPtr->SetLastError(
2919             VE_APM_ERROR, kTraceError,
2920             "SetRxAgcConfig() failed to set target peak |level|"
2921             "(or envelope) of the Agc");
2922         return -1;
2923     }
2924     if (rx_audioproc_->gain_control()->set_compression_gain_db(
2925         config.digitalCompressionGaindB) != 0)
2926     {
2927         _engineStatisticsPtr->SetLastError(
2928             VE_APM_ERROR, kTraceError,
2929             "SetRxAgcConfig() failed to set the range in |gain| the"
2930             " digital compression stage may apply");
2931         return -1;
2932     }
2933     if (rx_audioproc_->gain_control()->enable_limiter(
2934         config.limiterEnable) != 0)
2935     {
2936         _engineStatisticsPtr->SetLastError(
2937             VE_APM_ERROR, kTraceError,
2938             "SetRxAgcConfig() failed to set hard limiter to the signal");
2939         return -1;
2940     }
2941
2942     return 0;
2943 }
2944
2945 int
2946 Channel::GetRxAgcConfig(AgcConfig& config)
2947 {
2948     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2949                  "Channel::GetRxAgcConfig(config=%?)");
2950
2951     config.targetLeveldBOv =
2952         rx_audioproc_->gain_control()->target_level_dbfs();
2953     config.digitalCompressionGaindB =
2954         rx_audioproc_->gain_control()->compression_gain_db();
2955     config.limiterEnable =
2956         rx_audioproc_->gain_control()->is_limiter_enabled();
2957
2958     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2959                VoEId(_instanceId,_channelId), "GetRxAgcConfig() => "
2960                    "targetLeveldBOv=%u, digitalCompressionGaindB=%u,"
2961                    " limiterEnable=%d",
2962                    config.targetLeveldBOv,
2963                    config.digitalCompressionGaindB,
2964                    config.limiterEnable);
2965
2966     return 0;
2967 }
2968
2969 #endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
2970
2971 #ifdef WEBRTC_VOICE_ENGINE_NR
2972
2973 int
2974 Channel::SetRxNsStatus(bool enable, NsModes mode)
2975 {
2976     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2977                  "Channel::SetRxNsStatus(enable=%d, mode=%d)",
2978                  (int)enable, (int)mode);
2979
2980     NoiseSuppression::Level nsLevel = kDefaultNsMode;
2981     switch (mode)
2982     {
2983
2984         case kNsDefault:
2985             break;
2986         case kNsUnchanged:
2987             nsLevel = rx_audioproc_->noise_suppression()->level();
2988             break;
2989         case kNsConference:
2990             nsLevel = NoiseSuppression::kHigh;
2991             break;
2992         case kNsLowSuppression:
2993             nsLevel = NoiseSuppression::kLow;
2994             break;
2995         case kNsModerateSuppression:
2996             nsLevel = NoiseSuppression::kModerate;
2997             break;
2998         case kNsHighSuppression:
2999             nsLevel = NoiseSuppression::kHigh;
3000             break;
3001         case kNsVeryHighSuppression:
3002             nsLevel = NoiseSuppression::kVeryHigh;
3003             break;
3004     }
3005
3006     if (rx_audioproc_->noise_suppression()->set_level(nsLevel)
3007         != 0)
3008     {
3009         _engineStatisticsPtr->SetLastError(
3010             VE_APM_ERROR, kTraceError,
3011             "SetRxNsStatus() failed to set NS level");
3012         return -1;
3013     }
3014     if (rx_audioproc_->noise_suppression()->Enable(enable) != 0)
3015     {
3016         _engineStatisticsPtr->SetLastError(
3017             VE_APM_ERROR, kTraceError,
3018             "SetRxNsStatus() failed to set NS state");
3019         return -1;
3020     }
3021
3022     _rxNsIsEnabled = enable;
3023     channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
3024
3025     return 0;
3026 }
3027
3028 int
3029 Channel::GetRxNsStatus(bool& enabled, NsModes& mode)
3030 {
3031     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3032                  "Channel::GetRxNsStatus(enable=?, mode=?)");
3033
3034     bool enable =
3035         rx_audioproc_->noise_suppression()->is_enabled();
3036     NoiseSuppression::Level ncLevel =
3037         rx_audioproc_->noise_suppression()->level();
3038
3039     enabled = enable;
3040
3041     switch (ncLevel)
3042     {
3043         case NoiseSuppression::kLow:
3044             mode = kNsLowSuppression;
3045             break;
3046         case NoiseSuppression::kModerate:
3047             mode = kNsModerateSuppression;
3048             break;
3049         case NoiseSuppression::kHigh:
3050             mode = kNsHighSuppression;
3051             break;
3052         case NoiseSuppression::kVeryHigh:
3053             mode = kNsVeryHighSuppression;
3054             break;
3055     }
3056
3057     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3058                VoEId(_instanceId,_channelId),
3059                "GetRxNsStatus() => enabled=%d, mode=%d", enabled, mode);
3060     return 0;
3061 }
3062
3063 #endif // #ifdef WEBRTC_VOICE_ENGINE_NR
3064
3065 int
3066 Channel::RegisterRTPObserver(VoERTPObserver& observer)
3067 {
3068     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3069                  "Channel::RegisterRTPObserver()");
3070     CriticalSectionScoped cs(&_callbackCritSect);
3071
3072     if (_rtpObserverPtr)
3073     {
3074         _engineStatisticsPtr->SetLastError(
3075             VE_INVALID_OPERATION, kTraceError,
3076             "RegisterRTPObserver() observer already enabled");
3077         return -1;
3078     }
3079
3080     _rtpObserverPtr = &observer;
3081     _rtpObserver = true;
3082
3083     return 0;
3084 }
3085
3086 int
3087 Channel::DeRegisterRTPObserver()
3088 {
3089     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3090                  "Channel::DeRegisterRTPObserver()");
3091     CriticalSectionScoped cs(&_callbackCritSect);
3092
3093     if (!_rtpObserverPtr)
3094     {
3095         _engineStatisticsPtr->SetLastError(
3096             VE_INVALID_OPERATION, kTraceWarning,
3097             "DeRegisterRTPObserver() observer already disabled");
3098         return 0;
3099     }
3100
3101     _rtpObserver = false;
3102     _rtpObserverPtr = NULL;
3103
3104     return 0;
3105 }
3106
3107 int
3108 Channel::RegisterRTCPObserver(VoERTCPObserver& observer)
3109 {
3110     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3111                  "Channel::RegisterRTCPObserver()");
3112     CriticalSectionScoped cs(&_callbackCritSect);
3113
3114     if (_rtcpObserverPtr)
3115     {
3116         _engineStatisticsPtr->SetLastError(
3117             VE_INVALID_OPERATION, kTraceError,
3118             "RegisterRTCPObserver() observer already enabled");
3119         return -1;
3120     }
3121
3122     _rtcpObserverPtr = &observer;
3123     _rtcpObserver = true;
3124
3125     return 0;
3126 }
3127
3128 int
3129 Channel::DeRegisterRTCPObserver()
3130 {
3131     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3132                  "Channel::DeRegisterRTCPObserver()");
3133     CriticalSectionScoped cs(&_callbackCritSect);
3134
3135     if (!_rtcpObserverPtr)
3136     {
3137         _engineStatisticsPtr->SetLastError(
3138             VE_INVALID_OPERATION, kTraceWarning,
3139             "DeRegisterRTCPObserver() observer already disabled");
3140         return 0;
3141     }
3142
3143     _rtcpObserver = false;
3144     _rtcpObserverPtr = NULL;
3145
3146     return 0;
3147 }
3148
3149 int
3150 Channel::SetLocalSSRC(unsigned int ssrc)
3151 {
3152     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3153                  "Channel::SetLocalSSRC()");
3154     if (channel_state_.Get().sending)
3155     {
3156         _engineStatisticsPtr->SetLastError(
3157             VE_ALREADY_SENDING, kTraceError,
3158             "SetLocalSSRC() already sending");
3159         return -1;
3160     }
3161     if (_rtpRtcpModule->SetSSRC(ssrc) != 0)
3162     {
3163         _engineStatisticsPtr->SetLastError(
3164             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3165             "SetLocalSSRC() failed to set SSRC");
3166         return -1;
3167     }
3168     return 0;
3169 }
3170
3171 int
3172 Channel::GetLocalSSRC(unsigned int& ssrc)
3173 {
3174     ssrc = _rtpRtcpModule->SSRC();
3175     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3176                  VoEId(_instanceId,_channelId),
3177                  "GetLocalSSRC() => ssrc=%lu", ssrc);
3178     return 0;
3179 }
3180
3181 int
3182 Channel::GetRemoteSSRC(unsigned int& ssrc)
3183 {
3184     ssrc = rtp_receiver_->SSRC();
3185     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3186                  VoEId(_instanceId,_channelId),
3187                  "GetRemoteSSRC() => ssrc=%lu", ssrc);
3188     return 0;
3189 }
3190
3191 int
3192 Channel::GetRemoteCSRCs(unsigned int arrCSRC[15])
3193 {
3194     if (arrCSRC == NULL)
3195     {
3196         _engineStatisticsPtr->SetLastError(
3197             VE_INVALID_ARGUMENT, kTraceError,
3198             "GetRemoteCSRCs() invalid array argument");
3199         return -1;
3200     }
3201     uint32_t arrOfCSRC[kRtpCsrcSize];
3202     int32_t CSRCs(0);
3203     CSRCs = rtp_receiver_->CSRCs(arrOfCSRC);
3204     if (CSRCs > 0)
3205     {
3206         memcpy(arrCSRC, arrOfCSRC, CSRCs * sizeof(uint32_t));
3207         for (int i = 0; i < (int) CSRCs; i++)
3208         {
3209             WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3210                        VoEId(_instanceId, _channelId),
3211                        "GetRemoteCSRCs() => arrCSRC[%d]=%lu", i, arrCSRC[i]);
3212         }
3213     } else
3214     {
3215         WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3216                    VoEId(_instanceId, _channelId),
3217                    "GetRemoteCSRCs() => list is empty!");
3218     }
3219     return CSRCs;
3220 }
3221
3222 int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
3223   _includeAudioLevelIndication = enable;
3224   return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
3225 }
3226
3227 int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
3228                                                   unsigned char id) {
3229   rtp_header_parser_->DeregisterRtpHeaderExtension(
3230       kRtpExtensionAudioLevel);
3231   if (enable && !rtp_header_parser_->RegisterRtpHeaderExtension(
3232           kRtpExtensionAudioLevel, id)) {
3233     return -1;
3234   }
3235   return 0;
3236 }
3237
3238 int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
3239   return SetSendRtpHeaderExtension(enable, kRtpExtensionAbsoluteSendTime, id);
3240 }
3241
3242 int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
3243   rtp_header_parser_->DeregisterRtpHeaderExtension(
3244       kRtpExtensionAbsoluteSendTime);
3245   if (enable && !rtp_header_parser_->RegisterRtpHeaderExtension(
3246       kRtpExtensionAbsoluteSendTime, id)) {
3247     return -1;
3248   }
3249   return 0;
3250 }
3251
3252 int
3253 Channel::SetRTCPStatus(bool enable)
3254 {
3255     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3256                  "Channel::SetRTCPStatus()");
3257     if (_rtpRtcpModule->SetRTCPStatus(enable ?
3258         kRtcpCompound : kRtcpOff) != 0)
3259     {
3260         _engineStatisticsPtr->SetLastError(
3261             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3262             "SetRTCPStatus() failed to set RTCP status");
3263         return -1;
3264     }
3265     return 0;
3266 }
3267
3268 int
3269 Channel::GetRTCPStatus(bool& enabled)
3270 {
3271     RTCPMethod method = _rtpRtcpModule->RTCP();
3272     enabled = (method != kRtcpOff);
3273     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3274                  VoEId(_instanceId,_channelId),
3275                  "GetRTCPStatus() => enabled=%d", enabled);
3276     return 0;
3277 }
3278
3279 int
3280 Channel::SetRTCP_CNAME(const char cName[256])
3281 {
3282     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3283                  "Channel::SetRTCP_CNAME()");
3284     if (_rtpRtcpModule->SetCNAME(cName) != 0)
3285     {
3286         _engineStatisticsPtr->SetLastError(
3287             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3288             "SetRTCP_CNAME() failed to set RTCP CNAME");
3289         return -1;
3290     }
3291     return 0;
3292 }
3293
3294 int
3295 Channel::GetRTCP_CNAME(char cName[256])
3296 {
3297     if (_rtpRtcpModule->CNAME(cName) != 0)
3298     {
3299         _engineStatisticsPtr->SetLastError(
3300             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3301             "GetRTCP_CNAME() failed to retrieve RTCP CNAME");
3302         return -1;
3303     }
3304     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3305                  VoEId(_instanceId, _channelId),
3306                  "GetRTCP_CNAME() => cName=%s", cName);
3307     return 0;
3308 }
3309
3310 int
3311 Channel::GetRemoteRTCP_CNAME(char cName[256])
3312 {
3313     if (cName == NULL)
3314     {
3315         _engineStatisticsPtr->SetLastError(
3316             VE_INVALID_ARGUMENT, kTraceError,
3317             "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
3318         return -1;
3319     }
3320     char cname[RTCP_CNAME_SIZE];
3321     const uint32_t remoteSSRC = rtp_receiver_->SSRC();
3322     if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0)
3323     {
3324         _engineStatisticsPtr->SetLastError(
3325             VE_CANNOT_RETRIEVE_CNAME, kTraceError,
3326             "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
3327         return -1;
3328     }
3329     strcpy(cName, cname);
3330     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3331                  VoEId(_instanceId, _channelId),
3332                  "GetRemoteRTCP_CNAME() => cName=%s", cName);
3333     return 0;
3334 }
3335
3336 int
3337 Channel::GetRemoteRTCPData(
3338     unsigned int& NTPHigh,
3339     unsigned int& NTPLow,
3340     unsigned int& timestamp,
3341     unsigned int& playoutTimestamp,
3342     unsigned int* jitter,
3343     unsigned short* fractionLost)
3344 {
3345     // --- Information from sender info in received Sender Reports
3346
3347     RTCPSenderInfo senderInfo;
3348     if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0)
3349     {
3350         _engineStatisticsPtr->SetLastError(
3351             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3352             "GetRemoteRTCPData() failed to retrieve sender info for remote "
3353             "side");
3354         return -1;
3355     }
3356
3357     // We only utilize 12 out of 20 bytes in the sender info (ignores packet
3358     // and octet count)
3359     NTPHigh = senderInfo.NTPseconds;
3360     NTPLow = senderInfo.NTPfraction;
3361     timestamp = senderInfo.RTPtimeStamp;
3362
3363     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3364                  VoEId(_instanceId, _channelId),
3365                  "GetRemoteRTCPData() => NTPHigh=%lu, NTPLow=%lu, "
3366                  "timestamp=%lu",
3367                  NTPHigh, NTPLow, timestamp);
3368
3369     // --- Locally derived information
3370
3371     // This value is updated on each incoming RTCP packet (0 when no packet
3372     // has been received)
3373     playoutTimestamp = playout_timestamp_rtcp_;
3374
3375     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3376                  VoEId(_instanceId, _channelId),
3377                  "GetRemoteRTCPData() => playoutTimestamp=%lu",
3378                  playout_timestamp_rtcp_);
3379
3380     if (NULL != jitter || NULL != fractionLost)
3381     {
3382         // Get all RTCP receiver report blocks that have been received on this
3383         // channel. If we receive RTP packets from a remote source we know the
3384         // remote SSRC and use the report block from him.
3385         // Otherwise use the first report block.
3386         std::vector<RTCPReportBlock> remote_stats;
3387         if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
3388             remote_stats.empty()) {
3389           WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3390                        VoEId(_instanceId, _channelId),
3391                        "GetRemoteRTCPData() failed to measure statistics due"
3392                        " to lack of received RTP and/or RTCP packets");
3393           return -1;
3394         }
3395
3396         uint32_t remoteSSRC = rtp_receiver_->SSRC();
3397         std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
3398         for (; it != remote_stats.end(); ++it) {
3399           if (it->remoteSSRC == remoteSSRC)
3400             break;
3401         }
3402
3403         if (it == remote_stats.end()) {
3404           // If we have not received any RTCP packets from this SSRC it probably
3405           // means that we have not received any RTP packets.
3406           // Use the first received report block instead.
3407           it = remote_stats.begin();
3408           remoteSSRC = it->remoteSSRC;
3409         }
3410
3411         if (jitter) {
3412           *jitter = it->jitter;
3413           WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3414                        VoEId(_instanceId, _channelId),
3415                        "GetRemoteRTCPData() => jitter = %lu", *jitter);
3416         }
3417
3418         if (fractionLost) {
3419           *fractionLost = it->fractionLost;
3420           WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3421                        VoEId(_instanceId, _channelId),
3422                        "GetRemoteRTCPData() => fractionLost = %lu",
3423                        *fractionLost);
3424         }
3425     }
3426     return 0;
3427 }
3428
3429 int
3430 Channel::SendApplicationDefinedRTCPPacket(unsigned char subType,
3431                                              unsigned int name,
3432                                              const char* data,
3433                                              unsigned short dataLengthInBytes)
3434 {
3435     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3436                  "Channel::SendApplicationDefinedRTCPPacket()");
3437     if (!channel_state_.Get().sending)
3438     {
3439         _engineStatisticsPtr->SetLastError(
3440             VE_NOT_SENDING, kTraceError,
3441             "SendApplicationDefinedRTCPPacket() not sending");
3442         return -1;
3443     }
3444     if (NULL == data)
3445     {
3446         _engineStatisticsPtr->SetLastError(
3447             VE_INVALID_ARGUMENT, kTraceError,
3448             "SendApplicationDefinedRTCPPacket() invalid data value");
3449         return -1;
3450     }
3451     if (dataLengthInBytes % 4 != 0)
3452     {
3453         _engineStatisticsPtr->SetLastError(
3454             VE_INVALID_ARGUMENT, kTraceError,
3455             "SendApplicationDefinedRTCPPacket() invalid length value");
3456         return -1;
3457     }
3458     RTCPMethod status = _rtpRtcpModule->RTCP();
3459     if (status == kRtcpOff)
3460     {
3461         _engineStatisticsPtr->SetLastError(
3462             VE_RTCP_ERROR, kTraceError,
3463             "SendApplicationDefinedRTCPPacket() RTCP is disabled");
3464         return -1;
3465     }
3466
3467     // Create and schedule the RTCP APP packet for transmission
3468     if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
3469         subType,
3470         name,
3471         (const unsigned char*) data,
3472         dataLengthInBytes) != 0)
3473     {
3474         _engineStatisticsPtr->SetLastError(
3475             VE_SEND_ERROR, kTraceError,
3476             "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
3477         return -1;
3478     }
3479     return 0;
3480 }
3481
3482 int
3483 Channel::GetRTPStatistics(
3484         unsigned int& averageJitterMs,
3485         unsigned int& maxJitterMs,
3486         unsigned int& discardedPackets)
3487 {
3488     // The jitter statistics is updated for each received RTP packet and is
3489     // based on received packets.
3490     if (_rtpRtcpModule->RTCP() == kRtcpOff) {
3491       // If RTCP is off, there is no timed thread in the RTCP module regularly
3492       // generating new stats, trigger the update manually here instead.
3493       StreamStatistician* statistician =
3494           rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
3495       if (statistician) {
3496         // Don't use returned statistics, use data from proxy instead so that
3497         // max jitter can be fetched atomically.
3498         RtcpStatistics s;
3499         statistician->GetStatistics(&s, true);
3500       }
3501     }
3502
3503     ChannelStatistics stats = statistics_proxy_->GetStats();
3504     const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
3505     if (playoutFrequency > 0) {
3506       // Scale RTP statistics given the current playout frequency
3507       maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
3508       averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
3509     }
3510
3511     discardedPackets = _numberOfDiscardedPackets;
3512
3513     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3514                VoEId(_instanceId, _channelId),
3515                "GetRTPStatistics() => averageJitterMs = %lu, maxJitterMs = %lu,"
3516                " discardedPackets = %lu)",
3517                averageJitterMs, maxJitterMs, discardedPackets);
3518     return 0;
3519 }
3520
3521 int Channel::GetRemoteRTCPSenderInfo(SenderInfo* sender_info) {
3522   if (sender_info == NULL) {
3523     _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
3524         "GetRemoteRTCPSenderInfo() invalid sender_info.");
3525     return -1;
3526   }
3527
3528   // Get the sender info from the latest received RTCP Sender Report.
3529   RTCPSenderInfo rtcp_sender_info;
3530   if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_sender_info) != 0) {
3531     _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3532         "GetRemoteRTCPSenderInfo() failed to read RTCP SR sender info.");
3533     return -1;
3534   }
3535
3536   sender_info->NTP_timestamp_high = rtcp_sender_info.NTPseconds;
3537   sender_info->NTP_timestamp_low = rtcp_sender_info.NTPfraction;
3538   sender_info->RTP_timestamp = rtcp_sender_info.RTPtimeStamp;
3539   sender_info->sender_packet_count = rtcp_sender_info.sendPacketCount;
3540   sender_info->sender_octet_count = rtcp_sender_info.sendOctetCount;
3541   return 0;
3542 }
3543
3544 int Channel::GetRemoteRTCPReportBlocks(
3545     std::vector<ReportBlock>* report_blocks) {
3546   if (report_blocks == NULL) {
3547     _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
3548       "GetRemoteRTCPReportBlock()s invalid report_blocks.");
3549     return -1;
3550   }
3551
3552   // Get the report blocks from the latest received RTCP Sender or Receiver
3553   // Report. Each element in the vector contains the sender's SSRC and a
3554   // report block according to RFC 3550.
3555   std::vector<RTCPReportBlock> rtcp_report_blocks;
3556   if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
3557     _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3558         "GetRemoteRTCPReportBlocks() failed to read RTCP SR/RR report block.");
3559     return -1;
3560   }
3561
3562   if (rtcp_report_blocks.empty())
3563     return 0;
3564
3565   std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
3566   for (; it != rtcp_report_blocks.end(); ++it) {
3567     ReportBlock report_block;
3568     report_block.sender_SSRC = it->remoteSSRC;
3569     report_block.source_SSRC = it->sourceSSRC;
3570     report_block.fraction_lost = it->fractionLost;
3571     report_block.cumulative_num_packets_lost = it->cumulativeLost;
3572     report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
3573     report_block.interarrival_jitter = it->jitter;
3574     report_block.last_SR_timestamp = it->lastSR;
3575     report_block.delay_since_last_SR = it->delaySinceLastSR;
3576     report_blocks->push_back(report_block);
3577   }
3578   return 0;
3579 }
3580
3581 int
3582 Channel::GetRTPStatistics(CallStatistics& stats)
3583 {
3584     // --- Part one of the final structure (four values)
3585
3586     // The jitter statistics is updated for each received RTP packet and is
3587     // based on received packets.
3588     RtcpStatistics statistics;
3589     StreamStatistician* statistician =
3590         rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
3591     if (!statistician || !statistician->GetStatistics(
3592         &statistics, _rtpRtcpModule->RTCP() == kRtcpOff)) {
3593       _engineStatisticsPtr->SetLastError(
3594           VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
3595           "GetRTPStatistics() failed to read RTP statistics from the "
3596           "RTP/RTCP module");
3597     }
3598
3599     stats.fractionLost = statistics.fraction_lost;
3600     stats.cumulativeLost = statistics.cumulative_lost;
3601     stats.extendedMax = statistics.extended_max_sequence_number;
3602     stats.jitterSamples = statistics.jitter;
3603
3604     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3605                  VoEId(_instanceId, _channelId),
3606                  "GetRTPStatistics() => fractionLost=%lu, cumulativeLost=%lu,"
3607                  " extendedMax=%lu, jitterSamples=%li)",
3608                  stats.fractionLost, stats.cumulativeLost, stats.extendedMax,
3609                  stats.jitterSamples);
3610
3611     // --- Part two of the final structure (one value)
3612
3613     uint16_t RTT(0);
3614     RTCPMethod method = _rtpRtcpModule->RTCP();
3615     if (method == kRtcpOff)
3616     {
3617         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3618                      VoEId(_instanceId, _channelId),
3619                      "GetRTPStatistics() RTCP is disabled => valid RTT "
3620                      "measurements cannot be retrieved");
3621     } else
3622     {
3623         // The remote SSRC will be zero if no RTP packet has been received.
3624         uint32_t remoteSSRC = rtp_receiver_->SSRC();
3625         if (remoteSSRC > 0)
3626         {
3627             uint16_t avgRTT(0);
3628             uint16_t maxRTT(0);
3629             uint16_t minRTT(0);
3630
3631             if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT, &maxRTT)
3632                 != 0)
3633             {
3634                 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3635                              VoEId(_instanceId, _channelId),
3636                              "GetRTPStatistics() failed to retrieve RTT from "
3637                              "the RTP/RTCP module");
3638             }
3639         } else
3640         {
3641             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3642                          VoEId(_instanceId, _channelId),
3643                          "GetRTPStatistics() failed to measure RTT since no "
3644                          "RTP packets have been received yet");
3645         }
3646     }
3647
3648     stats.rttMs = static_cast<int> (RTT);
3649
3650     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3651                  VoEId(_instanceId, _channelId),
3652                  "GetRTPStatistics() => rttMs=%d", stats.rttMs);
3653
3654     // --- Part three of the final structure (four values)
3655
3656     uint32_t bytesSent(0);
3657     uint32_t packetsSent(0);
3658     uint32_t bytesReceived(0);
3659     uint32_t packetsReceived(0);
3660
3661     if (statistician) {
3662       statistician->GetDataCounters(&bytesReceived, &packetsReceived);
3663     }
3664
3665     if (_rtpRtcpModule->DataCountersRTP(&bytesSent,
3666                                         &packetsSent) != 0)
3667     {
3668         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3669                      VoEId(_instanceId, _channelId),
3670                      "GetRTPStatistics() failed to retrieve RTP datacounters =>"
3671                      " output will not be complete");
3672     }
3673
3674     stats.bytesSent = bytesSent;
3675     stats.packetsSent = packetsSent;
3676     stats.bytesReceived = bytesReceived;
3677     stats.packetsReceived = packetsReceived;
3678
3679     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3680                  VoEId(_instanceId, _channelId),
3681                  "GetRTPStatistics() => bytesSent=%d, packetsSent=%d,"
3682                  " bytesReceived=%d, packetsReceived=%d)",
3683                  stats.bytesSent, stats.packetsSent, stats.bytesReceived,
3684                  stats.packetsReceived);
3685
3686     return 0;
3687 }
3688
3689 int Channel::SetFECStatus(bool enable, int redPayloadtype) {
3690   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3691                "Channel::SetFECStatus()");
3692
3693   if (enable) {
3694     if (redPayloadtype < 0 || redPayloadtype > 127) {
3695       _engineStatisticsPtr->SetLastError(
3696           VE_PLTYPE_ERROR, kTraceError,
3697           "SetFECStatus() invalid RED payload type");
3698       return -1;
3699     }
3700
3701     if (SetRedPayloadType(redPayloadtype) < 0) {
3702       _engineStatisticsPtr->SetLastError(
3703           VE_CODEC_ERROR, kTraceError,
3704           "SetSecondarySendCodec() Failed to register RED ACM");
3705       return -1;
3706     }
3707   }
3708
3709   if (audio_coding_->SetFECStatus(enable) != 0) {
3710     _engineStatisticsPtr->SetLastError(
3711         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3712         "SetFECStatus() failed to set FEC state in the ACM");
3713     return -1;
3714   }
3715   return 0;
3716 }
3717
3718 int
3719 Channel::GetFECStatus(bool& enabled, int& redPayloadtype)
3720 {
3721     enabled = audio_coding_->FECStatus();
3722     if (enabled)
3723     {
3724         int8_t payloadType(0);
3725         if (_rtpRtcpModule->SendREDPayloadType(payloadType) != 0)
3726         {
3727             _engineStatisticsPtr->SetLastError(
3728                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3729                 "GetFECStatus() failed to retrieve RED PT from RTP/RTCP "
3730                 "module");
3731             return -1;
3732         }
3733         WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3734                    VoEId(_instanceId, _channelId),
3735                    "GetFECStatus() => enabled=%d, redPayloadtype=%d",
3736                    enabled, redPayloadtype);
3737         return 0;
3738     }
3739     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3740                  VoEId(_instanceId, _channelId),
3741                  "GetFECStatus() => enabled=%d", enabled);
3742     return 0;
3743 }
3744
3745 void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
3746   // None of these functions can fail.
3747   _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
3748   rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
3749   rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff);
3750   if (enable)
3751     audio_coding_->EnableNack(maxNumberOfPackets);
3752   else
3753     audio_coding_->DisableNack();
3754 }
3755
3756 // Called when we are missing one or more packets.
3757 int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
3758   return _rtpRtcpModule->SendNACK(sequence_numbers, length);
3759 }
3760
3761 int
3762 Channel::StartRTPDump(const char fileNameUTF8[1024],
3763                       RTPDirections direction)
3764 {
3765     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3766                  "Channel::StartRTPDump()");
3767     if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
3768     {
3769         _engineStatisticsPtr->SetLastError(
3770             VE_INVALID_ARGUMENT, kTraceError,
3771             "StartRTPDump() invalid RTP direction");
3772         return -1;
3773     }
3774     RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
3775         &_rtpDumpIn : &_rtpDumpOut;
3776     if (rtpDumpPtr == NULL)
3777     {
3778         assert(false);
3779         return -1;
3780     }
3781     if (rtpDumpPtr->IsActive())
3782     {
3783         rtpDumpPtr->Stop();
3784     }
3785     if (rtpDumpPtr->Start(fileNameUTF8) != 0)
3786     {
3787         _engineStatisticsPtr->SetLastError(
3788             VE_BAD_FILE, kTraceError,
3789             "StartRTPDump() failed to create file");
3790         return -1;
3791     }
3792     return 0;
3793 }
3794
3795 int
3796 Channel::StopRTPDump(RTPDirections direction)
3797 {
3798     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3799                  "Channel::StopRTPDump()");
3800     if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
3801     {
3802         _engineStatisticsPtr->SetLastError(
3803             VE_INVALID_ARGUMENT, kTraceError,
3804             "StopRTPDump() invalid RTP direction");
3805         return -1;
3806     }
3807     RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
3808         &_rtpDumpIn : &_rtpDumpOut;
3809     if (rtpDumpPtr == NULL)
3810     {
3811         assert(false);
3812         return -1;
3813     }
3814     if (!rtpDumpPtr->IsActive())
3815     {
3816         return 0;
3817     }
3818     return rtpDumpPtr->Stop();
3819 }
3820
3821 bool
3822 Channel::RTPDumpIsActive(RTPDirections direction)
3823 {
3824     if ((direction != kRtpIncoming) &&
3825         (direction != kRtpOutgoing))
3826     {
3827         _engineStatisticsPtr->SetLastError(
3828             VE_INVALID_ARGUMENT, kTraceError,
3829             "RTPDumpIsActive() invalid RTP direction");
3830         return false;
3831     }
3832     RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
3833         &_rtpDumpIn : &_rtpDumpOut;
3834     return rtpDumpPtr->IsActive();
3835 }
3836
3837 void Channel::SetVideoEngineBWETarget(ViENetwork* vie_network,
3838                                       int video_channel) {
3839   CriticalSectionScoped cs(&_callbackCritSect);
3840   if (vie_network_) {
3841     vie_network_->Release();
3842     vie_network_ = NULL;
3843   }
3844   video_channel_ = -1;
3845
3846   if (vie_network != NULL && video_channel != -1) {
3847     vie_network_ = vie_network;
3848     video_channel_ = video_channel;
3849   }
3850 }
3851
3852 uint32_t
3853 Channel::Demultiplex(const AudioFrame& audioFrame)
3854 {
3855     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3856                  "Channel::Demultiplex()");
3857     _audioFrame.CopyFrom(audioFrame);
3858     _audioFrame.id_ = _channelId;
3859     return 0;
3860 }
3861
3862 void Channel::Demultiplex(const int16_t* audio_data,
3863                           int sample_rate,
3864                           int number_of_frames,
3865                           int number_of_channels) {
3866   CodecInst codec;
3867   GetSendCodec(codec);
3868
3869   if (!mono_recording_audio_.get()) {
3870     // Temporary space for DownConvertToCodecFormat.
3871     mono_recording_audio_.reset(new int16_t[kMaxMonoDataSizeSamples]);
3872   }
3873   DownConvertToCodecFormat(audio_data,
3874                            number_of_frames,
3875                            number_of_channels,
3876                            sample_rate,
3877                            codec.channels,
3878                            codec.plfreq,
3879                            mono_recording_audio_.get(),
3880                            &input_resampler_,
3881                            &_audioFrame);
3882 }
3883
3884 uint32_t
3885 Channel::PrepareEncodeAndSend(int mixingFrequency)
3886 {
3887     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3888                  "Channel::PrepareEncodeAndSend()");
3889
3890     if (_audioFrame.samples_per_channel_ == 0)
3891     {
3892         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
3893                      "Channel::PrepareEncodeAndSend() invalid audio frame");
3894         return -1;
3895     }
3896
3897     if (channel_state_.Get().input_file_playing)
3898     {
3899         MixOrReplaceAudioWithFile(mixingFrequency);
3900     }
3901
3902     if (Mute())
3903     {
3904         AudioFrameOperations::Mute(_audioFrame);
3905     }
3906
3907     if (channel_state_.Get().input_external_media)
3908     {
3909         CriticalSectionScoped cs(&_callbackCritSect);
3910         const bool isStereo = (_audioFrame.num_channels_ == 2);
3911         if (_inputExternalMediaCallbackPtr)
3912         {
3913             _inputExternalMediaCallbackPtr->Process(
3914                 _channelId,
3915                 kRecordingPerChannel,
3916                (int16_t*)_audioFrame.data_,
3917                 _audioFrame.samples_per_channel_,
3918                 _audioFrame.sample_rate_hz_,
3919                 isStereo);
3920         }
3921     }
3922
3923     InsertInbandDtmfTone();
3924
3925     if (_includeAudioLevelIndication) {
3926       int length = _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
3927       rms_level_.Process(_audioFrame.data_, length);
3928     }
3929
3930     return 0;
3931 }
3932
3933 uint32_t
3934 Channel::EncodeAndSend()
3935 {
3936     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3937                  "Channel::EncodeAndSend()");
3938
3939     assert(_audioFrame.num_channels_ <= 2);
3940     if (_audioFrame.samples_per_channel_ == 0)
3941     {
3942         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
3943                      "Channel::EncodeAndSend() invalid audio frame");
3944         return -1;
3945     }
3946
3947     _audioFrame.id_ = _channelId;
3948
3949     // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
3950
3951     // The ACM resamples internally.
3952     _audioFrame.timestamp_ = _timeStamp;
3953     if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) != 0)
3954     {
3955         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
3956                      "Channel::EncodeAndSend() ACM encoding failed");
3957         return -1;
3958     }
3959
3960     _timeStamp += _audioFrame.samples_per_channel_;
3961
3962     // --- Encode if complete frame is ready
3963
3964     // This call will trigger AudioPacketizationCallback::SendData if encoding
3965     // is done and payload is ready for packetization and transmission.
3966     return audio_coding_->Process();
3967 }
3968
3969 int Channel::RegisterExternalMediaProcessing(
3970     ProcessingTypes type,
3971     VoEMediaProcess& processObject)
3972 {
3973     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3974                  "Channel::RegisterExternalMediaProcessing()");
3975
3976     CriticalSectionScoped cs(&_callbackCritSect);
3977
3978     if (kPlaybackPerChannel == type)
3979     {
3980         if (_outputExternalMediaCallbackPtr)
3981         {
3982             _engineStatisticsPtr->SetLastError(
3983                 VE_INVALID_OPERATION, kTraceError,
3984                 "Channel::RegisterExternalMediaProcessing() "
3985                 "output external media already enabled");
3986             return -1;
3987         }
3988         _outputExternalMediaCallbackPtr = &processObject;
3989         _outputExternalMedia = true;
3990     }
3991     else if (kRecordingPerChannel == type)
3992     {
3993         if (_inputExternalMediaCallbackPtr)
3994         {
3995             _engineStatisticsPtr->SetLastError(
3996                 VE_INVALID_OPERATION, kTraceError,
3997                 "Channel::RegisterExternalMediaProcessing() "
3998                 "output external media already enabled");
3999             return -1;
4000         }
4001         _inputExternalMediaCallbackPtr = &processObject;
4002         channel_state_.SetInputExternalMedia(true);
4003     }
4004     return 0;
4005 }
4006
4007 int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type)
4008 {
4009     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4010                  "Channel::DeRegisterExternalMediaProcessing()");
4011
4012     CriticalSectionScoped cs(&_callbackCritSect);
4013
4014     if (kPlaybackPerChannel == type)
4015     {
4016         if (!_outputExternalMediaCallbackPtr)
4017         {
4018             _engineStatisticsPtr->SetLastError(
4019                 VE_INVALID_OPERATION, kTraceWarning,
4020                 "Channel::DeRegisterExternalMediaProcessing() "
4021                 "output external media already disabled");
4022             return 0;
4023         }
4024         _outputExternalMedia = false;
4025         _outputExternalMediaCallbackPtr = NULL;
4026     }
4027     else if (kRecordingPerChannel == type)
4028     {
4029         if (!_inputExternalMediaCallbackPtr)
4030         {
4031             _engineStatisticsPtr->SetLastError(
4032                 VE_INVALID_OPERATION, kTraceWarning,
4033                 "Channel::DeRegisterExternalMediaProcessing() "
4034                 "input external media already disabled");
4035             return 0;
4036         }
4037         channel_state_.SetInputExternalMedia(false);
4038         _inputExternalMediaCallbackPtr = NULL;
4039     }
4040
4041     return 0;
4042 }
4043
4044 int Channel::SetExternalMixing(bool enabled) {
4045     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4046                  "Channel::SetExternalMixing(enabled=%d)", enabled);
4047
4048     if (channel_state_.Get().playing)
4049     {
4050         _engineStatisticsPtr->SetLastError(
4051             VE_INVALID_OPERATION, kTraceError,
4052             "Channel::SetExternalMixing() "
4053             "external mixing cannot be changed while playing.");
4054         return -1;
4055     }
4056
4057     _externalMixing = enabled;
4058
4059     return 0;
4060 }
4061
4062 int
4063 Channel::GetNetworkStatistics(NetworkStatistics& stats)
4064 {
4065     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4066                  "Channel::GetNetworkStatistics()");
4067     ACMNetworkStatistics acm_stats;
4068     int return_value = audio_coding_->NetworkStatistics(&acm_stats);
4069     if (return_value >= 0) {
4070       memcpy(&stats, &acm_stats, sizeof(NetworkStatistics));
4071     }
4072     return return_value;
4073 }
4074
4075 void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
4076   audio_coding_->GetDecodingCallStatistics(stats);
4077 }
4078
4079 bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
4080                                int* playout_buffer_delay_ms) const {
4081   if (_average_jitter_buffer_delay_us == 0) {
4082     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4083                  "Channel::GetDelayEstimate() no valid estimate.");
4084     return false;
4085   }
4086   *jitter_buffer_delay_ms = (_average_jitter_buffer_delay_us + 500) / 1000 +
4087       _recPacketDelayMs;
4088   *playout_buffer_delay_ms = playout_delay_ms_;
4089   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4090                "Channel::GetDelayEstimate()");
4091   return true;
4092 }
4093
4094 int Channel::SetInitialPlayoutDelay(int delay_ms)
4095 {
4096   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4097                "Channel::SetInitialPlayoutDelay()");
4098   if ((delay_ms < kVoiceEngineMinMinPlayoutDelayMs) ||
4099       (delay_ms > kVoiceEngineMaxMinPlayoutDelayMs))
4100   {
4101     _engineStatisticsPtr->SetLastError(
4102         VE_INVALID_ARGUMENT, kTraceError,
4103         "SetInitialPlayoutDelay() invalid min delay");
4104     return -1;
4105   }
4106   if (audio_coding_->SetInitialPlayoutDelay(delay_ms) != 0)
4107   {
4108     _engineStatisticsPtr->SetLastError(
4109         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4110         "SetInitialPlayoutDelay() failed to set min playout delay");
4111     return -1;
4112   }
4113   return 0;
4114 }
4115
4116
4117 int
4118 Channel::SetMinimumPlayoutDelay(int delayMs)
4119 {
4120     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4121                  "Channel::SetMinimumPlayoutDelay()");
4122     if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
4123         (delayMs > kVoiceEngineMaxMinPlayoutDelayMs))
4124     {
4125         _engineStatisticsPtr->SetLastError(
4126             VE_INVALID_ARGUMENT, kTraceError,
4127             "SetMinimumPlayoutDelay() invalid min delay");
4128         return -1;
4129     }
4130     if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0)
4131     {
4132         _engineStatisticsPtr->SetLastError(
4133             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4134             "SetMinimumPlayoutDelay() failed to set min playout delay");
4135         return -1;
4136     }
4137     return 0;
4138 }
4139
4140 void Channel::UpdatePlayoutTimestamp(bool rtcp) {
4141   uint32_t playout_timestamp = 0;
4142
4143   if (audio_coding_->PlayoutTimestamp(&playout_timestamp) == -1)  {
4144     WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4145                  "Channel::UpdatePlayoutTimestamp() failed to read playout"
4146                  " timestamp from the ACM");
4147     _engineStatisticsPtr->SetLastError(
4148         VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4149         "UpdatePlayoutTimestamp() failed to retrieve timestamp");
4150     return;
4151   }
4152
4153   uint16_t delay_ms = 0;
4154   if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
4155     WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4156                  "Channel::UpdatePlayoutTimestamp() failed to read playout"
4157                  " delay from the ADM");
4158     _engineStatisticsPtr->SetLastError(
4159         VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4160         "UpdatePlayoutTimestamp() failed to retrieve playout delay");
4161     return;
4162   }
4163
4164   int32_t playout_frequency = audio_coding_->PlayoutFrequency();
4165   CodecInst current_recive_codec;
4166   if (audio_coding_->ReceiveCodec(&current_recive_codec) == 0) {
4167     if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
4168       playout_frequency = 8000;
4169     } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
4170       playout_frequency = 48000;
4171     }
4172   }
4173
4174   jitter_buffer_playout_timestamp_ = playout_timestamp;
4175
4176   // Remove the playout delay.
4177   playout_timestamp -= (delay_ms * (playout_frequency / 1000));
4178
4179   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4180                "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
4181                playout_timestamp);
4182
4183   if (rtcp) {
4184     playout_timestamp_rtcp_ = playout_timestamp;
4185   } else {
4186     playout_timestamp_rtp_ = playout_timestamp;
4187   }
4188   playout_delay_ms_ = delay_ms;
4189 }
4190
4191 int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
4192   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4193                "Channel::GetPlayoutTimestamp()");
4194   if (playout_timestamp_rtp_ == 0)  {
4195     _engineStatisticsPtr->SetLastError(
4196         VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4197         "GetPlayoutTimestamp() failed to retrieve timestamp");
4198     return -1;
4199   }
4200   timestamp = playout_timestamp_rtp_;
4201   WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4202                VoEId(_instanceId,_channelId),
4203                "GetPlayoutTimestamp() => timestamp=%u", timestamp);
4204   return 0;
4205 }
4206
4207 int
4208 Channel::SetInitTimestamp(unsigned int timestamp)
4209 {
4210     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4211                "Channel::SetInitTimestamp()");
4212     if (channel_state_.Get().sending)
4213     {
4214         _engineStatisticsPtr->SetLastError(
4215             VE_SENDING, kTraceError, "SetInitTimestamp() already sending");
4216         return -1;
4217     }
4218     if (_rtpRtcpModule->SetStartTimestamp(timestamp) != 0)
4219     {
4220         _engineStatisticsPtr->SetLastError(
4221             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4222             "SetInitTimestamp() failed to set timestamp");
4223         return -1;
4224     }
4225     return 0;
4226 }
4227
4228 int
4229 Channel::SetInitSequenceNumber(short sequenceNumber)
4230 {
4231     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4232                  "Channel::SetInitSequenceNumber()");
4233     if (channel_state_.Get().sending)
4234     {
4235         _engineStatisticsPtr->SetLastError(
4236             VE_SENDING, kTraceError,
4237             "SetInitSequenceNumber() already sending");
4238         return -1;
4239     }
4240     if (_rtpRtcpModule->SetSequenceNumber(sequenceNumber) != 0)
4241     {
4242         _engineStatisticsPtr->SetLastError(
4243             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4244             "SetInitSequenceNumber() failed to set sequence number");
4245         return -1;
4246     }
4247     return 0;
4248 }
4249
4250 int
4251 Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule, RtpReceiver** rtp_receiver) const
4252 {
4253     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4254                  "Channel::GetRtpRtcp()");
4255     *rtpRtcpModule = _rtpRtcpModule.get();
4256     *rtp_receiver = rtp_receiver_.get();
4257     return 0;
4258 }
4259
4260 // TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
4261 // a shared helper.
4262 int32_t
4263 Channel::MixOrReplaceAudioWithFile(int mixingFrequency)
4264 {
4265     scoped_ptr<int16_t[]> fileBuffer(new int16_t[640]);
4266     int fileSamples(0);
4267
4268     {
4269         CriticalSectionScoped cs(&_fileCritSect);
4270
4271         if (_inputFilePlayerPtr == NULL)
4272         {
4273             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4274                          VoEId(_instanceId, _channelId),
4275                          "Channel::MixOrReplaceAudioWithFile() fileplayer"
4276                              " doesnt exist");
4277             return -1;
4278         }
4279
4280         if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
4281                                                       fileSamples,
4282                                                       mixingFrequency) == -1)
4283         {
4284             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4285                          VoEId(_instanceId, _channelId),
4286                          "Channel::MixOrReplaceAudioWithFile() file mixing "
4287                          "failed");
4288             return -1;
4289         }
4290         if (fileSamples == 0)
4291         {
4292             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4293                          VoEId(_instanceId, _channelId),
4294                          "Channel::MixOrReplaceAudioWithFile() file is ended");
4295             return 0;
4296         }
4297     }
4298
4299     assert(_audioFrame.samples_per_channel_ == fileSamples);
4300
4301     if (_mixFileWithMicrophone)
4302     {
4303         // Currently file stream is always mono.
4304         // TODO(xians): Change the code when FilePlayer supports real stereo.
4305         MixWithSat(_audioFrame.data_,
4306                    _audioFrame.num_channels_,
4307                    fileBuffer.get(),
4308                    1,
4309                    fileSamples);
4310     }
4311     else
4312     {
4313         // Replace ACM audio with file.
4314         // Currently file stream is always mono.
4315         // TODO(xians): Change the code when FilePlayer supports real stereo.
4316         _audioFrame.UpdateFrame(_channelId,
4317                                 -1,
4318                                 fileBuffer.get(),
4319                                 fileSamples,
4320                                 mixingFrequency,
4321                                 AudioFrame::kNormalSpeech,
4322                                 AudioFrame::kVadUnknown,
4323                                 1);
4324
4325     }
4326     return 0;
4327 }
4328
4329 int32_t
4330 Channel::MixAudioWithFile(AudioFrame& audioFrame,
4331                           int mixingFrequency)
4332 {
4333     assert(mixingFrequency <= 32000);
4334
4335     scoped_ptr<int16_t[]> fileBuffer(new int16_t[640]);
4336     int fileSamples(0);
4337
4338     {
4339         CriticalSectionScoped cs(&_fileCritSect);
4340
4341         if (_outputFilePlayerPtr == NULL)
4342         {
4343             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4344                          VoEId(_instanceId, _channelId),
4345                          "Channel::MixAudioWithFile() file mixing failed");
4346             return -1;
4347         }
4348
4349         // We should get the frequency we ask for.
4350         if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
4351                                                        fileSamples,
4352                                                        mixingFrequency) == -1)
4353         {
4354             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4355                          VoEId(_instanceId, _channelId),
4356                          "Channel::MixAudioWithFile() file mixing failed");
4357             return -1;
4358         }
4359     }
4360
4361     if (audioFrame.samples_per_channel_ == fileSamples)
4362     {
4363         // Currently file stream is always mono.
4364         // TODO(xians): Change the code when FilePlayer supports real stereo.
4365         MixWithSat(audioFrame.data_,
4366                    audioFrame.num_channels_,
4367                    fileBuffer.get(),
4368                    1,
4369                    fileSamples);
4370     }
4371     else
4372     {
4373         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4374             "Channel::MixAudioWithFile() samples_per_channel_(%d) != "
4375             "fileSamples(%d)",
4376             audioFrame.samples_per_channel_, fileSamples);
4377         return -1;
4378     }
4379
4380     return 0;
4381 }
4382
4383 int
4384 Channel::InsertInbandDtmfTone()
4385 {
4386     // Check if we should start a new tone.
4387     if (_inbandDtmfQueue.PendingDtmf() &&
4388         !_inbandDtmfGenerator.IsAddingTone() &&
4389         _inbandDtmfGenerator.DelaySinceLastTone() >
4390         kMinTelephoneEventSeparationMs)
4391     {
4392         int8_t eventCode(0);
4393         uint16_t lengthMs(0);
4394         uint8_t attenuationDb(0);
4395
4396         eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
4397         _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
4398         if (_playInbandDtmfEvent)
4399         {
4400             // Add tone to output mixer using a reduced length to minimize
4401             // risk of echo.
4402             _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80,
4403                                           attenuationDb);
4404         }
4405     }
4406
4407     if (_inbandDtmfGenerator.IsAddingTone())
4408     {
4409         uint16_t frequency(0);
4410         _inbandDtmfGenerator.GetSampleRate(frequency);
4411
4412         if (frequency != _audioFrame.sample_rate_hz_)
4413         {
4414             // Update sample rate of Dtmf tone since the mixing frequency
4415             // has changed.
4416             _inbandDtmfGenerator.SetSampleRate(
4417                 (uint16_t) (_audioFrame.sample_rate_hz_));
4418             // Reset the tone to be added taking the new sample rate into
4419             // account.
4420             _inbandDtmfGenerator.ResetTone();
4421         }
4422
4423         int16_t toneBuffer[320];
4424         uint16_t toneSamples(0);
4425         // Get 10ms tone segment and set time since last tone to zero
4426         if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
4427         {
4428             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4429                        VoEId(_instanceId, _channelId),
4430                        "Channel::EncodeAndSend() inserting Dtmf failed");
4431             return -1;
4432         }
4433
4434         // Replace mixed audio with DTMF tone.
4435         for (int sample = 0;
4436             sample < _audioFrame.samples_per_channel_;
4437             sample++)
4438         {
4439             for (int channel = 0;
4440                 channel < _audioFrame.num_channels_;
4441                 channel++)
4442             {
4443                 const int index = sample * _audioFrame.num_channels_ + channel;
4444                 _audioFrame.data_[index] = toneBuffer[sample];
4445             }
4446         }
4447
4448         assert(_audioFrame.samples_per_channel_ == toneSamples);
4449     } else
4450     {
4451         // Add 10ms to "delay-since-last-tone" counter
4452         _inbandDtmfGenerator.UpdateDelaySinceLastTone();
4453     }
4454     return 0;
4455 }
4456
4457 int32_t
4458 Channel::SendPacketRaw(const void *data, int len, bool RTCP)
4459 {
4460     CriticalSectionScoped cs(&_callbackCritSect);
4461     if (_transportPtr == NULL)
4462     {
4463         return -1;
4464     }
4465     if (!RTCP)
4466     {
4467         return _transportPtr->SendPacket(_channelId, data, len);
4468     }
4469     else
4470     {
4471         return _transportPtr->SendRTCPPacket(_channelId, data, len);
4472     }
4473 }
4474
4475 // Called for incoming RTP packets after successful RTP header parsing.
4476 void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
4477                                 uint16_t sequence_number) {
4478   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4479                "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
4480                rtp_timestamp, sequence_number);
4481
4482   // Get frequency of last received payload
4483   int rtp_receive_frequency = audio_coding_->ReceiveFrequency();
4484
4485   CodecInst current_receive_codec;
4486   if (audio_coding_->ReceiveCodec(&current_receive_codec) != 0) {
4487     return;
4488   }
4489
4490   // Update the least required delay.
4491   least_required_delay_ms_ = audio_coding_->LeastRequiredDelayMs();
4492
4493   if (STR_CASE_CMP("G722", current_receive_codec.plname) == 0) {
4494     // Even though the actual sampling rate for G.722 audio is
4495     // 16,000 Hz, the RTP clock rate for the G722 payload format is
4496     // 8,000 Hz because that value was erroneously assigned in
4497     // RFC 1890 and must remain unchanged for backward compatibility.
4498     rtp_receive_frequency = 8000;
4499   } else if (STR_CASE_CMP("opus", current_receive_codec.plname) == 0) {
4500     // We are resampling Opus internally to 32,000 Hz until all our
4501     // DSP routines can operate at 48,000 Hz, but the RTP clock
4502     // rate for the Opus payload format is standardized to 48,000 Hz,
4503     // because that is the maximum supported decoding sampling rate.
4504     rtp_receive_frequency = 48000;
4505   }
4506
4507   // |jitter_buffer_playout_timestamp_| updated in UpdatePlayoutTimestamp for
4508   // every incoming packet.
4509   uint32_t timestamp_diff_ms = (rtp_timestamp -
4510       jitter_buffer_playout_timestamp_) / (rtp_receive_frequency / 1000);
4511   if (!IsNewerTimestamp(rtp_timestamp, jitter_buffer_playout_timestamp_) ||
4512       timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
4513     // If |jitter_buffer_playout_timestamp_| is newer than the incoming RTP
4514     // timestamp, the resulting difference is negative, but is set to zero.
4515     // This can happen when a network glitch causes a packet to arrive late,
4516     // and during long comfort noise periods with clock drift.
4517     timestamp_diff_ms = 0;
4518   }
4519
4520   uint16_t packet_delay_ms = (rtp_timestamp - _previousTimestamp) /
4521       (rtp_receive_frequency / 1000);
4522
4523   _previousTimestamp = rtp_timestamp;
4524
4525   if (timestamp_diff_ms == 0) return;
4526
4527   if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
4528     _recPacketDelayMs = packet_delay_ms;
4529   }
4530
4531   if (_average_jitter_buffer_delay_us == 0) {
4532     _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
4533     return;
4534   }
4535
4536   // Filter average delay value using exponential filter (alpha is
4537   // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
4538   // risk of rounding error) and compensate for it in GetDelayEstimate()
4539   // later.
4540   _average_jitter_buffer_delay_us = (_average_jitter_buffer_delay_us * 7 +
4541       1000 * timestamp_diff_ms + 500) / 8;
4542 }
4543
4544 void
4545 Channel::RegisterReceiveCodecsToRTPModule()
4546 {
4547     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4548                  "Channel::RegisterReceiveCodecsToRTPModule()");
4549
4550
4551     CodecInst codec;
4552     const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
4553
4554     for (int idx = 0; idx < nSupportedCodecs; idx++)
4555     {
4556         // Open up the RTP/RTCP receiver for all supported codecs
4557         if ((audio_coding_->Codec(idx, &codec) == -1) ||
4558             (rtp_receiver_->RegisterReceivePayload(
4559                 codec.plname,
4560                 codec.pltype,
4561                 codec.plfreq,
4562                 codec.channels,
4563                 (codec.rate < 0) ? 0 : codec.rate) == -1))
4564         {
4565             WEBRTC_TRACE(
4566                          kTraceWarning,
4567                          kTraceVoice,
4568                          VoEId(_instanceId, _channelId),
4569                          "Channel::RegisterReceiveCodecsToRTPModule() unable"
4570                          " to register %s (%d/%d/%d/%d) to RTP/RTCP receiver",
4571                          codec.plname, codec.pltype, codec.plfreq,
4572                          codec.channels, codec.rate);
4573         }
4574         else
4575         {
4576             WEBRTC_TRACE(
4577                          kTraceInfo,
4578                          kTraceVoice,
4579                          VoEId(_instanceId, _channelId),
4580                          "Channel::RegisterReceiveCodecsToRTPModule() %s "
4581                          "(%d/%d/%d/%d) has been added to the RTP/RTCP "
4582                          "receiver",
4583                          codec.plname, codec.pltype, codec.plfreq,
4584                          codec.channels, codec.rate);
4585         }
4586     }
4587 }
4588
4589 int Channel::SetSecondarySendCodec(const CodecInst& codec,
4590                                    int red_payload_type) {
4591   // Sanity check for payload type.
4592   if (red_payload_type < 0 || red_payload_type > 127) {
4593     _engineStatisticsPtr->SetLastError(
4594         VE_PLTYPE_ERROR, kTraceError,
4595         "SetRedPayloadType() invalid RED payload type");
4596     return -1;
4597   }
4598
4599   if (SetRedPayloadType(red_payload_type) < 0) {
4600     _engineStatisticsPtr->SetLastError(
4601         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4602         "SetSecondarySendCodec() Failed to register RED ACM");
4603     return -1;
4604   }
4605   if (audio_coding_->RegisterSecondarySendCodec(codec) < 0) {
4606     _engineStatisticsPtr->SetLastError(
4607         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4608         "SetSecondarySendCodec() Failed to register secondary send codec in "
4609         "ACM");
4610     return -1;
4611   }
4612
4613   return 0;
4614 }
4615
4616 void Channel::RemoveSecondarySendCodec() {
4617   audio_coding_->UnregisterSecondarySendCodec();
4618 }
4619
4620 int Channel::GetSecondarySendCodec(CodecInst* codec) {
4621   if (audio_coding_->SecondarySendCodec(codec) < 0) {
4622     _engineStatisticsPtr->SetLastError(
4623         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4624         "GetSecondarySendCodec() Failed to get secondary sent codec from ACM");
4625     return -1;
4626   }
4627   return 0;
4628 }
4629
4630 // Assuming this method is called with valid payload type.
4631 int Channel::SetRedPayloadType(int red_payload_type) {
4632   CodecInst codec;
4633   bool found_red = false;
4634
4635   // Get default RED settings from the ACM database
4636   const int num_codecs = AudioCodingModule::NumberOfCodecs();
4637   for (int idx = 0; idx < num_codecs; idx++) {
4638     audio_coding_->Codec(idx, &codec);
4639     if (!STR_CASE_CMP(codec.plname, "RED")) {
4640       found_red = true;
4641       break;
4642     }
4643   }
4644
4645   if (!found_red) {
4646     _engineStatisticsPtr->SetLastError(
4647         VE_CODEC_ERROR, kTraceError,
4648         "SetRedPayloadType() RED is not supported");
4649     return -1;
4650   }
4651
4652   codec.pltype = red_payload_type;
4653   if (audio_coding_->RegisterSendCodec(codec) < 0) {
4654     _engineStatisticsPtr->SetLastError(
4655         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4656         "SetRedPayloadType() RED registration in ACM module failed");
4657     return -1;
4658   }
4659
4660   if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
4661     _engineStatisticsPtr->SetLastError(
4662         VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4663         "SetRedPayloadType() RED registration in RTP/RTCP module failed");
4664     return -1;
4665   }
4666   return 0;
4667 }
4668
4669 int Channel::SetSendRtpHeaderExtension(bool enable, RTPExtensionType type,
4670                                        unsigned char id) {
4671   int error = 0;
4672   _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
4673   if (enable) {
4674     error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
4675   }
4676   return error;
4677 }
4678 }  // namespace voe
4679 }  // namespace webrtc