• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include "webrtc/voice_engine/channel.h"
12 
13 #include "webrtc/base/timeutils.h"
14 #include "webrtc/common.h"
15 #include "webrtc/modules/audio_device/include/audio_device.h"
16 #include "webrtc/modules/audio_processing/include/audio_processing.h"
17 #include "webrtc/modules/interface/module_common_types.h"
18 #include "webrtc/modules/rtp_rtcp/interface/receive_statistics.h"
19 #include "webrtc/modules/rtp_rtcp/interface/remote_ntp_time_estimator.h"
20 #include "webrtc/modules/rtp_rtcp/interface/rtp_payload_registry.h"
21 #include "webrtc/modules/rtp_rtcp/interface/rtp_receiver.h"
22 #include "webrtc/modules/rtp_rtcp/source/rtp_receiver_strategy.h"
23 #include "webrtc/modules/utility/interface/audio_frame_operations.h"
24 #include "webrtc/modules/utility/interface/process_thread.h"
25 #include "webrtc/modules/utility/interface/rtp_dump.h"
26 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
27 #include "webrtc/system_wrappers/interface/logging.h"
28 #include "webrtc/system_wrappers/interface/trace.h"
29 #include "webrtc/video_engine/include/vie_network.h"
30 #include "webrtc/voice_engine/include/voe_base.h"
31 #include "webrtc/voice_engine/include/voe_external_media.h"
32 #include "webrtc/voice_engine/include/voe_rtp_rtcp.h"
33 #include "webrtc/voice_engine/output_mixer.h"
34 #include "webrtc/voice_engine/statistics.h"
35 #include "webrtc/voice_engine/transmit_mixer.h"
36 #include "webrtc/voice_engine/utility.h"
37 
38 #if defined(_WIN32)
39 #include <Qos.h>
40 #endif
41 
42 namespace webrtc {
43 namespace voe {
44 
45 // Extend the default RTCP statistics struct with max_jitter, defined as the
46 // maximum jitter value seen in an RTCP report block.
47 struct ChannelStatistics : public RtcpStatistics {
ChannelStatisticswebrtc::voe::ChannelStatistics48   ChannelStatistics() : rtcp(), max_jitter(0) {}
49 
50   RtcpStatistics rtcp;
51   uint32_t max_jitter;
52 };
53 
54 // Statistics callback, called at each generation of a new RTCP report block.
55 class StatisticsProxy : public RtcpStatisticsCallback {
56  public:
StatisticsProxy(uint32_t ssrc)57   StatisticsProxy(uint32_t ssrc)
58    : stats_lock_(CriticalSectionWrapper::CreateCriticalSection()),
59      ssrc_(ssrc) {}
~StatisticsProxy()60   virtual ~StatisticsProxy() {}
61 
StatisticsUpdated(const RtcpStatistics & statistics,uint32_t ssrc)62   virtual void StatisticsUpdated(const RtcpStatistics& statistics,
63                                  uint32_t ssrc) OVERRIDE {
64     if (ssrc != ssrc_)
65       return;
66 
67     CriticalSectionScoped cs(stats_lock_.get());
68     stats_.rtcp = statistics;
69     if (statistics.jitter > stats_.max_jitter) {
70       stats_.max_jitter = statistics.jitter;
71     }
72   }
73 
ResetStatistics()74   void ResetStatistics() {
75     CriticalSectionScoped cs(stats_lock_.get());
76     stats_ = ChannelStatistics();
77   }
78 
GetStats()79   ChannelStatistics GetStats() {
80     CriticalSectionScoped cs(stats_lock_.get());
81     return stats_;
82   }
83 
84  private:
85   // StatisticsUpdated calls are triggered from threads in the RTP module,
86   // while GetStats calls can be triggered from the public voice engine API,
87   // hence synchronization is needed.
88   scoped_ptr<CriticalSectionWrapper> stats_lock_;
89   const uint32_t ssrc_;
90   ChannelStatistics stats_;
91 };
92 
93 class VoEBitrateObserver : public BitrateObserver {
94  public:
VoEBitrateObserver(Channel * owner)95   explicit VoEBitrateObserver(Channel* owner)
96       : owner_(owner) {}
~VoEBitrateObserver()97   virtual ~VoEBitrateObserver() {}
98 
99   // Implements BitrateObserver.
OnNetworkChanged(const uint32_t bitrate_bps,const uint8_t fraction_lost,const uint32_t rtt)100   virtual void OnNetworkChanged(const uint32_t bitrate_bps,
101                                 const uint8_t fraction_lost,
102                                 const uint32_t rtt) OVERRIDE {
103     // |fraction_lost| has a scale of 0 - 255.
104     owner_->OnNetworkChanged(bitrate_bps, fraction_lost, rtt);
105   }
106 
107  private:
108   Channel* owner_;
109 };
110 
111 int32_t
SendData(FrameType frameType,uint8_t payloadType,uint32_t timeStamp,const uint8_t * payloadData,uint16_t payloadSize,const RTPFragmentationHeader * fragmentation)112 Channel::SendData(FrameType frameType,
113                   uint8_t   payloadType,
114                   uint32_t  timeStamp,
115                   const uint8_t*  payloadData,
116                   uint16_t  payloadSize,
117                   const RTPFragmentationHeader* fragmentation)
118 {
119     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
120                  "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
121                  " payloadSize=%u, fragmentation=0x%x)",
122                  frameType, payloadType, timeStamp, payloadSize, fragmentation);
123 
124     if (_includeAudioLevelIndication)
125     {
126         // Store current audio level in the RTP/RTCP module.
127         // The level will be used in combination with voice-activity state
128         // (frameType) to add an RTP header extension
129         _rtpRtcpModule->SetAudioLevel(rms_level_.RMS());
130     }
131 
132     // Push data from ACM to RTP/RTCP-module to deliver audio frame for
133     // packetization.
134     // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
135     if (_rtpRtcpModule->SendOutgoingData((FrameType&)frameType,
136                                         payloadType,
137                                         timeStamp,
138                                         // Leaving the time when this frame was
139                                         // received from the capture device as
140                                         // undefined for voice for now.
141                                         -1,
142                                         payloadData,
143                                         payloadSize,
144                                         fragmentation) == -1)
145     {
146         _engineStatisticsPtr->SetLastError(
147             VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
148             "Channel::SendData() failed to send data to RTP/RTCP module");
149         return -1;
150     }
151 
152     _lastLocalTimeStamp = timeStamp;
153     _lastPayloadType = payloadType;
154 
155     return 0;
156 }
157 
158 int32_t
InFrameType(int16_t frameType)159 Channel::InFrameType(int16_t frameType)
160 {
161     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
162                  "Channel::InFrameType(frameType=%d)", frameType);
163 
164     CriticalSectionScoped cs(&_callbackCritSect);
165     // 1 indicates speech
166     _sendFrameType = (frameType == 1) ? 1 : 0;
167     return 0;
168 }
169 
170 int32_t
OnRxVadDetected(int vadDecision)171 Channel::OnRxVadDetected(int vadDecision)
172 {
173     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
174                  "Channel::OnRxVadDetected(vadDecision=%d)", vadDecision);
175 
176     CriticalSectionScoped cs(&_callbackCritSect);
177     if (_rxVadObserverPtr)
178     {
179         _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
180     }
181 
182     return 0;
183 }
184 
185 int
SendPacket(int channel,const void * data,int len)186 Channel::SendPacket(int channel, const void *data, int len)
187 {
188     channel = VoEChannelId(channel);
189     assert(channel == _channelId);
190 
191     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
192                  "Channel::SendPacket(channel=%d, len=%d)", channel, len);
193 
194     CriticalSectionScoped cs(&_callbackCritSect);
195 
196     if (_transportPtr == NULL)
197     {
198         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
199                      "Channel::SendPacket() failed to send RTP packet due to"
200                      " invalid transport object");
201         return -1;
202     }
203 
204     uint8_t* bufferToSendPtr = (uint8_t*)data;
205     int32_t bufferLength = len;
206 
207     // Dump the RTP packet to a file (if RTP dump is enabled).
208     if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
209     {
210         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
211                      VoEId(_instanceId,_channelId),
212                      "Channel::SendPacket() RTP dump to output file failed");
213     }
214 
215     int n = _transportPtr->SendPacket(channel, bufferToSendPtr,
216                                       bufferLength);
217     if (n < 0) {
218       std::string transport_name =
219           _externalTransport ? "external transport" : "WebRtc sockets";
220       WEBRTC_TRACE(kTraceError, kTraceVoice,
221                    VoEId(_instanceId,_channelId),
222                    "Channel::SendPacket() RTP transmission using %s failed",
223                    transport_name.c_str());
224       return -1;
225     }
226     return n;
227 }
228 
229 int
SendRTCPPacket(int channel,const void * data,int len)230 Channel::SendRTCPPacket(int channel, const void *data, int len)
231 {
232     channel = VoEChannelId(channel);
233     assert(channel == _channelId);
234 
235     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
236                  "Channel::SendRTCPPacket(channel=%d, len=%d)", channel, len);
237 
238     CriticalSectionScoped cs(&_callbackCritSect);
239     if (_transportPtr == NULL)
240     {
241         WEBRTC_TRACE(kTraceError, kTraceVoice,
242                      VoEId(_instanceId,_channelId),
243                      "Channel::SendRTCPPacket() failed to send RTCP packet"
244                      " due to invalid transport object");
245         return -1;
246     }
247 
248     uint8_t* bufferToSendPtr = (uint8_t*)data;
249     int32_t bufferLength = len;
250 
251     // Dump the RTCP packet to a file (if RTP dump is enabled).
252     if (_rtpDumpOut.DumpPacket((const uint8_t*)data, len) == -1)
253     {
254         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
255                      VoEId(_instanceId,_channelId),
256                      "Channel::SendPacket() RTCP dump to output file failed");
257     }
258 
259     int n = _transportPtr->SendRTCPPacket(channel,
260                                           bufferToSendPtr,
261                                           bufferLength);
262     if (n < 0) {
263       std::string transport_name =
264           _externalTransport ? "external transport" : "WebRtc sockets";
265       WEBRTC_TRACE(kTraceInfo, kTraceVoice,
266                    VoEId(_instanceId,_channelId),
267                    "Channel::SendRTCPPacket() transmission using %s failed",
268                    transport_name.c_str());
269       return -1;
270     }
271     return n;
272 }
273 
274 void
OnPlayTelephoneEvent(int32_t id,uint8_t event,uint16_t lengthMs,uint8_t volume)275 Channel::OnPlayTelephoneEvent(int32_t id,
276                               uint8_t event,
277                               uint16_t lengthMs,
278                               uint8_t volume)
279 {
280     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
281                  "Channel::OnPlayTelephoneEvent(id=%d, event=%u, lengthMs=%u,"
282                  " volume=%u)", id, event, lengthMs, volume);
283 
284     if (!_playOutbandDtmfEvent || (event > 15))
285     {
286         // Ignore callback since feedback is disabled or event is not a
287         // Dtmf tone event.
288         return;
289     }
290 
291     assert(_outputMixerPtr != NULL);
292 
293     // Start playing out the Dtmf tone (if playout is enabled).
294     // Reduce length of tone with 80ms to the reduce risk of echo.
295     _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
296 }
297 
298 void
OnIncomingSSRCChanged(int32_t id,uint32_t ssrc)299 Channel::OnIncomingSSRCChanged(int32_t id, uint32_t ssrc)
300 {
301     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
302                  "Channel::OnIncomingSSRCChanged(id=%d, SSRC=%d)",
303                  id, ssrc);
304 
305     // Update ssrc so that NTP for AV sync can be updated.
306     _rtpRtcpModule->SetRemoteSSRC(ssrc);
307 }
308 
OnIncomingCSRCChanged(int32_t id,uint32_t CSRC,bool added)309 void Channel::OnIncomingCSRCChanged(int32_t id,
310                                     uint32_t CSRC,
311                                     bool added)
312 {
313     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
314                  "Channel::OnIncomingCSRCChanged(id=%d, CSRC=%d, added=%d)",
315                  id, CSRC, added);
316 }
317 
ResetStatistics(uint32_t ssrc)318 void Channel::ResetStatistics(uint32_t ssrc) {
319   StreamStatistician* statistician =
320       rtp_receive_statistics_->GetStatistician(ssrc);
321   if (statistician) {
322     statistician->ResetStatistics();
323   }
324   statistics_proxy_->ResetStatistics();
325 }
326 
327 void
OnApplicationDataReceived(int32_t id,uint8_t subType,uint32_t name,uint16_t length,const uint8_t * data)328 Channel::OnApplicationDataReceived(int32_t id,
329                                    uint8_t subType,
330                                    uint32_t name,
331                                    uint16_t length,
332                                    const uint8_t* data)
333 {
334     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
335                  "Channel::OnApplicationDataReceived(id=%d, subType=%u,"
336                  " name=%u, length=%u)",
337                  id, subType, name, length);
338 
339     int32_t channel = VoEChannelId(id);
340     assert(channel == _channelId);
341 
342     if (_rtcpObserver)
343     {
344         CriticalSectionScoped cs(&_callbackCritSect);
345 
346         if (_rtcpObserverPtr)
347         {
348             _rtcpObserverPtr->OnApplicationDataReceived(channel,
349                                                         subType,
350                                                         name,
351                                                         data,
352                                                         length);
353         }
354     }
355 }
356 
357 int32_t
OnInitializeDecoder(int32_t id,int8_t payloadType,const char payloadName[RTP_PAYLOAD_NAME_SIZE],int frequency,uint8_t channels,uint32_t rate)358 Channel::OnInitializeDecoder(
359     int32_t id,
360     int8_t payloadType,
361     const char payloadName[RTP_PAYLOAD_NAME_SIZE],
362     int frequency,
363     uint8_t channels,
364     uint32_t rate)
365 {
366     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
367                  "Channel::OnInitializeDecoder(id=%d, payloadType=%d, "
368                  "payloadName=%s, frequency=%u, channels=%u, rate=%u)",
369                  id, payloadType, payloadName, frequency, channels, rate);
370 
371     assert(VoEChannelId(id) == _channelId);
372 
373     CodecInst receiveCodec = {0};
374     CodecInst dummyCodec = {0};
375 
376     receiveCodec.pltype = payloadType;
377     receiveCodec.plfreq = frequency;
378     receiveCodec.channels = channels;
379     receiveCodec.rate = rate;
380     strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
381 
382     audio_coding_->Codec(payloadName, &dummyCodec, frequency, channels);
383     receiveCodec.pacsize = dummyCodec.pacsize;
384 
385     // Register the new codec to the ACM
386     if (audio_coding_->RegisterReceiveCodec(receiveCodec) == -1)
387     {
388         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
389                      VoEId(_instanceId, _channelId),
390                      "Channel::OnInitializeDecoder() invalid codec ("
391                      "pt=%d, name=%s) received - 1", payloadType, payloadName);
392         _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
393         return -1;
394     }
395 
396     return 0;
397 }
398 
399 void
OnPacketTimeout(int32_t id)400 Channel::OnPacketTimeout(int32_t id)
401 {
402     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
403                  "Channel::OnPacketTimeout(id=%d)", id);
404 
405     CriticalSectionScoped cs(_callbackCritSectPtr);
406     if (_voiceEngineObserverPtr)
407     {
408         if (channel_state_.Get().receiving || _externalTransport)
409         {
410             int32_t channel = VoEChannelId(id);
411             assert(channel == _channelId);
412             // Ensure that next OnReceivedPacket() callback will trigger
413             // a VE_PACKET_RECEIPT_RESTARTED callback.
414             _rtpPacketTimedOut = true;
415             // Deliver callback to the observer
416             WEBRTC_TRACE(kTraceInfo, kTraceVoice,
417                          VoEId(_instanceId,_channelId),
418                          "Channel::OnPacketTimeout() => "
419                          "CallbackOnError(VE_RECEIVE_PACKET_TIMEOUT)");
420             _voiceEngineObserverPtr->CallbackOnError(channel,
421                                                      VE_RECEIVE_PACKET_TIMEOUT);
422         }
423     }
424 }
425 
426 void
OnReceivedPacket(int32_t id,RtpRtcpPacketType packetType)427 Channel::OnReceivedPacket(int32_t id,
428                           RtpRtcpPacketType packetType)
429 {
430     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
431                  "Channel::OnReceivedPacket(id=%d, packetType=%d)",
432                  id, packetType);
433 
434     assert(VoEChannelId(id) == _channelId);
435 
436     // Notify only for the case when we have restarted an RTP session.
437     if (_rtpPacketTimedOut && (kPacketRtp == packetType))
438     {
439         CriticalSectionScoped cs(_callbackCritSectPtr);
440         if (_voiceEngineObserverPtr)
441         {
442             int32_t channel = VoEChannelId(id);
443             assert(channel == _channelId);
444             // Reset timeout mechanism
445             _rtpPacketTimedOut = false;
446             // Deliver callback to the observer
447             WEBRTC_TRACE(kTraceInfo, kTraceVoice,
448                          VoEId(_instanceId,_channelId),
449                          "Channel::OnPacketTimeout() =>"
450                          " CallbackOnError(VE_PACKET_RECEIPT_RESTARTED)");
451             _voiceEngineObserverPtr->CallbackOnError(
452                 channel,
453                 VE_PACKET_RECEIPT_RESTARTED);
454         }
455     }
456 }
457 
458 void
OnPeriodicDeadOrAlive(int32_t id,RTPAliveType alive)459 Channel::OnPeriodicDeadOrAlive(int32_t id,
460                                RTPAliveType alive)
461 {
462     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
463                  "Channel::OnPeriodicDeadOrAlive(id=%d, alive=%d)", id, alive);
464 
465     {
466         CriticalSectionScoped cs(&_callbackCritSect);
467         if (!_connectionObserver)
468             return;
469     }
470 
471     int32_t channel = VoEChannelId(id);
472     assert(channel == _channelId);
473 
474     // Use Alive as default to limit risk of false Dead detections
475     bool isAlive(true);
476 
477     // Always mark the connection as Dead when the module reports kRtpDead
478     if (kRtpDead == alive)
479     {
480         isAlive = false;
481     }
482 
483     // It is possible that the connection is alive even if no RTP packet has
484     // been received for a long time since the other side might use VAD/DTX
485     // and a low SID-packet update rate.
486     if ((kRtpNoRtp == alive) && channel_state_.Get().playing)
487     {
488         // Detect Alive for all NetEQ states except for the case when we are
489         // in PLC_CNG state.
490         // PLC_CNG <=> background noise only due to long expand or error.
491         // Note that, the case where the other side stops sending during CNG
492         // state will be detected as Alive. Dead is is not set until after
493         // missing RTCP packets for at least twelve seconds (handled
494         // internally by the RTP/RTCP module).
495         isAlive = (_outputSpeechType != AudioFrame::kPLCCNG);
496     }
497 
498     // Send callback to the registered observer
499     if (_connectionObserver)
500     {
501         CriticalSectionScoped cs(&_callbackCritSect);
502         if (_connectionObserverPtr)
503         {
504             _connectionObserverPtr->OnPeriodicDeadOrAlive(channel, isAlive);
505         }
506     }
507 }
508 
509 int32_t
OnReceivedPayloadData(const uint8_t * payloadData,uint16_t payloadSize,const WebRtcRTPHeader * rtpHeader)510 Channel::OnReceivedPayloadData(const uint8_t* payloadData,
511                                uint16_t payloadSize,
512                                const WebRtcRTPHeader* rtpHeader)
513 {
514     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
515                  "Channel::OnReceivedPayloadData(payloadSize=%d,"
516                  " payloadType=%u, audioChannel=%u)",
517                  payloadSize,
518                  rtpHeader->header.payloadType,
519                  rtpHeader->type.Audio.channel);
520 
521     if (!channel_state_.Get().playing)
522     {
523         // Avoid inserting into NetEQ when we are not playing. Count the
524         // packet as discarded.
525         WEBRTC_TRACE(kTraceStream, kTraceVoice,
526                      VoEId(_instanceId, _channelId),
527                      "received packet is discarded since playing is not"
528                      " activated");
529         _numberOfDiscardedPackets++;
530         return 0;
531     }
532 
533     // Push the incoming payload (parsed and ready for decoding) into the ACM
534     if (audio_coding_->IncomingPacket(payloadData,
535                                       payloadSize,
536                                       *rtpHeader) != 0)
537     {
538         _engineStatisticsPtr->SetLastError(
539             VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
540             "Channel::OnReceivedPayloadData() unable to push data to the ACM");
541         return -1;
542     }
543 
544     // Update the packet delay.
545     UpdatePacketDelay(rtpHeader->header.timestamp,
546                       rtpHeader->header.sequenceNumber);
547 
548     uint16_t round_trip_time = 0;
549     _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), &round_trip_time,
550                         NULL, NULL, NULL);
551 
552     std::vector<uint16_t> nack_list = audio_coding_->GetNackList(
553         round_trip_time);
554     if (!nack_list.empty()) {
555       // Can't use nack_list.data() since it's not supported by all
556       // compilers.
557       ResendPackets(&(nack_list[0]), static_cast<int>(nack_list.size()));
558     }
559     return 0;
560 }
561 
OnRecoveredPacket(const uint8_t * rtp_packet,int rtp_packet_length)562 bool Channel::OnRecoveredPacket(const uint8_t* rtp_packet,
563                                 int rtp_packet_length) {
564   RTPHeader header;
565   if (!rtp_header_parser_->Parse(rtp_packet, rtp_packet_length, &header)) {
566     WEBRTC_TRACE(kTraceDebug, webrtc::kTraceVoice, _channelId,
567                  "IncomingPacket invalid RTP header");
568     return false;
569   }
570   header.payload_type_frequency =
571       rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
572   if (header.payload_type_frequency < 0)
573     return false;
574   return ReceivePacket(rtp_packet, rtp_packet_length, header, false);
575 }
576 
GetAudioFrame(int32_t id,AudioFrame & audioFrame)577 int32_t Channel::GetAudioFrame(int32_t id, AudioFrame& audioFrame)
578 {
579     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
580                  "Channel::GetAudioFrame(id=%d)", id);
581 
582     // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
583     if (audio_coding_->PlayoutData10Ms(audioFrame.sample_rate_hz_,
584                                        &audioFrame) == -1)
585     {
586         WEBRTC_TRACE(kTraceError, kTraceVoice,
587                      VoEId(_instanceId,_channelId),
588                      "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
589         // In all likelihood, the audio in this frame is garbage. We return an
590         // error so that the audio mixer module doesn't add it to the mix. As
591         // a result, it won't be played out and the actions skipped here are
592         // irrelevant.
593         return -1;
594     }
595 
596     if (_RxVadDetection)
597     {
598         UpdateRxVadDetection(audioFrame);
599     }
600 
601     // Convert module ID to internal VoE channel ID
602     audioFrame.id_ = VoEChannelId(audioFrame.id_);
603     // Store speech type for dead-or-alive detection
604     _outputSpeechType = audioFrame.speech_type_;
605 
606     ChannelState::State state = channel_state_.Get();
607 
608     if (state.rx_apm_is_enabled) {
609       int err = rx_audioproc_->ProcessStream(&audioFrame);
610       if (err) {
611         LOG(LS_ERROR) << "ProcessStream() error: " << err;
612         assert(false);
613       }
614     }
615 
616     float output_gain = 1.0f;
617     float left_pan =  1.0f;
618     float right_pan =  1.0f;
619     {
620       CriticalSectionScoped cs(&volume_settings_critsect_);
621       output_gain = _outputGain;
622       left_pan = _panLeft;
623       right_pan= _panRight;
624     }
625 
626     // Output volume scaling
627     if (output_gain < 0.99f || output_gain > 1.01f)
628     {
629         AudioFrameOperations::ScaleWithSat(output_gain, audioFrame);
630     }
631 
632     // Scale left and/or right channel(s) if stereo and master balance is
633     // active
634 
635     if (left_pan != 1.0f || right_pan != 1.0f)
636     {
637         if (audioFrame.num_channels_ == 1)
638         {
639             // Emulate stereo mode since panning is active.
640             // The mono signal is copied to both left and right channels here.
641             AudioFrameOperations::MonoToStereo(&audioFrame);
642         }
643         // For true stereo mode (when we are receiving a stereo signal), no
644         // action is needed.
645 
646         // Do the panning operation (the audio frame contains stereo at this
647         // stage)
648         AudioFrameOperations::Scale(left_pan, right_pan, audioFrame);
649     }
650 
651     // Mix decoded PCM output with file if file mixing is enabled
652     if (state.output_file_playing)
653     {
654         MixAudioWithFile(audioFrame, audioFrame.sample_rate_hz_);
655     }
656 
657     // External media
658     if (_outputExternalMedia)
659     {
660         CriticalSectionScoped cs(&_callbackCritSect);
661         const bool isStereo = (audioFrame.num_channels_ == 2);
662         if (_outputExternalMediaCallbackPtr)
663         {
664             _outputExternalMediaCallbackPtr->Process(
665                 _channelId,
666                 kPlaybackPerChannel,
667                 (int16_t*)audioFrame.data_,
668                 audioFrame.samples_per_channel_,
669                 audioFrame.sample_rate_hz_,
670                 isStereo);
671         }
672     }
673 
674     // Record playout if enabled
675     {
676         CriticalSectionScoped cs(&_fileCritSect);
677 
678         if (_outputFileRecording && _outputFileRecorderPtr)
679         {
680             _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
681         }
682     }
683 
684     // Measure audio level (0-9)
685     _outputAudioLevel.ComputeLevel(audioFrame);
686 
687     if (capture_start_rtp_time_stamp_ < 0 && audioFrame.timestamp_ != 0) {
688       // The first frame with a valid rtp timestamp.
689       capture_start_rtp_time_stamp_ = audioFrame.timestamp_;
690     }
691 
692     if (capture_start_rtp_time_stamp_ >= 0) {
693       // audioFrame.timestamp_ should be valid from now on.
694 
695       // Compute elapsed time.
696       int64_t unwrap_timestamp =
697           rtp_ts_wraparound_handler_->Unwrap(audioFrame.timestamp_);
698       audioFrame.elapsed_time_ms_ =
699           (unwrap_timestamp - capture_start_rtp_time_stamp_) /
700           (GetPlayoutFrequency() / 1000);
701 
702       // Compute ntp time.
703       audioFrame.ntp_time_ms_ = ntp_estimator_->Estimate(audioFrame.timestamp_);
704       // |ntp_time_ms_| won't be valid until at least 2 RTCP SRs are received.
705       if (audioFrame.ntp_time_ms_ > 0) {
706         // Compute |capture_start_ntp_time_ms_| so that
707         // |capture_start_ntp_time_ms_| + |elapsed_time_ms_| == |ntp_time_ms_|
708         CriticalSectionScoped lock(ts_stats_lock_.get());
709         capture_start_ntp_time_ms_ =
710             audioFrame.ntp_time_ms_ - audioFrame.elapsed_time_ms_;
711       }
712     }
713 
714     return 0;
715 }
716 
717 int32_t
NeededFrequency(int32_t id)718 Channel::NeededFrequency(int32_t id)
719 {
720     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
721                  "Channel::NeededFrequency(id=%d)", id);
722 
723     int highestNeeded = 0;
724 
725     // Determine highest needed receive frequency
726     int32_t receiveFrequency = audio_coding_->ReceiveFrequency();
727 
728     // Return the bigger of playout and receive frequency in the ACM.
729     if (audio_coding_->PlayoutFrequency() > receiveFrequency)
730     {
731         highestNeeded = audio_coding_->PlayoutFrequency();
732     }
733     else
734     {
735         highestNeeded = receiveFrequency;
736     }
737 
738     // Special case, if we're playing a file on the playout side
739     // we take that frequency into consideration as well
740     // This is not needed on sending side, since the codec will
741     // limit the spectrum anyway.
742     if (channel_state_.Get().output_file_playing)
743     {
744         CriticalSectionScoped cs(&_fileCritSect);
745         if (_outputFilePlayerPtr)
746         {
747             if(_outputFilePlayerPtr->Frequency()>highestNeeded)
748             {
749                 highestNeeded=_outputFilePlayerPtr->Frequency();
750             }
751         }
752     }
753 
754     return(highestNeeded);
755 }
756 
757 int32_t
CreateChannel(Channel * & channel,int32_t channelId,uint32_t instanceId,const Config & config)758 Channel::CreateChannel(Channel*& channel,
759                        int32_t channelId,
760                        uint32_t instanceId,
761                        const Config& config)
762 {
763     WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId,channelId),
764                  "Channel::CreateChannel(channelId=%d, instanceId=%d)",
765         channelId, instanceId);
766 
767     channel = new Channel(channelId, instanceId, config);
768     if (channel == NULL)
769     {
770         WEBRTC_TRACE(kTraceMemory, kTraceVoice,
771                      VoEId(instanceId,channelId),
772                      "Channel::CreateChannel() unable to allocate memory for"
773                      " channel");
774         return -1;
775     }
776     return 0;
777 }
778 
779 void
PlayNotification(int32_t id,uint32_t durationMs)780 Channel::PlayNotification(int32_t id, uint32_t durationMs)
781 {
782     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
783                  "Channel::PlayNotification(id=%d, durationMs=%d)",
784                  id, durationMs);
785 
786     // Not implement yet
787 }
788 
789 void
RecordNotification(int32_t id,uint32_t durationMs)790 Channel::RecordNotification(int32_t id, uint32_t durationMs)
791 {
792     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
793                  "Channel::RecordNotification(id=%d, durationMs=%d)",
794                  id, durationMs);
795 
796     // Not implement yet
797 }
798 
799 void
PlayFileEnded(int32_t id)800 Channel::PlayFileEnded(int32_t id)
801 {
802     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
803                  "Channel::PlayFileEnded(id=%d)", id);
804 
805     if (id == _inputFilePlayerId)
806     {
807         channel_state_.SetInputFilePlaying(false);
808         WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
809                      VoEId(_instanceId,_channelId),
810                      "Channel::PlayFileEnded() => input file player module is"
811                      " shutdown");
812     }
813     else if (id == _outputFilePlayerId)
814     {
815         channel_state_.SetOutputFilePlaying(false);
816         WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
817                      VoEId(_instanceId,_channelId),
818                      "Channel::PlayFileEnded() => output file player module is"
819                      " shutdown");
820     }
821 }
822 
823 void
RecordFileEnded(int32_t id)824 Channel::RecordFileEnded(int32_t id)
825 {
826     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
827                  "Channel::RecordFileEnded(id=%d)", id);
828 
829     assert(id == _outputFileRecorderId);
830 
831     CriticalSectionScoped cs(&_fileCritSect);
832 
833     _outputFileRecording = false;
834     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
835                  VoEId(_instanceId,_channelId),
836                  "Channel::RecordFileEnded() => output file recorder module is"
837                  " shutdown");
838 }
839 
Channel(int32_t channelId,uint32_t instanceId,const Config & config)840 Channel::Channel(int32_t channelId,
841                  uint32_t instanceId,
842                  const Config& config) :
843     _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
844     _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
845     volume_settings_critsect_(*CriticalSectionWrapper::CreateCriticalSection()),
846     _instanceId(instanceId),
847     _channelId(channelId),
848     rtp_header_parser_(RtpHeaderParser::Create()),
849     rtp_payload_registry_(
850         new RTPPayloadRegistry(RTPPayloadStrategy::CreateStrategy(true))),
851     rtp_receive_statistics_(ReceiveStatistics::Create(
852         Clock::GetRealTimeClock())),
853     rtp_receiver_(RtpReceiver::CreateAudioReceiver(
854         VoEModuleId(instanceId, channelId), Clock::GetRealTimeClock(), this,
855         this, this, rtp_payload_registry_.get())),
856     telephone_event_handler_(rtp_receiver_->GetTelephoneEventHandler()),
857     audio_coding_(AudioCodingModule::Create(
858         VoEModuleId(instanceId, channelId))),
859     _rtpDumpIn(*RtpDump::CreateRtpDump()),
860     _rtpDumpOut(*RtpDump::CreateRtpDump()),
861     _outputAudioLevel(),
862     _externalTransport(false),
863     _audioLevel_dBov(0),
864     _inputFilePlayerPtr(NULL),
865     _outputFilePlayerPtr(NULL),
866     _outputFileRecorderPtr(NULL),
867     // Avoid conflict with other channels by adding 1024 - 1026,
868     // won't use as much as 1024 channels.
869     _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
870     _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
871     _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
872     _outputFileRecording(false),
873     _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
874     _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
875     _outputExternalMedia(false),
876     _inputExternalMediaCallbackPtr(NULL),
877     _outputExternalMediaCallbackPtr(NULL),
878     _timeStamp(0), // This is just an offset, RTP module will add it's own random offset
879     _sendTelephoneEventPayloadType(106),
880     ntp_estimator_(new RemoteNtpTimeEstimator(Clock::GetRealTimeClock())),
881     jitter_buffer_playout_timestamp_(0),
882     playout_timestamp_rtp_(0),
883     playout_timestamp_rtcp_(0),
884     playout_delay_ms_(0),
885     _numberOfDiscardedPackets(0),
886     send_sequence_number_(0),
887     ts_stats_lock_(CriticalSectionWrapper::CreateCriticalSection()),
888     rtp_ts_wraparound_handler_(new rtc::TimestampWrapAroundHandler()),
889     capture_start_rtp_time_stamp_(-1),
890     capture_start_ntp_time_ms_(-1),
891     _engineStatisticsPtr(NULL),
892     _outputMixerPtr(NULL),
893     _transmitMixerPtr(NULL),
894     _moduleProcessThreadPtr(NULL),
895     _audioDeviceModulePtr(NULL),
896     _voiceEngineObserverPtr(NULL),
897     _callbackCritSectPtr(NULL),
898     _transportPtr(NULL),
899     _rxVadObserverPtr(NULL),
900     _oldVadDecision(-1),
901     _sendFrameType(0),
902     _rtcpObserverPtr(NULL),
903     _externalPlayout(false),
904     _externalMixing(false),
905     _mixFileWithMicrophone(false),
906     _rtcpObserver(false),
907     _mute(false),
908     _panLeft(1.0f),
909     _panRight(1.0f),
910     _outputGain(1.0f),
911     _playOutbandDtmfEvent(false),
912     _playInbandDtmfEvent(false),
913     _lastLocalTimeStamp(0),
914     _lastPayloadType(0),
915     _includeAudioLevelIndication(false),
916     _rtpPacketTimedOut(false),
917     _rtpPacketTimeOutIsEnabled(false),
918     _rtpTimeOutSeconds(0),
919     _connectionObserver(false),
920     _connectionObserverPtr(NULL),
921     _outputSpeechType(AudioFrame::kNormalSpeech),
922     vie_network_(NULL),
923     video_channel_(-1),
924     _average_jitter_buffer_delay_us(0),
925     least_required_delay_ms_(0),
926     _previousTimestamp(0),
927     _recPacketDelayMs(20),
928     _RxVadDetection(false),
929     _rxAgcIsEnabled(false),
930     _rxNsIsEnabled(false),
931     restored_packet_in_use_(false),
932     bitrate_controller_(
933         BitrateController::CreateBitrateController(Clock::GetRealTimeClock(),
934                                                    true)),
935     rtcp_bandwidth_observer_(
936         bitrate_controller_->CreateRtcpBandwidthObserver()),
937     send_bitrate_observer_(new VoEBitrateObserver(this))
938 {
939     WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
940                  "Channel::Channel() - ctor");
941     _inbandDtmfQueue.ResetDtmf();
942     _inbandDtmfGenerator.Init();
943     _outputAudioLevel.Clear();
944 
945     RtpRtcp::Configuration configuration;
946     configuration.id = VoEModuleId(instanceId, channelId);
947     configuration.audio = true;
948     configuration.outgoing_transport = this;
949     configuration.rtcp_feedback = this;
950     configuration.audio_messages = this;
951     configuration.receive_statistics = rtp_receive_statistics_.get();
952     configuration.bandwidth_callback = rtcp_bandwidth_observer_.get();
953 
954     _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
955 
956     statistics_proxy_.reset(new StatisticsProxy(_rtpRtcpModule->SSRC()));
957     rtp_receive_statistics_->RegisterRtcpStatisticsCallback(
958         statistics_proxy_.get());
959 
960     Config audioproc_config;
961     audioproc_config.Set<ExperimentalAgc>(new ExperimentalAgc(false));
962     rx_audioproc_.reset(AudioProcessing::Create(audioproc_config));
963 }
964 
~Channel()965 Channel::~Channel()
966 {
967     rtp_receive_statistics_->RegisterRtcpStatisticsCallback(NULL);
968     WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
969                  "Channel::~Channel() - dtor");
970 
971     if (_outputExternalMedia)
972     {
973         DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
974     }
975     if (channel_state_.Get().input_external_media)
976     {
977         DeRegisterExternalMediaProcessing(kRecordingPerChannel);
978     }
979     StopSend();
980     StopPlayout();
981 
982     {
983         CriticalSectionScoped cs(&_fileCritSect);
984         if (_inputFilePlayerPtr)
985         {
986             _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
987             _inputFilePlayerPtr->StopPlayingFile();
988             FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
989             _inputFilePlayerPtr = NULL;
990         }
991         if (_outputFilePlayerPtr)
992         {
993             _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
994             _outputFilePlayerPtr->StopPlayingFile();
995             FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
996             _outputFilePlayerPtr = NULL;
997         }
998         if (_outputFileRecorderPtr)
999         {
1000             _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
1001             _outputFileRecorderPtr->StopRecording();
1002             FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
1003             _outputFileRecorderPtr = NULL;
1004         }
1005     }
1006 
1007     // The order to safely shutdown modules in a channel is:
1008     // 1. De-register callbacks in modules
1009     // 2. De-register modules in process thread
1010     // 3. Destroy modules
1011     if (audio_coding_->RegisterTransportCallback(NULL) == -1)
1012     {
1013         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1014                      VoEId(_instanceId,_channelId),
1015                      "~Channel() failed to de-register transport callback"
1016                      " (Audio coding module)");
1017     }
1018     if (audio_coding_->RegisterVADCallback(NULL) == -1)
1019     {
1020         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1021                      VoEId(_instanceId,_channelId),
1022                      "~Channel() failed to de-register VAD callback"
1023                      " (Audio coding module)");
1024     }
1025     // De-register modules in process thread
1026     if (_moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()) == -1)
1027     {
1028         WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1029                      VoEId(_instanceId,_channelId),
1030                      "~Channel() failed to deregister RTP/RTCP module");
1031     }
1032     // End of modules shutdown
1033 
1034     // Delete other objects
1035     if (vie_network_) {
1036       vie_network_->Release();
1037       vie_network_ = NULL;
1038     }
1039     RtpDump::DestroyRtpDump(&_rtpDumpIn);
1040     RtpDump::DestroyRtpDump(&_rtpDumpOut);
1041     delete &_callbackCritSect;
1042     delete &_fileCritSect;
1043     delete &volume_settings_critsect_;
1044 }
1045 
1046 int32_t
Init()1047 Channel::Init()
1048 {
1049     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1050                  "Channel::Init()");
1051 
1052     channel_state_.Reset();
1053 
1054     // --- Initial sanity
1055 
1056     if ((_engineStatisticsPtr == NULL) ||
1057         (_moduleProcessThreadPtr == NULL))
1058     {
1059         WEBRTC_TRACE(kTraceError, kTraceVoice,
1060                      VoEId(_instanceId,_channelId),
1061                      "Channel::Init() must call SetEngineInformation() first");
1062         return -1;
1063     }
1064 
1065     // --- Add modules to process thread (for periodic schedulation)
1066 
1067     const bool processThreadFail =
1068         ((_moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get()) != 0) ||
1069         false);
1070     if (processThreadFail)
1071     {
1072         _engineStatisticsPtr->SetLastError(
1073             VE_CANNOT_INIT_CHANNEL, kTraceError,
1074             "Channel::Init() modules not registered");
1075         return -1;
1076     }
1077     // --- ACM initialization
1078 
1079     if ((audio_coding_->InitializeReceiver() == -1) ||
1080 #ifdef WEBRTC_CODEC_AVT
1081         // out-of-band Dtmf tones are played out by default
1082         (audio_coding_->SetDtmfPlayoutStatus(true) == -1) ||
1083 #endif
1084         (audio_coding_->InitializeSender() == -1))
1085     {
1086         _engineStatisticsPtr->SetLastError(
1087             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1088             "Channel::Init() unable to initialize the ACM - 1");
1089         return -1;
1090     }
1091 
1092     // --- RTP/RTCP module initialization
1093 
1094     // Ensure that RTCP is enabled by default for the created channel.
1095     // Note that, the module will keep generating RTCP until it is explicitly
1096     // disabled by the user.
1097     // After StopListen (when no sockets exists), RTCP packets will no longer
1098     // be transmitted since the Transport object will then be invalid.
1099     telephone_event_handler_->SetTelephoneEventForwardToDecoder(true);
1100     // RTCP is enabled by default.
1101     if (_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1)
1102     {
1103         _engineStatisticsPtr->SetLastError(
1104             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1105             "Channel::Init() RTP/RTCP module not initialized");
1106         return -1;
1107     }
1108 
1109      // --- Register all permanent callbacks
1110     const bool fail =
1111         (audio_coding_->RegisterTransportCallback(this) == -1) ||
1112         (audio_coding_->RegisterVADCallback(this) == -1);
1113 
1114     if (fail)
1115     {
1116         _engineStatisticsPtr->SetLastError(
1117             VE_CANNOT_INIT_CHANNEL, kTraceError,
1118             "Channel::Init() callbacks not registered");
1119         return -1;
1120     }
1121 
1122     // --- Register all supported codecs to the receiving side of the
1123     // RTP/RTCP module
1124 
1125     CodecInst codec;
1126     const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
1127 
1128     for (int idx = 0; idx < nSupportedCodecs; idx++)
1129     {
1130         // Open up the RTP/RTCP receiver for all supported codecs
1131         if ((audio_coding_->Codec(idx, &codec) == -1) ||
1132             (rtp_receiver_->RegisterReceivePayload(
1133                 codec.plname,
1134                 codec.pltype,
1135                 codec.plfreq,
1136                 codec.channels,
1137                 (codec.rate < 0) ? 0 : codec.rate) == -1))
1138         {
1139             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1140                          VoEId(_instanceId,_channelId),
1141                          "Channel::Init() unable to register %s (%d/%d/%d/%d) "
1142                          "to RTP/RTCP receiver",
1143                          codec.plname, codec.pltype, codec.plfreq,
1144                          codec.channels, codec.rate);
1145         }
1146         else
1147         {
1148             WEBRTC_TRACE(kTraceInfo, kTraceVoice,
1149                          VoEId(_instanceId,_channelId),
1150                          "Channel::Init() %s (%d/%d/%d/%d) has been added to "
1151                          "the RTP/RTCP receiver",
1152                          codec.plname, codec.pltype, codec.plfreq,
1153                          codec.channels, codec.rate);
1154         }
1155 
1156         // Ensure that PCMU is used as default codec on the sending side
1157         if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1))
1158         {
1159             SetSendCodec(codec);
1160         }
1161 
1162         // Register default PT for outband 'telephone-event'
1163         if (!STR_CASE_CMP(codec.plname, "telephone-event"))
1164         {
1165             if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
1166                 (audio_coding_->RegisterReceiveCodec(codec) == -1))
1167             {
1168                 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1169                              VoEId(_instanceId,_channelId),
1170                              "Channel::Init() failed to register outband "
1171                              "'telephone-event' (%d/%d) correctly",
1172                              codec.pltype, codec.plfreq);
1173             }
1174         }
1175 
1176         if (!STR_CASE_CMP(codec.plname, "CN"))
1177         {
1178             if ((audio_coding_->RegisterSendCodec(codec) == -1) ||
1179                 (audio_coding_->RegisterReceiveCodec(codec) == -1) ||
1180                 (_rtpRtcpModule->RegisterSendPayload(codec) == -1))
1181             {
1182                 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1183                              VoEId(_instanceId,_channelId),
1184                              "Channel::Init() failed to register CN (%d/%d) "
1185                              "correctly - 1",
1186                              codec.pltype, codec.plfreq);
1187             }
1188         }
1189 #ifdef WEBRTC_CODEC_RED
1190         // Register RED to the receiving side of the ACM.
1191         // We will not receive an OnInitializeDecoder() callback for RED.
1192         if (!STR_CASE_CMP(codec.plname, "RED"))
1193         {
1194             if (audio_coding_->RegisterReceiveCodec(codec) == -1)
1195             {
1196                 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1197                              VoEId(_instanceId,_channelId),
1198                              "Channel::Init() failed to register RED (%d/%d) "
1199                              "correctly",
1200                              codec.pltype, codec.plfreq);
1201             }
1202         }
1203 #endif
1204     }
1205 
1206     if (rx_audioproc_->noise_suppression()->set_level(kDefaultNsMode) != 0) {
1207       LOG_FERR1(LS_ERROR, noise_suppression()->set_level, kDefaultNsMode);
1208       return -1;
1209     }
1210     if (rx_audioproc_->gain_control()->set_mode(kDefaultRxAgcMode) != 0) {
1211       LOG_FERR1(LS_ERROR, gain_control()->set_mode, kDefaultRxAgcMode);
1212       return -1;
1213     }
1214 
1215     return 0;
1216 }
1217 
1218 int32_t
SetEngineInformation(Statistics & engineStatistics,OutputMixer & outputMixer,voe::TransmitMixer & transmitMixer,ProcessThread & moduleProcessThread,AudioDeviceModule & audioDeviceModule,VoiceEngineObserver * voiceEngineObserver,CriticalSectionWrapper * callbackCritSect)1219 Channel::SetEngineInformation(Statistics& engineStatistics,
1220                               OutputMixer& outputMixer,
1221                               voe::TransmitMixer& transmitMixer,
1222                               ProcessThread& moduleProcessThread,
1223                               AudioDeviceModule& audioDeviceModule,
1224                               VoiceEngineObserver* voiceEngineObserver,
1225                               CriticalSectionWrapper* callbackCritSect)
1226 {
1227     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1228                  "Channel::SetEngineInformation()");
1229     _engineStatisticsPtr = &engineStatistics;
1230     _outputMixerPtr = &outputMixer;
1231     _transmitMixerPtr = &transmitMixer,
1232     _moduleProcessThreadPtr = &moduleProcessThread;
1233     _audioDeviceModulePtr = &audioDeviceModule;
1234     _voiceEngineObserverPtr = voiceEngineObserver;
1235     _callbackCritSectPtr = callbackCritSect;
1236     return 0;
1237 }
1238 
1239 int32_t
UpdateLocalTimeStamp()1240 Channel::UpdateLocalTimeStamp()
1241 {
1242 
1243     _timeStamp += _audioFrame.samples_per_channel_;
1244     return 0;
1245 }
1246 
1247 int32_t
StartPlayout()1248 Channel::StartPlayout()
1249 {
1250     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1251                  "Channel::StartPlayout()");
1252     if (channel_state_.Get().playing)
1253     {
1254         return 0;
1255     }
1256 
1257     if (!_externalMixing) {
1258         // Add participant as candidates for mixing.
1259         if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0)
1260         {
1261             _engineStatisticsPtr->SetLastError(
1262                 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1263                 "StartPlayout() failed to add participant to mixer");
1264             return -1;
1265         }
1266     }
1267 
1268     channel_state_.SetPlaying(true);
1269     if (RegisterFilePlayingToMixer() != 0)
1270         return -1;
1271 
1272     return 0;
1273 }
1274 
1275 int32_t
StopPlayout()1276 Channel::StopPlayout()
1277 {
1278     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1279                  "Channel::StopPlayout()");
1280     if (!channel_state_.Get().playing)
1281     {
1282         return 0;
1283     }
1284 
1285     if (!_externalMixing) {
1286         // Remove participant as candidates for mixing
1287         if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0)
1288         {
1289             _engineStatisticsPtr->SetLastError(
1290                 VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
1291                 "StopPlayout() failed to remove participant from mixer");
1292             return -1;
1293         }
1294     }
1295 
1296     channel_state_.SetPlaying(false);
1297     _outputAudioLevel.Clear();
1298 
1299     return 0;
1300 }
1301 
1302 int32_t
StartSend()1303 Channel::StartSend()
1304 {
1305     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1306                  "Channel::StartSend()");
1307     // Resume the previous sequence number which was reset by StopSend().
1308     // This needs to be done before |sending| is set to true.
1309     if (send_sequence_number_)
1310       SetInitSequenceNumber(send_sequence_number_);
1311 
1312     if (channel_state_.Get().sending)
1313     {
1314       return 0;
1315     }
1316     channel_state_.SetSending(true);
1317 
1318     if (_rtpRtcpModule->SetSendingStatus(true) != 0)
1319     {
1320         _engineStatisticsPtr->SetLastError(
1321             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1322             "StartSend() RTP/RTCP failed to start sending");
1323         CriticalSectionScoped cs(&_callbackCritSect);
1324         channel_state_.SetSending(false);
1325         return -1;
1326     }
1327 
1328     return 0;
1329 }
1330 
1331 int32_t
StopSend()1332 Channel::StopSend()
1333 {
1334     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1335                  "Channel::StopSend()");
1336     if (!channel_state_.Get().sending)
1337     {
1338       return 0;
1339     }
1340     channel_state_.SetSending(false);
1341 
1342     // Store the sequence number to be able to pick up the same sequence for
1343     // the next StartSend(). This is needed for restarting device, otherwise
1344     // it might cause libSRTP to complain about packets being replayed.
1345     // TODO(xians): Remove this workaround after RtpRtcpModule's refactoring
1346     // CL is landed. See issue
1347     // https://code.google.com/p/webrtc/issues/detail?id=2111 .
1348     send_sequence_number_ = _rtpRtcpModule->SequenceNumber();
1349 
1350     // Reset sending SSRC and sequence number and triggers direct transmission
1351     // of RTCP BYE
1352     if (_rtpRtcpModule->SetSendingStatus(false) == -1 ||
1353         _rtpRtcpModule->ResetSendDataCountersRTP() == -1)
1354     {
1355         _engineStatisticsPtr->SetLastError(
1356             VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1357             "StartSend() RTP/RTCP failed to stop sending");
1358     }
1359 
1360     return 0;
1361 }
1362 
1363 int32_t
StartReceiving()1364 Channel::StartReceiving()
1365 {
1366     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1367                  "Channel::StartReceiving()");
1368     if (channel_state_.Get().receiving)
1369     {
1370         return 0;
1371     }
1372     channel_state_.SetReceiving(true);
1373     _numberOfDiscardedPackets = 0;
1374     return 0;
1375 }
1376 
1377 int32_t
StopReceiving()1378 Channel::StopReceiving()
1379 {
1380     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1381                  "Channel::StopReceiving()");
1382     if (!channel_state_.Get().receiving)
1383     {
1384         return 0;
1385     }
1386 
1387     channel_state_.SetReceiving(false);
1388     return 0;
1389 }
1390 
1391 int32_t
SetNetEQPlayoutMode(NetEqModes mode)1392 Channel::SetNetEQPlayoutMode(NetEqModes mode)
1393 {
1394     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1395                  "Channel::SetNetEQPlayoutMode()");
1396     AudioPlayoutMode playoutMode(voice);
1397     switch (mode)
1398     {
1399         case kNetEqDefault:
1400             playoutMode = voice;
1401             break;
1402         case kNetEqStreaming:
1403             playoutMode = streaming;
1404             break;
1405         case kNetEqFax:
1406             playoutMode = fax;
1407             break;
1408         case kNetEqOff:
1409             playoutMode = off;
1410             break;
1411     }
1412     if (audio_coding_->SetPlayoutMode(playoutMode) != 0)
1413     {
1414         _engineStatisticsPtr->SetLastError(
1415             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1416             "SetNetEQPlayoutMode() failed to set playout mode");
1417         return -1;
1418     }
1419     return 0;
1420 }
1421 
1422 int32_t
GetNetEQPlayoutMode(NetEqModes & mode)1423 Channel::GetNetEQPlayoutMode(NetEqModes& mode)
1424 {
1425     const AudioPlayoutMode playoutMode = audio_coding_->PlayoutMode();
1426     switch (playoutMode)
1427     {
1428         case voice:
1429             mode = kNetEqDefault;
1430             break;
1431         case streaming:
1432             mode = kNetEqStreaming;
1433             break;
1434         case fax:
1435             mode = kNetEqFax;
1436             break;
1437         case off:
1438             mode = kNetEqOff;
1439     }
1440     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
1441                  VoEId(_instanceId,_channelId),
1442                  "Channel::GetNetEQPlayoutMode() => mode=%u", mode);
1443     return 0;
1444 }
1445 
1446 int32_t
RegisterVoiceEngineObserver(VoiceEngineObserver & observer)1447 Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
1448 {
1449     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1450                  "Channel::RegisterVoiceEngineObserver()");
1451     CriticalSectionScoped cs(&_callbackCritSect);
1452 
1453     if (_voiceEngineObserverPtr)
1454     {
1455         _engineStatisticsPtr->SetLastError(
1456             VE_INVALID_OPERATION, kTraceError,
1457             "RegisterVoiceEngineObserver() observer already enabled");
1458         return -1;
1459     }
1460     _voiceEngineObserverPtr = &observer;
1461     return 0;
1462 }
1463 
1464 int32_t
DeRegisterVoiceEngineObserver()1465 Channel::DeRegisterVoiceEngineObserver()
1466 {
1467     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1468                  "Channel::DeRegisterVoiceEngineObserver()");
1469     CriticalSectionScoped cs(&_callbackCritSect);
1470 
1471     if (!_voiceEngineObserverPtr)
1472     {
1473         _engineStatisticsPtr->SetLastError(
1474             VE_INVALID_OPERATION, kTraceWarning,
1475             "DeRegisterVoiceEngineObserver() observer already disabled");
1476         return 0;
1477     }
1478     _voiceEngineObserverPtr = NULL;
1479     return 0;
1480 }
1481 
1482 int32_t
GetSendCodec(CodecInst & codec)1483 Channel::GetSendCodec(CodecInst& codec)
1484 {
1485     return (audio_coding_->SendCodec(&codec));
1486 }
1487 
1488 int32_t
GetRecCodec(CodecInst & codec)1489 Channel::GetRecCodec(CodecInst& codec)
1490 {
1491     return (audio_coding_->ReceiveCodec(&codec));
1492 }
1493 
1494 int32_t
SetSendCodec(const CodecInst & codec)1495 Channel::SetSendCodec(const CodecInst& codec)
1496 {
1497     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1498                  "Channel::SetSendCodec()");
1499 
1500     if (audio_coding_->RegisterSendCodec(codec) != 0)
1501     {
1502         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1503                      "SetSendCodec() failed to register codec to ACM");
1504         return -1;
1505     }
1506 
1507     if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
1508     {
1509         _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1510         if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
1511         {
1512             WEBRTC_TRACE(
1513                     kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1514                     "SetSendCodec() failed to register codec to"
1515                     " RTP/RTCP module");
1516             return -1;
1517         }
1518     }
1519 
1520     if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0)
1521     {
1522         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
1523                      "SetSendCodec() failed to set audio packet size");
1524         return -1;
1525     }
1526 
1527     bitrate_controller_->SetBitrateObserver(send_bitrate_observer_.get(),
1528                                             codec.rate, 0, 0);
1529 
1530     return 0;
1531 }
1532 
1533 void
OnNetworkChanged(const uint32_t bitrate_bps,const uint8_t fraction_lost,const uint32_t rtt)1534 Channel::OnNetworkChanged(const uint32_t bitrate_bps,
1535                           const uint8_t fraction_lost,  // 0 - 255.
1536                           const uint32_t rtt) {
1537   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1538       "Channel::OnNetworkChanged(bitrate_bps=%d, fration_lost=%d, rtt=%d)",
1539       bitrate_bps, fraction_lost, rtt);
1540   // Normalizes rate to 0 - 100.
1541   if (audio_coding_->SetPacketLossRate(100 * fraction_lost / 255) != 0) {
1542     _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR,
1543         kTraceError, "OnNetworkChanged() failed to set packet loss rate");
1544     assert(false);  // This should not happen.
1545   }
1546 }
1547 
1548 int32_t
SetVADStatus(bool enableVAD,ACMVADMode mode,bool disableDTX)1549 Channel::SetVADStatus(bool enableVAD, ACMVADMode mode, bool disableDTX)
1550 {
1551     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1552                  "Channel::SetVADStatus(mode=%d)", mode);
1553     // To disable VAD, DTX must be disabled too
1554     disableDTX = ((enableVAD == false) ? true : disableDTX);
1555     if (audio_coding_->SetVAD(!disableDTX, enableVAD, mode) != 0)
1556     {
1557         _engineStatisticsPtr->SetLastError(
1558             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1559             "SetVADStatus() failed to set VAD");
1560         return -1;
1561     }
1562     return 0;
1563 }
1564 
1565 int32_t
GetVADStatus(bool & enabledVAD,ACMVADMode & mode,bool & disabledDTX)1566 Channel::GetVADStatus(bool& enabledVAD, ACMVADMode& mode, bool& disabledDTX)
1567 {
1568     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1569                  "Channel::GetVADStatus");
1570     if (audio_coding_->VAD(&disabledDTX, &enabledVAD, &mode) != 0)
1571     {
1572         _engineStatisticsPtr->SetLastError(
1573             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1574             "GetVADStatus() failed to get VAD status");
1575         return -1;
1576     }
1577     disabledDTX = !disabledDTX;
1578     return 0;
1579 }
1580 
1581 int32_t
SetRecPayloadType(const CodecInst & codec)1582 Channel::SetRecPayloadType(const CodecInst& codec)
1583 {
1584     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1585                  "Channel::SetRecPayloadType()");
1586 
1587     if (channel_state_.Get().playing)
1588     {
1589         _engineStatisticsPtr->SetLastError(
1590             VE_ALREADY_PLAYING, kTraceError,
1591             "SetRecPayloadType() unable to set PT while playing");
1592         return -1;
1593     }
1594     if (channel_state_.Get().receiving)
1595     {
1596         _engineStatisticsPtr->SetLastError(
1597             VE_ALREADY_LISTENING, kTraceError,
1598             "SetRecPayloadType() unable to set PT while listening");
1599         return -1;
1600     }
1601 
1602     if (codec.pltype == -1)
1603     {
1604         // De-register the selected codec (RTP/RTCP module and ACM)
1605 
1606         int8_t pltype(-1);
1607         CodecInst rxCodec = codec;
1608 
1609         // Get payload type for the given codec
1610         rtp_payload_registry_->ReceivePayloadType(
1611             rxCodec.plname,
1612             rxCodec.plfreq,
1613             rxCodec.channels,
1614             (rxCodec.rate < 0) ? 0 : rxCodec.rate,
1615             &pltype);
1616         rxCodec.pltype = pltype;
1617 
1618         if (rtp_receiver_->DeRegisterReceivePayload(pltype) != 0)
1619         {
1620             _engineStatisticsPtr->SetLastError(
1621                     VE_RTP_RTCP_MODULE_ERROR,
1622                     kTraceError,
1623                     "SetRecPayloadType() RTP/RTCP-module deregistration "
1624                     "failed");
1625             return -1;
1626         }
1627         if (audio_coding_->UnregisterReceiveCodec(rxCodec.pltype) != 0)
1628         {
1629             _engineStatisticsPtr->SetLastError(
1630                 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1631                 "SetRecPayloadType() ACM deregistration failed - 1");
1632             return -1;
1633         }
1634         return 0;
1635     }
1636 
1637     if (rtp_receiver_->RegisterReceivePayload(
1638         codec.plname,
1639         codec.pltype,
1640         codec.plfreq,
1641         codec.channels,
1642         (codec.rate < 0) ? 0 : codec.rate) != 0)
1643     {
1644         // First attempt to register failed => de-register and try again
1645         rtp_receiver_->DeRegisterReceivePayload(codec.pltype);
1646         if (rtp_receiver_->RegisterReceivePayload(
1647             codec.plname,
1648             codec.pltype,
1649             codec.plfreq,
1650             codec.channels,
1651             (codec.rate < 0) ? 0 : codec.rate) != 0)
1652         {
1653             _engineStatisticsPtr->SetLastError(
1654                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1655                 "SetRecPayloadType() RTP/RTCP-module registration failed");
1656             return -1;
1657         }
1658     }
1659     if (audio_coding_->RegisterReceiveCodec(codec) != 0)
1660     {
1661         audio_coding_->UnregisterReceiveCodec(codec.pltype);
1662         if (audio_coding_->RegisterReceiveCodec(codec) != 0)
1663         {
1664             _engineStatisticsPtr->SetLastError(
1665                 VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1666                 "SetRecPayloadType() ACM registration failed - 1");
1667             return -1;
1668         }
1669     }
1670     return 0;
1671 }
1672 
1673 int32_t
GetRecPayloadType(CodecInst & codec)1674 Channel::GetRecPayloadType(CodecInst& codec)
1675 {
1676     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1677                  "Channel::GetRecPayloadType()");
1678     int8_t payloadType(-1);
1679     if (rtp_payload_registry_->ReceivePayloadType(
1680         codec.plname,
1681         codec.plfreq,
1682         codec.channels,
1683         (codec.rate < 0) ? 0 : codec.rate,
1684         &payloadType) != 0)
1685     {
1686         _engineStatisticsPtr->SetLastError(
1687             VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
1688             "GetRecPayloadType() failed to retrieve RX payload type");
1689         return -1;
1690     }
1691     codec.pltype = payloadType;
1692     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1693                  "Channel::GetRecPayloadType() => pltype=%u", codec.pltype);
1694     return 0;
1695 }
1696 
1697 int32_t
SetSendCNPayloadType(int type,PayloadFrequencies frequency)1698 Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
1699 {
1700     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1701                  "Channel::SetSendCNPayloadType()");
1702 
1703     CodecInst codec;
1704     int32_t samplingFreqHz(-1);
1705     const int kMono = 1;
1706     if (frequency == kFreq32000Hz)
1707         samplingFreqHz = 32000;
1708     else if (frequency == kFreq16000Hz)
1709         samplingFreqHz = 16000;
1710 
1711     if (audio_coding_->Codec("CN", &codec, samplingFreqHz, kMono) == -1)
1712     {
1713         _engineStatisticsPtr->SetLastError(
1714             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1715             "SetSendCNPayloadType() failed to retrieve default CN codec "
1716             "settings");
1717         return -1;
1718     }
1719 
1720     // Modify the payload type (must be set to dynamic range)
1721     codec.pltype = type;
1722 
1723     if (audio_coding_->RegisterSendCodec(codec) != 0)
1724     {
1725         _engineStatisticsPtr->SetLastError(
1726             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
1727             "SetSendCNPayloadType() failed to register CN to ACM");
1728         return -1;
1729     }
1730 
1731     if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
1732     {
1733         _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
1734         if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
1735         {
1736             _engineStatisticsPtr->SetLastError(
1737                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
1738                 "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
1739                 "module");
1740             return -1;
1741         }
1742     }
1743     return 0;
1744 }
1745 
RegisterExternalTransport(Transport & transport)1746 int32_t Channel::RegisterExternalTransport(Transport& transport)
1747 {
1748     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
1749                "Channel::RegisterExternalTransport()");
1750 
1751     CriticalSectionScoped cs(&_callbackCritSect);
1752 
1753     if (_externalTransport)
1754     {
1755         _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION,
1756                                            kTraceError,
1757               "RegisterExternalTransport() external transport already enabled");
1758        return -1;
1759     }
1760     _externalTransport = true;
1761     _transportPtr = &transport;
1762     return 0;
1763 }
1764 
1765 int32_t
DeRegisterExternalTransport()1766 Channel::DeRegisterExternalTransport()
1767 {
1768     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1769                  "Channel::DeRegisterExternalTransport()");
1770 
1771     CriticalSectionScoped cs(&_callbackCritSect);
1772 
1773     if (!_transportPtr)
1774     {
1775         _engineStatisticsPtr->SetLastError(
1776             VE_INVALID_OPERATION, kTraceWarning,
1777             "DeRegisterExternalTransport() external transport already "
1778             "disabled");
1779         return 0;
1780     }
1781     _externalTransport = false;
1782     _transportPtr = NULL;
1783     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1784                  "DeRegisterExternalTransport() all transport is disabled");
1785     return 0;
1786 }
1787 
ReceivedRTPPacket(const int8_t * data,int32_t length,const PacketTime & packet_time)1788 int32_t Channel::ReceivedRTPPacket(const int8_t* data, int32_t length,
1789                                    const PacketTime& packet_time) {
1790   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
1791                "Channel::ReceivedRTPPacket()");
1792 
1793   // Store playout timestamp for the received RTP packet
1794   UpdatePlayoutTimestamp(false);
1795 
1796   // Dump the RTP packet to a file (if RTP dump is enabled).
1797   if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
1798                             (uint16_t)length) == -1) {
1799     WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1800                  VoEId(_instanceId,_channelId),
1801                  "Channel::SendPacket() RTP dump to input file failed");
1802   }
1803   const uint8_t* received_packet = reinterpret_cast<const uint8_t*>(data);
1804   RTPHeader header;
1805   if (!rtp_header_parser_->Parse(received_packet, length, &header)) {
1806     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1807                  "Incoming packet: invalid RTP header");
1808     return -1;
1809   }
1810   header.payload_type_frequency =
1811       rtp_payload_registry_->GetPayloadTypeFrequency(header.payloadType);
1812   if (header.payload_type_frequency < 0)
1813     return -1;
1814   bool in_order = IsPacketInOrder(header);
1815   rtp_receive_statistics_->IncomingPacket(header, length,
1816       IsPacketRetransmitted(header, in_order));
1817   rtp_payload_registry_->SetIncomingPayloadType(header);
1818 
1819   // Forward any packets to ViE bandwidth estimator, if enabled.
1820   {
1821     CriticalSectionScoped cs(&_callbackCritSect);
1822     if (vie_network_) {
1823       int64_t arrival_time_ms;
1824       if (packet_time.timestamp != -1) {
1825         arrival_time_ms = (packet_time.timestamp + 500) / 1000;
1826       } else {
1827         arrival_time_ms = TickTime::MillisecondTimestamp();
1828       }
1829       int payload_length = length - header.headerLength;
1830       vie_network_->ReceivedBWEPacket(video_channel_, arrival_time_ms,
1831                                       payload_length, header);
1832     }
1833   }
1834 
1835   return ReceivePacket(received_packet, length, header, in_order) ? 0 : -1;
1836 }
1837 
ReceivePacket(const uint8_t * packet,int packet_length,const RTPHeader & header,bool in_order)1838 bool Channel::ReceivePacket(const uint8_t* packet,
1839                             int packet_length,
1840                             const RTPHeader& header,
1841                             bool in_order) {
1842   if (rtp_payload_registry_->IsEncapsulated(header)) {
1843     return HandleEncapsulation(packet, packet_length, header);
1844   }
1845   const uint8_t* payload = packet + header.headerLength;
1846   int payload_length = packet_length - header.headerLength;
1847   assert(payload_length >= 0);
1848   PayloadUnion payload_specific;
1849   if (!rtp_payload_registry_->GetPayloadSpecifics(header.payloadType,
1850                                                   &payload_specific)) {
1851     return false;
1852   }
1853   return rtp_receiver_->IncomingRtpPacket(header, payload, payload_length,
1854                                           payload_specific, in_order);
1855 }
1856 
HandleEncapsulation(const uint8_t * packet,int packet_length,const RTPHeader & header)1857 bool Channel::HandleEncapsulation(const uint8_t* packet,
1858                                   int packet_length,
1859                                   const RTPHeader& header) {
1860   if (!rtp_payload_registry_->IsRtx(header))
1861     return false;
1862 
1863   // Remove the RTX header and parse the original RTP header.
1864   if (packet_length < header.headerLength)
1865     return false;
1866   if (packet_length > kVoiceEngineMaxIpPacketSizeBytes)
1867     return false;
1868   if (restored_packet_in_use_) {
1869     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1870                  "Multiple RTX headers detected, dropping packet");
1871     return false;
1872   }
1873   uint8_t* restored_packet_ptr = restored_packet_;
1874   if (!rtp_payload_registry_->RestoreOriginalPacket(
1875       &restored_packet_ptr, packet, &packet_length, rtp_receiver_->SSRC(),
1876       header)) {
1877     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVoice, _channelId,
1878                  "Incoming RTX packet: invalid RTP header");
1879     return false;
1880   }
1881   restored_packet_in_use_ = true;
1882   bool ret = OnRecoveredPacket(restored_packet_ptr, packet_length);
1883   restored_packet_in_use_ = false;
1884   return ret;
1885 }
1886 
IsPacketInOrder(const RTPHeader & header) const1887 bool Channel::IsPacketInOrder(const RTPHeader& header) const {
1888   StreamStatistician* statistician =
1889       rtp_receive_statistics_->GetStatistician(header.ssrc);
1890   if (!statistician)
1891     return false;
1892   return statistician->IsPacketInOrder(header.sequenceNumber);
1893 }
1894 
IsPacketRetransmitted(const RTPHeader & header,bool in_order) const1895 bool Channel::IsPacketRetransmitted(const RTPHeader& header,
1896                                     bool in_order) const {
1897   // Retransmissions are handled separately if RTX is enabled.
1898   if (rtp_payload_registry_->RtxEnabled())
1899     return false;
1900   StreamStatistician* statistician =
1901       rtp_receive_statistics_->GetStatistician(header.ssrc);
1902   if (!statistician)
1903     return false;
1904   // Check if this is a retransmission.
1905   uint16_t min_rtt = 0;
1906   _rtpRtcpModule->RTT(rtp_receiver_->SSRC(), NULL, NULL, &min_rtt, NULL);
1907   return !in_order &&
1908       statistician->IsRetransmitOfOldPacket(header, min_rtt);
1909 }
1910 
ReceivedRTCPPacket(const int8_t * data,int32_t length)1911 int32_t Channel::ReceivedRTCPPacket(const int8_t* data, int32_t length) {
1912   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
1913                "Channel::ReceivedRTCPPacket()");
1914   // Store playout timestamp for the received RTCP packet
1915   UpdatePlayoutTimestamp(true);
1916 
1917   // Dump the RTCP packet to a file (if RTP dump is enabled).
1918   if (_rtpDumpIn.DumpPacket((const uint8_t*)data,
1919                             (uint16_t)length) == -1) {
1920     WEBRTC_TRACE(kTraceWarning, kTraceVoice,
1921                  VoEId(_instanceId,_channelId),
1922                  "Channel::SendPacket() RTCP dump to input file failed");
1923   }
1924 
1925   // Deliver RTCP packet to RTP/RTCP module for parsing
1926   if (_rtpRtcpModule->IncomingRtcpPacket((const uint8_t*)data,
1927                                          (uint16_t)length) == -1) {
1928     _engineStatisticsPtr->SetLastError(
1929         VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
1930         "Channel::IncomingRTPPacket() RTCP packet is invalid");
1931   }
1932 
1933   ntp_estimator_->UpdateRtcpTimestamp(rtp_receiver_->SSRC(),
1934                                       _rtpRtcpModule.get());
1935   return 0;
1936 }
1937 
StartPlayingFileLocally(const char * fileName,bool loop,FileFormats format,int startPosition,float volumeScaling,int stopPosition,const CodecInst * codecInst)1938 int Channel::StartPlayingFileLocally(const char* fileName,
1939                                      bool loop,
1940                                      FileFormats format,
1941                                      int startPosition,
1942                                      float volumeScaling,
1943                                      int stopPosition,
1944                                      const CodecInst* codecInst)
1945 {
1946     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
1947                  "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
1948                  " format=%d, volumeScaling=%5.3f, startPosition=%d, "
1949                  "stopPosition=%d)", fileName, loop, format, volumeScaling,
1950                  startPosition, stopPosition);
1951 
1952     if (channel_state_.Get().output_file_playing)
1953     {
1954         _engineStatisticsPtr->SetLastError(
1955             VE_ALREADY_PLAYING, kTraceError,
1956             "StartPlayingFileLocally() is already playing");
1957         return -1;
1958     }
1959 
1960     {
1961         CriticalSectionScoped cs(&_fileCritSect);
1962 
1963         if (_outputFilePlayerPtr)
1964         {
1965             _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
1966             FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1967             _outputFilePlayerPtr = NULL;
1968         }
1969 
1970         _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
1971             _outputFilePlayerId, (const FileFormats)format);
1972 
1973         if (_outputFilePlayerPtr == NULL)
1974         {
1975             _engineStatisticsPtr->SetLastError(
1976                 VE_INVALID_ARGUMENT, kTraceError,
1977                 "StartPlayingFileLocally() filePlayer format is not correct");
1978             return -1;
1979         }
1980 
1981         const uint32_t notificationTime(0);
1982 
1983         if (_outputFilePlayerPtr->StartPlayingFile(
1984                 fileName,
1985                 loop,
1986                 startPosition,
1987                 volumeScaling,
1988                 notificationTime,
1989                 stopPosition,
1990                 (const CodecInst*)codecInst) != 0)
1991         {
1992             _engineStatisticsPtr->SetLastError(
1993                 VE_BAD_FILE, kTraceError,
1994                 "StartPlayingFile() failed to start file playout");
1995             _outputFilePlayerPtr->StopPlayingFile();
1996             FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
1997             _outputFilePlayerPtr = NULL;
1998             return -1;
1999         }
2000         _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2001         channel_state_.SetOutputFilePlaying(true);
2002     }
2003 
2004     if (RegisterFilePlayingToMixer() != 0)
2005         return -1;
2006 
2007     return 0;
2008 }
2009 
StartPlayingFileLocally(InStream * stream,FileFormats format,int startPosition,float volumeScaling,int stopPosition,const CodecInst * codecInst)2010 int Channel::StartPlayingFileLocally(InStream* stream,
2011                                      FileFormats format,
2012                                      int startPosition,
2013                                      float volumeScaling,
2014                                      int stopPosition,
2015                                      const CodecInst* codecInst)
2016 {
2017     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2018                  "Channel::StartPlayingFileLocally(format=%d,"
2019                  " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2020                  format, volumeScaling, startPosition, stopPosition);
2021 
2022     if(stream == NULL)
2023     {
2024         _engineStatisticsPtr->SetLastError(
2025             VE_BAD_FILE, kTraceError,
2026             "StartPlayingFileLocally() NULL as input stream");
2027         return -1;
2028     }
2029 
2030 
2031     if (channel_state_.Get().output_file_playing)
2032     {
2033         _engineStatisticsPtr->SetLastError(
2034             VE_ALREADY_PLAYING, kTraceError,
2035             "StartPlayingFileLocally() is already playing");
2036         return -1;
2037     }
2038 
2039     {
2040       CriticalSectionScoped cs(&_fileCritSect);
2041 
2042       // Destroy the old instance
2043       if (_outputFilePlayerPtr)
2044       {
2045           _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2046           FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2047           _outputFilePlayerPtr = NULL;
2048       }
2049 
2050       // Create the instance
2051       _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2052           _outputFilePlayerId,
2053           (const FileFormats)format);
2054 
2055       if (_outputFilePlayerPtr == NULL)
2056       {
2057           _engineStatisticsPtr->SetLastError(
2058               VE_INVALID_ARGUMENT, kTraceError,
2059               "StartPlayingFileLocally() filePlayer format isnot correct");
2060           return -1;
2061       }
2062 
2063       const uint32_t notificationTime(0);
2064 
2065       if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2066                                                  volumeScaling,
2067                                                  notificationTime,
2068                                                  stopPosition, codecInst) != 0)
2069       {
2070           _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2071                                              "StartPlayingFile() failed to "
2072                                              "start file playout");
2073           _outputFilePlayerPtr->StopPlayingFile();
2074           FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2075           _outputFilePlayerPtr = NULL;
2076           return -1;
2077       }
2078       _outputFilePlayerPtr->RegisterModuleFileCallback(this);
2079       channel_state_.SetOutputFilePlaying(true);
2080     }
2081 
2082     if (RegisterFilePlayingToMixer() != 0)
2083         return -1;
2084 
2085     return 0;
2086 }
2087 
StopPlayingFileLocally()2088 int Channel::StopPlayingFileLocally()
2089 {
2090     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2091                  "Channel::StopPlayingFileLocally()");
2092 
2093     if (!channel_state_.Get().output_file_playing)
2094     {
2095         _engineStatisticsPtr->SetLastError(
2096             VE_INVALID_OPERATION, kTraceWarning,
2097             "StopPlayingFileLocally() isnot playing");
2098         return 0;
2099     }
2100 
2101     {
2102         CriticalSectionScoped cs(&_fileCritSect);
2103 
2104         if (_outputFilePlayerPtr->StopPlayingFile() != 0)
2105         {
2106             _engineStatisticsPtr->SetLastError(
2107                 VE_STOP_RECORDING_FAILED, kTraceError,
2108                 "StopPlayingFile() could not stop playing");
2109             return -1;
2110         }
2111         _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2112         FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2113         _outputFilePlayerPtr = NULL;
2114         channel_state_.SetOutputFilePlaying(false);
2115     }
2116     // _fileCritSect cannot be taken while calling
2117     // SetAnonymousMixibilityStatus. Refer to comments in
2118     // StartPlayingFileLocally(const char* ...) for more details.
2119     if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0)
2120     {
2121         _engineStatisticsPtr->SetLastError(
2122             VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
2123             "StopPlayingFile() failed to stop participant from playing as"
2124             "file in the mixer");
2125         return -1;
2126     }
2127 
2128     return 0;
2129 }
2130 
IsPlayingFileLocally() const2131 int Channel::IsPlayingFileLocally() const
2132 {
2133     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2134                  "Channel::IsPlayingFileLocally()");
2135 
2136     return channel_state_.Get().output_file_playing;
2137 }
2138 
RegisterFilePlayingToMixer()2139 int Channel::RegisterFilePlayingToMixer()
2140 {
2141     // Return success for not registering for file playing to mixer if:
2142     // 1. playing file before playout is started on that channel.
2143     // 2. starting playout without file playing on that channel.
2144     if (!channel_state_.Get().playing ||
2145         !channel_state_.Get().output_file_playing)
2146     {
2147         return 0;
2148     }
2149 
2150     // |_fileCritSect| cannot be taken while calling
2151     // SetAnonymousMixabilityStatus() since as soon as the participant is added
2152     // frames can be pulled by the mixer. Since the frames are generated from
2153     // the file, _fileCritSect will be taken. This would result in a deadlock.
2154     if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
2155     {
2156         channel_state_.SetOutputFilePlaying(false);
2157         CriticalSectionScoped cs(&_fileCritSect);
2158         _engineStatisticsPtr->SetLastError(
2159             VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
2160             "StartPlayingFile() failed to add participant as file to mixer");
2161         _outputFilePlayerPtr->StopPlayingFile();
2162         FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
2163         _outputFilePlayerPtr = NULL;
2164         return -1;
2165     }
2166 
2167     return 0;
2168 }
2169 
StartPlayingFileAsMicrophone(const char * fileName,bool loop,FileFormats format,int startPosition,float volumeScaling,int stopPosition,const CodecInst * codecInst)2170 int Channel::StartPlayingFileAsMicrophone(const char* fileName,
2171                                           bool loop,
2172                                           FileFormats format,
2173                                           int startPosition,
2174                                           float volumeScaling,
2175                                           int stopPosition,
2176                                           const CodecInst* codecInst)
2177 {
2178     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2179                  "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
2180                  "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
2181                  "stopPosition=%d)", fileName, loop, format, volumeScaling,
2182                  startPosition, stopPosition);
2183 
2184     CriticalSectionScoped cs(&_fileCritSect);
2185 
2186     if (channel_state_.Get().input_file_playing)
2187     {
2188         _engineStatisticsPtr->SetLastError(
2189             VE_ALREADY_PLAYING, kTraceWarning,
2190             "StartPlayingFileAsMicrophone() filePlayer is playing");
2191         return 0;
2192     }
2193 
2194     // Destroy the old instance
2195     if (_inputFilePlayerPtr)
2196     {
2197         _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2198         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2199         _inputFilePlayerPtr = NULL;
2200     }
2201 
2202     // Create the instance
2203     _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2204         _inputFilePlayerId, (const FileFormats)format);
2205 
2206     if (_inputFilePlayerPtr == NULL)
2207     {
2208         _engineStatisticsPtr->SetLastError(
2209             VE_INVALID_ARGUMENT, kTraceError,
2210             "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
2211         return -1;
2212     }
2213 
2214     const uint32_t notificationTime(0);
2215 
2216     if (_inputFilePlayerPtr->StartPlayingFile(
2217         fileName,
2218         loop,
2219         startPosition,
2220         volumeScaling,
2221         notificationTime,
2222         stopPosition,
2223         (const CodecInst*)codecInst) != 0)
2224     {
2225         _engineStatisticsPtr->SetLastError(
2226             VE_BAD_FILE, kTraceError,
2227             "StartPlayingFile() failed to start file playout");
2228         _inputFilePlayerPtr->StopPlayingFile();
2229         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2230         _inputFilePlayerPtr = NULL;
2231         return -1;
2232     }
2233     _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2234     channel_state_.SetInputFilePlaying(true);
2235 
2236     return 0;
2237 }
2238 
StartPlayingFileAsMicrophone(InStream * stream,FileFormats format,int startPosition,float volumeScaling,int stopPosition,const CodecInst * codecInst)2239 int Channel::StartPlayingFileAsMicrophone(InStream* stream,
2240                                           FileFormats format,
2241                                           int startPosition,
2242                                           float volumeScaling,
2243                                           int stopPosition,
2244                                           const CodecInst* codecInst)
2245 {
2246     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2247                  "Channel::StartPlayingFileAsMicrophone(format=%d, "
2248                  "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
2249                  format, volumeScaling, startPosition, stopPosition);
2250 
2251     if(stream == NULL)
2252     {
2253         _engineStatisticsPtr->SetLastError(
2254             VE_BAD_FILE, kTraceError,
2255             "StartPlayingFileAsMicrophone NULL as input stream");
2256         return -1;
2257     }
2258 
2259     CriticalSectionScoped cs(&_fileCritSect);
2260 
2261     if (channel_state_.Get().input_file_playing)
2262     {
2263         _engineStatisticsPtr->SetLastError(
2264             VE_ALREADY_PLAYING, kTraceWarning,
2265             "StartPlayingFileAsMicrophone() is playing");
2266         return 0;
2267     }
2268 
2269     // Destroy the old instance
2270     if (_inputFilePlayerPtr)
2271     {
2272         _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2273         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2274         _inputFilePlayerPtr = NULL;
2275     }
2276 
2277     // Create the instance
2278     _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
2279         _inputFilePlayerId, (const FileFormats)format);
2280 
2281     if (_inputFilePlayerPtr == NULL)
2282     {
2283         _engineStatisticsPtr->SetLastError(
2284             VE_INVALID_ARGUMENT, kTraceError,
2285             "StartPlayingInputFile() filePlayer format isnot correct");
2286         return -1;
2287     }
2288 
2289     const uint32_t notificationTime(0);
2290 
2291     if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
2292                                               volumeScaling, notificationTime,
2293                                               stopPosition, codecInst) != 0)
2294     {
2295         _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2296                                            "StartPlayingFile() failed to start "
2297                                            "file playout");
2298         _inputFilePlayerPtr->StopPlayingFile();
2299         FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2300         _inputFilePlayerPtr = NULL;
2301         return -1;
2302     }
2303 
2304     _inputFilePlayerPtr->RegisterModuleFileCallback(this);
2305     channel_state_.SetInputFilePlaying(true);
2306 
2307     return 0;
2308 }
2309 
StopPlayingFileAsMicrophone()2310 int Channel::StopPlayingFileAsMicrophone()
2311 {
2312     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2313                  "Channel::StopPlayingFileAsMicrophone()");
2314 
2315     CriticalSectionScoped cs(&_fileCritSect);
2316 
2317     if (!channel_state_.Get().input_file_playing)
2318     {
2319         _engineStatisticsPtr->SetLastError(
2320             VE_INVALID_OPERATION, kTraceWarning,
2321             "StopPlayingFileAsMicrophone() isnot playing");
2322         return 0;
2323     }
2324 
2325     if (_inputFilePlayerPtr->StopPlayingFile() != 0)
2326     {
2327         _engineStatisticsPtr->SetLastError(
2328             VE_STOP_RECORDING_FAILED, kTraceError,
2329             "StopPlayingFile() could not stop playing");
2330         return -1;
2331     }
2332     _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
2333     FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
2334     _inputFilePlayerPtr = NULL;
2335     channel_state_.SetInputFilePlaying(false);
2336 
2337     return 0;
2338 }
2339 
IsPlayingFileAsMicrophone() const2340 int Channel::IsPlayingFileAsMicrophone() const
2341 {
2342     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2343                  "Channel::IsPlayingFileAsMicrophone()");
2344     return channel_state_.Get().input_file_playing;
2345 }
2346 
StartRecordingPlayout(const char * fileName,const CodecInst * codecInst)2347 int Channel::StartRecordingPlayout(const char* fileName,
2348                                    const CodecInst* codecInst)
2349 {
2350     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2351                  "Channel::StartRecordingPlayout(fileName=%s)", fileName);
2352 
2353     if (_outputFileRecording)
2354     {
2355         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2356                      "StartRecordingPlayout() is already recording");
2357         return 0;
2358     }
2359 
2360     FileFormats format;
2361     const uint32_t notificationTime(0); // Not supported in VoE
2362     CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2363 
2364     if ((codecInst != NULL) &&
2365       ((codecInst->channels < 1) || (codecInst->channels > 2)))
2366     {
2367         _engineStatisticsPtr->SetLastError(
2368             VE_BAD_ARGUMENT, kTraceError,
2369             "StartRecordingPlayout() invalid compression");
2370         return(-1);
2371     }
2372     if(codecInst == NULL)
2373     {
2374         format = kFileFormatPcm16kHzFile;
2375         codecInst=&dummyCodec;
2376     }
2377     else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2378         (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2379         (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2380     {
2381         format = kFileFormatWavFile;
2382     }
2383     else
2384     {
2385         format = kFileFormatCompressedFile;
2386     }
2387 
2388     CriticalSectionScoped cs(&_fileCritSect);
2389 
2390     // Destroy the old instance
2391     if (_outputFileRecorderPtr)
2392     {
2393         _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2394         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2395         _outputFileRecorderPtr = NULL;
2396     }
2397 
2398     _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2399         _outputFileRecorderId, (const FileFormats)format);
2400     if (_outputFileRecorderPtr == NULL)
2401     {
2402         _engineStatisticsPtr->SetLastError(
2403             VE_INVALID_ARGUMENT, kTraceError,
2404             "StartRecordingPlayout() fileRecorder format isnot correct");
2405         return -1;
2406     }
2407 
2408     if (_outputFileRecorderPtr->StartRecordingAudioFile(
2409         fileName, (const CodecInst&)*codecInst, notificationTime) != 0)
2410     {
2411         _engineStatisticsPtr->SetLastError(
2412             VE_BAD_FILE, kTraceError,
2413             "StartRecordingAudioFile() failed to start file recording");
2414         _outputFileRecorderPtr->StopRecording();
2415         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2416         _outputFileRecorderPtr = NULL;
2417         return -1;
2418     }
2419     _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2420     _outputFileRecording = true;
2421 
2422     return 0;
2423 }
2424 
StartRecordingPlayout(OutStream * stream,const CodecInst * codecInst)2425 int Channel::StartRecordingPlayout(OutStream* stream,
2426                                    const CodecInst* codecInst)
2427 {
2428     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2429                  "Channel::StartRecordingPlayout()");
2430 
2431     if (_outputFileRecording)
2432     {
2433         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
2434                      "StartRecordingPlayout() is already recording");
2435         return 0;
2436     }
2437 
2438     FileFormats format;
2439     const uint32_t notificationTime(0); // Not supported in VoE
2440     CodecInst dummyCodec={100,"L16",16000,320,1,320000};
2441 
2442     if (codecInst != NULL && codecInst->channels != 1)
2443     {
2444         _engineStatisticsPtr->SetLastError(
2445             VE_BAD_ARGUMENT, kTraceError,
2446             "StartRecordingPlayout() invalid compression");
2447         return(-1);
2448     }
2449     if(codecInst == NULL)
2450     {
2451         format = kFileFormatPcm16kHzFile;
2452         codecInst=&dummyCodec;
2453     }
2454     else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
2455         (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
2456         (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
2457     {
2458         format = kFileFormatWavFile;
2459     }
2460     else
2461     {
2462         format = kFileFormatCompressedFile;
2463     }
2464 
2465     CriticalSectionScoped cs(&_fileCritSect);
2466 
2467     // Destroy the old instance
2468     if (_outputFileRecorderPtr)
2469     {
2470         _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2471         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2472         _outputFileRecorderPtr = NULL;
2473     }
2474 
2475     _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
2476         _outputFileRecorderId, (const FileFormats)format);
2477     if (_outputFileRecorderPtr == NULL)
2478     {
2479         _engineStatisticsPtr->SetLastError(
2480             VE_INVALID_ARGUMENT, kTraceError,
2481             "StartRecordingPlayout() fileRecorder format isnot correct");
2482         return -1;
2483     }
2484 
2485     if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
2486                                                         notificationTime) != 0)
2487     {
2488         _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
2489                                            "StartRecordingPlayout() failed to "
2490                                            "start file recording");
2491         _outputFileRecorderPtr->StopRecording();
2492         FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2493         _outputFileRecorderPtr = NULL;
2494         return -1;
2495     }
2496 
2497     _outputFileRecorderPtr->RegisterModuleFileCallback(this);
2498     _outputFileRecording = true;
2499 
2500     return 0;
2501 }
2502 
StopRecordingPlayout()2503 int Channel::StopRecordingPlayout()
2504 {
2505     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
2506                  "Channel::StopRecordingPlayout()");
2507 
2508     if (!_outputFileRecording)
2509     {
2510         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
2511                      "StopRecordingPlayout() isnot recording");
2512         return -1;
2513     }
2514 
2515 
2516     CriticalSectionScoped cs(&_fileCritSect);
2517 
2518     if (_outputFileRecorderPtr->StopRecording() != 0)
2519     {
2520         _engineStatisticsPtr->SetLastError(
2521             VE_STOP_RECORDING_FAILED, kTraceError,
2522             "StopRecording() could not stop recording");
2523         return(-1);
2524     }
2525     _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
2526     FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
2527     _outputFileRecorderPtr = NULL;
2528     _outputFileRecording = false;
2529 
2530     return 0;
2531 }
2532 
2533 void
SetMixWithMicStatus(bool mix)2534 Channel::SetMixWithMicStatus(bool mix)
2535 {
2536     CriticalSectionScoped cs(&_fileCritSect);
2537     _mixFileWithMicrophone=mix;
2538 }
2539 
2540 int
GetSpeechOutputLevel(uint32_t & level) const2541 Channel::GetSpeechOutputLevel(uint32_t& level) const
2542 {
2543     int8_t currentLevel = _outputAudioLevel.Level();
2544     level = static_cast<int32_t> (currentLevel);
2545     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2546                VoEId(_instanceId,_channelId),
2547                "GetSpeechOutputLevel() => level=%u", level);
2548     return 0;
2549 }
2550 
2551 int
GetSpeechOutputLevelFullRange(uint32_t & level) const2552 Channel::GetSpeechOutputLevelFullRange(uint32_t& level) const
2553 {
2554     int16_t currentLevel = _outputAudioLevel.LevelFullRange();
2555     level = static_cast<int32_t> (currentLevel);
2556     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2557                VoEId(_instanceId,_channelId),
2558                "GetSpeechOutputLevelFullRange() => level=%u", level);
2559     return 0;
2560 }
2561 
2562 int
SetMute(bool enable)2563 Channel::SetMute(bool enable)
2564 {
2565     CriticalSectionScoped cs(&volume_settings_critsect_);
2566     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2567                "Channel::SetMute(enable=%d)", enable);
2568     _mute = enable;
2569     return 0;
2570 }
2571 
2572 bool
Mute() const2573 Channel::Mute() const
2574 {
2575     CriticalSectionScoped cs(&volume_settings_critsect_);
2576     return _mute;
2577 }
2578 
2579 int
SetOutputVolumePan(float left,float right)2580 Channel::SetOutputVolumePan(float left, float right)
2581 {
2582     CriticalSectionScoped cs(&volume_settings_critsect_);
2583     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2584                "Channel::SetOutputVolumePan()");
2585     _panLeft = left;
2586     _panRight = right;
2587     return 0;
2588 }
2589 
2590 int
GetOutputVolumePan(float & left,float & right) const2591 Channel::GetOutputVolumePan(float& left, float& right) const
2592 {
2593     CriticalSectionScoped cs(&volume_settings_critsect_);
2594     left = _panLeft;
2595     right = _panRight;
2596     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2597                VoEId(_instanceId,_channelId),
2598                "GetOutputVolumePan() => left=%3.2f, right=%3.2f", left, right);
2599     return 0;
2600 }
2601 
2602 int
SetChannelOutputVolumeScaling(float scaling)2603 Channel::SetChannelOutputVolumeScaling(float scaling)
2604 {
2605     CriticalSectionScoped cs(&volume_settings_critsect_);
2606     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2607                "Channel::SetChannelOutputVolumeScaling()");
2608     _outputGain = scaling;
2609     return 0;
2610 }
2611 
2612 int
GetChannelOutputVolumeScaling(float & scaling) const2613 Channel::GetChannelOutputVolumeScaling(float& scaling) const
2614 {
2615     CriticalSectionScoped cs(&volume_settings_critsect_);
2616     scaling = _outputGain;
2617     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2618                VoEId(_instanceId,_channelId),
2619                "GetChannelOutputVolumeScaling() => scaling=%3.2f", scaling);
2620     return 0;
2621 }
2622 
SendTelephoneEventOutband(unsigned char eventCode,int lengthMs,int attenuationDb,bool playDtmfEvent)2623 int Channel::SendTelephoneEventOutband(unsigned char eventCode,
2624                                        int lengthMs, int attenuationDb,
2625                                        bool playDtmfEvent)
2626 {
2627     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2628                "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
2629                playDtmfEvent);
2630 
2631     _playOutbandDtmfEvent = playDtmfEvent;
2632 
2633     if (_rtpRtcpModule->SendTelephoneEventOutband(eventCode, lengthMs,
2634                                                  attenuationDb) != 0)
2635     {
2636         _engineStatisticsPtr->SetLastError(
2637             VE_SEND_DTMF_FAILED,
2638             kTraceWarning,
2639             "SendTelephoneEventOutband() failed to send event");
2640         return -1;
2641     }
2642     return 0;
2643 }
2644 
SendTelephoneEventInband(unsigned char eventCode,int lengthMs,int attenuationDb,bool playDtmfEvent)2645 int Channel::SendTelephoneEventInband(unsigned char eventCode,
2646                                          int lengthMs,
2647                                          int attenuationDb,
2648                                          bool playDtmfEvent)
2649 {
2650     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
2651                "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
2652                playDtmfEvent);
2653 
2654     _playInbandDtmfEvent = playDtmfEvent;
2655     _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
2656 
2657     return 0;
2658 }
2659 
2660 int
SetDtmfPlayoutStatus(bool enable)2661 Channel::SetDtmfPlayoutStatus(bool enable)
2662 {
2663     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2664                "Channel::SetDtmfPlayoutStatus()");
2665     if (audio_coding_->SetDtmfPlayoutStatus(enable) != 0)
2666     {
2667         _engineStatisticsPtr->SetLastError(
2668             VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
2669             "SetDtmfPlayoutStatus() failed to set Dtmf playout");
2670         return -1;
2671     }
2672     return 0;
2673 }
2674 
2675 bool
DtmfPlayoutStatus() const2676 Channel::DtmfPlayoutStatus() const
2677 {
2678     return audio_coding_->DtmfPlayoutStatus();
2679 }
2680 
2681 int
SetSendTelephoneEventPayloadType(unsigned char type)2682 Channel::SetSendTelephoneEventPayloadType(unsigned char type)
2683 {
2684     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2685                "Channel::SetSendTelephoneEventPayloadType()");
2686     if (type > 127)
2687     {
2688         _engineStatisticsPtr->SetLastError(
2689             VE_INVALID_ARGUMENT, kTraceError,
2690             "SetSendTelephoneEventPayloadType() invalid type");
2691         return -1;
2692     }
2693     CodecInst codec = {};
2694     codec.plfreq = 8000;
2695     codec.pltype = type;
2696     memcpy(codec.plname, "telephone-event", 16);
2697     if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
2698     {
2699         _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
2700         if (_rtpRtcpModule->RegisterSendPayload(codec) != 0) {
2701             _engineStatisticsPtr->SetLastError(
2702                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
2703                 "SetSendTelephoneEventPayloadType() failed to register send"
2704                 "payload type");
2705             return -1;
2706         }
2707     }
2708     _sendTelephoneEventPayloadType = type;
2709     return 0;
2710 }
2711 
2712 int
GetSendTelephoneEventPayloadType(unsigned char & type)2713 Channel::GetSendTelephoneEventPayloadType(unsigned char& type)
2714 {
2715     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2716                  "Channel::GetSendTelephoneEventPayloadType()");
2717     type = _sendTelephoneEventPayloadType;
2718     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2719                VoEId(_instanceId,_channelId),
2720                "GetSendTelephoneEventPayloadType() => type=%u", type);
2721     return 0;
2722 }
2723 
2724 int
UpdateRxVadDetection(AudioFrame & audioFrame)2725 Channel::UpdateRxVadDetection(AudioFrame& audioFrame)
2726 {
2727     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2728                  "Channel::UpdateRxVadDetection()");
2729 
2730     int vadDecision = 1;
2731 
2732     vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive)? 1 : 0;
2733 
2734     if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr)
2735     {
2736         OnRxVadDetected(vadDecision);
2737         _oldVadDecision = vadDecision;
2738     }
2739 
2740     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
2741                  "Channel::UpdateRxVadDetection() => vadDecision=%d",
2742                  vadDecision);
2743     return 0;
2744 }
2745 
2746 int
RegisterRxVadObserver(VoERxVadCallback & observer)2747 Channel::RegisterRxVadObserver(VoERxVadCallback &observer)
2748 {
2749     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2750                  "Channel::RegisterRxVadObserver()");
2751     CriticalSectionScoped cs(&_callbackCritSect);
2752 
2753     if (_rxVadObserverPtr)
2754     {
2755         _engineStatisticsPtr->SetLastError(
2756             VE_INVALID_OPERATION, kTraceError,
2757             "RegisterRxVadObserver() observer already enabled");
2758         return -1;
2759     }
2760     _rxVadObserverPtr = &observer;
2761     _RxVadDetection = true;
2762     return 0;
2763 }
2764 
2765 int
DeRegisterRxVadObserver()2766 Channel::DeRegisterRxVadObserver()
2767 {
2768     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2769                  "Channel::DeRegisterRxVadObserver()");
2770     CriticalSectionScoped cs(&_callbackCritSect);
2771 
2772     if (!_rxVadObserverPtr)
2773     {
2774         _engineStatisticsPtr->SetLastError(
2775             VE_INVALID_OPERATION, kTraceWarning,
2776             "DeRegisterRxVadObserver() observer already disabled");
2777         return 0;
2778     }
2779     _rxVadObserverPtr = NULL;
2780     _RxVadDetection = false;
2781     return 0;
2782 }
2783 
2784 int
VoiceActivityIndicator(int & activity)2785 Channel::VoiceActivityIndicator(int &activity)
2786 {
2787     activity = _sendFrameType;
2788 
2789     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2790                  "Channel::VoiceActivityIndicator(indicator=%d)", activity);
2791     return 0;
2792 }
2793 
2794 #ifdef WEBRTC_VOICE_ENGINE_AGC
2795 
2796 int
SetRxAgcStatus(bool enable,AgcModes mode)2797 Channel::SetRxAgcStatus(bool enable, AgcModes mode)
2798 {
2799     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2800                  "Channel::SetRxAgcStatus(enable=%d, mode=%d)",
2801                  (int)enable, (int)mode);
2802 
2803     GainControl::Mode agcMode = kDefaultRxAgcMode;
2804     switch (mode)
2805     {
2806         case kAgcDefault:
2807             break;
2808         case kAgcUnchanged:
2809             agcMode = rx_audioproc_->gain_control()->mode();
2810             break;
2811         case kAgcFixedDigital:
2812             agcMode = GainControl::kFixedDigital;
2813             break;
2814         case kAgcAdaptiveDigital:
2815             agcMode =GainControl::kAdaptiveDigital;
2816             break;
2817         default:
2818             _engineStatisticsPtr->SetLastError(
2819                 VE_INVALID_ARGUMENT, kTraceError,
2820                 "SetRxAgcStatus() invalid Agc mode");
2821             return -1;
2822     }
2823 
2824     if (rx_audioproc_->gain_control()->set_mode(agcMode) != 0)
2825     {
2826         _engineStatisticsPtr->SetLastError(
2827             VE_APM_ERROR, kTraceError,
2828             "SetRxAgcStatus() failed to set Agc mode");
2829         return -1;
2830     }
2831     if (rx_audioproc_->gain_control()->Enable(enable) != 0)
2832     {
2833         _engineStatisticsPtr->SetLastError(
2834             VE_APM_ERROR, kTraceError,
2835             "SetRxAgcStatus() failed to set Agc state");
2836         return -1;
2837     }
2838 
2839     _rxAgcIsEnabled = enable;
2840     channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
2841 
2842     return 0;
2843 }
2844 
2845 int
GetRxAgcStatus(bool & enabled,AgcModes & mode)2846 Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode)
2847 {
2848     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2849                      "Channel::GetRxAgcStatus(enable=?, mode=?)");
2850 
2851     bool enable = rx_audioproc_->gain_control()->is_enabled();
2852     GainControl::Mode agcMode =
2853         rx_audioproc_->gain_control()->mode();
2854 
2855     enabled = enable;
2856 
2857     switch (agcMode)
2858     {
2859         case GainControl::kFixedDigital:
2860             mode = kAgcFixedDigital;
2861             break;
2862         case GainControl::kAdaptiveDigital:
2863             mode = kAgcAdaptiveDigital;
2864             break;
2865         default:
2866             _engineStatisticsPtr->SetLastError(
2867                 VE_APM_ERROR, kTraceError,
2868                 "GetRxAgcStatus() invalid Agc mode");
2869             return -1;
2870     }
2871 
2872     return 0;
2873 }
2874 
2875 int
SetRxAgcConfig(AgcConfig config)2876 Channel::SetRxAgcConfig(AgcConfig config)
2877 {
2878     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2879                  "Channel::SetRxAgcConfig()");
2880 
2881     if (rx_audioproc_->gain_control()->set_target_level_dbfs(
2882         config.targetLeveldBOv) != 0)
2883     {
2884         _engineStatisticsPtr->SetLastError(
2885             VE_APM_ERROR, kTraceError,
2886             "SetRxAgcConfig() failed to set target peak |level|"
2887             "(or envelope) of the Agc");
2888         return -1;
2889     }
2890     if (rx_audioproc_->gain_control()->set_compression_gain_db(
2891         config.digitalCompressionGaindB) != 0)
2892     {
2893         _engineStatisticsPtr->SetLastError(
2894             VE_APM_ERROR, kTraceError,
2895             "SetRxAgcConfig() failed to set the range in |gain| the"
2896             " digital compression stage may apply");
2897         return -1;
2898     }
2899     if (rx_audioproc_->gain_control()->enable_limiter(
2900         config.limiterEnable) != 0)
2901     {
2902         _engineStatisticsPtr->SetLastError(
2903             VE_APM_ERROR, kTraceError,
2904             "SetRxAgcConfig() failed to set hard limiter to the signal");
2905         return -1;
2906     }
2907 
2908     return 0;
2909 }
2910 
2911 int
GetRxAgcConfig(AgcConfig & config)2912 Channel::GetRxAgcConfig(AgcConfig& config)
2913 {
2914     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2915                  "Channel::GetRxAgcConfig(config=%?)");
2916 
2917     config.targetLeveldBOv =
2918         rx_audioproc_->gain_control()->target_level_dbfs();
2919     config.digitalCompressionGaindB =
2920         rx_audioproc_->gain_control()->compression_gain_db();
2921     config.limiterEnable =
2922         rx_audioproc_->gain_control()->is_limiter_enabled();
2923 
2924     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
2925                VoEId(_instanceId,_channelId), "GetRxAgcConfig() => "
2926                    "targetLeveldBOv=%u, digitalCompressionGaindB=%u,"
2927                    " limiterEnable=%d",
2928                    config.targetLeveldBOv,
2929                    config.digitalCompressionGaindB,
2930                    config.limiterEnable);
2931 
2932     return 0;
2933 }
2934 
2935 #endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
2936 
2937 #ifdef WEBRTC_VOICE_ENGINE_NR
2938 
2939 int
SetRxNsStatus(bool enable,NsModes mode)2940 Channel::SetRxNsStatus(bool enable, NsModes mode)
2941 {
2942     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2943                  "Channel::SetRxNsStatus(enable=%d, mode=%d)",
2944                  (int)enable, (int)mode);
2945 
2946     NoiseSuppression::Level nsLevel = kDefaultNsMode;
2947     switch (mode)
2948     {
2949 
2950         case kNsDefault:
2951             break;
2952         case kNsUnchanged:
2953             nsLevel = rx_audioproc_->noise_suppression()->level();
2954             break;
2955         case kNsConference:
2956             nsLevel = NoiseSuppression::kHigh;
2957             break;
2958         case kNsLowSuppression:
2959             nsLevel = NoiseSuppression::kLow;
2960             break;
2961         case kNsModerateSuppression:
2962             nsLevel = NoiseSuppression::kModerate;
2963             break;
2964         case kNsHighSuppression:
2965             nsLevel = NoiseSuppression::kHigh;
2966             break;
2967         case kNsVeryHighSuppression:
2968             nsLevel = NoiseSuppression::kVeryHigh;
2969             break;
2970     }
2971 
2972     if (rx_audioproc_->noise_suppression()->set_level(nsLevel)
2973         != 0)
2974     {
2975         _engineStatisticsPtr->SetLastError(
2976             VE_APM_ERROR, kTraceError,
2977             "SetRxNsStatus() failed to set NS level");
2978         return -1;
2979     }
2980     if (rx_audioproc_->noise_suppression()->Enable(enable) != 0)
2981     {
2982         _engineStatisticsPtr->SetLastError(
2983             VE_APM_ERROR, kTraceError,
2984             "SetRxNsStatus() failed to set NS state");
2985         return -1;
2986     }
2987 
2988     _rxNsIsEnabled = enable;
2989     channel_state_.SetRxApmIsEnabled(_rxAgcIsEnabled || _rxNsIsEnabled);
2990 
2991     return 0;
2992 }
2993 
2994 int
GetRxNsStatus(bool & enabled,NsModes & mode)2995 Channel::GetRxNsStatus(bool& enabled, NsModes& mode)
2996 {
2997     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
2998                  "Channel::GetRxNsStatus(enable=?, mode=?)");
2999 
3000     bool enable =
3001         rx_audioproc_->noise_suppression()->is_enabled();
3002     NoiseSuppression::Level ncLevel =
3003         rx_audioproc_->noise_suppression()->level();
3004 
3005     enabled = enable;
3006 
3007     switch (ncLevel)
3008     {
3009         case NoiseSuppression::kLow:
3010             mode = kNsLowSuppression;
3011             break;
3012         case NoiseSuppression::kModerate:
3013             mode = kNsModerateSuppression;
3014             break;
3015         case NoiseSuppression::kHigh:
3016             mode = kNsHighSuppression;
3017             break;
3018         case NoiseSuppression::kVeryHigh:
3019             mode = kNsVeryHighSuppression;
3020             break;
3021     }
3022 
3023     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3024                VoEId(_instanceId,_channelId),
3025                "GetRxNsStatus() => enabled=%d, mode=%d", enabled, mode);
3026     return 0;
3027 }
3028 
3029 #endif // #ifdef WEBRTC_VOICE_ENGINE_NR
3030 
3031 int
RegisterRTCPObserver(VoERTCPObserver & observer)3032 Channel::RegisterRTCPObserver(VoERTCPObserver& observer)
3033 {
3034     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3035                  "Channel::RegisterRTCPObserver()");
3036     CriticalSectionScoped cs(&_callbackCritSect);
3037 
3038     if (_rtcpObserverPtr)
3039     {
3040         _engineStatisticsPtr->SetLastError(
3041             VE_INVALID_OPERATION, kTraceError,
3042             "RegisterRTCPObserver() observer already enabled");
3043         return -1;
3044     }
3045 
3046     _rtcpObserverPtr = &observer;
3047     _rtcpObserver = true;
3048 
3049     return 0;
3050 }
3051 
3052 int
DeRegisterRTCPObserver()3053 Channel::DeRegisterRTCPObserver()
3054 {
3055     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3056                  "Channel::DeRegisterRTCPObserver()");
3057     CriticalSectionScoped cs(&_callbackCritSect);
3058 
3059     if (!_rtcpObserverPtr)
3060     {
3061         _engineStatisticsPtr->SetLastError(
3062             VE_INVALID_OPERATION, kTraceWarning,
3063             "DeRegisterRTCPObserver() observer already disabled");
3064         return 0;
3065     }
3066 
3067     _rtcpObserver = false;
3068     _rtcpObserverPtr = NULL;
3069 
3070     return 0;
3071 }
3072 
3073 int
SetLocalSSRC(unsigned int ssrc)3074 Channel::SetLocalSSRC(unsigned int ssrc)
3075 {
3076     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3077                  "Channel::SetLocalSSRC()");
3078     if (channel_state_.Get().sending)
3079     {
3080         _engineStatisticsPtr->SetLastError(
3081             VE_ALREADY_SENDING, kTraceError,
3082             "SetLocalSSRC() already sending");
3083         return -1;
3084     }
3085     _rtpRtcpModule->SetSSRC(ssrc);
3086     return 0;
3087 }
3088 
3089 int
GetLocalSSRC(unsigned int & ssrc)3090 Channel::GetLocalSSRC(unsigned int& ssrc)
3091 {
3092     ssrc = _rtpRtcpModule->SSRC();
3093     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3094                  VoEId(_instanceId,_channelId),
3095                  "GetLocalSSRC() => ssrc=%lu", ssrc);
3096     return 0;
3097 }
3098 
3099 int
GetRemoteSSRC(unsigned int & ssrc)3100 Channel::GetRemoteSSRC(unsigned int& ssrc)
3101 {
3102     ssrc = rtp_receiver_->SSRC();
3103     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3104                  VoEId(_instanceId,_channelId),
3105                  "GetRemoteSSRC() => ssrc=%lu", ssrc);
3106     return 0;
3107 }
3108 
SetSendAudioLevelIndicationStatus(bool enable,unsigned char id)3109 int Channel::SetSendAudioLevelIndicationStatus(bool enable, unsigned char id) {
3110   _includeAudioLevelIndication = enable;
3111   return SetSendRtpHeaderExtension(enable, kRtpExtensionAudioLevel, id);
3112 }
3113 
SetReceiveAudioLevelIndicationStatus(bool enable,unsigned char id)3114 int Channel::SetReceiveAudioLevelIndicationStatus(bool enable,
3115                                                   unsigned char id) {
3116   rtp_header_parser_->DeregisterRtpHeaderExtension(
3117       kRtpExtensionAudioLevel);
3118   if (enable && !rtp_header_parser_->RegisterRtpHeaderExtension(
3119           kRtpExtensionAudioLevel, id)) {
3120     return -1;
3121   }
3122   return 0;
3123 }
3124 
SetSendAbsoluteSenderTimeStatus(bool enable,unsigned char id)3125 int Channel::SetSendAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
3126   return SetSendRtpHeaderExtension(enable, kRtpExtensionAbsoluteSendTime, id);
3127 }
3128 
SetReceiveAbsoluteSenderTimeStatus(bool enable,unsigned char id)3129 int Channel::SetReceiveAbsoluteSenderTimeStatus(bool enable, unsigned char id) {
3130   rtp_header_parser_->DeregisterRtpHeaderExtension(
3131       kRtpExtensionAbsoluteSendTime);
3132   if (enable && !rtp_header_parser_->RegisterRtpHeaderExtension(
3133       kRtpExtensionAbsoluteSendTime, id)) {
3134     return -1;
3135   }
3136   return 0;
3137 }
3138 
3139 int
SetRTCPStatus(bool enable)3140 Channel::SetRTCPStatus(bool enable)
3141 {
3142     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3143                  "Channel::SetRTCPStatus()");
3144     if (_rtpRtcpModule->SetRTCPStatus(enable ?
3145         kRtcpCompound : kRtcpOff) != 0)
3146     {
3147         _engineStatisticsPtr->SetLastError(
3148             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3149             "SetRTCPStatus() failed to set RTCP status");
3150         return -1;
3151     }
3152     return 0;
3153 }
3154 
3155 int
GetRTCPStatus(bool & enabled)3156 Channel::GetRTCPStatus(bool& enabled)
3157 {
3158     RTCPMethod method = _rtpRtcpModule->RTCP();
3159     enabled = (method != kRtcpOff);
3160     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3161                  VoEId(_instanceId,_channelId),
3162                  "GetRTCPStatus() => enabled=%d", enabled);
3163     return 0;
3164 }
3165 
3166 int
SetRTCP_CNAME(const char cName[256])3167 Channel::SetRTCP_CNAME(const char cName[256])
3168 {
3169     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3170                  "Channel::SetRTCP_CNAME()");
3171     if (_rtpRtcpModule->SetCNAME(cName) != 0)
3172     {
3173         _engineStatisticsPtr->SetLastError(
3174             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3175             "SetRTCP_CNAME() failed to set RTCP CNAME");
3176         return -1;
3177     }
3178     return 0;
3179 }
3180 
3181 int
GetRTCP_CNAME(char cName[256])3182 Channel::GetRTCP_CNAME(char cName[256])
3183 {
3184     if (_rtpRtcpModule->CNAME(cName) != 0)
3185     {
3186         _engineStatisticsPtr->SetLastError(
3187             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3188             "GetRTCP_CNAME() failed to retrieve RTCP CNAME");
3189         return -1;
3190     }
3191     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3192                  VoEId(_instanceId, _channelId),
3193                  "GetRTCP_CNAME() => cName=%s", cName);
3194     return 0;
3195 }
3196 
3197 int
GetRemoteRTCP_CNAME(char cName[256])3198 Channel::GetRemoteRTCP_CNAME(char cName[256])
3199 {
3200     if (cName == NULL)
3201     {
3202         _engineStatisticsPtr->SetLastError(
3203             VE_INVALID_ARGUMENT, kTraceError,
3204             "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
3205         return -1;
3206     }
3207     char cname[RTCP_CNAME_SIZE];
3208     const uint32_t remoteSSRC = rtp_receiver_->SSRC();
3209     if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0)
3210     {
3211         _engineStatisticsPtr->SetLastError(
3212             VE_CANNOT_RETRIEVE_CNAME, kTraceError,
3213             "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
3214         return -1;
3215     }
3216     strcpy(cName, cname);
3217     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3218                  VoEId(_instanceId, _channelId),
3219                  "GetRemoteRTCP_CNAME() => cName=%s", cName);
3220     return 0;
3221 }
3222 
3223 int
GetRemoteRTCPData(unsigned int & NTPHigh,unsigned int & NTPLow,unsigned int & timestamp,unsigned int & playoutTimestamp,unsigned int * jitter,unsigned short * fractionLost)3224 Channel::GetRemoteRTCPData(
3225     unsigned int& NTPHigh,
3226     unsigned int& NTPLow,
3227     unsigned int& timestamp,
3228     unsigned int& playoutTimestamp,
3229     unsigned int* jitter,
3230     unsigned short* fractionLost)
3231 {
3232     // --- Information from sender info in received Sender Reports
3233 
3234     RTCPSenderInfo senderInfo;
3235     if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0)
3236     {
3237         _engineStatisticsPtr->SetLastError(
3238             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3239             "GetRemoteRTCPData() failed to retrieve sender info for remote "
3240             "side");
3241         return -1;
3242     }
3243 
3244     // We only utilize 12 out of 20 bytes in the sender info (ignores packet
3245     // and octet count)
3246     NTPHigh = senderInfo.NTPseconds;
3247     NTPLow = senderInfo.NTPfraction;
3248     timestamp = senderInfo.RTPtimeStamp;
3249 
3250     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3251                  VoEId(_instanceId, _channelId),
3252                  "GetRemoteRTCPData() => NTPHigh=%lu, NTPLow=%lu, "
3253                  "timestamp=%lu",
3254                  NTPHigh, NTPLow, timestamp);
3255 
3256     // --- Locally derived information
3257 
3258     // This value is updated on each incoming RTCP packet (0 when no packet
3259     // has been received)
3260     playoutTimestamp = playout_timestamp_rtcp_;
3261 
3262     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3263                  VoEId(_instanceId, _channelId),
3264                  "GetRemoteRTCPData() => playoutTimestamp=%lu",
3265                  playout_timestamp_rtcp_);
3266 
3267     if (NULL != jitter || NULL != fractionLost)
3268     {
3269         // Get all RTCP receiver report blocks that have been received on this
3270         // channel. If we receive RTP packets from a remote source we know the
3271         // remote SSRC and use the report block from him.
3272         // Otherwise use the first report block.
3273         std::vector<RTCPReportBlock> remote_stats;
3274         if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
3275             remote_stats.empty()) {
3276           WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3277                        VoEId(_instanceId, _channelId),
3278                        "GetRemoteRTCPData() failed to measure statistics due"
3279                        " to lack of received RTP and/or RTCP packets");
3280           return -1;
3281         }
3282 
3283         uint32_t remoteSSRC = rtp_receiver_->SSRC();
3284         std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
3285         for (; it != remote_stats.end(); ++it) {
3286           if (it->remoteSSRC == remoteSSRC)
3287             break;
3288         }
3289 
3290         if (it == remote_stats.end()) {
3291           // If we have not received any RTCP packets from this SSRC it probably
3292           // means that we have not received any RTP packets.
3293           // Use the first received report block instead.
3294           it = remote_stats.begin();
3295           remoteSSRC = it->remoteSSRC;
3296         }
3297 
3298         if (jitter) {
3299           *jitter = it->jitter;
3300           WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3301                        VoEId(_instanceId, _channelId),
3302                        "GetRemoteRTCPData() => jitter = %lu", *jitter);
3303         }
3304 
3305         if (fractionLost) {
3306           *fractionLost = it->fractionLost;
3307           WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3308                        VoEId(_instanceId, _channelId),
3309                        "GetRemoteRTCPData() => fractionLost = %lu",
3310                        *fractionLost);
3311         }
3312     }
3313     return 0;
3314 }
3315 
3316 int
SendApplicationDefinedRTCPPacket(unsigned char subType,unsigned int name,const char * data,unsigned short dataLengthInBytes)3317 Channel::SendApplicationDefinedRTCPPacket(unsigned char subType,
3318                                              unsigned int name,
3319                                              const char* data,
3320                                              unsigned short dataLengthInBytes)
3321 {
3322     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3323                  "Channel::SendApplicationDefinedRTCPPacket()");
3324     if (!channel_state_.Get().sending)
3325     {
3326         _engineStatisticsPtr->SetLastError(
3327             VE_NOT_SENDING, kTraceError,
3328             "SendApplicationDefinedRTCPPacket() not sending");
3329         return -1;
3330     }
3331     if (NULL == data)
3332     {
3333         _engineStatisticsPtr->SetLastError(
3334             VE_INVALID_ARGUMENT, kTraceError,
3335             "SendApplicationDefinedRTCPPacket() invalid data value");
3336         return -1;
3337     }
3338     if (dataLengthInBytes % 4 != 0)
3339     {
3340         _engineStatisticsPtr->SetLastError(
3341             VE_INVALID_ARGUMENT, kTraceError,
3342             "SendApplicationDefinedRTCPPacket() invalid length value");
3343         return -1;
3344     }
3345     RTCPMethod status = _rtpRtcpModule->RTCP();
3346     if (status == kRtcpOff)
3347     {
3348         _engineStatisticsPtr->SetLastError(
3349             VE_RTCP_ERROR, kTraceError,
3350             "SendApplicationDefinedRTCPPacket() RTCP is disabled");
3351         return -1;
3352     }
3353 
3354     // Create and schedule the RTCP APP packet for transmission
3355     if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
3356         subType,
3357         name,
3358         (const unsigned char*) data,
3359         dataLengthInBytes) != 0)
3360     {
3361         _engineStatisticsPtr->SetLastError(
3362             VE_SEND_ERROR, kTraceError,
3363             "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
3364         return -1;
3365     }
3366     return 0;
3367 }
3368 
3369 int
GetRTPStatistics(unsigned int & averageJitterMs,unsigned int & maxJitterMs,unsigned int & discardedPackets)3370 Channel::GetRTPStatistics(
3371         unsigned int& averageJitterMs,
3372         unsigned int& maxJitterMs,
3373         unsigned int& discardedPackets)
3374 {
3375     // The jitter statistics is updated for each received RTP packet and is
3376     // based on received packets.
3377     if (_rtpRtcpModule->RTCP() == kRtcpOff) {
3378       // If RTCP is off, there is no timed thread in the RTCP module regularly
3379       // generating new stats, trigger the update manually here instead.
3380       StreamStatistician* statistician =
3381           rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
3382       if (statistician) {
3383         // Don't use returned statistics, use data from proxy instead so that
3384         // max jitter can be fetched atomically.
3385         RtcpStatistics s;
3386         statistician->GetStatistics(&s, true);
3387       }
3388     }
3389 
3390     ChannelStatistics stats = statistics_proxy_->GetStats();
3391     const int32_t playoutFrequency = audio_coding_->PlayoutFrequency();
3392     if (playoutFrequency > 0) {
3393       // Scale RTP statistics given the current playout frequency
3394       maxJitterMs = stats.max_jitter / (playoutFrequency / 1000);
3395       averageJitterMs = stats.rtcp.jitter / (playoutFrequency / 1000);
3396     }
3397 
3398     discardedPackets = _numberOfDiscardedPackets;
3399 
3400     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3401                VoEId(_instanceId, _channelId),
3402                "GetRTPStatistics() => averageJitterMs = %lu, maxJitterMs = %lu,"
3403                " discardedPackets = %lu)",
3404                averageJitterMs, maxJitterMs, discardedPackets);
3405     return 0;
3406 }
3407 
GetRemoteRTCPReportBlocks(std::vector<ReportBlock> * report_blocks)3408 int Channel::GetRemoteRTCPReportBlocks(
3409     std::vector<ReportBlock>* report_blocks) {
3410   if (report_blocks == NULL) {
3411     _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
3412       "GetRemoteRTCPReportBlock()s invalid report_blocks.");
3413     return -1;
3414   }
3415 
3416   // Get the report blocks from the latest received RTCP Sender or Receiver
3417   // Report. Each element in the vector contains the sender's SSRC and a
3418   // report block according to RFC 3550.
3419   std::vector<RTCPReportBlock> rtcp_report_blocks;
3420   if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
3421     _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3422         "GetRemoteRTCPReportBlocks() failed to read RTCP SR/RR report block.");
3423     return -1;
3424   }
3425 
3426   if (rtcp_report_blocks.empty())
3427     return 0;
3428 
3429   std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
3430   for (; it != rtcp_report_blocks.end(); ++it) {
3431     ReportBlock report_block;
3432     report_block.sender_SSRC = it->remoteSSRC;
3433     report_block.source_SSRC = it->sourceSSRC;
3434     report_block.fraction_lost = it->fractionLost;
3435     report_block.cumulative_num_packets_lost = it->cumulativeLost;
3436     report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
3437     report_block.interarrival_jitter = it->jitter;
3438     report_block.last_SR_timestamp = it->lastSR;
3439     report_block.delay_since_last_SR = it->delaySinceLastSR;
3440     report_blocks->push_back(report_block);
3441   }
3442   return 0;
3443 }
3444 
3445 int
GetRTPStatistics(CallStatistics & stats)3446 Channel::GetRTPStatistics(CallStatistics& stats)
3447 {
3448     // --- RtcpStatistics
3449 
3450     // The jitter statistics is updated for each received RTP packet and is
3451     // based on received packets.
3452     RtcpStatistics statistics;
3453     StreamStatistician* statistician =
3454         rtp_receive_statistics_->GetStatistician(rtp_receiver_->SSRC());
3455     if (!statistician || !statistician->GetStatistics(
3456         &statistics, _rtpRtcpModule->RTCP() == kRtcpOff)) {
3457       _engineStatisticsPtr->SetLastError(
3458           VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
3459           "GetRTPStatistics() failed to read RTP statistics from the "
3460           "RTP/RTCP module");
3461     }
3462 
3463     stats.fractionLost = statistics.fraction_lost;
3464     stats.cumulativeLost = statistics.cumulative_lost;
3465     stats.extendedMax = statistics.extended_max_sequence_number;
3466     stats.jitterSamples = statistics.jitter;
3467 
3468     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3469                  VoEId(_instanceId, _channelId),
3470                  "GetRTPStatistics() => fractionLost=%lu, cumulativeLost=%lu,"
3471                  " extendedMax=%lu, jitterSamples=%li)",
3472                  stats.fractionLost, stats.cumulativeLost, stats.extendedMax,
3473                  stats.jitterSamples);
3474 
3475     // --- RTT
3476 
3477     uint16_t RTT(0);
3478     RTCPMethod method = _rtpRtcpModule->RTCP();
3479     if (method == kRtcpOff)
3480     {
3481         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3482                      VoEId(_instanceId, _channelId),
3483                      "GetRTPStatistics() RTCP is disabled => valid RTT "
3484                      "measurements cannot be retrieved");
3485     } else
3486     {
3487         // The remote SSRC will be zero if no RTP packet has been received.
3488         uint32_t remoteSSRC = rtp_receiver_->SSRC();
3489         if (remoteSSRC > 0)
3490         {
3491             uint16_t avgRTT(0);
3492             uint16_t maxRTT(0);
3493             uint16_t minRTT(0);
3494 
3495             if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT, &maxRTT)
3496                 != 0)
3497             {
3498                 WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3499                              VoEId(_instanceId, _channelId),
3500                              "GetRTPStatistics() failed to retrieve RTT from "
3501                              "the RTP/RTCP module");
3502             }
3503         } else
3504         {
3505             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3506                          VoEId(_instanceId, _channelId),
3507                          "GetRTPStatistics() failed to measure RTT since no "
3508                          "RTP packets have been received yet");
3509         }
3510     }
3511 
3512     stats.rttMs = static_cast<int> (RTT);
3513 
3514     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3515                  VoEId(_instanceId, _channelId),
3516                  "GetRTPStatistics() => rttMs=%d", stats.rttMs);
3517 
3518     // --- Data counters
3519 
3520     uint32_t bytesSent(0);
3521     uint32_t packetsSent(0);
3522     uint32_t bytesReceived(0);
3523     uint32_t packetsReceived(0);
3524 
3525     if (statistician) {
3526       statistician->GetDataCounters(&bytesReceived, &packetsReceived);
3527     }
3528 
3529     if (_rtpRtcpModule->DataCountersRTP(&bytesSent,
3530                                         &packetsSent) != 0)
3531     {
3532         WEBRTC_TRACE(kTraceWarning, kTraceVoice,
3533                      VoEId(_instanceId, _channelId),
3534                      "GetRTPStatistics() failed to retrieve RTP datacounters =>"
3535                      " output will not be complete");
3536     }
3537 
3538     stats.bytesSent = bytesSent;
3539     stats.packetsSent = packetsSent;
3540     stats.bytesReceived = bytesReceived;
3541     stats.packetsReceived = packetsReceived;
3542 
3543     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3544                  VoEId(_instanceId, _channelId),
3545                  "GetRTPStatistics() => bytesSent=%d, packetsSent=%d,"
3546                  " bytesReceived=%d, packetsReceived=%d)",
3547                  stats.bytesSent, stats.packetsSent, stats.bytesReceived,
3548                  stats.packetsReceived);
3549 
3550     // --- Timestamps
3551     {
3552       CriticalSectionScoped lock(ts_stats_lock_.get());
3553       stats.capture_start_ntp_time_ms_ = capture_start_ntp_time_ms_;
3554     }
3555     return 0;
3556 }
3557 
SetREDStatus(bool enable,int redPayloadtype)3558 int Channel::SetREDStatus(bool enable, int redPayloadtype) {
3559   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3560                "Channel::SetREDStatus()");
3561 
3562   if (enable) {
3563     if (redPayloadtype < 0 || redPayloadtype > 127) {
3564       _engineStatisticsPtr->SetLastError(
3565           VE_PLTYPE_ERROR, kTraceError,
3566           "SetREDStatus() invalid RED payload type");
3567       return -1;
3568     }
3569 
3570     if (SetRedPayloadType(redPayloadtype) < 0) {
3571       _engineStatisticsPtr->SetLastError(
3572           VE_CODEC_ERROR, kTraceError,
3573           "SetSecondarySendCodec() Failed to register RED ACM");
3574       return -1;
3575     }
3576   }
3577 
3578   if (audio_coding_->SetREDStatus(enable) != 0) {
3579     _engineStatisticsPtr->SetLastError(
3580         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3581         "SetREDStatus() failed to set RED state in the ACM");
3582     return -1;
3583   }
3584   return 0;
3585 }
3586 
3587 int
GetREDStatus(bool & enabled,int & redPayloadtype)3588 Channel::GetREDStatus(bool& enabled, int& redPayloadtype)
3589 {
3590     enabled = audio_coding_->REDStatus();
3591     if (enabled)
3592     {
3593         int8_t payloadType(0);
3594         if (_rtpRtcpModule->SendREDPayloadType(payloadType) != 0)
3595         {
3596             _engineStatisticsPtr->SetLastError(
3597                 VE_RTP_RTCP_MODULE_ERROR, kTraceError,
3598                 "GetREDStatus() failed to retrieve RED PT from RTP/RTCP "
3599                 "module");
3600             return -1;
3601         }
3602         WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3603                    VoEId(_instanceId, _channelId),
3604                    "GetREDStatus() => enabled=%d, redPayloadtype=%d",
3605                    enabled, redPayloadtype);
3606         return 0;
3607     }
3608     WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3609                  VoEId(_instanceId, _channelId),
3610                  "GetREDStatus() => enabled=%d", enabled);
3611     return 0;
3612 }
3613 
SetCodecFECStatus(bool enable)3614 int Channel::SetCodecFECStatus(bool enable) {
3615   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3616                "Channel::SetCodecFECStatus()");
3617 
3618   if (audio_coding_->SetCodecFEC(enable) != 0) {
3619     _engineStatisticsPtr->SetLastError(
3620         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
3621         "SetCodecFECStatus() failed to set FEC state");
3622     return -1;
3623   }
3624   return 0;
3625 }
3626 
GetCodecFECStatus()3627 bool Channel::GetCodecFECStatus() {
3628   bool enabled = audio_coding_->CodecFEC();
3629   WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
3630                VoEId(_instanceId, _channelId),
3631                "GetCodecFECStatus() => enabled=%d", enabled);
3632   return enabled;
3633 }
3634 
SetNACKStatus(bool enable,int maxNumberOfPackets)3635 void Channel::SetNACKStatus(bool enable, int maxNumberOfPackets) {
3636   // None of these functions can fail.
3637   _rtpRtcpModule->SetStorePacketsStatus(enable, maxNumberOfPackets);
3638   rtp_receive_statistics_->SetMaxReorderingThreshold(maxNumberOfPackets);
3639   rtp_receiver_->SetNACKStatus(enable ? kNackRtcp : kNackOff);
3640   if (enable)
3641     audio_coding_->EnableNack(maxNumberOfPackets);
3642   else
3643     audio_coding_->DisableNack();
3644 }
3645 
3646 // Called when we are missing one or more packets.
ResendPackets(const uint16_t * sequence_numbers,int length)3647 int Channel::ResendPackets(const uint16_t* sequence_numbers, int length) {
3648   return _rtpRtcpModule->SendNACK(sequence_numbers, length);
3649 }
3650 
3651 int
StartRTPDump(const char fileNameUTF8[1024],RTPDirections direction)3652 Channel::StartRTPDump(const char fileNameUTF8[1024],
3653                       RTPDirections direction)
3654 {
3655     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3656                  "Channel::StartRTPDump()");
3657     if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
3658     {
3659         _engineStatisticsPtr->SetLastError(
3660             VE_INVALID_ARGUMENT, kTraceError,
3661             "StartRTPDump() invalid RTP direction");
3662         return -1;
3663     }
3664     RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
3665         &_rtpDumpIn : &_rtpDumpOut;
3666     if (rtpDumpPtr == NULL)
3667     {
3668         assert(false);
3669         return -1;
3670     }
3671     if (rtpDumpPtr->IsActive())
3672     {
3673         rtpDumpPtr->Stop();
3674     }
3675     if (rtpDumpPtr->Start(fileNameUTF8) != 0)
3676     {
3677         _engineStatisticsPtr->SetLastError(
3678             VE_BAD_FILE, kTraceError,
3679             "StartRTPDump() failed to create file");
3680         return -1;
3681     }
3682     return 0;
3683 }
3684 
3685 int
StopRTPDump(RTPDirections direction)3686 Channel::StopRTPDump(RTPDirections direction)
3687 {
3688     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
3689                  "Channel::StopRTPDump()");
3690     if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
3691     {
3692         _engineStatisticsPtr->SetLastError(
3693             VE_INVALID_ARGUMENT, kTraceError,
3694             "StopRTPDump() invalid RTP direction");
3695         return -1;
3696     }
3697     RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
3698         &_rtpDumpIn : &_rtpDumpOut;
3699     if (rtpDumpPtr == NULL)
3700     {
3701         assert(false);
3702         return -1;
3703     }
3704     if (!rtpDumpPtr->IsActive())
3705     {
3706         return 0;
3707     }
3708     return rtpDumpPtr->Stop();
3709 }
3710 
3711 bool
RTPDumpIsActive(RTPDirections direction)3712 Channel::RTPDumpIsActive(RTPDirections direction)
3713 {
3714     if ((direction != kRtpIncoming) &&
3715         (direction != kRtpOutgoing))
3716     {
3717         _engineStatisticsPtr->SetLastError(
3718             VE_INVALID_ARGUMENT, kTraceError,
3719             "RTPDumpIsActive() invalid RTP direction");
3720         return false;
3721     }
3722     RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
3723         &_rtpDumpIn : &_rtpDumpOut;
3724     return rtpDumpPtr->IsActive();
3725 }
3726 
SetVideoEngineBWETarget(ViENetwork * vie_network,int video_channel)3727 void Channel::SetVideoEngineBWETarget(ViENetwork* vie_network,
3728                                       int video_channel) {
3729   CriticalSectionScoped cs(&_callbackCritSect);
3730   if (vie_network_) {
3731     vie_network_->Release();
3732     vie_network_ = NULL;
3733   }
3734   video_channel_ = -1;
3735 
3736   if (vie_network != NULL && video_channel != -1) {
3737     vie_network_ = vie_network;
3738     video_channel_ = video_channel;
3739   }
3740 }
3741 
3742 uint32_t
Demultiplex(const AudioFrame & audioFrame)3743 Channel::Demultiplex(const AudioFrame& audioFrame)
3744 {
3745     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3746                  "Channel::Demultiplex()");
3747     _audioFrame.CopyFrom(audioFrame);
3748     _audioFrame.id_ = _channelId;
3749     return 0;
3750 }
3751 
Demultiplex(const int16_t * audio_data,int sample_rate,int number_of_frames,int number_of_channels)3752 void Channel::Demultiplex(const int16_t* audio_data,
3753                           int sample_rate,
3754                           int number_of_frames,
3755                           int number_of_channels) {
3756   CodecInst codec;
3757   GetSendCodec(codec);
3758 
3759   if (!mono_recording_audio_.get()) {
3760     // Temporary space for DownConvertToCodecFormat.
3761     mono_recording_audio_.reset(new int16_t[kMaxMonoDataSizeSamples]);
3762   }
3763   DownConvertToCodecFormat(audio_data,
3764                            number_of_frames,
3765                            number_of_channels,
3766                            sample_rate,
3767                            codec.channels,
3768                            codec.plfreq,
3769                            mono_recording_audio_.get(),
3770                            &input_resampler_,
3771                            &_audioFrame);
3772 }
3773 
3774 uint32_t
PrepareEncodeAndSend(int mixingFrequency)3775 Channel::PrepareEncodeAndSend(int mixingFrequency)
3776 {
3777     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3778                  "Channel::PrepareEncodeAndSend()");
3779 
3780     if (_audioFrame.samples_per_channel_ == 0)
3781     {
3782         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
3783                      "Channel::PrepareEncodeAndSend() invalid audio frame");
3784         return -1;
3785     }
3786 
3787     if (channel_state_.Get().input_file_playing)
3788     {
3789         MixOrReplaceAudioWithFile(mixingFrequency);
3790     }
3791 
3792     bool is_muted = Mute();  // Cache locally as Mute() takes a lock.
3793     if (is_muted) {
3794       AudioFrameOperations::Mute(_audioFrame);
3795     }
3796 
3797     if (channel_state_.Get().input_external_media)
3798     {
3799         CriticalSectionScoped cs(&_callbackCritSect);
3800         const bool isStereo = (_audioFrame.num_channels_ == 2);
3801         if (_inputExternalMediaCallbackPtr)
3802         {
3803             _inputExternalMediaCallbackPtr->Process(
3804                 _channelId,
3805                 kRecordingPerChannel,
3806                (int16_t*)_audioFrame.data_,
3807                 _audioFrame.samples_per_channel_,
3808                 _audioFrame.sample_rate_hz_,
3809                 isStereo);
3810         }
3811     }
3812 
3813     InsertInbandDtmfTone();
3814 
3815     if (_includeAudioLevelIndication) {
3816       int length = _audioFrame.samples_per_channel_ * _audioFrame.num_channels_;
3817       if (is_muted) {
3818         rms_level_.ProcessMuted(length);
3819       } else {
3820         rms_level_.Process(_audioFrame.data_, length);
3821       }
3822     }
3823 
3824     return 0;
3825 }
3826 
3827 uint32_t
EncodeAndSend()3828 Channel::EncodeAndSend()
3829 {
3830     WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
3831                  "Channel::EncodeAndSend()");
3832 
3833     assert(_audioFrame.num_channels_ <= 2);
3834     if (_audioFrame.samples_per_channel_ == 0)
3835     {
3836         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
3837                      "Channel::EncodeAndSend() invalid audio frame");
3838         return -1;
3839     }
3840 
3841     _audioFrame.id_ = _channelId;
3842 
3843     // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
3844 
3845     // The ACM resamples internally.
3846     _audioFrame.timestamp_ = _timeStamp;
3847     if (audio_coding_->Add10MsData((AudioFrame&)_audioFrame) != 0)
3848     {
3849         WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
3850                      "Channel::EncodeAndSend() ACM encoding failed");
3851         return -1;
3852     }
3853 
3854     _timeStamp += _audioFrame.samples_per_channel_;
3855 
3856     // --- Encode if complete frame is ready
3857 
3858     // This call will trigger AudioPacketizationCallback::SendData if encoding
3859     // is done and payload is ready for packetization and transmission.
3860     return audio_coding_->Process();
3861 }
3862 
RegisterExternalMediaProcessing(ProcessingTypes type,VoEMediaProcess & processObject)3863 int Channel::RegisterExternalMediaProcessing(
3864     ProcessingTypes type,
3865     VoEMediaProcess& processObject)
3866 {
3867     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3868                  "Channel::RegisterExternalMediaProcessing()");
3869 
3870     CriticalSectionScoped cs(&_callbackCritSect);
3871 
3872     if (kPlaybackPerChannel == type)
3873     {
3874         if (_outputExternalMediaCallbackPtr)
3875         {
3876             _engineStatisticsPtr->SetLastError(
3877                 VE_INVALID_OPERATION, kTraceError,
3878                 "Channel::RegisterExternalMediaProcessing() "
3879                 "output external media already enabled");
3880             return -1;
3881         }
3882         _outputExternalMediaCallbackPtr = &processObject;
3883         _outputExternalMedia = true;
3884     }
3885     else if (kRecordingPerChannel == type)
3886     {
3887         if (_inputExternalMediaCallbackPtr)
3888         {
3889             _engineStatisticsPtr->SetLastError(
3890                 VE_INVALID_OPERATION, kTraceError,
3891                 "Channel::RegisterExternalMediaProcessing() "
3892                 "output external media already enabled");
3893             return -1;
3894         }
3895         _inputExternalMediaCallbackPtr = &processObject;
3896         channel_state_.SetInputExternalMedia(true);
3897     }
3898     return 0;
3899 }
3900 
DeRegisterExternalMediaProcessing(ProcessingTypes type)3901 int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type)
3902 {
3903     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3904                  "Channel::DeRegisterExternalMediaProcessing()");
3905 
3906     CriticalSectionScoped cs(&_callbackCritSect);
3907 
3908     if (kPlaybackPerChannel == type)
3909     {
3910         if (!_outputExternalMediaCallbackPtr)
3911         {
3912             _engineStatisticsPtr->SetLastError(
3913                 VE_INVALID_OPERATION, kTraceWarning,
3914                 "Channel::DeRegisterExternalMediaProcessing() "
3915                 "output external media already disabled");
3916             return 0;
3917         }
3918         _outputExternalMedia = false;
3919         _outputExternalMediaCallbackPtr = NULL;
3920     }
3921     else if (kRecordingPerChannel == type)
3922     {
3923         if (!_inputExternalMediaCallbackPtr)
3924         {
3925             _engineStatisticsPtr->SetLastError(
3926                 VE_INVALID_OPERATION, kTraceWarning,
3927                 "Channel::DeRegisterExternalMediaProcessing() "
3928                 "input external media already disabled");
3929             return 0;
3930         }
3931         channel_state_.SetInputExternalMedia(false);
3932         _inputExternalMediaCallbackPtr = NULL;
3933     }
3934 
3935     return 0;
3936 }
3937 
SetExternalMixing(bool enabled)3938 int Channel::SetExternalMixing(bool enabled) {
3939     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3940                  "Channel::SetExternalMixing(enabled=%d)", enabled);
3941 
3942     if (channel_state_.Get().playing)
3943     {
3944         _engineStatisticsPtr->SetLastError(
3945             VE_INVALID_OPERATION, kTraceError,
3946             "Channel::SetExternalMixing() "
3947             "external mixing cannot be changed while playing.");
3948         return -1;
3949     }
3950 
3951     _externalMixing = enabled;
3952 
3953     return 0;
3954 }
3955 
3956 int
GetNetworkStatistics(NetworkStatistics & stats)3957 Channel::GetNetworkStatistics(NetworkStatistics& stats)
3958 {
3959     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3960                  "Channel::GetNetworkStatistics()");
3961     ACMNetworkStatistics acm_stats;
3962     int return_value = audio_coding_->NetworkStatistics(&acm_stats);
3963     if (return_value >= 0) {
3964       memcpy(&stats, &acm_stats, sizeof(NetworkStatistics));
3965     }
3966     return return_value;
3967 }
3968 
GetDecodingCallStatistics(AudioDecodingCallStats * stats) const3969 void Channel::GetDecodingCallStatistics(AudioDecodingCallStats* stats) const {
3970   audio_coding_->GetDecodingCallStatistics(stats);
3971 }
3972 
GetDelayEstimate(int * jitter_buffer_delay_ms,int * playout_buffer_delay_ms) const3973 bool Channel::GetDelayEstimate(int* jitter_buffer_delay_ms,
3974                                int* playout_buffer_delay_ms) const {
3975   if (_average_jitter_buffer_delay_us == 0) {
3976     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3977                  "Channel::GetDelayEstimate() no valid estimate.");
3978     return false;
3979   }
3980   *jitter_buffer_delay_ms = (_average_jitter_buffer_delay_us + 500) / 1000 +
3981       _recPacketDelayMs;
3982   *playout_buffer_delay_ms = playout_delay_ms_;
3983   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3984                "Channel::GetDelayEstimate()");
3985   return true;
3986 }
3987 
SetInitialPlayoutDelay(int delay_ms)3988 int Channel::SetInitialPlayoutDelay(int delay_ms)
3989 {
3990   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
3991                "Channel::SetInitialPlayoutDelay()");
3992   if ((delay_ms < kVoiceEngineMinMinPlayoutDelayMs) ||
3993       (delay_ms > kVoiceEngineMaxMinPlayoutDelayMs))
3994   {
3995     _engineStatisticsPtr->SetLastError(
3996         VE_INVALID_ARGUMENT, kTraceError,
3997         "SetInitialPlayoutDelay() invalid min delay");
3998     return -1;
3999   }
4000   if (audio_coding_->SetInitialPlayoutDelay(delay_ms) != 0)
4001   {
4002     _engineStatisticsPtr->SetLastError(
4003         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4004         "SetInitialPlayoutDelay() failed to set min playout delay");
4005     return -1;
4006   }
4007   return 0;
4008 }
4009 
4010 
4011 int
SetMinimumPlayoutDelay(int delayMs)4012 Channel::SetMinimumPlayoutDelay(int delayMs)
4013 {
4014     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4015                  "Channel::SetMinimumPlayoutDelay()");
4016     if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
4017         (delayMs > kVoiceEngineMaxMinPlayoutDelayMs))
4018     {
4019         _engineStatisticsPtr->SetLastError(
4020             VE_INVALID_ARGUMENT, kTraceError,
4021             "SetMinimumPlayoutDelay() invalid min delay");
4022         return -1;
4023     }
4024     if (audio_coding_->SetMinimumPlayoutDelay(delayMs) != 0)
4025     {
4026         _engineStatisticsPtr->SetLastError(
4027             VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4028             "SetMinimumPlayoutDelay() failed to set min playout delay");
4029         return -1;
4030     }
4031     return 0;
4032 }
4033 
UpdatePlayoutTimestamp(bool rtcp)4034 void Channel::UpdatePlayoutTimestamp(bool rtcp) {
4035   uint32_t playout_timestamp = 0;
4036 
4037   if (audio_coding_->PlayoutTimestamp(&playout_timestamp) == -1)  {
4038     WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4039                  "Channel::UpdatePlayoutTimestamp() failed to read playout"
4040                  " timestamp from the ACM");
4041     _engineStatisticsPtr->SetLastError(
4042         VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4043         "UpdatePlayoutTimestamp() failed to retrieve timestamp");
4044     return;
4045   }
4046 
4047   uint16_t delay_ms = 0;
4048   if (_audioDeviceModulePtr->PlayoutDelay(&delay_ms) == -1) {
4049     WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4050                  "Channel::UpdatePlayoutTimestamp() failed to read playout"
4051                  " delay from the ADM");
4052     _engineStatisticsPtr->SetLastError(
4053         VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4054         "UpdatePlayoutTimestamp() failed to retrieve playout delay");
4055     return;
4056   }
4057 
4058   jitter_buffer_playout_timestamp_ = playout_timestamp;
4059 
4060   // Remove the playout delay.
4061   playout_timestamp -= (delay_ms * (GetPlayoutFrequency() / 1000));
4062 
4063   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4064                "Channel::UpdatePlayoutTimestamp() => playoutTimestamp = %lu",
4065                playout_timestamp);
4066 
4067   if (rtcp) {
4068     playout_timestamp_rtcp_ = playout_timestamp;
4069   } else {
4070     playout_timestamp_rtp_ = playout_timestamp;
4071   }
4072   playout_delay_ms_ = delay_ms;
4073 }
4074 
GetPlayoutTimestamp(unsigned int & timestamp)4075 int Channel::GetPlayoutTimestamp(unsigned int& timestamp) {
4076   WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4077                "Channel::GetPlayoutTimestamp()");
4078   if (playout_timestamp_rtp_ == 0)  {
4079     _engineStatisticsPtr->SetLastError(
4080         VE_CANNOT_RETRIEVE_VALUE, kTraceError,
4081         "GetPlayoutTimestamp() failed to retrieve timestamp");
4082     return -1;
4083   }
4084   timestamp = playout_timestamp_rtp_;
4085   WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
4086                VoEId(_instanceId,_channelId),
4087                "GetPlayoutTimestamp() => timestamp=%u", timestamp);
4088   return 0;
4089 }
4090 
4091 int
SetInitTimestamp(unsigned int timestamp)4092 Channel::SetInitTimestamp(unsigned int timestamp)
4093 {
4094     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4095                "Channel::SetInitTimestamp()");
4096     if (channel_state_.Get().sending)
4097     {
4098         _engineStatisticsPtr->SetLastError(
4099             VE_SENDING, kTraceError, "SetInitTimestamp() already sending");
4100         return -1;
4101     }
4102     if (_rtpRtcpModule->SetStartTimestamp(timestamp) != 0)
4103     {
4104         _engineStatisticsPtr->SetLastError(
4105             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4106             "SetInitTimestamp() failed to set timestamp");
4107         return -1;
4108     }
4109     return 0;
4110 }
4111 
4112 int
SetInitSequenceNumber(short sequenceNumber)4113 Channel::SetInitSequenceNumber(short sequenceNumber)
4114 {
4115     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4116                  "Channel::SetInitSequenceNumber()");
4117     if (channel_state_.Get().sending)
4118     {
4119         _engineStatisticsPtr->SetLastError(
4120             VE_SENDING, kTraceError,
4121             "SetInitSequenceNumber() already sending");
4122         return -1;
4123     }
4124     if (_rtpRtcpModule->SetSequenceNumber(sequenceNumber) != 0)
4125     {
4126         _engineStatisticsPtr->SetLastError(
4127             VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4128             "SetInitSequenceNumber() failed to set sequence number");
4129         return -1;
4130     }
4131     return 0;
4132 }
4133 
4134 int
GetRtpRtcp(RtpRtcp ** rtpRtcpModule,RtpReceiver ** rtp_receiver) const4135 Channel::GetRtpRtcp(RtpRtcp** rtpRtcpModule, RtpReceiver** rtp_receiver) const
4136 {
4137     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4138                  "Channel::GetRtpRtcp()");
4139     *rtpRtcpModule = _rtpRtcpModule.get();
4140     *rtp_receiver = rtp_receiver_.get();
4141     return 0;
4142 }
4143 
4144 // TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
4145 // a shared helper.
4146 int32_t
MixOrReplaceAudioWithFile(int mixingFrequency)4147 Channel::MixOrReplaceAudioWithFile(int mixingFrequency)
4148 {
4149     scoped_ptr<int16_t[]> fileBuffer(new int16_t[640]);
4150     int fileSamples(0);
4151 
4152     {
4153         CriticalSectionScoped cs(&_fileCritSect);
4154 
4155         if (_inputFilePlayerPtr == NULL)
4156         {
4157             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4158                          VoEId(_instanceId, _channelId),
4159                          "Channel::MixOrReplaceAudioWithFile() fileplayer"
4160                              " doesnt exist");
4161             return -1;
4162         }
4163 
4164         if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
4165                                                       fileSamples,
4166                                                       mixingFrequency) == -1)
4167         {
4168             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4169                          VoEId(_instanceId, _channelId),
4170                          "Channel::MixOrReplaceAudioWithFile() file mixing "
4171                          "failed");
4172             return -1;
4173         }
4174         if (fileSamples == 0)
4175         {
4176             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4177                          VoEId(_instanceId, _channelId),
4178                          "Channel::MixOrReplaceAudioWithFile() file is ended");
4179             return 0;
4180         }
4181     }
4182 
4183     assert(_audioFrame.samples_per_channel_ == fileSamples);
4184 
4185     if (_mixFileWithMicrophone)
4186     {
4187         // Currently file stream is always mono.
4188         // TODO(xians): Change the code when FilePlayer supports real stereo.
4189         MixWithSat(_audioFrame.data_,
4190                    _audioFrame.num_channels_,
4191                    fileBuffer.get(),
4192                    1,
4193                    fileSamples);
4194     }
4195     else
4196     {
4197         // Replace ACM audio with file.
4198         // Currently file stream is always mono.
4199         // TODO(xians): Change the code when FilePlayer supports real stereo.
4200         _audioFrame.UpdateFrame(_channelId,
4201                                 -1,
4202                                 fileBuffer.get(),
4203                                 fileSamples,
4204                                 mixingFrequency,
4205                                 AudioFrame::kNormalSpeech,
4206                                 AudioFrame::kVadUnknown,
4207                                 1);
4208 
4209     }
4210     return 0;
4211 }
4212 
4213 int32_t
MixAudioWithFile(AudioFrame & audioFrame,int mixingFrequency)4214 Channel::MixAudioWithFile(AudioFrame& audioFrame,
4215                           int mixingFrequency)
4216 {
4217     assert(mixingFrequency <= 32000);
4218 
4219     scoped_ptr<int16_t[]> fileBuffer(new int16_t[640]);
4220     int fileSamples(0);
4221 
4222     {
4223         CriticalSectionScoped cs(&_fileCritSect);
4224 
4225         if (_outputFilePlayerPtr == NULL)
4226         {
4227             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4228                          VoEId(_instanceId, _channelId),
4229                          "Channel::MixAudioWithFile() file mixing failed");
4230             return -1;
4231         }
4232 
4233         // We should get the frequency we ask for.
4234         if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
4235                                                        fileSamples,
4236                                                        mixingFrequency) == -1)
4237         {
4238             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4239                          VoEId(_instanceId, _channelId),
4240                          "Channel::MixAudioWithFile() file mixing failed");
4241             return -1;
4242         }
4243     }
4244 
4245     if (audioFrame.samples_per_channel_ == fileSamples)
4246     {
4247         // Currently file stream is always mono.
4248         // TODO(xians): Change the code when FilePlayer supports real stereo.
4249         MixWithSat(audioFrame.data_,
4250                    audioFrame.num_channels_,
4251                    fileBuffer.get(),
4252                    1,
4253                    fileSamples);
4254     }
4255     else
4256     {
4257         WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
4258             "Channel::MixAudioWithFile() samples_per_channel_(%d) != "
4259             "fileSamples(%d)",
4260             audioFrame.samples_per_channel_, fileSamples);
4261         return -1;
4262     }
4263 
4264     return 0;
4265 }
4266 
4267 int
InsertInbandDtmfTone()4268 Channel::InsertInbandDtmfTone()
4269 {
4270     // Check if we should start a new tone.
4271     if (_inbandDtmfQueue.PendingDtmf() &&
4272         !_inbandDtmfGenerator.IsAddingTone() &&
4273         _inbandDtmfGenerator.DelaySinceLastTone() >
4274         kMinTelephoneEventSeparationMs)
4275     {
4276         int8_t eventCode(0);
4277         uint16_t lengthMs(0);
4278         uint8_t attenuationDb(0);
4279 
4280         eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
4281         _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
4282         if (_playInbandDtmfEvent)
4283         {
4284             // Add tone to output mixer using a reduced length to minimize
4285             // risk of echo.
4286             _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80,
4287                                           attenuationDb);
4288         }
4289     }
4290 
4291     if (_inbandDtmfGenerator.IsAddingTone())
4292     {
4293         uint16_t frequency(0);
4294         _inbandDtmfGenerator.GetSampleRate(frequency);
4295 
4296         if (frequency != _audioFrame.sample_rate_hz_)
4297         {
4298             // Update sample rate of Dtmf tone since the mixing frequency
4299             // has changed.
4300             _inbandDtmfGenerator.SetSampleRate(
4301                 (uint16_t) (_audioFrame.sample_rate_hz_));
4302             // Reset the tone to be added taking the new sample rate into
4303             // account.
4304             _inbandDtmfGenerator.ResetTone();
4305         }
4306 
4307         int16_t toneBuffer[320];
4308         uint16_t toneSamples(0);
4309         // Get 10ms tone segment and set time since last tone to zero
4310         if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
4311         {
4312             WEBRTC_TRACE(kTraceWarning, kTraceVoice,
4313                        VoEId(_instanceId, _channelId),
4314                        "Channel::EncodeAndSend() inserting Dtmf failed");
4315             return -1;
4316         }
4317 
4318         // Replace mixed audio with DTMF tone.
4319         for (int sample = 0;
4320             sample < _audioFrame.samples_per_channel_;
4321             sample++)
4322         {
4323             for (int channel = 0;
4324                 channel < _audioFrame.num_channels_;
4325                 channel++)
4326             {
4327                 const int index = sample * _audioFrame.num_channels_ + channel;
4328                 _audioFrame.data_[index] = toneBuffer[sample];
4329             }
4330         }
4331 
4332         assert(_audioFrame.samples_per_channel_ == toneSamples);
4333     } else
4334     {
4335         // Add 10ms to "delay-since-last-tone" counter
4336         _inbandDtmfGenerator.UpdateDelaySinceLastTone();
4337     }
4338     return 0;
4339 }
4340 
4341 int32_t
SendPacketRaw(const void * data,int len,bool RTCP)4342 Channel::SendPacketRaw(const void *data, int len, bool RTCP)
4343 {
4344     CriticalSectionScoped cs(&_callbackCritSect);
4345     if (_transportPtr == NULL)
4346     {
4347         return -1;
4348     }
4349     if (!RTCP)
4350     {
4351         return _transportPtr->SendPacket(_channelId, data, len);
4352     }
4353     else
4354     {
4355         return _transportPtr->SendRTCPPacket(_channelId, data, len);
4356     }
4357 }
4358 
4359 // Called for incoming RTP packets after successful RTP header parsing.
UpdatePacketDelay(uint32_t rtp_timestamp,uint16_t sequence_number)4360 void Channel::UpdatePacketDelay(uint32_t rtp_timestamp,
4361                                 uint16_t sequence_number) {
4362   WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
4363                "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
4364                rtp_timestamp, sequence_number);
4365 
4366   // Get frequency of last received payload
4367   int rtp_receive_frequency = GetPlayoutFrequency();
4368 
4369   // Update the least required delay.
4370   least_required_delay_ms_ = audio_coding_->LeastRequiredDelayMs();
4371 
4372   // |jitter_buffer_playout_timestamp_| updated in UpdatePlayoutTimestamp for
4373   // every incoming packet.
4374   uint32_t timestamp_diff_ms = (rtp_timestamp -
4375       jitter_buffer_playout_timestamp_) / (rtp_receive_frequency / 1000);
4376   if (!IsNewerTimestamp(rtp_timestamp, jitter_buffer_playout_timestamp_) ||
4377       timestamp_diff_ms > (2 * kVoiceEngineMaxMinPlayoutDelayMs)) {
4378     // If |jitter_buffer_playout_timestamp_| is newer than the incoming RTP
4379     // timestamp, the resulting difference is negative, but is set to zero.
4380     // This can happen when a network glitch causes a packet to arrive late,
4381     // and during long comfort noise periods with clock drift.
4382     timestamp_diff_ms = 0;
4383   }
4384 
4385   uint16_t packet_delay_ms = (rtp_timestamp - _previousTimestamp) /
4386       (rtp_receive_frequency / 1000);
4387 
4388   _previousTimestamp = rtp_timestamp;
4389 
4390   if (timestamp_diff_ms == 0) return;
4391 
4392   if (packet_delay_ms >= 10 && packet_delay_ms <= 60) {
4393     _recPacketDelayMs = packet_delay_ms;
4394   }
4395 
4396   if (_average_jitter_buffer_delay_us == 0) {
4397     _average_jitter_buffer_delay_us = timestamp_diff_ms * 1000;
4398     return;
4399   }
4400 
4401   // Filter average delay value using exponential filter (alpha is
4402   // 7/8). We derive 1000 *_average_jitter_buffer_delay_us here (reduces
4403   // risk of rounding error) and compensate for it in GetDelayEstimate()
4404   // later.
4405   _average_jitter_buffer_delay_us = (_average_jitter_buffer_delay_us * 7 +
4406       1000 * timestamp_diff_ms + 500) / 8;
4407 }
4408 
4409 void
RegisterReceiveCodecsToRTPModule()4410 Channel::RegisterReceiveCodecsToRTPModule()
4411 {
4412     WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
4413                  "Channel::RegisterReceiveCodecsToRTPModule()");
4414 
4415 
4416     CodecInst codec;
4417     const uint8_t nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
4418 
4419     for (int idx = 0; idx < nSupportedCodecs; idx++)
4420     {
4421         // Open up the RTP/RTCP receiver for all supported codecs
4422         if ((audio_coding_->Codec(idx, &codec) == -1) ||
4423             (rtp_receiver_->RegisterReceivePayload(
4424                 codec.plname,
4425                 codec.pltype,
4426                 codec.plfreq,
4427                 codec.channels,
4428                 (codec.rate < 0) ? 0 : codec.rate) == -1))
4429         {
4430             WEBRTC_TRACE(
4431                          kTraceWarning,
4432                          kTraceVoice,
4433                          VoEId(_instanceId, _channelId),
4434                          "Channel::RegisterReceiveCodecsToRTPModule() unable"
4435                          " to register %s (%d/%d/%d/%d) to RTP/RTCP receiver",
4436                          codec.plname, codec.pltype, codec.plfreq,
4437                          codec.channels, codec.rate);
4438         }
4439         else
4440         {
4441             WEBRTC_TRACE(
4442                          kTraceInfo,
4443                          kTraceVoice,
4444                          VoEId(_instanceId, _channelId),
4445                          "Channel::RegisterReceiveCodecsToRTPModule() %s "
4446                          "(%d/%d/%d/%d) has been added to the RTP/RTCP "
4447                          "receiver",
4448                          codec.plname, codec.pltype, codec.plfreq,
4449                          codec.channels, codec.rate);
4450         }
4451     }
4452 }
4453 
SetSecondarySendCodec(const CodecInst & codec,int red_payload_type)4454 int Channel::SetSecondarySendCodec(const CodecInst& codec,
4455                                    int red_payload_type) {
4456   // Sanity check for payload type.
4457   if (red_payload_type < 0 || red_payload_type > 127) {
4458     _engineStatisticsPtr->SetLastError(
4459         VE_PLTYPE_ERROR, kTraceError,
4460         "SetRedPayloadType() invalid RED payload type");
4461     return -1;
4462   }
4463 
4464   if (SetRedPayloadType(red_payload_type) < 0) {
4465     _engineStatisticsPtr->SetLastError(
4466         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4467         "SetSecondarySendCodec() Failed to register RED ACM");
4468     return -1;
4469   }
4470   if (audio_coding_->RegisterSecondarySendCodec(codec) < 0) {
4471     _engineStatisticsPtr->SetLastError(
4472         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4473         "SetSecondarySendCodec() Failed to register secondary send codec in "
4474         "ACM");
4475     return -1;
4476   }
4477 
4478   return 0;
4479 }
4480 
RemoveSecondarySendCodec()4481 void Channel::RemoveSecondarySendCodec() {
4482   audio_coding_->UnregisterSecondarySendCodec();
4483 }
4484 
GetSecondarySendCodec(CodecInst * codec)4485 int Channel::GetSecondarySendCodec(CodecInst* codec) {
4486   if (audio_coding_->SecondarySendCodec(codec) < 0) {
4487     _engineStatisticsPtr->SetLastError(
4488         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4489         "GetSecondarySendCodec() Failed to get secondary sent codec from ACM");
4490     return -1;
4491   }
4492   return 0;
4493 }
4494 
4495 // Assuming this method is called with valid payload type.
SetRedPayloadType(int red_payload_type)4496 int Channel::SetRedPayloadType(int red_payload_type) {
4497   CodecInst codec;
4498   bool found_red = false;
4499 
4500   // Get default RED settings from the ACM database
4501   const int num_codecs = AudioCodingModule::NumberOfCodecs();
4502   for (int idx = 0; idx < num_codecs; idx++) {
4503     audio_coding_->Codec(idx, &codec);
4504     if (!STR_CASE_CMP(codec.plname, "RED")) {
4505       found_red = true;
4506       break;
4507     }
4508   }
4509 
4510   if (!found_red) {
4511     _engineStatisticsPtr->SetLastError(
4512         VE_CODEC_ERROR, kTraceError,
4513         "SetRedPayloadType() RED is not supported");
4514     return -1;
4515   }
4516 
4517   codec.pltype = red_payload_type;
4518   if (audio_coding_->RegisterSendCodec(codec) < 0) {
4519     _engineStatisticsPtr->SetLastError(
4520         VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
4521         "SetRedPayloadType() RED registration in ACM module failed");
4522     return -1;
4523   }
4524 
4525   if (_rtpRtcpModule->SetSendREDPayloadType(red_payload_type) != 0) {
4526     _engineStatisticsPtr->SetLastError(
4527         VE_RTP_RTCP_MODULE_ERROR, kTraceError,
4528         "SetRedPayloadType() RED registration in RTP/RTCP module failed");
4529     return -1;
4530   }
4531   return 0;
4532 }
4533 
SetSendRtpHeaderExtension(bool enable,RTPExtensionType type,unsigned char id)4534 int Channel::SetSendRtpHeaderExtension(bool enable, RTPExtensionType type,
4535                                        unsigned char id) {
4536   int error = 0;
4537   _rtpRtcpModule->DeregisterSendRtpHeaderExtension(type);
4538   if (enable) {
4539     error = _rtpRtcpModule->RegisterSendRtpHeaderExtension(type, id);
4540   }
4541   return error;
4542 }
4543 
GetPlayoutFrequency()4544 int32_t Channel::GetPlayoutFrequency() {
4545   int32_t playout_frequency = audio_coding_->PlayoutFrequency();
4546   CodecInst current_recive_codec;
4547   if (audio_coding_->ReceiveCodec(&current_recive_codec) == 0) {
4548     if (STR_CASE_CMP("G722", current_recive_codec.plname) == 0) {
4549       // Even though the actual sampling rate for G.722 audio is
4550       // 16,000 Hz, the RTP clock rate for the G722 payload format is
4551       // 8,000 Hz because that value was erroneously assigned in
4552       // RFC 1890 and must remain unchanged for backward compatibility.
4553       playout_frequency = 8000;
4554     } else if (STR_CASE_CMP("opus", current_recive_codec.plname) == 0) {
4555       // We are resampling Opus internally to 32,000 Hz until all our
4556       // DSP routines can operate at 48,000 Hz, but the RTP clock
4557       // rate for the Opus payload format is standardized to 48,000 Hz,
4558       // because that is the maximum supported decoding sampling rate.
4559       playout_frequency = 48000;
4560     }
4561   }
4562   return playout_frequency;
4563 }
4564 
4565 }  // namespace voe
4566 }  // namespace webrtc
4567