• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include "webrtc/voice_engine/voe_audio_processing_impl.h"
12 
13 #include "webrtc/modules/audio_processing/include/audio_processing.h"
14 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
15 #include "webrtc/system_wrappers/interface/logging.h"
16 #include "webrtc/system_wrappers/interface/trace.h"
17 #include "webrtc/voice_engine/channel.h"
18 #include "webrtc/voice_engine/include/voe_errors.h"
19 #include "webrtc/voice_engine/transmit_mixer.h"
20 #include "webrtc/voice_engine/voice_engine_impl.h"
21 
22 // TODO(andrew): move to a common place.
23 #define WEBRTC_VOICE_INIT_CHECK()                        \
24   do {                                                   \
25     if (!_shared->statistics().Initialized()) {          \
26       _shared->SetLastError(VE_NOT_INITED, kTraceError); \
27       return -1;                                         \
28     }                                                    \
29   } while (0)
30 
31 #define WEBRTC_VOICE_INIT_CHECK_BOOL()                   \
32   do {                                                   \
33     if (!_shared->statistics().Initialized()) {          \
34       _shared->SetLastError(VE_NOT_INITED, kTraceError); \
35       return false;                                      \
36     }                                                    \
37   } while (0)
38 
39 namespace webrtc {
40 
41 #if defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)
42 static const EcModes kDefaultEcMode = kEcAecm;
43 #else
44 static const EcModes kDefaultEcMode = kEcAec;
45 #endif
46 
GetInterface(VoiceEngine * voiceEngine)47 VoEAudioProcessing* VoEAudioProcessing::GetInterface(VoiceEngine* voiceEngine) {
48 #ifndef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
49   return NULL;
50 #else
51   if (NULL == voiceEngine) {
52     return NULL;
53   }
54   VoiceEngineImpl* s = static_cast<VoiceEngineImpl*>(voiceEngine);
55   s->AddRef();
56   return s;
57 #endif
58 }
59 
60 #ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
VoEAudioProcessingImpl(voe::SharedData * shared)61 VoEAudioProcessingImpl::VoEAudioProcessingImpl(voe::SharedData* shared)
62     : _isAecMode(kDefaultEcMode == kEcAec),
63       _shared(shared) {
64   WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
65                "VoEAudioProcessingImpl::VoEAudioProcessingImpl() - ctor");
66 }
67 
~VoEAudioProcessingImpl()68 VoEAudioProcessingImpl::~VoEAudioProcessingImpl() {
69   WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
70                "VoEAudioProcessingImpl::~VoEAudioProcessingImpl() - dtor");
71 }
72 
SetNsStatus(bool enable,NsModes mode)73 int VoEAudioProcessingImpl::SetNsStatus(bool enable, NsModes mode) {
74   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
75                "SetNsStatus(enable=%d, mode=%d)", enable, mode);
76 #ifdef WEBRTC_VOICE_ENGINE_NR
77   if (!_shared->statistics().Initialized()) {
78     _shared->SetLastError(VE_NOT_INITED, kTraceError);
79     return -1;
80   }
81 
82   NoiseSuppression::Level nsLevel = kDefaultNsMode;
83   switch (mode) {
84     case kNsDefault:
85       nsLevel = kDefaultNsMode;
86       break;
87     case kNsUnchanged:
88       nsLevel = _shared->audio_processing()->noise_suppression()->level();
89       break;
90     case kNsConference:
91       nsLevel = NoiseSuppression::kHigh;
92       break;
93     case kNsLowSuppression:
94       nsLevel = NoiseSuppression::kLow;
95       break;
96     case kNsModerateSuppression:
97       nsLevel = NoiseSuppression::kModerate;
98       break;
99     case kNsHighSuppression:
100       nsLevel = NoiseSuppression::kHigh;
101       break;
102     case kNsVeryHighSuppression:
103       nsLevel = NoiseSuppression::kVeryHigh;
104       break;
105   }
106 
107   if (_shared->audio_processing()->noise_suppression()->
108           set_level(nsLevel) != 0) {
109     _shared->SetLastError(VE_APM_ERROR, kTraceError,
110         "SetNsStatus() failed to set Ns mode");
111     return -1;
112   }
113   if (_shared->audio_processing()->noise_suppression()->Enable(enable) != 0) {
114     _shared->SetLastError(VE_APM_ERROR, kTraceError,
115         "SetNsStatus() failed to set Ns state");
116     return -1;
117   }
118 
119   return 0;
120 #else
121   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
122       "SetNsStatus() Ns is not supported");
123   return -1;
124 #endif
125 }
126 
GetNsStatus(bool & enabled,NsModes & mode)127 int VoEAudioProcessingImpl::GetNsStatus(bool& enabled, NsModes& mode) {
128   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
129                "GetNsStatus(enabled=?, mode=?)");
130 #ifdef WEBRTC_VOICE_ENGINE_NR
131   if (!_shared->statistics().Initialized()) {
132     _shared->SetLastError(VE_NOT_INITED, kTraceError);
133     return -1;
134   }
135 
136   enabled = _shared->audio_processing()->noise_suppression()->is_enabled();
137   NoiseSuppression::Level nsLevel =
138       _shared->audio_processing()->noise_suppression()->level();
139 
140   switch (nsLevel) {
141     case NoiseSuppression::kLow:
142       mode = kNsLowSuppression;
143       break;
144     case NoiseSuppression::kModerate:
145       mode = kNsModerateSuppression;
146       break;
147     case NoiseSuppression::kHigh:
148       mode = kNsHighSuppression;
149       break;
150     case NoiseSuppression::kVeryHigh:
151       mode = kNsVeryHighSuppression;
152       break;
153   }
154 
155   WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
156                "GetNsStatus() => enabled=% d, mode=%d", enabled, mode);
157   return 0;
158 #else
159   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
160       "GetNsStatus() Ns is not supported");
161   return -1;
162 #endif
163 }
164 
SetAgcStatus(bool enable,AgcModes mode)165 int VoEAudioProcessingImpl::SetAgcStatus(bool enable, AgcModes mode) {
166   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
167                "SetAgcStatus(enable=%d, mode=%d)", enable, mode);
168 #ifdef WEBRTC_VOICE_ENGINE_AGC
169   if (!_shared->statistics().Initialized()) {
170     _shared->SetLastError(VE_NOT_INITED, kTraceError);
171     return -1;
172   }
173 
174 #if defined(WEBRTC_IOS) || defined(ATA) || defined(WEBRTC_ANDROID)
175   if (mode == kAgcAdaptiveAnalog) {
176     _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
177         "SetAgcStatus() invalid Agc mode for mobile device");
178     return -1;
179   }
180 #endif
181 
182   GainControl::Mode agcMode = kDefaultAgcMode;
183   switch (mode) {
184     case kAgcDefault:
185       agcMode = kDefaultAgcMode;
186       break;
187     case kAgcUnchanged:
188       agcMode = _shared->audio_processing()->gain_control()->mode();
189       break;
190     case kAgcFixedDigital:
191       agcMode = GainControl::kFixedDigital;
192       break;
193     case kAgcAdaptiveAnalog:
194       agcMode = GainControl::kAdaptiveAnalog;
195       break;
196     case kAgcAdaptiveDigital:
197       agcMode = GainControl::kAdaptiveDigital;
198       break;
199   }
200 
201   if (_shared->audio_processing()->gain_control()->set_mode(agcMode) != 0) {
202     _shared->SetLastError(VE_APM_ERROR, kTraceError,
203         "SetAgcStatus() failed to set Agc mode");
204     return -1;
205   }
206   if (_shared->audio_processing()->gain_control()->Enable(enable) != 0) {
207     _shared->SetLastError(VE_APM_ERROR, kTraceError,
208         "SetAgcStatus() failed to set Agc state");
209     return -1;
210   }
211 
212   if (agcMode != GainControl::kFixedDigital) {
213     // Set Agc state in the ADM when adaptive Agc mode has been selected.
214     // Note that we also enable the ADM Agc when Adaptive Digital mode is
215     // used since we want to be able to provide the APM with updated mic
216     // levels when the user modifies the mic level manually.
217     if (_shared->audio_device()->SetAGC(enable) != 0) {
218       _shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR,
219           kTraceWarning, "SetAgcStatus() failed to set Agc mode");
220     }
221   }
222 
223   return 0;
224 #else
225   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
226       "SetAgcStatus() Agc is not supported");
227   return -1;
228 #endif
229 }
230 
GetAgcStatus(bool & enabled,AgcModes & mode)231 int VoEAudioProcessingImpl::GetAgcStatus(bool& enabled, AgcModes& mode) {
232   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
233                "GetAgcStatus(enabled=?, mode=?)");
234 #ifdef WEBRTC_VOICE_ENGINE_AGC
235   if (!_shared->statistics().Initialized()) {
236     _shared->SetLastError(VE_NOT_INITED, kTraceError);
237     return -1;
238   }
239 
240   enabled = _shared->audio_processing()->gain_control()->is_enabled();
241   GainControl::Mode agcMode =
242     _shared->audio_processing()->gain_control()->mode();
243 
244   switch (agcMode) {
245     case GainControl::kFixedDigital:
246       mode = kAgcFixedDigital;
247       break;
248     case GainControl::kAdaptiveAnalog:
249       mode = kAgcAdaptiveAnalog;
250       break;
251     case GainControl::kAdaptiveDigital:
252       mode = kAgcAdaptiveDigital;
253       break;
254   }
255 
256   WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
257                "GetAgcStatus() => enabled=%d, mode=%d", enabled, mode);
258   return 0;
259 #else
260   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
261       "GetAgcStatus() Agc is not supported");
262   return -1;
263 #endif
264 }
265 
SetAgcConfig(AgcConfig config)266 int VoEAudioProcessingImpl::SetAgcConfig(AgcConfig config) {
267   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
268                "SetAgcConfig()");
269 #ifdef WEBRTC_VOICE_ENGINE_AGC
270   if (!_shared->statistics().Initialized()) {
271     _shared->SetLastError(VE_NOT_INITED, kTraceError);
272     return -1;
273   }
274 
275   if (_shared->audio_processing()->gain_control()->set_target_level_dbfs(
276       config.targetLeveldBOv) != 0) {
277     _shared->SetLastError(VE_APM_ERROR, kTraceError,
278         "SetAgcConfig() failed to set target peak |level|"
279         " (or envelope) of the Agc");
280     return -1;
281   }
282   if (_shared->audio_processing()->gain_control()->set_compression_gain_db(
283         config.digitalCompressionGaindB) != 0) {
284     _shared->SetLastError(VE_APM_ERROR, kTraceError,
285         "SetAgcConfig() failed to set the range in |gain| "
286         "the digital compression stage may apply");
287     return -1;
288   }
289   if (_shared->audio_processing()->gain_control()->enable_limiter(
290         config.limiterEnable) != 0) {
291     _shared->SetLastError(VE_APM_ERROR, kTraceError,
292         "SetAgcConfig() failed to set hard limiter to the signal");
293     return -1;
294   }
295 
296   return 0;
297 #else
298   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
299       "SetAgcConfig() EC is not supported");
300   return -1;
301 #endif
302 }
303 
GetAgcConfig(AgcConfig & config)304 int VoEAudioProcessingImpl::GetAgcConfig(AgcConfig& config) {
305   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
306                "GetAgcConfig(config=?)");
307 #ifdef WEBRTC_VOICE_ENGINE_AGC
308   if (!_shared->statistics().Initialized()) {
309     _shared->SetLastError(VE_NOT_INITED, kTraceError);
310     return -1;
311   }
312 
313   config.targetLeveldBOv =
314     _shared->audio_processing()->gain_control()->target_level_dbfs();
315   config.digitalCompressionGaindB =
316     _shared->audio_processing()->gain_control()->compression_gain_db();
317   config.limiterEnable =
318     _shared->audio_processing()->gain_control()->is_limiter_enabled();
319 
320   WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
321                "GetAgcConfig() => targetLeveldBOv=%u, "
322                   "digitalCompressionGaindB=%u, limiterEnable=%d",
323                config.targetLeveldBOv,
324                config.digitalCompressionGaindB,
325                config.limiterEnable);
326 
327   return 0;
328 #else
329   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
330       "GetAgcConfig() EC is not supported");
331   return -1;
332 #endif
333 }
334 
SetRxNsStatus(int channel,bool enable,NsModes mode)335 int VoEAudioProcessingImpl::SetRxNsStatus(int channel,
336                                           bool enable,
337                                           NsModes mode) {
338   LOG_API3(channel, enable, mode);
339 #ifdef WEBRTC_VOICE_ENGINE_NR
340   if (!_shared->statistics().Initialized()) {
341     _shared->SetLastError(VE_NOT_INITED, kTraceError);
342     return -1;
343   }
344 
345   voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
346   voe::Channel* channelPtr = ch.channel();
347   if (channelPtr == NULL) {
348     _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
349         "SetRxNsStatus() failed to locate channel");
350     return -1;
351   }
352   return channelPtr->SetRxNsStatus(enable, mode);
353 #else
354   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
355       "SetRxNsStatus() NS is not supported");
356   return -1;
357 #endif
358 }
359 
GetRxNsStatus(int channel,bool & enabled,NsModes & mode)360 int VoEAudioProcessingImpl::GetRxNsStatus(int channel,
361                                           bool& enabled,
362                                           NsModes& mode) {
363   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
364                "GetRxNsStatus(channel=%d, enable=?, mode=?)", channel);
365 #ifdef WEBRTC_VOICE_ENGINE_NR
366   if (!_shared->statistics().Initialized()) {
367     _shared->SetLastError(VE_NOT_INITED, kTraceError);
368     return -1;
369   }
370 
371   voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
372   voe::Channel* channelPtr = ch.channel();
373   if (channelPtr == NULL) {
374     _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
375         "GetRxNsStatus() failed to locate channel");
376     return -1;
377   }
378   return channelPtr->GetRxNsStatus(enabled, mode);
379 #else
380   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
381       "GetRxNsStatus() NS is not supported");
382   return -1;
383 #endif
384 }
385 
SetRxAgcStatus(int channel,bool enable,AgcModes mode)386 int VoEAudioProcessingImpl::SetRxAgcStatus(int channel,
387                                            bool enable,
388                                            AgcModes mode) {
389   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
390                "SetRxAgcStatus(channel=%d, enable=%d, mode=%d)",
391                channel, (int)enable, (int)mode);
392 #ifdef WEBRTC_VOICE_ENGINE_AGC
393   if (!_shared->statistics().Initialized()) {
394     _shared->SetLastError(VE_NOT_INITED, kTraceError);
395     return -1;
396   }
397 
398   voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
399   voe::Channel* channelPtr = ch.channel();
400   if (channelPtr == NULL) {
401     _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
402         "SetRxAgcStatus() failed to locate channel");
403     return -1;
404   }
405   return channelPtr->SetRxAgcStatus(enable, mode);
406 #else
407   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
408       "SetRxAgcStatus() Agc is not supported");
409   return -1;
410 #endif
411 }
412 
GetRxAgcStatus(int channel,bool & enabled,AgcModes & mode)413 int VoEAudioProcessingImpl::GetRxAgcStatus(int channel,
414                                            bool& enabled,
415                                            AgcModes& mode) {
416   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
417                "GetRxAgcStatus(channel=%d, enable=?, mode=?)", channel);
418 #ifdef WEBRTC_VOICE_ENGINE_AGC
419   if (!_shared->statistics().Initialized()) {
420     _shared->SetLastError(VE_NOT_INITED, kTraceError);
421     return -1;
422   }
423 
424   voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
425   voe::Channel* channelPtr = ch.channel();
426   if (channelPtr == NULL) {
427     _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
428         "GetRxAgcStatus() failed to locate channel");
429     return -1;
430   }
431   return channelPtr->GetRxAgcStatus(enabled, mode);
432 #else
433   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
434       "GetRxAgcStatus() Agc is not supported");
435   return -1;
436 #endif
437 }
438 
SetRxAgcConfig(int channel,AgcConfig config)439 int VoEAudioProcessingImpl::SetRxAgcConfig(int channel,
440                                            AgcConfig config) {
441   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
442                "SetRxAgcConfig(channel=%d)", channel);
443 #ifdef WEBRTC_VOICE_ENGINE_AGC
444   if (!_shared->statistics().Initialized()) {
445     _shared->SetLastError(VE_NOT_INITED, kTraceError);
446     return -1;
447   }
448 
449   voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
450   voe::Channel* channelPtr = ch.channel();
451   if (channelPtr == NULL) {
452     _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
453       "SetRxAgcConfig() failed to locate channel");
454     return -1;
455   }
456   return channelPtr->SetRxAgcConfig(config);
457 #else
458   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
459       "SetRxAgcConfig() Agc is not supported");
460   return -1;
461 #endif
462 }
463 
GetRxAgcConfig(int channel,AgcConfig & config)464 int VoEAudioProcessingImpl::GetRxAgcConfig(int channel, AgcConfig& config) {
465   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
466                "GetRxAgcConfig(channel=%d)", channel);
467 #ifdef WEBRTC_VOICE_ENGINE_AGC
468   if (!_shared->statistics().Initialized()) {
469     _shared->SetLastError(VE_NOT_INITED, kTraceError);
470     return -1;
471   }
472 
473   voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
474   voe::Channel* channelPtr = ch.channel();
475   if (channelPtr == NULL) {
476     _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
477         "GetRxAgcConfig() failed to locate channel");
478     return -1;
479   }
480   return channelPtr->GetRxAgcConfig(config);
481 #else
482   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
483       "GetRxAgcConfig() Agc is not supported");
484   return -1;
485 #endif
486 }
487 
DriftCompensationSupported()488 bool VoEAudioProcessing::DriftCompensationSupported() {
489 #if defined(WEBRTC_DRIFT_COMPENSATION_SUPPORTED)
490   return true;
491 #else
492   return false;
493 #endif
494 }
495 
EnableDriftCompensation(bool enable)496 int VoEAudioProcessingImpl::EnableDriftCompensation(bool enable) {
497   LOG_API1(enable);
498   WEBRTC_VOICE_INIT_CHECK();
499 
500   if (!DriftCompensationSupported()) {
501     _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
502         "Drift compensation is not supported on this platform.");
503     return -1;
504   }
505 
506   EchoCancellation* aec = _shared->audio_processing()->echo_cancellation();
507   if (aec->enable_drift_compensation(enable) != 0) {
508     _shared->SetLastError(VE_APM_ERROR, kTraceError,
509         "aec->enable_drift_compensation() failed");
510     return -1;
511   }
512   return 0;
513 }
514 
DriftCompensationEnabled()515 bool VoEAudioProcessingImpl::DriftCompensationEnabled() {
516   LOG_API0();
517   WEBRTC_VOICE_INIT_CHECK_BOOL();
518 
519   EchoCancellation* aec = _shared->audio_processing()->echo_cancellation();
520   return aec->is_drift_compensation_enabled();
521 }
522 
SetEcStatus(bool enable,EcModes mode)523 int VoEAudioProcessingImpl::SetEcStatus(bool enable, EcModes mode) {
524   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
525                "SetEcStatus(enable=%d, mode=%d)", enable, mode);
526 #ifdef WEBRTC_VOICE_ENGINE_ECHO
527   if (!_shared->statistics().Initialized()) {
528     _shared->SetLastError(VE_NOT_INITED, kTraceError);
529     return -1;
530   }
531 
532   // AEC mode
533   if ((mode == kEcDefault) ||
534       (mode == kEcConference) ||
535       (mode == kEcAec) ||
536       ((mode == kEcUnchanged) &&
537        (_isAecMode == true))) {
538     if (enable) {
539       // Disable the AECM before enable the AEC
540       if (_shared->audio_processing()->echo_control_mobile()->is_enabled()) {
541         _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
542             "SetEcStatus() disable AECM before enabling AEC");
543         if (_shared->audio_processing()->echo_control_mobile()->
544             Enable(false) != 0) {
545           _shared->SetLastError(VE_APM_ERROR, kTraceError,
546               "SetEcStatus() failed to disable AECM");
547           return -1;
548         }
549       }
550     }
551     if (_shared->audio_processing()->echo_cancellation()->Enable(enable) != 0) {
552       _shared->SetLastError(VE_APM_ERROR, kTraceError,
553           "SetEcStatus() failed to set AEC state");
554       return -1;
555     }
556     if (mode == kEcConference) {
557       if (_shared->audio_processing()->echo_cancellation()->
558           set_suppression_level(EchoCancellation::kHighSuppression) != 0) {
559         _shared->SetLastError(VE_APM_ERROR, kTraceError,
560             "SetEcStatus() failed to set aggressiveness to high");
561         return -1;
562       }
563     } else {
564       if (_shared->audio_processing()->echo_cancellation()->
565           set_suppression_level(
566             EchoCancellation::kModerateSuppression) != 0) {
567         _shared->SetLastError(VE_APM_ERROR, kTraceError,
568             "SetEcStatus() failed to set aggressiveness to moderate");
569         return -1;
570       }
571     }
572 
573     _isAecMode = true;
574   } else if ((mode == kEcAecm) ||
575              ((mode == kEcUnchanged) &&
576               (_isAecMode == false))) {
577     if (enable) {
578       // Disable the AEC before enable the AECM
579       if (_shared->audio_processing()->echo_cancellation()->is_enabled()) {
580         _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
581             "SetEcStatus() disable AEC before enabling AECM");
582         if (_shared->audio_processing()->echo_cancellation()->
583             Enable(false) != 0) {
584           _shared->SetLastError(VE_APM_ERROR, kTraceError,
585               "SetEcStatus() failed to disable AEC");
586           return -1;
587         }
588       }
589     }
590     if (_shared->audio_processing()->echo_control_mobile()->
591         Enable(enable) != 0) {
592       _shared->SetLastError(VE_APM_ERROR, kTraceError,
593           "SetEcStatus() failed to set AECM state");
594       return -1;
595     }
596     _isAecMode = false;
597   } else {
598     _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
599                                    "SetEcStatus() invalid EC mode");
600     return -1;
601   }
602 
603   return 0;
604 #else
605   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
606       "SetEcStatus() EC is not supported");
607   return -1;
608 #endif
609 }
610 
GetEcStatus(bool & enabled,EcModes & mode)611 int VoEAudioProcessingImpl::GetEcStatus(bool& enabled, EcModes& mode) {
612   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
613                "GetEcStatus()");
614 #ifdef WEBRTC_VOICE_ENGINE_ECHO
615   if (!_shared->statistics().Initialized()) {
616     _shared->SetLastError(VE_NOT_INITED, kTraceError);
617     return -1;
618   }
619 
620   if (_isAecMode == true) {
621     mode = kEcAec;
622     enabled = _shared->audio_processing()->echo_cancellation()->is_enabled();
623   } else {
624     mode = kEcAecm;
625     enabled = _shared->audio_processing()->echo_control_mobile()->
626               is_enabled();
627   }
628 
629   WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
630                "GetEcStatus() => enabled=%i, mode=%i",
631                enabled, (int)mode);
632   return 0;
633 #else
634   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
635       "GetEcStatus() EC is not supported");
636   return -1;
637 #endif
638 }
639 
SetDelayOffsetMs(int offset)640 void VoEAudioProcessingImpl::SetDelayOffsetMs(int offset) {
641   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
642                "SetDelayOffsetMs(offset = %d)", offset);
643   _shared->audio_processing()->set_delay_offset_ms(offset);
644 }
645 
DelayOffsetMs()646 int VoEAudioProcessingImpl::DelayOffsetMs() {
647   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
648                "DelayOffsetMs()");
649   return _shared->audio_processing()->delay_offset_ms();
650 }
651 
SetAecmMode(AecmModes mode,bool enableCNG)652 int VoEAudioProcessingImpl::SetAecmMode(AecmModes mode, bool enableCNG) {
653   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
654                "SetAECMMode(mode = %d)", mode);
655 #ifdef WEBRTC_VOICE_ENGINE_ECHO
656   if (!_shared->statistics().Initialized()) {
657     _shared->SetLastError(VE_NOT_INITED, kTraceError);
658     return -1;
659   }
660 
661   EchoControlMobile::RoutingMode aecmMode(
662       EchoControlMobile::kQuietEarpieceOrHeadset);
663 
664   switch (mode) {
665     case kAecmQuietEarpieceOrHeadset:
666       aecmMode = EchoControlMobile::kQuietEarpieceOrHeadset;
667       break;
668     case kAecmEarpiece:
669       aecmMode = EchoControlMobile::kEarpiece;
670       break;
671     case kAecmLoudEarpiece:
672       aecmMode = EchoControlMobile::kLoudEarpiece;
673       break;
674     case kAecmSpeakerphone:
675       aecmMode = EchoControlMobile::kSpeakerphone;
676       break;
677     case kAecmLoudSpeakerphone:
678       aecmMode = EchoControlMobile::kLoudSpeakerphone;
679       break;
680   }
681 
682 
683   if (_shared->audio_processing()->echo_control_mobile()->
684       set_routing_mode(aecmMode) != 0) {
685     _shared->SetLastError(VE_APM_ERROR, kTraceError,
686         "SetAECMMode() failed to set AECM routing mode");
687     return -1;
688   }
689   if (_shared->audio_processing()->echo_control_mobile()->
690       enable_comfort_noise(enableCNG) != 0) {
691     _shared->SetLastError(VE_APM_ERROR, kTraceError,
692         "SetAECMMode() failed to set comfort noise state for AECM");
693     return -1;
694   }
695 
696   return 0;
697 #else
698   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
699       "SetAECMMode() EC is not supported");
700   return -1;
701 #endif
702 }
703 
GetAecmMode(AecmModes & mode,bool & enabledCNG)704 int VoEAudioProcessingImpl::GetAecmMode(AecmModes& mode, bool& enabledCNG) {
705   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
706                "GetAECMMode(mode=?)");
707 #ifdef WEBRTC_VOICE_ENGINE_ECHO
708   if (!_shared->statistics().Initialized()) {
709     _shared->SetLastError(VE_NOT_INITED, kTraceError);
710     return -1;
711   }
712 
713   enabledCNG = false;
714 
715   EchoControlMobile::RoutingMode aecmMode =
716       _shared->audio_processing()->echo_control_mobile()->routing_mode();
717   enabledCNG = _shared->audio_processing()->echo_control_mobile()->
718       is_comfort_noise_enabled();
719 
720   switch (aecmMode) {
721     case EchoControlMobile::kQuietEarpieceOrHeadset:
722       mode = kAecmQuietEarpieceOrHeadset;
723       break;
724     case EchoControlMobile::kEarpiece:
725       mode = kAecmEarpiece;
726       break;
727     case EchoControlMobile::kLoudEarpiece:
728       mode = kAecmLoudEarpiece;
729       break;
730     case EchoControlMobile::kSpeakerphone:
731       mode = kAecmSpeakerphone;
732       break;
733     case EchoControlMobile::kLoudSpeakerphone:
734       mode = kAecmLoudSpeakerphone;
735       break;
736   }
737 
738   return 0;
739 #else
740   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
741       "GetAECMMode() EC is not supported");
742   return -1;
743 #endif
744 }
745 
EnableHighPassFilter(bool enable)746 int VoEAudioProcessingImpl::EnableHighPassFilter(bool enable) {
747   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
748                "EnableHighPassFilter(%d)", enable);
749   if (_shared->audio_processing()->high_pass_filter()->Enable(enable) !=
750       AudioProcessing::kNoError) {
751     _shared->SetLastError(VE_APM_ERROR, kTraceError,
752         "HighPassFilter::Enable() failed.");
753     return -1;
754   }
755 
756   return 0;
757 }
758 
IsHighPassFilterEnabled()759 bool VoEAudioProcessingImpl::IsHighPassFilterEnabled() {
760   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
761                "IsHighPassFilterEnabled()");
762   return _shared->audio_processing()->high_pass_filter()->is_enabled();
763 }
764 
RegisterRxVadObserver(int channel,VoERxVadCallback & observer)765 int VoEAudioProcessingImpl::RegisterRxVadObserver(
766   int channel,
767   VoERxVadCallback& observer) {
768   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
769                "RegisterRxVadObserver()");
770   if (!_shared->statistics().Initialized()) {
771     _shared->SetLastError(VE_NOT_INITED, kTraceError);
772     return -1;
773   }
774   voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
775   voe::Channel* channelPtr = ch.channel();
776   if (channelPtr == NULL) {
777     _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
778         "RegisterRxVadObserver() failed to locate channel");
779     return -1;
780   }
781   return channelPtr->RegisterRxVadObserver(observer);
782 }
783 
DeRegisterRxVadObserver(int channel)784 int VoEAudioProcessingImpl::DeRegisterRxVadObserver(int channel) {
785   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
786                "DeRegisterRxVadObserver()");
787   if (!_shared->statistics().Initialized()) {
788     _shared->SetLastError(VE_NOT_INITED, kTraceError);
789     return -1;
790   }
791   voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
792   voe::Channel* channelPtr = ch.channel();
793   if (channelPtr == NULL) {
794     _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
795         "DeRegisterRxVadObserver() failed to locate channel");
796     return -1;
797   }
798 
799   return channelPtr->DeRegisterRxVadObserver();
800 }
801 
VoiceActivityIndicator(int channel)802 int VoEAudioProcessingImpl::VoiceActivityIndicator(int channel) {
803   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
804                "VoiceActivityIndicator(channel=%d)", channel);
805   if (!_shared->statistics().Initialized()) {
806     _shared->SetLastError(VE_NOT_INITED, kTraceError);
807     return -1;
808   }
809 
810   voe::ChannelOwner ch = _shared->channel_manager().GetChannel(channel);
811   voe::Channel* channelPtr = ch.channel();
812   if (channelPtr == NULL) {
813     _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
814         "DeRegisterRxVadObserver() failed to locate channel");
815     return -1;
816   }
817   int activity(-1);
818   channelPtr->VoiceActivityIndicator(activity);
819 
820   return activity;
821 }
822 
SetEcMetricsStatus(bool enable)823 int VoEAudioProcessingImpl::SetEcMetricsStatus(bool enable) {
824   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
825                "SetEcMetricsStatus(enable=%d)", enable);
826 #ifdef WEBRTC_VOICE_ENGINE_ECHO
827   if (!_shared->statistics().Initialized()) {
828     _shared->SetLastError(VE_NOT_INITED, kTraceError);
829     return -1;
830   }
831 
832   if ((_shared->audio_processing()->echo_cancellation()->enable_metrics(enable)
833        != 0) ||
834       (_shared->audio_processing()->echo_cancellation()->enable_delay_logging(
835          enable) != 0)) {
836     _shared->SetLastError(VE_APM_ERROR, kTraceError,
837         "SetEcMetricsStatus() unable to set EC metrics mode");
838     return -1;
839   }
840   return 0;
841 #else
842   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
843       "SetEcStatus() EC is not supported");
844   return -1;
845 #endif
846 }
847 
GetEcMetricsStatus(bool & enabled)848 int VoEAudioProcessingImpl::GetEcMetricsStatus(bool& enabled) {
849   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
850                "GetEcMetricsStatus(enabled=?)");
851 #ifdef WEBRTC_VOICE_ENGINE_ECHO
852   if (!_shared->statistics().Initialized()) {
853     _shared->SetLastError(VE_NOT_INITED, kTraceError);
854     return -1;
855   }
856 
857   bool echo_mode =
858     _shared->audio_processing()->echo_cancellation()->are_metrics_enabled();
859   bool delay_mode = _shared->audio_processing()->echo_cancellation()->
860       is_delay_logging_enabled();
861 
862   if (echo_mode != delay_mode) {
863     _shared->SetLastError(VE_APM_ERROR, kTraceError,
864         "GetEcMetricsStatus() delay logging and echo mode are not the same");
865     return -1;
866   }
867 
868   enabled = echo_mode;
869 
870   WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
871                "GetEcMetricsStatus() => enabled=%d", enabled);
872   return 0;
873 #else
874   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
875       "SetEcStatus() EC is not supported");
876   return -1;
877 #endif
878 }
879 
GetEchoMetrics(int & ERL,int & ERLE,int & RERL,int & A_NLP)880 int VoEAudioProcessingImpl::GetEchoMetrics(int& ERL,
881                                            int& ERLE,
882                                            int& RERL,
883                                            int& A_NLP) {
884   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
885                "GetEchoMetrics(ERL=?, ERLE=?, RERL=?, A_NLP=?)");
886 #ifdef WEBRTC_VOICE_ENGINE_ECHO
887   if (!_shared->statistics().Initialized()) {
888     _shared->SetLastError(VE_NOT_INITED, kTraceError);
889     return -1;
890   }
891   if (!_shared->audio_processing()->echo_cancellation()->is_enabled()) {
892     _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
893         "GetEchoMetrics() AudioProcessingModule AEC is not enabled");
894     return -1;
895   }
896 
897   // Get Echo Metrics from Audio Processing Module.
898   EchoCancellation::Metrics echoMetrics;
899   if (_shared->audio_processing()->echo_cancellation()->GetMetrics(
900           &echoMetrics)) {
901     WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_shared->instance_id(), -1),
902                  "GetEchoMetrics(), AudioProcessingModule metrics error");
903     return -1;
904   }
905 
906   // Echo quality metrics.
907   ERL = echoMetrics.echo_return_loss.instant;
908   ERLE = echoMetrics.echo_return_loss_enhancement.instant;
909   RERL = echoMetrics.residual_echo_return_loss.instant;
910   A_NLP = echoMetrics.a_nlp.instant;
911 
912   WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
913                "GetEchoMetrics() => ERL=%d, ERLE=%d, RERL=%d, A_NLP=%d",
914                ERL, ERLE, RERL, A_NLP);
915   return 0;
916 #else
917   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
918       "SetEcStatus() EC is not supported");
919   return -1;
920 #endif
921 }
922 
GetEcDelayMetrics(int & delay_median,int & delay_std)923 int VoEAudioProcessingImpl::GetEcDelayMetrics(int& delay_median,
924                                               int& delay_std) {
925   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
926                "GetEcDelayMetrics(median=?, std=?)");
927 #ifdef WEBRTC_VOICE_ENGINE_ECHO
928   if (!_shared->statistics().Initialized()) {
929     _shared->SetLastError(VE_NOT_INITED, kTraceError);
930     return -1;
931   }
932   if (!_shared->audio_processing()->echo_cancellation()->is_enabled()) {
933     _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
934         "GetEcDelayMetrics() AudioProcessingModule AEC is not enabled");
935     return -1;
936   }
937 
938   int median = 0;
939   int std = 0;
940   // Get delay-logging values from Audio Processing Module.
941   if (_shared->audio_processing()->echo_cancellation()->GetDelayMetrics(
942         &median, &std)) {
943     WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_shared->instance_id(), -1),
944                  "GetEcDelayMetrics(), AudioProcessingModule delay-logging "
945                  "error");
946     return -1;
947   }
948 
949   // EC delay-logging metrics
950   delay_median = median;
951   delay_std = std;
952 
953   WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
954                "GetEcDelayMetrics() => delay_median=%d, delay_std=%d",
955                delay_median, delay_std);
956   return 0;
957 #else
958   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
959       "SetEcStatus() EC is not supported");
960   return -1;
961 #endif
962 }
963 
StartDebugRecording(const char * fileNameUTF8)964 int VoEAudioProcessingImpl::StartDebugRecording(const char* fileNameUTF8) {
965   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
966                "StartDebugRecording()");
967   if (!_shared->statistics().Initialized()) {
968     _shared->SetLastError(VE_NOT_INITED, kTraceError);
969     return -1;
970   }
971 
972   return _shared->audio_processing()->StartDebugRecording(fileNameUTF8);
973 }
974 
StartDebugRecording(FILE * file_handle)975 int VoEAudioProcessingImpl::StartDebugRecording(FILE* file_handle) {
976   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
977                "StartDebugRecording()");
978   if (!_shared->statistics().Initialized()) {
979     _shared->SetLastError(VE_NOT_INITED, kTraceError);
980     return -1;
981   }
982 
983   return _shared->audio_processing()->StartDebugRecording(file_handle);
984 }
985 
StopDebugRecording()986 int VoEAudioProcessingImpl::StopDebugRecording() {
987   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
988                "StopDebugRecording()");
989   if (!_shared->statistics().Initialized()) {
990     _shared->SetLastError(VE_NOT_INITED, kTraceError);
991     return -1;
992   }
993 
994   return _shared->audio_processing()->StopDebugRecording();
995 }
996 
SetTypingDetectionStatus(bool enable)997 int VoEAudioProcessingImpl::SetTypingDetectionStatus(bool enable) {
998   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
999                "SetTypingDetectionStatus()");
1000 #if !defined(WEBRTC_VOICE_ENGINE_TYPING_DETECTION)
1001   NOT_SUPPORTED(_shared->statistics());
1002 #else
1003   if (!_shared->statistics().Initialized()) {
1004     _shared->SetLastError(VE_NOT_INITED, kTraceError);
1005     return -1;
1006   }
1007 
1008   // Just use the VAD state to determine if we should enable typing detection
1009   // or not
1010 
1011   if (_shared->audio_processing()->voice_detection()->Enable(enable)) {
1012     _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
1013         "SetTypingDetectionStatus() failed to set VAD state");
1014     return -1;
1015   }
1016   if (_shared->audio_processing()->voice_detection()->set_likelihood(
1017           VoiceDetection::kVeryLowLikelihood)) {
1018     _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
1019         "SetTypingDetectionStatus() failed to set VAD likelihood to low");
1020     return -1;
1021   }
1022 
1023   return 0;
1024 #endif
1025 }
1026 
GetTypingDetectionStatus(bool & enabled)1027 int VoEAudioProcessingImpl::GetTypingDetectionStatus(bool& enabled) {
1028   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
1029                "GetTypingDetectionStatus()");
1030   if (!_shared->statistics().Initialized()) {
1031     _shared->SetLastError(VE_NOT_INITED, kTraceError);
1032     return -1;
1033   }
1034   // Just use the VAD state to determine if we should enable typing
1035   // detection or not
1036 
1037   enabled = _shared->audio_processing()->voice_detection()->is_enabled();
1038 
1039   return 0;
1040 }
1041 
1042 
TimeSinceLastTyping(int & seconds)1043 int VoEAudioProcessingImpl::TimeSinceLastTyping(int &seconds) {
1044   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
1045                "TimeSinceLastTyping()");
1046 #if !defined(WEBRTC_VOICE_ENGINE_TYPING_DETECTION)
1047   NOT_SUPPORTED(_shared->statistics());
1048 #else
1049   if (!_shared->statistics().Initialized()) {
1050     _shared->SetLastError(VE_NOT_INITED, kTraceError);
1051     return -1;
1052   }
1053   // Check if typing detection is enabled
1054   bool enabled = _shared->audio_processing()->voice_detection()->is_enabled();
1055   if (enabled)
1056   {
1057     _shared->transmit_mixer()->TimeSinceLastTyping(seconds);
1058     return 0;
1059   }
1060   else
1061   {
1062     _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
1063       "SetTypingDetectionStatus is not enabled");
1064   return -1;
1065   }
1066 #endif
1067 }
1068 
SetTypingDetectionParameters(int timeWindow,int costPerTyping,int reportingThreshold,int penaltyDecay,int typeEventDelay)1069 int VoEAudioProcessingImpl::SetTypingDetectionParameters(int timeWindow,
1070                                                          int costPerTyping,
1071                                                          int reportingThreshold,
1072                                                          int penaltyDecay,
1073                                                          int typeEventDelay) {
1074   WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
1075                "SetTypingDetectionParameters()");
1076 #if !defined(WEBRTC_VOICE_ENGINE_TYPING_DETECTION)
1077   NOT_SUPPORTED(_shared->statistics());
1078 #else
1079   if (!_shared->statistics().Initialized()) {
1080     _shared->statistics().SetLastError(VE_NOT_INITED, kTraceError);
1081     return -1;
1082   }
1083   return (_shared->transmit_mixer()->SetTypingDetectionParameters(timeWindow,
1084       costPerTyping, reportingThreshold, penaltyDecay, typeEventDelay));
1085 #endif
1086 }
1087 
EnableStereoChannelSwapping(bool enable)1088 void VoEAudioProcessingImpl::EnableStereoChannelSwapping(bool enable) {
1089   LOG_API1(enable);
1090   _shared->transmit_mixer()->EnableStereoChannelSwapping(enable);
1091 }
1092 
IsStereoChannelSwappingEnabled()1093 bool VoEAudioProcessingImpl::IsStereoChannelSwappingEnabled() {
1094   LOG_API0();
1095   return _shared->transmit_mixer()->IsStereoChannelSwappingEnabled();
1096 }
1097 
1098 #endif  // #ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
1099 
1100 }  // namespace webrtc
1101