1 /*
2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 /*
12 * Android audio device implementation (JNI/AudioTrack usage)
13 */
14
15 // TODO(xians): Break out attach and detach current thread to JVM to
16 // separate functions.
17
18 #include "webrtc/modules/audio_device/android/audio_track_jni.h"
19
20 #include <android/log.h>
21 #include <stdlib.h>
22
23 #include "webrtc/modules/audio_device/audio_device_config.h"
24 #include "webrtc/modules/audio_device/audio_device_utility.h"
25
26 #include "webrtc/system_wrappers/interface/event_wrapper.h"
27 #include "webrtc/system_wrappers/interface/thread_wrapper.h"
28 #include "webrtc/system_wrappers/interface/trace.h"
29
30 namespace webrtc {
31
32 JavaVM* AudioTrackJni::globalJvm = NULL;
33 JNIEnv* AudioTrackJni::globalJNIEnv = NULL;
34 jobject AudioTrackJni::globalContext = NULL;
35 jclass AudioTrackJni::globalScClass = NULL;
36
SetAndroidAudioDeviceObjects(void * javaVM,void * env,void * context)37 int32_t AudioTrackJni::SetAndroidAudioDeviceObjects(void* javaVM, void* env,
38 void* context) {
39 assert(env);
40 globalJvm = reinterpret_cast<JavaVM*>(javaVM);
41 globalJNIEnv = reinterpret_cast<JNIEnv*>(env);
42 // Get java class type (note path to class packet).
43 jclass javaScClassLocal = globalJNIEnv->FindClass(
44 "org/webrtc/voiceengine/WebRtcAudioTrack");
45 if (!javaScClassLocal) {
46 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
47 "%s: could not find java class", __FUNCTION__);
48 return -1; // exception thrown
49 }
50
51 // Create a global reference to the class (to tell JNI that we are
52 // referencing it after this function has returned).
53 globalScClass = reinterpret_cast<jclass> (
54 globalJNIEnv->NewGlobalRef(javaScClassLocal));
55 if (!globalScClass) {
56 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
57 "%s: could not create reference", __FUNCTION__);
58 return -1;
59 }
60
61 globalContext = globalJNIEnv->NewGlobalRef(
62 reinterpret_cast<jobject>(context));
63 if (!globalContext) {
64 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
65 "%s: could not create context reference", __FUNCTION__);
66 return -1;
67 }
68
69 // Delete local class ref, we only use the global ref
70 globalJNIEnv->DeleteLocalRef(javaScClassLocal);
71 return 0;
72 }
73
ClearAndroidAudioDeviceObjects()74 void AudioTrackJni::ClearAndroidAudioDeviceObjects() {
75 WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1,
76 "%s: env is NULL, assuming deinit", __FUNCTION__);
77
78 globalJvm = NULL;
79 if (!globalJNIEnv) {
80 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1,
81 "%s: saved env already NULL", __FUNCTION__);
82 return;
83 }
84
85 globalJNIEnv->DeleteGlobalRef(globalContext);
86 globalContext = reinterpret_cast<jobject>(NULL);
87
88 globalJNIEnv->DeleteGlobalRef(globalScClass);
89 globalScClass = reinterpret_cast<jclass>(NULL);
90
91 globalJNIEnv = reinterpret_cast<JNIEnv*>(NULL);
92 }
93
AudioTrackJni(const int32_t id)94 AudioTrackJni::AudioTrackJni(const int32_t id)
95 : _javaVM(NULL),
96 _jniEnvPlay(NULL),
97 _javaScClass(0),
98 _javaScObj(0),
99 _javaPlayBuffer(0),
100 _javaDirectPlayBuffer(NULL),
101 _javaMidPlayAudio(0),
102 _ptrAudioBuffer(NULL),
103 _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
104 _id(id),
105 _initialized(false),
106 _timeEventPlay(*EventWrapper::Create()),
107 _playStartStopEvent(*EventWrapper::Create()),
108 _ptrThreadPlay(NULL),
109 _playThreadID(0),
110 _playThreadIsInitialized(false),
111 _shutdownPlayThread(false),
112 _playoutDeviceIsSpecified(false),
113 _playing(false),
114 _playIsInitialized(false),
115 _speakerIsInitialized(false),
116 _startPlay(false),
117 _playWarning(0),
118 _playError(0),
119 _delayPlayout(0),
120 _samplingFreqOut((N_PLAY_SAMPLES_PER_SEC/1000)),
121 _maxSpeakerVolume(0) {
122 }
123
~AudioTrackJni()124 AudioTrackJni::~AudioTrackJni() {
125 WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
126 "%s destroyed", __FUNCTION__);
127
128 Terminate();
129
130 delete &_playStartStopEvent;
131 delete &_timeEventPlay;
132 delete &_critSect;
133 }
134
Init()135 int32_t AudioTrackJni::Init() {
136 CriticalSectionScoped lock(&_critSect);
137 if (_initialized)
138 {
139 return 0;
140 }
141
142 _playWarning = 0;
143 _playError = 0;
144
145 // Init Java member variables
146 // and set up JNI interface to
147 // AudioDeviceAndroid java class
148 if (InitJavaResources() != 0)
149 {
150 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
151 "%s: Failed to init Java resources", __FUNCTION__);
152 return -1;
153 }
154
155 // Check the sample rate to be used for playback and recording
156 // and the max playout volume
157 if (InitSampleRate() != 0)
158 {
159 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
160 "%s: Failed to init samplerate", __FUNCTION__);
161 return -1;
162 }
163
164 const char* threadName = "jni_audio_render_thread";
165 _ptrThreadPlay = ThreadWrapper::CreateThread(PlayThreadFunc, this,
166 kRealtimePriority, threadName);
167 if (_ptrThreadPlay == NULL)
168 {
169 WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
170 " failed to create the play audio thread");
171 return -1;
172 }
173
174 unsigned int threadID = 0;
175 if (!_ptrThreadPlay->Start(threadID))
176 {
177 WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
178 " failed to start the play audio thread");
179 delete _ptrThreadPlay;
180 _ptrThreadPlay = NULL;
181 return -1;
182 }
183 _playThreadID = threadID;
184
185 _initialized = true;
186
187 return 0;
188 }
189
Terminate()190 int32_t AudioTrackJni::Terminate() {
191 CriticalSectionScoped lock(&_critSect);
192 if (!_initialized)
193 {
194 return 0;
195 }
196
197 StopPlayout();
198 _shutdownPlayThread = true;
199 _timeEventPlay.Set(); // Release rec thread from waiting state
200 if (_ptrThreadPlay)
201 {
202 // First, the thread must detach itself from Java VM
203 _critSect.Leave();
204 if (kEventSignaled != _playStartStopEvent.Wait(5000))
205 {
206 WEBRTC_TRACE(
207 kTraceError,
208 kTraceAudioDevice,
209 _id,
210 "%s: Playout thread shutdown timed out, cannot "
211 "terminate thread",
212 __FUNCTION__);
213 // If we close thread anyway, the app will crash
214 return -1;
215 }
216 _playStartStopEvent.Reset();
217 _critSect.Enter();
218
219 // Close down play thread
220 ThreadWrapper* tmpThread = _ptrThreadPlay;
221 _ptrThreadPlay = NULL;
222 _critSect.Leave();
223 tmpThread->SetNotAlive();
224 _timeEventPlay.Set();
225 if (tmpThread->Stop())
226 {
227 delete tmpThread;
228 _jniEnvPlay = NULL;
229 }
230 else
231 {
232 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
233 " failed to close down the play audio thread");
234 }
235 _critSect.Enter();
236
237 _playThreadIsInitialized = false;
238 }
239 _speakerIsInitialized = false;
240 _playoutDeviceIsSpecified = false;
241
242 // get the JNI env for this thread
243 JNIEnv *env;
244 bool isAttached = false;
245
246 // get the JNI env for this thread
247 if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
248 {
249 // try to attach the thread and get the env
250 // Attach this thread to JVM
251 jint res = _javaVM->AttachCurrentThread(&env, NULL);
252 if ((res < 0) || !env)
253 {
254 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
255 "%s: Could not attach thread to JVM (%d, %p)",
256 __FUNCTION__, res, env);
257 return -1;
258 }
259 isAttached = true;
260 }
261
262 // Make method IDs and buffer pointers unusable
263 _javaMidPlayAudio = 0;
264 _javaDirectPlayBuffer = NULL;
265
266 // Delete the references to the java buffers, this allows the
267 // garbage collector to delete them
268 env->DeleteGlobalRef(_javaPlayBuffer);
269 _javaPlayBuffer = 0;
270
271 // Delete the references to the java object and class, this allows the
272 // garbage collector to delete them
273 env->DeleteGlobalRef(_javaScObj);
274 _javaScObj = 0;
275 _javaScClass = 0;
276
277 // Detach this thread if it was attached
278 if (isAttached)
279 {
280 if (_javaVM->DetachCurrentThread() < 0)
281 {
282 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
283 "%s: Could not detach thread from JVM", __FUNCTION__);
284 }
285 }
286
287 _initialized = false;
288
289 return 0;
290 }
291
PlayoutDeviceName(uint16_t index,char name[kAdmMaxDeviceNameSize],char guid[kAdmMaxGuidSize])292 int32_t AudioTrackJni::PlayoutDeviceName(uint16_t index,
293 char name[kAdmMaxDeviceNameSize],
294 char guid[kAdmMaxGuidSize]) {
295 if (0 != index)
296 {
297 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
298 " Device index is out of range [0,0]");
299 return -1;
300 }
301
302 // Return empty string
303 memset(name, 0, kAdmMaxDeviceNameSize);
304
305 if (guid)
306 {
307 memset(guid, 0, kAdmMaxGuidSize);
308 }
309
310 return 0;
311 }
312
SetPlayoutDevice(uint16_t index)313 int32_t AudioTrackJni::SetPlayoutDevice(uint16_t index) {
314 if (_playIsInitialized)
315 {
316 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
317 " Playout already initialized");
318 return -1;
319 }
320
321 if (0 != index)
322 {
323 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
324 " Device index is out of range [0,0]");
325 return -1;
326 }
327
328 // Do nothing but set a flag, this is to have consistent behavior
329 // with other platforms
330 _playoutDeviceIsSpecified = true;
331
332 return 0;
333 }
334
SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device)335 int32_t AudioTrackJni::SetPlayoutDevice(
336 AudioDeviceModule::WindowsDeviceType device) {
337 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
338 " API call not supported on this platform");
339 return -1;
340 }
341
342
PlayoutIsAvailable(bool & available)343 int32_t AudioTrackJni::PlayoutIsAvailable(bool& available) { // NOLINT
344 available = false;
345
346 // Try to initialize the playout side
347 int32_t res = InitPlayout();
348
349 // Cancel effect of initialization
350 StopPlayout();
351
352 if (res != -1)
353 {
354 available = true;
355 }
356
357 return res;
358 }
359
InitPlayout()360 int32_t AudioTrackJni::InitPlayout() {
361 CriticalSectionScoped lock(&_critSect);
362
363 if (!_initialized)
364 {
365 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
366 " Not initialized");
367 return -1;
368 }
369
370 if (_playing)
371 {
372 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
373 " Playout already started");
374 return -1;
375 }
376
377 if (!_playoutDeviceIsSpecified)
378 {
379 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
380 " Playout device is not specified");
381 return -1;
382 }
383
384 if (_playIsInitialized)
385 {
386 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
387 " Playout already initialized");
388 return 0;
389 }
390
391 // Initialize the speaker
392 if (InitSpeaker() == -1)
393 {
394 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
395 " InitSpeaker() failed");
396 }
397
398 // get the JNI env for this thread
399 JNIEnv *env;
400 bool isAttached = false;
401
402 // get the JNI env for this thread
403 if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
404 {
405 WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
406 "attaching");
407
408 // try to attach the thread and get the env
409 // Attach this thread to JVM
410 jint res = _javaVM->AttachCurrentThread(&env, NULL);
411 if ((res < 0) || !env)
412 {
413 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
414 " Could not attach thread to JVM (%d, %p)", res, env);
415 return -1;
416 }
417 isAttached = true;
418 }
419
420 // get the method ID
421 jmethodID initPlaybackID = env->GetMethodID(_javaScClass, "InitPlayback",
422 "(I)I");
423
424 int samplingFreq = 44100;
425 if (_samplingFreqOut != 44)
426 {
427 samplingFreq = _samplingFreqOut * 1000;
428 }
429
430 int retVal = -1;
431
432 // Call java sc object method
433 jint res = env->CallIntMethod(_javaScObj, initPlaybackID, samplingFreq);
434 if (res < 0)
435 {
436 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
437 "InitPlayback failed (%d)", res);
438 }
439 else
440 {
441 // Set the audio device buffer sampling rate
442 _ptrAudioBuffer->SetPlayoutSampleRate(_samplingFreqOut * 1000);
443 _playIsInitialized = true;
444 retVal = 0;
445 }
446
447 // Detach this thread if it was attached
448 if (isAttached)
449 {
450 WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
451 "detaching");
452 if (_javaVM->DetachCurrentThread() < 0)
453 {
454 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
455 " Could not detach thread from JVM");
456 }
457 }
458
459 return retVal;
460 }
461
StartPlayout()462 int32_t AudioTrackJni::StartPlayout() {
463 CriticalSectionScoped lock(&_critSect);
464
465 if (!_playIsInitialized)
466 {
467 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
468 " Playout not initialized");
469 return -1;
470 }
471
472 if (_playing)
473 {
474 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
475 " Playout already started");
476 return 0;
477 }
478
479 // get the JNI env for this thread
480 JNIEnv *env;
481 bool isAttached = false;
482
483 // get the JNI env for this thread
484 if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
485 {
486 // try to attach the thread and get the env
487 // Attach this thread to JVM
488 jint res = _javaVM->AttachCurrentThread(&env, NULL);
489 if ((res < 0) || !env)
490 {
491 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
492 " Could not attach thread to JVM (%d, %p)", res, env);
493 return -1;
494 }
495 isAttached = true;
496 }
497
498 // get the method ID
499 jmethodID startPlaybackID = env->GetMethodID(_javaScClass, "StartPlayback",
500 "()I");
501
502 // Call java sc object method
503 jint res = env->CallIntMethod(_javaScObj, startPlaybackID);
504 if (res < 0)
505 {
506 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
507 "StartPlayback failed (%d)", res);
508 return -1;
509 }
510
511 _playWarning = 0;
512 _playError = 0;
513
514 // Signal to playout thread that we want to start
515 _startPlay = true;
516 _timeEventPlay.Set(); // Release thread from waiting state
517 _critSect.Leave();
518 // Wait for thread to init
519 if (kEventSignaled != _playStartStopEvent.Wait(5000))
520 {
521 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
522 " Timeout or error starting");
523 }
524 _playStartStopEvent.Reset();
525 _critSect.Enter();
526
527 // Detach this thread if it was attached
528 if (isAttached)
529 {
530 if (_javaVM->DetachCurrentThread() < 0)
531 {
532 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
533 " Could not detach thread from JVM");
534 }
535 }
536
537 return 0;
538 }
539
StopPlayout()540 int32_t AudioTrackJni::StopPlayout() {
541 CriticalSectionScoped lock(&_critSect);
542
543 if (!_playIsInitialized)
544 {
545 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
546 " Playout is not initialized");
547 return 0;
548 }
549
550 // get the JNI env for this thread
551 JNIEnv *env;
552 bool isAttached = false;
553
554 // get the JNI env for this thread
555 if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
556 {
557 // try to attach the thread and get the env
558 // Attach this thread to JVM
559 jint res = _javaVM->AttachCurrentThread(&env, NULL);
560 if ((res < 0) || !env)
561 {
562 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
563 " Could not attach thread to JVM (%d, %p)", res, env);
564 return -1;
565 }
566 isAttached = true;
567 }
568
569 // get the method ID
570 jmethodID stopPlaybackID = env->GetMethodID(_javaScClass, "StopPlayback",
571 "()I");
572
573 // Call java sc object method
574 jint res = env->CallIntMethod(_javaScObj, stopPlaybackID);
575 if (res < 0)
576 {
577 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
578 "StopPlayback failed (%d)", res);
579 }
580
581 _playIsInitialized = false;
582 _playing = false;
583 _playWarning = 0;
584 _playError = 0;
585
586 // Detach this thread if it was attached
587 if (isAttached)
588 {
589 if (_javaVM->DetachCurrentThread() < 0)
590 {
591 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
592 " Could not detach thread from JVM");
593 }
594 }
595
596 return 0;
597
598 }
599
InitSpeaker()600 int32_t AudioTrackJni::InitSpeaker() {
601 CriticalSectionScoped lock(&_critSect);
602
603 if (_playing)
604 {
605 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
606 " Playout already started");
607 return -1;
608 }
609
610 if (!_playoutDeviceIsSpecified)
611 {
612 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
613 " Playout device is not specified");
614 return -1;
615 }
616
617 // Nothing needs to be done here, we use a flag to have consistent
618 // behavior with other platforms
619 _speakerIsInitialized = true;
620
621 return 0;
622 }
623
SpeakerVolumeIsAvailable(bool & available)624 int32_t AudioTrackJni::SpeakerVolumeIsAvailable(bool& available) { // NOLINT
625 available = true; // We assume we are always be able to set/get volume
626 return 0;
627 }
628
SetSpeakerVolume(uint32_t volume)629 int32_t AudioTrackJni::SetSpeakerVolume(uint32_t volume) {
630 if (!_speakerIsInitialized)
631 {
632 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
633 " Speaker not initialized");
634 return -1;
635 }
636 if (!globalContext)
637 {
638 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
639 " Context is not set");
640 return -1;
641 }
642
643 // get the JNI env for this thread
644 JNIEnv *env;
645 bool isAttached = false;
646
647 if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
648 {
649 // try to attach the thread and get the env
650 // Attach this thread to JVM
651 jint res = _javaVM->AttachCurrentThread(&env, NULL);
652 if ((res < 0) || !env)
653 {
654 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
655 " Could not attach thread to JVM (%d, %p)", res, env);
656 return -1;
657 }
658 isAttached = true;
659 }
660
661 // get the method ID
662 jmethodID setPlayoutVolumeID = env->GetMethodID(_javaScClass,
663 "SetPlayoutVolume", "(I)I");
664
665 // call java sc object method
666 jint res = env->CallIntMethod(_javaScObj, setPlayoutVolumeID,
667 static_cast<int> (volume));
668 if (res < 0)
669 {
670 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
671 "SetPlayoutVolume failed (%d)", res);
672 return -1;
673 }
674
675 // Detach this thread if it was attached
676 if (isAttached)
677 {
678 if (_javaVM->DetachCurrentThread() < 0)
679 {
680 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
681 " Could not detach thread from JVM");
682 }
683 }
684
685 return 0;
686 }
687
SpeakerVolume(uint32_t & volume) const688 int32_t AudioTrackJni::SpeakerVolume(uint32_t& volume) const { // NOLINT
689 if (!_speakerIsInitialized)
690 {
691 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
692 " Speaker not initialized");
693 return -1;
694 }
695 if (!globalContext)
696 {
697 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
698 " Context is not set");
699 return -1;
700 }
701
702 // get the JNI env for this thread
703 JNIEnv *env;
704 bool isAttached = false;
705
706 if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
707 {
708 // try to attach the thread and get the env
709 // Attach this thread to JVM
710 jint res = _javaVM->AttachCurrentThread(&env, NULL);
711 if ((res < 0) || !env)
712 {
713 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
714 " Could not attach thread to JVM (%d, %p)", res, env);
715 return -1;
716 }
717 isAttached = true;
718 }
719
720 // get the method ID
721 jmethodID getPlayoutVolumeID = env->GetMethodID(_javaScClass,
722 "GetPlayoutVolume", "()I");
723
724 // call java sc object method
725 jint level = env->CallIntMethod(_javaScObj, getPlayoutVolumeID);
726 if (level < 0)
727 {
728 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
729 "GetPlayoutVolume failed (%d)", level);
730 return -1;
731 }
732
733 // Detach this thread if it was attached
734 if (isAttached)
735 {
736 if (_javaVM->DetachCurrentThread() < 0)
737 {
738 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
739 " Could not detach thread from JVM");
740 }
741 }
742
743 volume = static_cast<uint32_t> (level);
744
745 return 0;
746 }
747
748
MaxSpeakerVolume(uint32_t & maxVolume) const749 int32_t AudioTrackJni::MaxSpeakerVolume(uint32_t& maxVolume) const { // NOLINT
750 if (!_speakerIsInitialized)
751 {
752 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
753 " Speaker not initialized");
754 return -1;
755 }
756
757 maxVolume = _maxSpeakerVolume;
758
759 return 0;
760 }
761
MinSpeakerVolume(uint32_t & minVolume) const762 int32_t AudioTrackJni::MinSpeakerVolume(uint32_t& minVolume) const { // NOLINT
763 if (!_speakerIsInitialized)
764 {
765 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
766 " Speaker not initialized");
767 return -1;
768 }
769 minVolume = 0;
770 return 0;
771 }
772
SpeakerVolumeStepSize(uint16_t & stepSize) const773 int32_t AudioTrackJni::SpeakerVolumeStepSize(
774 uint16_t& stepSize) const { // NOLINT
775 if (!_speakerIsInitialized)
776 {
777 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
778 " Speaker not initialized");
779 return -1;
780 }
781
782 stepSize = 1;
783
784 return 0;
785 }
786
SpeakerMuteIsAvailable(bool & available)787 int32_t AudioTrackJni::SpeakerMuteIsAvailable(bool& available) { // NOLINT
788 available = false; // Speaker mute not supported on Android
789 return 0;
790 }
791
SetSpeakerMute(bool enable)792 int32_t AudioTrackJni::SetSpeakerMute(bool enable) {
793 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
794 " API call not supported on this platform");
795 return -1;
796 }
797
SpeakerMute(bool &) const798 int32_t AudioTrackJni::SpeakerMute(bool& /*enabled*/) const {
799
800 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
801 " API call not supported on this platform");
802 return -1;
803 }
804
StereoPlayoutIsAvailable(bool & available)805 int32_t AudioTrackJni::StereoPlayoutIsAvailable(bool& available) { // NOLINT
806 available = false; // Stereo playout not supported on Android
807 return 0;
808 }
809
SetStereoPlayout(bool enable)810 int32_t AudioTrackJni::SetStereoPlayout(bool enable) {
811 if (enable)
812 {
813 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
814 " Enabling not available");
815 return -1;
816 }
817
818 return 0;
819 }
820
StereoPlayout(bool & enabled) const821 int32_t AudioTrackJni::StereoPlayout(bool& enabled) const { // NOLINT
822 enabled = false;
823 return 0;
824 }
825
SetPlayoutBuffer(const AudioDeviceModule::BufferType type,uint16_t sizeMS)826 int32_t AudioTrackJni::SetPlayoutBuffer(
827 const AudioDeviceModule::BufferType type,
828 uint16_t sizeMS) {
829 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
830 " API call not supported on this platform");
831 return -1;
832 }
833
834
PlayoutBuffer(AudioDeviceModule::BufferType & type,uint16_t & sizeMS) const835 int32_t AudioTrackJni::PlayoutBuffer(
836 AudioDeviceModule::BufferType& type, // NOLINT
837 uint16_t& sizeMS) const { // NOLINT
838 type = AudioDeviceModule::kAdaptiveBufferSize;
839 sizeMS = _delayPlayout; // Set to current playout delay
840
841 return 0;
842 }
843
PlayoutDelay(uint16_t & delayMS) const844 int32_t AudioTrackJni::PlayoutDelay(uint16_t& delayMS) const { // NOLINT
845 delayMS = _delayPlayout;
846 return 0;
847 }
848
AttachAudioBuffer(AudioDeviceBuffer * audioBuffer)849 void AudioTrackJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
850 CriticalSectionScoped lock(&_critSect);
851 _ptrAudioBuffer = audioBuffer;
852 // inform the AudioBuffer about default settings for this implementation
853 _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
854 _ptrAudioBuffer->SetPlayoutChannels(N_PLAY_CHANNELS);
855 }
856
SetPlayoutSampleRate(const uint32_t samplesPerSec)857 int32_t AudioTrackJni::SetPlayoutSampleRate(const uint32_t samplesPerSec) {
858 if (samplesPerSec > 48000 || samplesPerSec < 8000)
859 {
860 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
861 " Invalid sample rate");
862 return -1;
863 }
864
865 // set the playout sample rate to use
866 if (samplesPerSec == 44100)
867 {
868 _samplingFreqOut = 44;
869 }
870 else
871 {
872 _samplingFreqOut = samplesPerSec / 1000;
873 }
874
875 // Update the AudioDeviceBuffer
876 _ptrAudioBuffer->SetPlayoutSampleRate(samplesPerSec);
877
878 return 0;
879 }
880
PlayoutWarning() const881 bool AudioTrackJni::PlayoutWarning() const {
882 return (_playWarning > 0);
883 }
884
PlayoutError() const885 bool AudioTrackJni::PlayoutError() const {
886 return (_playError > 0);
887 }
888
ClearPlayoutWarning()889 void AudioTrackJni::ClearPlayoutWarning() {
890 _playWarning = 0;
891 }
892
ClearPlayoutError()893 void AudioTrackJni::ClearPlayoutError() {
894 _playError = 0;
895 }
896
SetLoudspeakerStatus(bool enable)897 int32_t AudioTrackJni::SetLoudspeakerStatus(bool enable) {
898 if (!globalContext)
899 {
900 WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
901 " Context is not set");
902 return -1;
903 }
904
905 // get the JNI env for this thread
906 JNIEnv *env;
907 bool isAttached = false;
908
909 if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
910 {
911 // try to attach the thread and get the env
912 // Attach this thread to JVM
913 jint res = _javaVM->AttachCurrentThread(&env, NULL);
914
915 // Get the JNI env for this thread
916 if ((res < 0) || !env)
917 {
918 WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
919 " Could not attach thread to JVM (%d, %p)", res, env);
920 return -1;
921 }
922 isAttached = true;
923 }
924
925 // get the method ID
926 jmethodID setPlayoutSpeakerID = env->GetMethodID(_javaScClass,
927 "SetPlayoutSpeaker",
928 "(Z)I");
929
930 // call java sc object method
931 jint res = env->CallIntMethod(_javaScObj, setPlayoutSpeakerID, enable);
932 if (res < 0)
933 {
934 WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
935 " SetPlayoutSpeaker failed (%d)", res);
936 return -1;
937 }
938
939 _loudSpeakerOn = enable;
940
941 // Detach this thread if it was attached
942 if (isAttached)
943 {
944 if (_javaVM->DetachCurrentThread() < 0)
945 {
946 WEBRTC_TRACE(kTraceWarning, kTraceUtility, -1,
947 " Could not detach thread from JVM");
948 }
949 }
950
951 return 0;
952 }
953
GetLoudspeakerStatus(bool & enabled) const954 int32_t AudioTrackJni::GetLoudspeakerStatus(bool& enabled) const { // NOLINT
955 enabled = _loudSpeakerOn;
956 return 0;
957 }
958
InitJavaResources()959 int32_t AudioTrackJni::InitJavaResources() {
960 // todo: Check if we already have created the java object
961 _javaVM = globalJvm;
962 _javaScClass = globalScClass;
963
964 // use the jvm that has been set
965 if (!_javaVM)
966 {
967 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
968 "%s: Not a valid Java VM pointer", __FUNCTION__);
969 return -1;
970 }
971
972 // get the JNI env for this thread
973 JNIEnv *env;
974 bool isAttached = false;
975
976 // get the JNI env for this thread
977 if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
978 {
979 // try to attach the thread and get the env
980 // Attach this thread to JVM
981 jint res = _javaVM->AttachCurrentThread(&env, NULL);
982 if ((res < 0) || !env)
983 {
984 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
985 "%s: Could not attach thread to JVM (%d, %p)",
986 __FUNCTION__, res, env);
987 return -1;
988 }
989 isAttached = true;
990 }
991
992 WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
993 "get method id");
994
995 // get the method ID for the void(void) constructor
996 jmethodID cid = env->GetMethodID(_javaScClass, "<init>", "()V");
997 if (cid == NULL)
998 {
999 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
1000 "%s: could not get constructor ID", __FUNCTION__);
1001 return -1; /* exception thrown */
1002 }
1003
1004 WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
1005 "construct object", __FUNCTION__);
1006
1007 // construct the object
1008 jobject javaScObjLocal = env->NewObject(_javaScClass, cid);
1009 if (!javaScObjLocal)
1010 {
1011 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
1012 "%s: could not create Java sc object", __FUNCTION__);
1013 return -1;
1014 }
1015
1016 // Create a reference to the object (to tell JNI that we are referencing it
1017 // after this function has returned).
1018 _javaScObj = env->NewGlobalRef(javaScObjLocal);
1019 if (!_javaScObj)
1020 {
1021 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
1022 "%s: could not create Java sc object reference",
1023 __FUNCTION__);
1024 return -1;
1025 }
1026
1027 // Delete local object ref, we only use the global ref.
1028 env->DeleteLocalRef(javaScObjLocal);
1029
1030 //////////////////////
1031 // AUDIO MANAGEMENT
1032
1033 // This is not mandatory functionality
1034 if (globalContext) {
1035 jfieldID context_id = env->GetFieldID(globalScClass,
1036 "_context",
1037 "Landroid/content/Context;");
1038 if (!context_id) {
1039 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
1040 "%s: could not get _context id", __FUNCTION__);
1041 return -1;
1042 }
1043
1044 env->SetObjectField(_javaScObj, context_id, globalContext);
1045 jobject javaContext = env->GetObjectField(_javaScObj, context_id);
1046 if (!javaContext) {
1047 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
1048 "%s: could not set or get _context", __FUNCTION__);
1049 return -1;
1050 }
1051 }
1052 else {
1053 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
1054 "%s: did not set Context - some functionality is not "
1055 "supported",
1056 __FUNCTION__);
1057 }
1058
1059 /////////////
1060 // PLAYOUT
1061
1062 // Get play buffer field ID.
1063 jfieldID fidPlayBuffer = env->GetFieldID(_javaScClass, "_playBuffer",
1064 "Ljava/nio/ByteBuffer;");
1065 if (!fidPlayBuffer)
1066 {
1067 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
1068 "%s: could not get play buffer fid", __FUNCTION__);
1069 return -1;
1070 }
1071
1072 // Get play buffer object.
1073 jobject javaPlayBufferLocal =
1074 env->GetObjectField(_javaScObj, fidPlayBuffer);
1075 if (!javaPlayBufferLocal)
1076 {
1077 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
1078 "%s: could not get play buffer", __FUNCTION__);
1079 return -1;
1080 }
1081
1082 // Create a global reference to the object (to tell JNI that we are
1083 // referencing it after this function has returned)
1084 // NOTE: we are referencing it only through the direct buffer (see below).
1085 _javaPlayBuffer = env->NewGlobalRef(javaPlayBufferLocal);
1086 if (!_javaPlayBuffer)
1087 {
1088 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
1089 "%s: could not get play buffer reference", __FUNCTION__);
1090 return -1;
1091 }
1092
1093 // Delete local object ref, we only use the global ref.
1094 env->DeleteLocalRef(javaPlayBufferLocal);
1095
1096 // Get direct buffer.
1097 _javaDirectPlayBuffer = env->GetDirectBufferAddress(_javaPlayBuffer);
1098 if (!_javaDirectPlayBuffer)
1099 {
1100 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
1101 "%s: could not get direct play buffer", __FUNCTION__);
1102 return -1;
1103 }
1104
1105 // Get the play audio method ID.
1106 _javaMidPlayAudio = env->GetMethodID(_javaScClass, "PlayAudio", "(I)I");
1107 if (!_javaMidPlayAudio)
1108 {
1109 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
1110 "%s: could not get play audio mid", __FUNCTION__);
1111 return -1;
1112 }
1113
1114 // Detach this thread if it was attached.
1115 if (isAttached)
1116 {
1117 if (_javaVM->DetachCurrentThread() < 0)
1118 {
1119 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
1120 "%s: Could not detach thread from JVM", __FUNCTION__);
1121 }
1122 }
1123
1124 return 0;
1125
1126 }
1127
InitSampleRate()1128 int32_t AudioTrackJni::InitSampleRate() {
1129 int samplingFreq = 44100;
1130 jint res = 0;
1131
1132 // get the JNI env for this thread
1133 JNIEnv *env;
1134 bool isAttached = false;
1135
1136 // get the JNI env for this thread
1137 if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
1138 {
1139 // try to attach the thread and get the env
1140 // Attach this thread to JVM
1141 jint res = _javaVM->AttachCurrentThread(&env, NULL);
1142 if ((res < 0) || !env)
1143 {
1144 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
1145 "%s: Could not attach thread to JVM (%d, %p)",
1146 __FUNCTION__, res, env);
1147 return -1;
1148 }
1149 isAttached = true;
1150 }
1151
1152 // get the method ID
1153 jmethodID initPlaybackID = env->GetMethodID(_javaScClass, "InitPlayback",
1154 "(I)I");
1155
1156 if (_samplingFreqOut > 0)
1157 {
1158 // read the configured sampling rate
1159 samplingFreq = 44100;
1160 if (_samplingFreqOut != 44)
1161 {
1162 samplingFreq = _samplingFreqOut * 1000;
1163 }
1164 WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
1165 " Trying configured playback sampling rate %d",
1166 samplingFreq);
1167 }
1168 else
1169 {
1170 // set the preferred sampling frequency
1171 if (samplingFreq == 8000)
1172 {
1173 // try 16000
1174 samplingFreq = 16000;
1175 }
1176 // else use same as recording
1177 }
1178
1179 bool keepTrying = true;
1180 while (keepTrying)
1181 {
1182 // call java sc object method
1183 res = env->CallIntMethod(_javaScObj, initPlaybackID, samplingFreq);
1184 if (res < 0)
1185 {
1186 switch (samplingFreq)
1187 {
1188 case 44100:
1189 samplingFreq = 16000;
1190 break;
1191 case 16000:
1192 samplingFreq = 8000;
1193 break;
1194 default: // error
1195 WEBRTC_TRACE(kTraceError,
1196 kTraceAudioDevice, _id,
1197 "InitPlayback failed (%d)", res);
1198 return -1;
1199 }
1200 }
1201 else
1202 {
1203 keepTrying = false;
1204 }
1205 }
1206
1207 // Store max playout volume
1208 _maxSpeakerVolume = static_cast<uint32_t> (res);
1209 if (_maxSpeakerVolume < 1)
1210 {
1211 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
1212 " Did not get valid max speaker volume value (%d)",
1213 _maxSpeakerVolume);
1214 }
1215
1216 // set the playback sample rate to use
1217 if (samplingFreq == 44100)
1218 {
1219 _samplingFreqOut = 44;
1220 }
1221 else
1222 {
1223 _samplingFreqOut = samplingFreq / 1000;
1224 }
1225
1226 WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
1227 "Playback sample rate set to (%d)", _samplingFreqOut);
1228
1229 // get the method ID
1230 jmethodID stopPlaybackID = env->GetMethodID(_javaScClass, "StopPlayback",
1231 "()I");
1232
1233 // Call java sc object method
1234 res = env->CallIntMethod(_javaScObj, stopPlaybackID);
1235 if (res < 0)
1236 {
1237 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
1238 "StopPlayback failed (%d)", res);
1239 }
1240
1241 // Detach this thread if it was attached
1242 if (isAttached)
1243 {
1244 if (_javaVM->DetachCurrentThread() < 0)
1245 {
1246 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
1247 "%s: Could not detach thread from JVM", __FUNCTION__);
1248 }
1249 }
1250
1251 return 0;
1252
1253 }
1254
PlayThreadFunc(void * pThis)1255 bool AudioTrackJni::PlayThreadFunc(void* pThis)
1256 {
1257 return (static_cast<AudioTrackJni*> (pThis)->PlayThreadProcess());
1258 }
1259
PlayThreadProcess()1260 bool AudioTrackJni::PlayThreadProcess()
1261 {
1262 if (!_playThreadIsInitialized)
1263 {
1264 // Do once when thread is started
1265
1266 // Attach this thread to JVM and get the JNI env for this thread
1267 jint res = _javaVM->AttachCurrentThread(&_jniEnvPlay, NULL);
1268 if ((res < 0) || !_jniEnvPlay)
1269 {
1270 WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
1271 _id,
1272 "Could not attach playout thread to JVM (%d, %p)",
1273 res, _jniEnvPlay);
1274 return false; // Close down thread
1275 }
1276
1277 _playThreadIsInitialized = true;
1278 }
1279
1280 if (!_playing)
1281 {
1282 switch (_timeEventPlay.Wait(1000))
1283 {
1284 case kEventSignaled:
1285 WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,
1286 _id, "Playout thread event signal");
1287 _timeEventPlay.Reset();
1288 break;
1289 case kEventError:
1290 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
1291 _id, "Playout thread event error");
1292 return true;
1293 case kEventTimeout:
1294 WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,
1295 _id, "Playout thread event timeout");
1296 return true;
1297 }
1298 }
1299
1300 Lock();
1301
1302 if (_startPlay)
1303 {
1304 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
1305 "_startPlay true, performing initial actions");
1306 _startPlay = false;
1307 _playing = true;
1308 _playWarning = 0;
1309 _playError = 0;
1310 _playStartStopEvent.Set();
1311 WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
1312 "Sent signal");
1313 }
1314
1315 if (_playing)
1316 {
1317 int8_t playBuffer[2 * 480]; // Max 10 ms @ 48 kHz / 16 bit
1318 uint32_t samplesToPlay = _samplingFreqOut * 10;
1319
1320 // ask for new PCM data to be played out using the AudioDeviceBuffer
1321 // ensure that this callback is executed without taking the
1322 // audio-thread lock
1323 UnLock();
1324 uint32_t nSamples =
1325 _ptrAudioBuffer->RequestPlayoutData(samplesToPlay);
1326 Lock();
1327
1328 // Check again since play may have stopped during unlocked period
1329 if (!_playing)
1330 {
1331 UnLock();
1332 return true;
1333 }
1334
1335 nSamples = _ptrAudioBuffer->GetPlayoutData(playBuffer);
1336 if (nSamples != samplesToPlay)
1337 {
1338 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
1339 " invalid number of output samples(%d)", nSamples);
1340 _playWarning = 1;
1341 }
1342
1343 // Copy data to our direct buffer (held by java sc object)
1344 // todo: Give _javaDirectPlayBuffer directly to VoE?
1345 memcpy(_javaDirectPlayBuffer, playBuffer, nSamples * 2);
1346
1347 UnLock();
1348
1349 // Call java sc object method to process data in direct buffer
1350 // Will block until data has been put in OS playout buffer
1351 // (see java sc class)
1352 jint res = _jniEnvPlay->CallIntMethod(_javaScObj, _javaMidPlayAudio,
1353 2 * nSamples);
1354 if (res < 0)
1355 {
1356 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
1357 "PlayAudio failed (%d)", res);
1358 _playWarning = 1;
1359 }
1360 else if (res > 0)
1361 {
1362 // we are not recording and have got a delay value from playback
1363 _delayPlayout = res / _samplingFreqOut;
1364 }
1365 Lock();
1366
1367 } // _playing
1368
1369 if (_shutdownPlayThread)
1370 {
1371 WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
1372 "Detaching thread from Java VM");
1373
1374 // Detach thread from Java VM
1375 if (_javaVM->DetachCurrentThread() < 0)
1376 {
1377 WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
1378 _id, "Could not detach playout thread from JVM");
1379 _shutdownPlayThread = false;
1380 // If we say OK (i.e. set event) and close thread anyway,
1381 // app will crash
1382 }
1383 else
1384 {
1385 _jniEnvPlay = NULL;
1386 _shutdownPlayThread = false;
1387 _playStartStopEvent.Set(); // Signal to Terminate() that we are done
1388 WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
1389 "Sent signal");
1390 }
1391 }
1392
1393 UnLock();
1394 return true;
1395 }
1396
1397 } // namespace webrtc
1398