1 /*
2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 /*
12 * Android audio device implementation (JNI/AudioRecord usage)
13 */
14
15 // TODO(xians): Break out attach and detach current thread to JVM to
16 // separate functions.
17
18 #include "webrtc/modules/audio_device/android/audio_record_jni.h"
19
20 #include <android/log.h>
21 #include <stdlib.h>
22
23 #include "webrtc/modules/audio_device/android/audio_common.h"
24 #include "webrtc/modules/audio_device/audio_device_config.h"
25 #include "webrtc/modules/audio_device/audio_device_utility.h"
26
27 #include "webrtc/system_wrappers/interface/event_wrapper.h"
28 #include "webrtc/system_wrappers/interface/thread_wrapper.h"
29 #include "webrtc/system_wrappers/interface/trace.h"
30
31 namespace webrtc {
32
33 JavaVM* AudioRecordJni::globalJvm = NULL;
34 JNIEnv* AudioRecordJni::globalJNIEnv = NULL;
35 jobject AudioRecordJni::globalContext = NULL;
36 jclass AudioRecordJni::globalScClass = NULL;
37
SetAndroidAudioDeviceObjects(void * javaVM,void * env,void * context)38 int32_t AudioRecordJni::SetAndroidAudioDeviceObjects(void* javaVM, void* env,
39 void* context) {
40 assert(env);
41 globalJvm = reinterpret_cast<JavaVM*>(javaVM);
42 globalJNIEnv = reinterpret_cast<JNIEnv*>(env);
43 // Get java class type (note path to class packet).
44 jclass javaScClassLocal = globalJNIEnv->FindClass(
45 "org/webrtc/voiceengine/WebRtcAudioRecord");
46 if (!javaScClassLocal) {
47 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
48 "%s: could not find java class", __FUNCTION__);
49 return -1; // exception thrown
50 }
51
52 // Create a global reference to the class (to tell JNI that we are
53 // referencing it after this function has returned).
54 globalScClass = reinterpret_cast<jclass> (
55 globalJNIEnv->NewGlobalRef(javaScClassLocal));
56 if (!globalScClass) {
57 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
58 "%s: could not create reference", __FUNCTION__);
59 return -1;
60 }
61
62 globalContext = globalJNIEnv->NewGlobalRef(
63 reinterpret_cast<jobject>(context));
64 if (!globalContext) {
65 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
66 "%s: could not create context reference", __FUNCTION__);
67 return -1;
68 }
69
70 // Delete local class ref, we only use the global ref
71 globalJNIEnv->DeleteLocalRef(javaScClassLocal);
72
73 return 0;
74 }
75
ClearAndroidAudioDeviceObjects()76 void AudioRecordJni::ClearAndroidAudioDeviceObjects() {
77 WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1,
78 "%s: env is NULL, assuming deinit", __FUNCTION__);
79
80 globalJvm = NULL;;
81 if (!globalJNIEnv) {
82 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1,
83 "%s: saved env already NULL", __FUNCTION__);
84 return;
85 }
86
87 globalJNIEnv->DeleteGlobalRef(globalContext);
88 globalContext = reinterpret_cast<jobject>(NULL);
89
90 globalJNIEnv->DeleteGlobalRef(globalScClass);
91 globalScClass = reinterpret_cast<jclass>(NULL);
92
93 globalJNIEnv = reinterpret_cast<JNIEnv*>(NULL);
94 }
95
AudioRecordJni(const int32_t id,PlayoutDelayProvider * delay_provider)96 AudioRecordJni::AudioRecordJni(
97 const int32_t id, PlayoutDelayProvider* delay_provider)
98 : _javaVM(NULL),
99 _jniEnvRec(NULL),
100 _javaScClass(0),
101 _javaScObj(0),
102 _javaRecBuffer(0),
103 _javaDirectRecBuffer(NULL),
104 _javaMidRecAudio(0),
105 _ptrAudioBuffer(NULL),
106 _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
107 _id(id),
108 _delay_provider(delay_provider),
109 _initialized(false),
110 _timeEventRec(*EventWrapper::Create()),
111 _recStartStopEvent(*EventWrapper::Create()),
112 _ptrThreadRec(NULL),
113 _recThreadID(0),
114 _recThreadIsInitialized(false),
115 _shutdownRecThread(false),
116 _recordingDeviceIsSpecified(false),
117 _recording(false),
118 _recIsInitialized(false),
119 _micIsInitialized(false),
120 _startRec(false),
121 _recWarning(0),
122 _recError(0),
123 _delayRecording(0),
124 _AGC(false),
125 _samplingFreqIn((N_REC_SAMPLES_PER_SEC/1000)),
126 _recAudioSource(1) { // 1 is AudioSource.MIC which is our default
127 memset(_recBuffer, 0, sizeof(_recBuffer));
128 }
129
~AudioRecordJni()130 AudioRecordJni::~AudioRecordJni() {
131 WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
132 "%s destroyed", __FUNCTION__);
133
134 Terminate();
135
136 delete &_recStartStopEvent;
137 delete &_timeEventRec;
138 delete &_critSect;
139 }
140
Init()141 int32_t AudioRecordJni::Init() {
142 CriticalSectionScoped lock(&_critSect);
143
144 if (_initialized)
145 {
146 return 0;
147 }
148
149 _recWarning = 0;
150 _recError = 0;
151
152 // Init Java member variables
153 // and set up JNI interface to
154 // AudioDeviceAndroid java class
155 if (InitJavaResources() != 0)
156 {
157 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
158 "%s: Failed to init Java resources", __FUNCTION__);
159 return -1;
160 }
161
162 // Check the sample rate to be used for playback and recording
163 // and the max playout volume
164 if (InitSampleRate() != 0)
165 {
166 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
167 "%s: Failed to init samplerate", __FUNCTION__);
168 return -1;
169 }
170
171 const char* threadName = "jni_audio_capture_thread";
172 _ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc, this,
173 kRealtimePriority, threadName);
174 if (_ptrThreadRec == NULL)
175 {
176 WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
177 " failed to create the rec audio thread");
178 return -1;
179 }
180
181 unsigned int threadID(0);
182 if (!_ptrThreadRec->Start(threadID))
183 {
184 WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
185 " failed to start the rec audio thread");
186 delete _ptrThreadRec;
187 _ptrThreadRec = NULL;
188 return -1;
189 }
190 _recThreadID = threadID;
191 _initialized = true;
192
193 return 0;
194 }
195
Terminate()196 int32_t AudioRecordJni::Terminate() {
197 CriticalSectionScoped lock(&_critSect);
198
199 if (!_initialized)
200 {
201 return 0;
202 }
203
204 StopRecording();
205 _shutdownRecThread = true;
206 _timeEventRec.Set(); // Release rec thread from waiting state
207 if (_ptrThreadRec)
208 {
209 // First, the thread must detach itself from Java VM
210 _critSect.Leave();
211 if (kEventSignaled != _recStartStopEvent.Wait(5000))
212 {
213 WEBRTC_TRACE(
214 kTraceError,
215 kTraceAudioDevice,
216 _id,
217 "%s: Recording thread shutdown timed out, cannot "
218 "terminate thread",
219 __FUNCTION__);
220 // If we close thread anyway, the app will crash
221 return -1;
222 }
223 _recStartStopEvent.Reset();
224 _critSect.Enter();
225
226 // Close down rec thread
227 ThreadWrapper* tmpThread = _ptrThreadRec;
228 _ptrThreadRec = NULL;
229 _critSect.Leave();
230 tmpThread->SetNotAlive();
231 // Release again, we might have returned to waiting state
232 _timeEventRec.Set();
233 if (tmpThread->Stop())
234 {
235 delete tmpThread;
236 _jniEnvRec = NULL;
237 }
238 else
239 {
240 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
241 " failed to close down the rec audio thread");
242 }
243 _critSect.Enter();
244
245 _recThreadIsInitialized = false;
246 }
247 _micIsInitialized = false;
248 _recordingDeviceIsSpecified = false;
249
250 // get the JNI env for this thread
251 JNIEnv *env;
252 bool isAttached = false;
253
254 // get the JNI env for this thread
255 if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
256 {
257 // try to attach the thread and get the env
258 // Attach this thread to JVM
259 jint res = _javaVM->AttachCurrentThread(&env, NULL);
260 if ((res < 0) || !env)
261 {
262 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
263 "%s: Could not attach thread to JVM (%d, %p)",
264 __FUNCTION__, res, env);
265 return -1;
266 }
267 isAttached = true;
268 }
269
270 // Make method IDs and buffer pointers unusable
271 _javaMidRecAudio = 0;
272 _javaDirectRecBuffer = NULL;
273
274 // Delete the references to the java buffers, this allows the
275 // garbage collector to delete them
276 env->DeleteGlobalRef(_javaRecBuffer);
277 _javaRecBuffer = 0;
278
279 // Delete the references to the java object and class, this allows the
280 // garbage collector to delete them
281 env->DeleteGlobalRef(_javaScObj);
282 _javaScObj = 0;
283 _javaScClass = 0;
284
285 // Detach this thread if it was attached
286 if (isAttached)
287 {
288 if (_javaVM->DetachCurrentThread() < 0)
289 {
290 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
291 "%s: Could not detach thread from JVM", __FUNCTION__);
292 }
293 }
294
295 _initialized = false;
296
297 return 0;
298 }
299
RecordingDeviceName(uint16_t index,char name[kAdmMaxDeviceNameSize],char guid[kAdmMaxGuidSize])300 int32_t AudioRecordJni::RecordingDeviceName(uint16_t index,
301 char name[kAdmMaxDeviceNameSize],
302 char guid[kAdmMaxGuidSize]) {
303 if (0 != index)
304 {
305 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
306 " Device index is out of range [0,0]");
307 return -1;
308 }
309
310 // Return empty string
311 memset(name, 0, kAdmMaxDeviceNameSize);
312
313 if (guid)
314 {
315 memset(guid, 0, kAdmMaxGuidSize);
316 }
317
318 return 0;
319 }
320
SetRecordingDevice(uint16_t index)321 int32_t AudioRecordJni::SetRecordingDevice(uint16_t index) {
322 if (_recIsInitialized)
323 {
324 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
325 " Recording already initialized");
326 return -1;
327 }
328
329 // Recording device index will be used for specifying recording
330 // audio source, allow any value
331 _recAudioSource = index;
332 _recordingDeviceIsSpecified = true;
333
334 return 0;
335 }
336
SetRecordingDevice(AudioDeviceModule::WindowsDeviceType device)337 int32_t AudioRecordJni::SetRecordingDevice(
338 AudioDeviceModule::WindowsDeviceType device) {
339 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
340 " API call not supported on this platform");
341 return -1;
342 }
343
RecordingIsAvailable(bool & available)344 int32_t AudioRecordJni::RecordingIsAvailable(bool& available) { // NOLINT
345 available = false;
346
347 // Try to initialize the playout side
348 int32_t res = InitRecording();
349
350 // Cancel effect of initialization
351 StopRecording();
352
353 if (res != -1)
354 {
355 available = true;
356 }
357
358 return res;
359 }
360
InitRecording()361 int32_t AudioRecordJni::InitRecording() {
362 CriticalSectionScoped lock(&_critSect);
363
364 if (!_initialized)
365 {
366 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
367 " Not initialized");
368 return -1;
369 }
370
371 if (_recording)
372 {
373 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
374 " Recording already started");
375 return -1;
376 }
377
378 if (!_recordingDeviceIsSpecified)
379 {
380 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
381 " Recording device is not specified");
382 return -1;
383 }
384
385 if (_recIsInitialized)
386 {
387 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
388 " Recording already initialized");
389 return 0;
390 }
391
392 // Initialize the microphone
393 if (InitMicrophone() == -1)
394 {
395 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
396 " InitMicrophone() failed");
397 }
398
399 // get the JNI env for this thread
400 JNIEnv *env;
401 bool isAttached = false;
402
403 // get the JNI env for this thread
404 if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
405 {
406 // try to attach the thread and get the env
407 // Attach this thread to JVM
408 jint res = _javaVM->AttachCurrentThread(&env, NULL);
409 if ((res < 0) || !env)
410 {
411 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
412 " Could not attach thread to JVM (%d, %p)", res, env);
413 return -1;
414 }
415 isAttached = true;
416 }
417
418 // get the method ID
419 jmethodID initRecordingID = env->GetMethodID(_javaScClass, "InitRecording",
420 "(II)I");
421
422 int samplingFreq = 44100;
423 if (_samplingFreqIn != 44)
424 {
425 samplingFreq = _samplingFreqIn * 1000;
426 }
427
428 int retVal = -1;
429
430 // call java sc object method
431 jint res = env->CallIntMethod(_javaScObj, initRecordingID, _recAudioSource,
432 samplingFreq);
433 if (res < 0)
434 {
435 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
436 "InitRecording failed (%d)", res);
437 }
438 else
439 {
440 // Set the audio device buffer sampling rate
441 _ptrAudioBuffer->SetRecordingSampleRate(_samplingFreqIn * 1000);
442
443 // the init rec function returns a fixed delay
444 _delayRecording = res / _samplingFreqIn;
445
446 _recIsInitialized = true;
447 retVal = 0;
448 }
449
450 // Detach this thread if it was attached
451 if (isAttached)
452 {
453 if (_javaVM->DetachCurrentThread() < 0)
454 {
455 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
456 " Could not detach thread from JVM");
457 }
458 }
459
460 return retVal;
461 }
462
StartRecording()463 int32_t AudioRecordJni::StartRecording() {
464 CriticalSectionScoped lock(&_critSect);
465
466 if (!_recIsInitialized)
467 {
468 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
469 " Recording not initialized");
470 return -1;
471 }
472
473 if (_recording)
474 {
475 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
476 " Recording already started");
477 return 0;
478 }
479
480 // get the JNI env for this thread
481 JNIEnv *env;
482 bool isAttached = false;
483
484 // get the JNI env for this thread
485 if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
486 {
487 // try to attach the thread and get the env
488 // Attach this thread to JVM
489 jint res = _javaVM->AttachCurrentThread(&env, NULL);
490 if ((res < 0) || !env)
491 {
492 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
493 " Could not attach thread to JVM (%d, %p)", res, env);
494 return -1;
495 }
496 isAttached = true;
497 }
498
499 // get the method ID
500 jmethodID startRecordingID = env->GetMethodID(_javaScClass,
501 "StartRecording", "()I");
502
503 // Call java sc object method
504 jint res = env->CallIntMethod(_javaScObj, startRecordingID);
505 if (res < 0)
506 {
507 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
508 "StartRecording failed (%d)", res);
509 return -1;
510 }
511
512 _recWarning = 0;
513 _recError = 0;
514
515 // Signal to recording thread that we want to start
516 _startRec = true;
517 _timeEventRec.Set(); // Release thread from waiting state
518 _critSect.Leave();
519 // Wait for thread to init
520 if (kEventSignaled != _recStartStopEvent.Wait(5000))
521 {
522 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
523 " Timeout or error starting");
524 }
525 _recStartStopEvent.Reset();
526 _critSect.Enter();
527
528 // Detach this thread if it was attached
529 if (isAttached)
530 {
531 if (_javaVM->DetachCurrentThread() < 0)
532 {
533 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
534 " Could not detach thread from JVM");
535 }
536 }
537
538 return 0;
539
540 }
541
StopRecording()542 int32_t AudioRecordJni::StopRecording() {
543 CriticalSectionScoped lock(&_critSect);
544
545 if (!_recIsInitialized)
546 {
547 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
548 " Recording is not initialized");
549 return 0;
550 }
551
552 // make sure we don't start recording (it's asynchronous),
553 // assuming that we are under lock
554 _startRec = false;
555
556 // get the JNI env for this thread
557 JNIEnv *env;
558 bool isAttached = false;
559
560 // get the JNI env for this thread
561 if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
562 {
563 // try to attach the thread and get the env
564 // Attach this thread to JVM
565 jint res = _javaVM->AttachCurrentThread(&env, NULL);
566 if ((res < 0) || !env)
567 {
568 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
569 " Could not attach thread to JVM (%d, %p)", res, env);
570 return -1;
571 }
572 isAttached = true;
573 }
574
575 // get the method ID
576 jmethodID stopRecordingID = env->GetMethodID(_javaScClass, "StopRecording",
577 "()I");
578
579 // Call java sc object method
580 jint res = env->CallIntMethod(_javaScObj, stopRecordingID);
581 if (res < 0)
582 {
583 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
584 "StopRecording failed (%d)", res);
585 }
586
587 _recIsInitialized = false;
588 _recording = false;
589 _recWarning = 0;
590 _recError = 0;
591
592 // Detach this thread if it was attached
593 if (isAttached)
594 {
595 if (_javaVM->DetachCurrentThread() < 0)
596 {
597 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
598 " Could not detach thread from JVM");
599 }
600 }
601
602 return 0;
603
604 }
605
SetAGC(bool enable)606 int32_t AudioRecordJni::SetAGC(bool enable) {
607 _AGC = enable;
608 return 0;
609 }
610
InitMicrophone()611 int32_t AudioRecordJni::InitMicrophone() {
612 CriticalSectionScoped lock(&_critSect);
613
614 if (_recording)
615 {
616 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
617 " Recording already started");
618 return -1;
619 }
620
621 if (!_recordingDeviceIsSpecified)
622 {
623 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
624 " Recording device is not specified");
625 return -1;
626 }
627
628 // Nothing needs to be done here, we use a flag to have consistent
629 // behavior with other platforms
630 _micIsInitialized = true;
631
632 return 0;
633 }
634
MicrophoneVolumeIsAvailable(bool & available)635 int32_t AudioRecordJni::MicrophoneVolumeIsAvailable(
636 bool& available) { // NOLINT
637 available = false; // Mic volume not supported on Android
638 return 0;
639 }
640
SetMicrophoneVolume(uint32_t)641 int32_t AudioRecordJni::SetMicrophoneVolume( uint32_t /*volume*/) {
642
643 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
644 " API call not supported on this platform");
645 return -1;
646 }
647
MicrophoneVolume(uint32_t & volume) const648 int32_t AudioRecordJni::MicrophoneVolume(uint32_t& volume) const { // NOLINT
649 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
650 " API call not supported on this platform");
651 return -1;
652 }
653
MaxMicrophoneVolume(uint32_t & maxVolume) const654 int32_t AudioRecordJni::MaxMicrophoneVolume(
655 uint32_t& maxVolume) const { // NOLINT
656 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
657 " API call not supported on this platform");
658 return -1;
659 }
660
MinMicrophoneVolume(uint32_t & minVolume) const661 int32_t AudioRecordJni::MinMicrophoneVolume(
662 uint32_t& minVolume) const { // NOLINT
663 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
664 " API call not supported on this platform");
665 return -1;
666 }
667
MicrophoneVolumeStepSize(uint16_t & stepSize) const668 int32_t AudioRecordJni::MicrophoneVolumeStepSize(
669 uint16_t& stepSize) const {
670 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
671 " API call not supported on this platform");
672 return -1;
673 }
674
MicrophoneMuteIsAvailable(bool & available)675 int32_t AudioRecordJni::MicrophoneMuteIsAvailable(bool& available) { // NOLINT
676 available = false; // Mic mute not supported on Android
677 return 0;
678 }
679
SetMicrophoneMute(bool enable)680 int32_t AudioRecordJni::SetMicrophoneMute(bool enable) {
681 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
682 " API call not supported on this platform");
683 return -1;
684 }
685
MicrophoneMute(bool & enabled) const686 int32_t AudioRecordJni::MicrophoneMute(bool& enabled) const { // NOLINT
687 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
688 " API call not supported on this platform");
689 return -1;
690 }
691
MicrophoneBoostIsAvailable(bool & available)692 int32_t AudioRecordJni::MicrophoneBoostIsAvailable(bool& available) { // NOLINT
693 available = false; // Mic boost not supported on Android
694 return 0;
695 }
696
SetMicrophoneBoost(bool enable)697 int32_t AudioRecordJni::SetMicrophoneBoost(bool enable) {
698 if (!_micIsInitialized)
699 {
700 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
701 " Microphone not initialized");
702 return -1;
703 }
704
705 if (enable)
706 {
707 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
708 " Enabling not available");
709 return -1;
710 }
711
712 return 0;
713 }
714
MicrophoneBoost(bool & enabled) const715 int32_t AudioRecordJni::MicrophoneBoost(bool& enabled) const { // NOLINT
716 if (!_micIsInitialized)
717 {
718 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
719 " Microphone not initialized");
720 return -1;
721 }
722
723 enabled = false;
724
725 return 0;
726 }
727
StereoRecordingIsAvailable(bool & available)728 int32_t AudioRecordJni::StereoRecordingIsAvailable(bool& available) { // NOLINT
729 available = false; // Stereo recording not supported on Android
730 return 0;
731 }
732
SetStereoRecording(bool enable)733 int32_t AudioRecordJni::SetStereoRecording(bool enable) {
734 if (enable)
735 {
736 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
737 " Enabling not available");
738 return -1;
739 }
740
741 return 0;
742 }
743
StereoRecording(bool & enabled) const744 int32_t AudioRecordJni::StereoRecording(bool& enabled) const { // NOLINT
745 enabled = false;
746 return 0;
747 }
748
RecordingDelay(uint16_t & delayMS) const749 int32_t AudioRecordJni::RecordingDelay(uint16_t& delayMS) const { // NOLINT
750 delayMS = _delayRecording;
751 return 0;
752 }
753
RecordingWarning() const754 bool AudioRecordJni::RecordingWarning() const {
755 return (_recWarning > 0);
756 }
757
RecordingError() const758 bool AudioRecordJni::RecordingError() const {
759 return (_recError > 0);
760 }
761
ClearRecordingWarning()762 void AudioRecordJni::ClearRecordingWarning() {
763 _recWarning = 0;
764 }
765
ClearRecordingError()766 void AudioRecordJni::ClearRecordingError() {
767 _recError = 0;
768 }
769
AttachAudioBuffer(AudioDeviceBuffer * audioBuffer)770 void AudioRecordJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
771 CriticalSectionScoped lock(&_critSect);
772 _ptrAudioBuffer = audioBuffer;
773 // inform the AudioBuffer about default settings for this implementation
774 _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
775 _ptrAudioBuffer->SetRecordingChannels(N_REC_CHANNELS);
776 }
777
SetRecordingSampleRate(const uint32_t samplesPerSec)778 int32_t AudioRecordJni::SetRecordingSampleRate(const uint32_t samplesPerSec) {
779 if (samplesPerSec > 48000 || samplesPerSec < 8000)
780 {
781 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
782 " Invalid sample rate");
783 return -1;
784 }
785
786 // set the recording sample rate to use
787 if (samplesPerSec == 44100)
788 {
789 _samplingFreqIn = 44;
790 }
791 else
792 {
793 _samplingFreqIn = samplesPerSec / 1000;
794 }
795
796 // Update the AudioDeviceBuffer
797 _ptrAudioBuffer->SetRecordingSampleRate(samplesPerSec);
798
799 return 0;
800 }
801
InitJavaResources()802 int32_t AudioRecordJni::InitJavaResources() {
803 // todo: Check if we already have created the java object
804 _javaVM = globalJvm;
805 _javaScClass = globalScClass;
806
807 // use the jvm that has been set
808 if (!_javaVM)
809 {
810 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
811 "%s: Not a valid Java VM pointer", __FUNCTION__);
812 return -1;
813 }
814
815 // get the JNI env for this thread
816 JNIEnv *env;
817 bool isAttached = false;
818
819 // get the JNI env for this thread
820 if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
821 {
822 // try to attach the thread and get the env
823 // Attach this thread to JVM
824 jint res = _javaVM->AttachCurrentThread(&env, NULL);
825 if ((res < 0) || !env)
826 {
827 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
828 "%s: Could not attach thread to JVM (%d, %p)",
829 __FUNCTION__, res, env);
830 return -1;
831 }
832 isAttached = true;
833 }
834
835 WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
836 "get method id");
837
838 // get the method ID for the void(void) constructor
839 jmethodID cid = env->GetMethodID(_javaScClass, "<init>", "()V");
840 if (cid == NULL)
841 {
842 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
843 "%s: could not get constructor ID", __FUNCTION__);
844 return -1; /* exception thrown */
845 }
846
847 WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
848 "construct object", __FUNCTION__);
849
850 // construct the object
851 jobject javaScObjLocal = env->NewObject(_javaScClass, cid);
852 if (!javaScObjLocal)
853 {
854 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
855 "%s: could not create Java sc object", __FUNCTION__);
856 return -1;
857 }
858
859 // Create a reference to the object (to tell JNI that we are referencing it
860 // after this function has returned).
861 _javaScObj = env->NewGlobalRef(javaScObjLocal);
862 if (!_javaScObj)
863 {
864 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
865 "%s: could not create Java sc object reference",
866 __FUNCTION__);
867 return -1;
868 }
869
870 // Delete local object ref, we only use the global ref.
871 env->DeleteLocalRef(javaScObjLocal);
872
873 //////////////////////
874 // AUDIO MANAGEMENT
875
876 // This is not mandatory functionality
877 if (globalContext) {
878 jfieldID context_id = env->GetFieldID(globalScClass,
879 "_context",
880 "Landroid/content/Context;");
881 if (!context_id) {
882 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
883 "%s: could not get _context id", __FUNCTION__);
884 return -1;
885 }
886
887 env->SetObjectField(_javaScObj, context_id, globalContext);
888 jobject javaContext = env->GetObjectField(_javaScObj, context_id);
889 if (!javaContext) {
890 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
891 "%s: could not set or get _context", __FUNCTION__);
892 return -1;
893 }
894 }
895 else {
896 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
897 "%s: did not set Context - some functionality is not "
898 "supported",
899 __FUNCTION__);
900 }
901
902 // Get rec buffer field ID.
903 jfieldID fidRecBuffer = env->GetFieldID(_javaScClass, "_recBuffer",
904 "Ljava/nio/ByteBuffer;");
905 if (!fidRecBuffer)
906 {
907 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
908 "%s: could not get rec buffer fid", __FUNCTION__);
909 return -1;
910 }
911
912 // Get rec buffer object.
913 jobject javaRecBufferLocal = env->GetObjectField(_javaScObj, fidRecBuffer);
914 if (!javaRecBufferLocal)
915 {
916 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
917 "%s: could not get rec buffer", __FUNCTION__);
918 return -1;
919 }
920
921 // Create a global reference to the object (to tell JNI that we are
922 // referencing it after this function has returned)
923 // NOTE: we are referencing it only through the direct buffer (see below).
924 _javaRecBuffer = env->NewGlobalRef(javaRecBufferLocal);
925 if (!_javaRecBuffer)
926 {
927 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
928 "%s: could not get rec buffer reference", __FUNCTION__);
929 return -1;
930 }
931
932 // Delete local object ref, we only use the global ref.
933 env->DeleteLocalRef(javaRecBufferLocal);
934
935 // Get direct buffer.
936 _javaDirectRecBuffer = env->GetDirectBufferAddress(_javaRecBuffer);
937 if (!_javaDirectRecBuffer)
938 {
939 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
940 "%s: could not get direct rec buffer", __FUNCTION__);
941 return -1;
942 }
943
944 // Get the rec audio method ID.
945 _javaMidRecAudio = env->GetMethodID(_javaScClass, "RecordAudio", "(I)I");
946 if (!_javaMidRecAudio)
947 {
948 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
949 "%s: could not get rec audio mid", __FUNCTION__);
950 return -1;
951 }
952
953 // Detach this thread if it was attached.
954 if (isAttached)
955 {
956 if (_javaVM->DetachCurrentThread() < 0)
957 {
958 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
959 "%s: Could not detach thread from JVM", __FUNCTION__);
960 }
961 }
962
963 return 0;
964
965 }
966
InitSampleRate()967 int32_t AudioRecordJni::InitSampleRate() {
968 int samplingFreq = 44100;
969 jint res = 0;
970
971 // get the JNI env for this thread
972 JNIEnv *env;
973 bool isAttached = false;
974
975 // get the JNI env for this thread
976 if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
977 {
978 // try to attach the thread and get the env
979 // Attach this thread to JVM
980 jint res = _javaVM->AttachCurrentThread(&env, NULL);
981 if ((res < 0) || !env)
982 {
983 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
984 "%s: Could not attach thread to JVM (%d, %p)",
985 __FUNCTION__, res, env);
986 return -1;
987 }
988 isAttached = true;
989 }
990
991 if (_samplingFreqIn > 0)
992 {
993 // read the configured sampling rate
994 samplingFreq = 44100;
995 if (_samplingFreqIn != 44)
996 {
997 samplingFreq = _samplingFreqIn * 1000;
998 }
999 WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
1000 " Trying configured recording sampling rate %d",
1001 samplingFreq);
1002 }
1003
1004 // get the method ID
1005 jmethodID initRecordingID = env->GetMethodID(_javaScClass, "InitRecording",
1006 "(II)I");
1007
1008 bool keepTrying = true;
1009 while (keepTrying)
1010 {
1011 // call java sc object method
1012 res = env->CallIntMethod(_javaScObj, initRecordingID, _recAudioSource,
1013 samplingFreq);
1014 if (res < 0)
1015 {
1016 switch (samplingFreq)
1017 {
1018 case 44100:
1019 samplingFreq = 16000;
1020 break;
1021 case 16000:
1022 samplingFreq = 8000;
1023 break;
1024 default: // error
1025 WEBRTC_TRACE(kTraceError,
1026 kTraceAudioDevice, _id,
1027 "%s: InitRecording failed (%d)", __FUNCTION__,
1028 res);
1029 return -1;
1030 }
1031 }
1032 else
1033 {
1034 keepTrying = false;
1035 }
1036 }
1037
1038 // set the recording sample rate to use
1039 if (samplingFreq == 44100)
1040 {
1041 _samplingFreqIn = 44;
1042 }
1043 else
1044 {
1045 _samplingFreqIn = samplingFreq / 1000;
1046 }
1047
1048 WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
1049 "Recording sample rate set to (%d)", _samplingFreqIn);
1050
1051 // get the method ID
1052 jmethodID stopRecordingID = env->GetMethodID(_javaScClass, "StopRecording",
1053 "()I");
1054
1055 // Call java sc object method
1056 res = env->CallIntMethod(_javaScObj, stopRecordingID);
1057 if (res < 0)
1058 {
1059 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
1060 "StopRecording failed (%d)", res);
1061 }
1062
1063 // Detach this thread if it was attached
1064 if (isAttached)
1065 {
1066 if (_javaVM->DetachCurrentThread() < 0)
1067 {
1068 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
1069 "%s: Could not detach thread from JVM", __FUNCTION__);
1070 }
1071 }
1072
1073 return 0;
1074 }
1075
RecThreadFunc(void * pThis)1076 bool AudioRecordJni::RecThreadFunc(void* pThis)
1077 {
1078 return (static_cast<AudioRecordJni*> (pThis)->RecThreadProcess());
1079 }
1080
RecThreadProcess()1081 bool AudioRecordJni::RecThreadProcess()
1082 {
1083 if (!_recThreadIsInitialized)
1084 {
1085 // Do once when thread is started
1086
1087 // Attach this thread to JVM
1088 jint res = _javaVM->AttachCurrentThread(&_jniEnvRec, NULL);
1089
1090 // Get the JNI env for this thread
1091 if ((res < 0) || !_jniEnvRec)
1092 {
1093 WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
1094 _id, "Could not attach rec thread to JVM (%d, %p)",
1095 res, _jniEnvRec);
1096 return false; // Close down thread
1097 }
1098
1099 _recThreadIsInitialized = true;
1100 }
1101
1102 // just sleep if rec has not started
1103 if (!_recording)
1104 {
1105 switch (_timeEventRec.Wait(1000))
1106 {
1107 case kEventSignaled:
1108 WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,
1109 _id, "Recording thread event signal");
1110 _timeEventRec.Reset();
1111 break;
1112 case kEventError:
1113 WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
1114 _id, "Recording thread event error");
1115 return true;
1116 case kEventTimeout:
1117 WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,
1118 _id, "Recording thread event timeout");
1119 return true;
1120 }
1121 }
1122
1123 Lock();
1124
1125 if (_startRec)
1126 {
1127 WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
1128 "_startRec true, performing initial actions");
1129 _startRec = false;
1130 _recording = true;
1131 _recWarning = 0;
1132 _recError = 0;
1133 _recStartStopEvent.Set();
1134 }
1135
1136 if (_recording)
1137 {
1138 uint32_t samplesToRec = _samplingFreqIn * 10;
1139
1140 // Call java sc object method to record data to direct buffer
1141 // Will block until data has been recorded (see java sc class),
1142 // therefore we must release the lock
1143 UnLock();
1144 jint recDelayInSamples = _jniEnvRec->CallIntMethod(_javaScObj,
1145 _javaMidRecAudio,
1146 2 * samplesToRec);
1147 if (recDelayInSamples < 0)
1148 {
1149 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
1150 "RecordAudio failed");
1151 _recWarning = 1;
1152 }
1153 else
1154 {
1155 _delayRecording = recDelayInSamples / _samplingFreqIn;
1156 }
1157 Lock();
1158
1159 // Check again since recording may have stopped during Java call
1160 if (_recording)
1161 {
1162 // WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
1163 // "total delay is %d", msPlayDelay + _delayRecording);
1164
1165 // Copy data to our direct buffer (held by java sc object)
1166 // todo: Give _javaDirectRecBuffer directly to VoE?
1167 // todo: Check count <= 480 ?
1168 memcpy(_recBuffer, _javaDirectRecBuffer, 2 * samplesToRec);
1169
1170 // store the recorded buffer (no action will be taken if the
1171 // #recorded samples is not a full buffer)
1172 _ptrAudioBuffer->SetRecordedBuffer(_recBuffer, samplesToRec);
1173
1174 // store vqe delay values
1175 _ptrAudioBuffer->SetVQEData(_delay_provider->PlayoutDelayMs(),
1176 _delayRecording, 0);
1177
1178 // deliver recorded samples at specified sample rate, mic level
1179 // etc. to the observer using callback
1180 UnLock();
1181 _ptrAudioBuffer->DeliverRecordedData();
1182 Lock();
1183 }
1184
1185 } // _recording
1186
1187 if (_shutdownRecThread)
1188 {
1189 WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
1190 "Detaching rec thread from Java VM");
1191
1192 // Detach thread from Java VM
1193 if (_javaVM->DetachCurrentThread() < 0)
1194 {
1195 WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
1196 _id, "Could not detach recording thread from JVM");
1197 _shutdownRecThread = false;
1198 // If we say OK (i.e. set event) and close thread anyway,
1199 // app will crash
1200 }
1201 else
1202 {
1203 _jniEnvRec = NULL;
1204 _shutdownRecThread = false;
1205 _recStartStopEvent.Set(); // Signal to Terminate() that we are done
1206
1207 WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
1208 "Sent signal rec");
1209 }
1210 }
1211
1212 UnLock();
1213 return true;
1214 }
1215
1216 } // namespace webrtc
1217