1 /*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 *
16 */
17
18 /* This is a JNI example where we use native methods to play sounds
19 * using OpenSL ES. See the corresponding Java source file located at:
20 *
21 * src/com/example/nativeaudio/NativeAudio/NativeAudio.java
22 */
23
24 #include <assert.h>
25 #include <jni.h>
26 #include <string.h>
27
28 // for __android_log_print(ANDROID_LOG_INFO, "YourApp", "formatted message");
29 // #include <android/log.h>
30
31 // for native audio
32 #include <SLES/OpenSLES.h>
33 #include <SLES/OpenSLES_Android.h>
34
35 // for native asset manager
36 #include <sys/types.h>
37 #include <android/asset_manager.h>
38 #include <android/asset_manager_jni.h>
39
40 // pre-recorded sound clips, both are 8 kHz mono 16-bit signed little endian
41
42 static const char hello[] =
43 #include "hello_clip.h"
44 ;
45
46 static const char android[] =
47 #include "android_clip.h"
48 ;
49
50 // engine interfaces
51 static SLObjectItf engineObject = NULL;
52 static SLEngineItf engineEngine;
53
54 // output mix interfaces
55 static SLObjectItf outputMixObject = NULL;
56 static SLEnvironmentalReverbItf outputMixEnvironmentalReverb = NULL;
57
58 // buffer queue player interfaces
59 static SLObjectItf bqPlayerObject = NULL;
60 static SLPlayItf bqPlayerPlay;
61 static SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue;
62 static SLEffectSendItf bqPlayerEffectSend;
63 static SLMuteSoloItf bqPlayerMuteSolo;
64 static SLVolumeItf bqPlayerVolume;
65
66 // aux effect on the output mix, used by the buffer queue player
67 static const SLEnvironmentalReverbSettings reverbSettings =
68 SL_I3DL2_ENVIRONMENT_PRESET_STONECORRIDOR;
69
70 // URI player interfaces
71 static SLObjectItf uriPlayerObject = NULL;
72 static SLPlayItf uriPlayerPlay;
73 static SLSeekItf uriPlayerSeek;
74 static SLMuteSoloItf uriPlayerMuteSolo;
75 static SLVolumeItf uriPlayerVolume;
76
77 // file descriptor player interfaces
78 static SLObjectItf fdPlayerObject = NULL;
79 static SLPlayItf fdPlayerPlay;
80 static SLSeekItf fdPlayerSeek;
81 static SLMuteSoloItf fdPlayerMuteSolo;
82 static SLVolumeItf fdPlayerVolume;
83
84 // recorder interfaces
85 static SLObjectItf recorderObject = NULL;
86 static SLRecordItf recorderRecord;
87 static SLAndroidSimpleBufferQueueItf recorderBufferQueue;
88
89 // synthesized sawtooth clip
90 #define SAWTOOTH_FRAMES 8000
91 static short sawtoothBuffer[SAWTOOTH_FRAMES];
92
93 // 5 seconds of recorded audio at 16 kHz mono, 16-bit signed little endian
94 #define RECORDER_FRAMES (16000 * 5)
95 static short recorderBuffer[RECORDER_FRAMES];
96 static unsigned recorderSize = 0;
97 static SLmilliHertz recorderSR;
98
99 // pointer and size of the next player buffer to enqueue, and number of remaining buffers
100 static short *nextBuffer;
101 static unsigned nextSize;
102 static int nextCount;
103
104
105 // synthesize a mono sawtooth wave and place it into a buffer (called automatically on load)
onDlOpen(void)106 __attribute__((constructor)) static void onDlOpen(void)
107 {
108 unsigned i;
109 for (i = 0; i < SAWTOOTH_FRAMES; ++i) {
110 sawtoothBuffer[i] = 32768 - ((i % 100) * 660);
111 }
112 }
113
114
115 // this callback handler is called every time a buffer finishes playing
bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq,void * context)116 void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq, void *context)
117 {
118 assert(bq == bqPlayerBufferQueue);
119 assert(NULL == context);
120 // for streaming playback, replace this test by logic to find and fill the next buffer
121 if (--nextCount > 0 && NULL != nextBuffer && 0 != nextSize) {
122 SLresult result;
123 // enqueue another buffer
124 result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, nextBuffer, nextSize);
125 // the most likely other result is SL_RESULT_BUFFER_INSUFFICIENT,
126 // which for this code example would indicate a programming error
127 assert(SL_RESULT_SUCCESS == result);
128 (void)result;
129 }
130 }
131
132
133 // this callback handler is called every time a buffer finishes recording
bqRecorderCallback(SLAndroidSimpleBufferQueueItf bq,void * context)134 void bqRecorderCallback(SLAndroidSimpleBufferQueueItf bq, void *context)
135 {
136 assert(bq == recorderBufferQueue);
137 assert(NULL == context);
138 // for streaming recording, here we would call Enqueue to give recorder the next buffer to fill
139 // but instead, this is a one-time buffer so we stop recording
140 SLresult result;
141 result = (*recorderRecord)->SetRecordState(recorderRecord, SL_RECORDSTATE_STOPPED);
142 if (SL_RESULT_SUCCESS == result) {
143 recorderSize = RECORDER_FRAMES * sizeof(short);
144 recorderSR = SL_SAMPLINGRATE_16;
145 }
146 }
147
148
149 // create the engine and output mix objects
Java_com_example_nativeaudio_NativeAudio_createEngine(JNIEnv * env,jclass clazz)150 void Java_com_example_nativeaudio_NativeAudio_createEngine(JNIEnv* env, jclass clazz)
151 {
152 SLresult result;
153
154 // create engine
155 result = slCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
156 assert(SL_RESULT_SUCCESS == result);
157 (void)result;
158
159 // realize the engine
160 result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
161 assert(SL_RESULT_SUCCESS == result);
162 (void)result;
163
164 // get the engine interface, which is needed in order to create other objects
165 result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine);
166 assert(SL_RESULT_SUCCESS == result);
167 (void)result;
168
169 // create output mix, with environmental reverb specified as a non-required interface
170 const SLInterfaceID ids[1] = {SL_IID_ENVIRONMENTALREVERB};
171 const SLboolean req[1] = {SL_BOOLEAN_FALSE};
172 result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 1, ids, req);
173 assert(SL_RESULT_SUCCESS == result);
174 (void)result;
175
176 // realize the output mix
177 result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
178 assert(SL_RESULT_SUCCESS == result);
179 (void)result;
180
181 // get the environmental reverb interface
182 // this could fail if the environmental reverb effect is not available,
183 // either because the feature is not present, excessive CPU load, or
184 // the required MODIFY_AUDIO_SETTINGS permission was not requested and granted
185 result = (*outputMixObject)->GetInterface(outputMixObject, SL_IID_ENVIRONMENTALREVERB,
186 &outputMixEnvironmentalReverb);
187 if (SL_RESULT_SUCCESS == result) {
188 result = (*outputMixEnvironmentalReverb)->SetEnvironmentalReverbProperties(
189 outputMixEnvironmentalReverb, &reverbSettings);
190 (void)result;
191 }
192 // ignore unsuccessful result codes for environmental reverb, as it is optional for this example
193
194 }
195
196
197 // create buffer queue audio player
Java_com_example_nativeaudio_NativeAudio_createBufferQueueAudioPlayer(JNIEnv * env,jclass clazz)198 void Java_com_example_nativeaudio_NativeAudio_createBufferQueueAudioPlayer(JNIEnv* env,
199 jclass clazz)
200 {
201 SLresult result;
202
203 // configure audio source
204 SLDataLocator_AndroidSimpleBufferQueue loc_bufq = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2};
205 SLDataFormat_PCM format_pcm = {SL_DATAFORMAT_PCM, 1, SL_SAMPLINGRATE_8,
206 SL_PCMSAMPLEFORMAT_FIXED_16, SL_PCMSAMPLEFORMAT_FIXED_16,
207 SL_SPEAKER_FRONT_CENTER, SL_BYTEORDER_LITTLEENDIAN};
208 SLDataSource audioSrc = {&loc_bufq, &format_pcm};
209
210 // configure audio sink
211 SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
212 SLDataSink audioSnk = {&loc_outmix, NULL};
213
214 // create audio player
215 const SLInterfaceID ids[3] = {SL_IID_BUFFERQUEUE, SL_IID_EFFECTSEND,
216 /*SL_IID_MUTESOLO,*/ SL_IID_VOLUME};
217 const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE,
218 /*SL_BOOLEAN_TRUE,*/ SL_BOOLEAN_TRUE};
219 result = (*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &audioSrc, &audioSnk,
220 3, ids, req);
221 assert(SL_RESULT_SUCCESS == result);
222 (void)result;
223
224 // realize the player
225 result = (*bqPlayerObject)->Realize(bqPlayerObject, SL_BOOLEAN_FALSE);
226 assert(SL_RESULT_SUCCESS == result);
227 (void)result;
228
229 // get the play interface
230 result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &bqPlayerPlay);
231 assert(SL_RESULT_SUCCESS == result);
232 (void)result;
233
234 // get the buffer queue interface
235 result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_BUFFERQUEUE,
236 &bqPlayerBufferQueue);
237 assert(SL_RESULT_SUCCESS == result);
238 (void)result;
239
240 // register callback on the buffer queue
241 result = (*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, bqPlayerCallback, NULL);
242 assert(SL_RESULT_SUCCESS == result);
243 (void)result;
244
245 // get the effect send interface
246 result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_EFFECTSEND,
247 &bqPlayerEffectSend);
248 assert(SL_RESULT_SUCCESS == result);
249 (void)result;
250
251 #if 0 // mute/solo is not supported for sources that are known to be mono, as this is
252 // get the mute/solo interface
253 result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_MUTESOLO, &bqPlayerMuteSolo);
254 assert(SL_RESULT_SUCCESS == result);
255 (void)result;
256 #endif
257
258 // get the volume interface
259 result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_VOLUME, &bqPlayerVolume);
260 assert(SL_RESULT_SUCCESS == result);
261 (void)result;
262
263 // set the player's state to playing
264 result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING);
265 assert(SL_RESULT_SUCCESS == result);
266 (void)result;
267 }
268
269
270 // create URI audio player
Java_com_example_nativeaudio_NativeAudio_createUriAudioPlayer(JNIEnv * env,jclass clazz,jstring uri)271 jboolean Java_com_example_nativeaudio_NativeAudio_createUriAudioPlayer(JNIEnv* env, jclass clazz,
272 jstring uri)
273 {
274 SLresult result;
275
276 // convert Java string to UTF-8
277 const char *utf8 = (*env)->GetStringUTFChars(env, uri, NULL);
278 assert(NULL != utf8);
279
280 // configure audio source
281 // (requires the INTERNET permission depending on the uri parameter)
282 SLDataLocator_URI loc_uri = {SL_DATALOCATOR_URI, (SLchar *) utf8};
283 SLDataFormat_MIME format_mime = {SL_DATAFORMAT_MIME, NULL, SL_CONTAINERTYPE_UNSPECIFIED};
284 SLDataSource audioSrc = {&loc_uri, &format_mime};
285
286 // configure audio sink
287 SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
288 SLDataSink audioSnk = {&loc_outmix, NULL};
289
290 // create audio player
291 const SLInterfaceID ids[3] = {SL_IID_SEEK, SL_IID_MUTESOLO, SL_IID_VOLUME};
292 const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
293 result = (*engineEngine)->CreateAudioPlayer(engineEngine, &uriPlayerObject, &audioSrc,
294 &audioSnk, 3, ids, req);
295 // note that an invalid URI is not detected here, but during prepare/prefetch on Android,
296 // or possibly during Realize on other platforms
297 assert(SL_RESULT_SUCCESS == result);
298 (void)result;
299
300 // release the Java string and UTF-8
301 (*env)->ReleaseStringUTFChars(env, uri, utf8);
302
303 // realize the player
304 result = (*uriPlayerObject)->Realize(uriPlayerObject, SL_BOOLEAN_FALSE);
305 // this will always succeed on Android, but we check result for portability to other platforms
306 if (SL_RESULT_SUCCESS != result) {
307 (*uriPlayerObject)->Destroy(uriPlayerObject);
308 uriPlayerObject = NULL;
309 return JNI_FALSE;
310 }
311
312 // get the play interface
313 result = (*uriPlayerObject)->GetInterface(uriPlayerObject, SL_IID_PLAY, &uriPlayerPlay);
314 assert(SL_RESULT_SUCCESS == result);
315 (void)result;
316
317 // get the seek interface
318 result = (*uriPlayerObject)->GetInterface(uriPlayerObject, SL_IID_SEEK, &uriPlayerSeek);
319 assert(SL_RESULT_SUCCESS == result);
320 (void)result;
321
322 // get the mute/solo interface
323 result = (*uriPlayerObject)->GetInterface(uriPlayerObject, SL_IID_MUTESOLO, &uriPlayerMuteSolo);
324 assert(SL_RESULT_SUCCESS == result);
325 (void)result;
326
327 // get the volume interface
328 result = (*uriPlayerObject)->GetInterface(uriPlayerObject, SL_IID_VOLUME, &uriPlayerVolume);
329 assert(SL_RESULT_SUCCESS == result);
330 (void)result;
331
332 return JNI_TRUE;
333 }
334
335
336 // set the playing state for the URI audio player
337 // to PLAYING (true) or PAUSED (false)
Java_com_example_nativeaudio_NativeAudio_setPlayingUriAudioPlayer(JNIEnv * env,jclass clazz,jboolean isPlaying)338 void Java_com_example_nativeaudio_NativeAudio_setPlayingUriAudioPlayer(JNIEnv* env,
339 jclass clazz, jboolean isPlaying)
340 {
341 SLresult result;
342
343 // make sure the URI audio player was created
344 if (NULL != uriPlayerPlay) {
345
346 // set the player's state
347 result = (*uriPlayerPlay)->SetPlayState(uriPlayerPlay, isPlaying ?
348 SL_PLAYSTATE_PLAYING : SL_PLAYSTATE_PAUSED);
349 assert(SL_RESULT_SUCCESS == result);
350 (void)result;
351 }
352
353 }
354
355
356 // set the whole file looping state for the URI audio player
Java_com_example_nativeaudio_NativeAudio_setLoopingUriAudioPlayer(JNIEnv * env,jclass clazz,jboolean isLooping)357 void Java_com_example_nativeaudio_NativeAudio_setLoopingUriAudioPlayer(JNIEnv* env,
358 jclass clazz, jboolean isLooping)
359 {
360 SLresult result;
361
362 // make sure the URI audio player was created
363 if (NULL != uriPlayerSeek) {
364
365 // set the looping state
366 result = (*uriPlayerSeek)->SetLoop(uriPlayerSeek, (SLboolean) isLooping, 0,
367 SL_TIME_UNKNOWN);
368 assert(SL_RESULT_SUCCESS == result);
369 (void)result;
370 }
371
372 }
373
374
375 // expose the mute/solo APIs to Java for one of the 3 players
376
getMuteSolo()377 static SLMuteSoloItf getMuteSolo()
378 {
379 if (uriPlayerMuteSolo != NULL)
380 return uriPlayerMuteSolo;
381 else if (fdPlayerMuteSolo != NULL)
382 return fdPlayerMuteSolo;
383 else
384 return bqPlayerMuteSolo;
385 }
386
Java_com_example_nativeaudio_NativeAudio_setChannelMuteUriAudioPlayer(JNIEnv * env,jclass clazz,jint chan,jboolean mute)387 void Java_com_example_nativeaudio_NativeAudio_setChannelMuteUriAudioPlayer(JNIEnv* env,
388 jclass clazz, jint chan, jboolean mute)
389 {
390 SLresult result;
391 SLMuteSoloItf muteSoloItf = getMuteSolo();
392 if (NULL != muteSoloItf) {
393 result = (*muteSoloItf)->SetChannelMute(muteSoloItf, chan, mute);
394 assert(SL_RESULT_SUCCESS == result);
395 (void)result;
396 }
397 }
398
Java_com_example_nativeaudio_NativeAudio_setChannelSoloUriAudioPlayer(JNIEnv * env,jclass clazz,jint chan,jboolean solo)399 void Java_com_example_nativeaudio_NativeAudio_setChannelSoloUriAudioPlayer(JNIEnv* env,
400 jclass clazz, jint chan, jboolean solo)
401 {
402 SLresult result;
403 SLMuteSoloItf muteSoloItf = getMuteSolo();
404 if (NULL != muteSoloItf) {
405 result = (*muteSoloItf)->SetChannelSolo(muteSoloItf, chan, solo);
406 assert(SL_RESULT_SUCCESS == result);
407 (void)result;
408 }
409 }
410
Java_com_example_nativeaudio_NativeAudio_getNumChannelsUriAudioPlayer(JNIEnv * env,jclass clazz)411 int Java_com_example_nativeaudio_NativeAudio_getNumChannelsUriAudioPlayer(JNIEnv* env, jclass clazz)
412 {
413 SLuint8 numChannels;
414 SLresult result;
415 SLMuteSoloItf muteSoloItf = getMuteSolo();
416 if (NULL != muteSoloItf) {
417 result = (*muteSoloItf)->GetNumChannels(muteSoloItf, &numChannels);
418 if (SL_RESULT_PRECONDITIONS_VIOLATED == result) {
419 // channel count is not yet known
420 numChannels = 0;
421 } else {
422 assert(SL_RESULT_SUCCESS == result);
423 }
424 } else {
425 numChannels = 0;
426 }
427 return numChannels;
428 }
429
430 // expose the volume APIs to Java for one of the 3 players
431
getVolume()432 static SLVolumeItf getVolume()
433 {
434 if (uriPlayerVolume != NULL)
435 return uriPlayerVolume;
436 else if (fdPlayerVolume != NULL)
437 return fdPlayerVolume;
438 else
439 return bqPlayerVolume;
440 }
441
Java_com_example_nativeaudio_NativeAudio_setVolumeUriAudioPlayer(JNIEnv * env,jclass clazz,jint millibel)442 void Java_com_example_nativeaudio_NativeAudio_setVolumeUriAudioPlayer(JNIEnv* env, jclass clazz,
443 jint millibel)
444 {
445 SLresult result;
446 SLVolumeItf volumeItf = getVolume();
447 if (NULL != volumeItf) {
448 result = (*volumeItf)->SetVolumeLevel(volumeItf, millibel);
449 assert(SL_RESULT_SUCCESS == result);
450 (void)result;
451 }
452 }
453
Java_com_example_nativeaudio_NativeAudio_setMuteUriAudioPlayer(JNIEnv * env,jclass clazz,jboolean mute)454 void Java_com_example_nativeaudio_NativeAudio_setMuteUriAudioPlayer(JNIEnv* env, jclass clazz,
455 jboolean mute)
456 {
457 SLresult result;
458 SLVolumeItf volumeItf = getVolume();
459 if (NULL != volumeItf) {
460 result = (*volumeItf)->SetMute(volumeItf, mute);
461 assert(SL_RESULT_SUCCESS == result);
462 (void)result;
463 }
464 }
465
Java_com_example_nativeaudio_NativeAudio_enableStereoPositionUriAudioPlayer(JNIEnv * env,jclass clazz,jboolean enable)466 void Java_com_example_nativeaudio_NativeAudio_enableStereoPositionUriAudioPlayer(JNIEnv* env,
467 jclass clazz, jboolean enable)
468 {
469 SLresult result;
470 SLVolumeItf volumeItf = getVolume();
471 if (NULL != volumeItf) {
472 result = (*volumeItf)->EnableStereoPosition(volumeItf, enable);
473 assert(SL_RESULT_SUCCESS == result);
474 (void)result;
475 }
476 }
477
Java_com_example_nativeaudio_NativeAudio_setStereoPositionUriAudioPlayer(JNIEnv * env,jclass clazz,jint permille)478 void Java_com_example_nativeaudio_NativeAudio_setStereoPositionUriAudioPlayer(JNIEnv* env,
479 jclass clazz, jint permille)
480 {
481 SLresult result;
482 SLVolumeItf volumeItf = getVolume();
483 if (NULL != volumeItf) {
484 result = (*volumeItf)->SetStereoPosition(volumeItf, permille);
485 assert(SL_RESULT_SUCCESS == result);
486 (void)result;
487 }
488 }
489
490 // enable reverb on the buffer queue player
Java_com_example_nativeaudio_NativeAudio_enableReverb(JNIEnv * env,jclass clazz,jboolean enabled)491 jboolean Java_com_example_nativeaudio_NativeAudio_enableReverb(JNIEnv* env, jclass clazz,
492 jboolean enabled)
493 {
494 SLresult result;
495
496 // we might not have been able to add environmental reverb to the output mix
497 if (NULL == outputMixEnvironmentalReverb) {
498 return JNI_FALSE;
499 }
500
501 result = (*bqPlayerEffectSend)->EnableEffectSend(bqPlayerEffectSend,
502 outputMixEnvironmentalReverb, (SLboolean) enabled, (SLmillibel) 0);
503 // and even if environmental reverb was present, it might no longer be available
504 if (SL_RESULT_SUCCESS != result) {
505 return JNI_FALSE;
506 }
507
508 return JNI_TRUE;
509 }
510
511
512 // select the desired clip and play count, and enqueue the first buffer if idle
Java_com_example_nativeaudio_NativeAudio_selectClip(JNIEnv * env,jclass clazz,jint which,jint count)513 jboolean Java_com_example_nativeaudio_NativeAudio_selectClip(JNIEnv* env, jclass clazz, jint which,
514 jint count)
515 {
516 switch (which) {
517 case 0: // CLIP_NONE
518 nextBuffer = (short *) NULL;
519 nextSize = 0;
520 break;
521 case 1: // CLIP_HELLO
522 nextBuffer = (short *) hello;
523 nextSize = sizeof(hello);
524 break;
525 case 2: // CLIP_ANDROID
526 nextBuffer = (short *) android;
527 nextSize = sizeof(android);
528 break;
529 case 3: // CLIP_SAWTOOTH
530 nextBuffer = sawtoothBuffer;
531 nextSize = sizeof(sawtoothBuffer);
532 break;
533 case 4: // CLIP_PLAYBACK
534 // we recorded at 16 kHz, but are playing buffers at 8 Khz, so do a primitive down-sample
535 if (recorderSR == SL_SAMPLINGRATE_16) {
536 unsigned i;
537 for (i = 0; i < recorderSize; i += 2 * sizeof(short)) {
538 recorderBuffer[i >> 2] = recorderBuffer[i >> 1];
539 }
540 recorderSR = SL_SAMPLINGRATE_8;
541 recorderSize >>= 1;
542 }
543 nextBuffer = recorderBuffer;
544 nextSize = recorderSize;
545 break;
546 default:
547 nextBuffer = NULL;
548 nextSize = 0;
549 break;
550 }
551 nextCount = count;
552 if (nextSize > 0) {
553 // here we only enqueue one buffer because it is a long clip,
554 // but for streaming playback we would typically enqueue at least 2 buffers to start
555 SLresult result;
556 result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, nextBuffer, nextSize);
557 if (SL_RESULT_SUCCESS != result) {
558 return JNI_FALSE;
559 }
560 }
561
562 return JNI_TRUE;
563 }
564
565
566 // create asset audio player
Java_com_example_nativeaudio_NativeAudio_createAssetAudioPlayer(JNIEnv * env,jclass clazz,jobject assetManager,jstring filename)567 jboolean Java_com_example_nativeaudio_NativeAudio_createAssetAudioPlayer(JNIEnv* env, jclass clazz,
568 jobject assetManager, jstring filename)
569 {
570 SLresult result;
571
572 // convert Java string to UTF-8
573 const char *utf8 = (*env)->GetStringUTFChars(env, filename, NULL);
574 assert(NULL != utf8);
575
576 // use asset manager to open asset by filename
577 AAssetManager* mgr = AAssetManager_fromJava(env, assetManager);
578 assert(NULL != mgr);
579 AAsset* asset = AAssetManager_open(mgr, utf8, AASSET_MODE_UNKNOWN);
580
581 // release the Java string and UTF-8
582 (*env)->ReleaseStringUTFChars(env, filename, utf8);
583
584 // the asset might not be found
585 if (NULL == asset) {
586 return JNI_FALSE;
587 }
588
589 // open asset as file descriptor
590 off_t start, length;
591 int fd = AAsset_openFileDescriptor(asset, &start, &length);
592 assert(0 <= fd);
593 AAsset_close(asset);
594
595 // configure audio source
596 SLDataLocator_AndroidFD loc_fd = {SL_DATALOCATOR_ANDROIDFD, fd, start, length};
597 SLDataFormat_MIME format_mime = {SL_DATAFORMAT_MIME, NULL, SL_CONTAINERTYPE_UNSPECIFIED};
598 SLDataSource audioSrc = {&loc_fd, &format_mime};
599
600 // configure audio sink
601 SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
602 SLDataSink audioSnk = {&loc_outmix, NULL};
603
604 // create audio player
605 const SLInterfaceID ids[3] = {SL_IID_SEEK, SL_IID_MUTESOLO, SL_IID_VOLUME};
606 const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
607 result = (*engineEngine)->CreateAudioPlayer(engineEngine, &fdPlayerObject, &audioSrc, &audioSnk,
608 3, ids, req);
609 assert(SL_RESULT_SUCCESS == result);
610 (void)result;
611
612 // realize the player
613 result = (*fdPlayerObject)->Realize(fdPlayerObject, SL_BOOLEAN_FALSE);
614 assert(SL_RESULT_SUCCESS == result);
615 (void)result;
616
617 // get the play interface
618 result = (*fdPlayerObject)->GetInterface(fdPlayerObject, SL_IID_PLAY, &fdPlayerPlay);
619 assert(SL_RESULT_SUCCESS == result);
620 (void)result;
621
622 // get the seek interface
623 result = (*fdPlayerObject)->GetInterface(fdPlayerObject, SL_IID_SEEK, &fdPlayerSeek);
624 assert(SL_RESULT_SUCCESS == result);
625 (void)result;
626
627 // get the mute/solo interface
628 result = (*fdPlayerObject)->GetInterface(fdPlayerObject, SL_IID_MUTESOLO, &fdPlayerMuteSolo);
629 assert(SL_RESULT_SUCCESS == result);
630 (void)result;
631
632 // get the volume interface
633 result = (*fdPlayerObject)->GetInterface(fdPlayerObject, SL_IID_VOLUME, &fdPlayerVolume);
634 assert(SL_RESULT_SUCCESS == result);
635 (void)result;
636
637 // enable whole file looping
638 result = (*fdPlayerSeek)->SetLoop(fdPlayerSeek, SL_BOOLEAN_TRUE, 0, SL_TIME_UNKNOWN);
639 assert(SL_RESULT_SUCCESS == result);
640 (void)result;
641
642 return JNI_TRUE;
643 }
644
645
646 // set the playing state for the asset audio player
Java_com_example_nativeaudio_NativeAudio_setPlayingAssetAudioPlayer(JNIEnv * env,jclass clazz,jboolean isPlaying)647 void Java_com_example_nativeaudio_NativeAudio_setPlayingAssetAudioPlayer(JNIEnv* env,
648 jclass clazz, jboolean isPlaying)
649 {
650 SLresult result;
651
652 // make sure the asset audio player was created
653 if (NULL != fdPlayerPlay) {
654
655 // set the player's state
656 result = (*fdPlayerPlay)->SetPlayState(fdPlayerPlay, isPlaying ?
657 SL_PLAYSTATE_PLAYING : SL_PLAYSTATE_PAUSED);
658 assert(SL_RESULT_SUCCESS == result);
659 (void)result;
660 }
661
662 }
663
664
665 // create audio recorder
Java_com_example_nativeaudio_NativeAudio_createAudioRecorder(JNIEnv * env,jclass clazz)666 jboolean Java_com_example_nativeaudio_NativeAudio_createAudioRecorder(JNIEnv* env, jclass clazz)
667 {
668 SLresult result;
669
670 // configure audio source
671 SLDataLocator_IODevice loc_dev = {SL_DATALOCATOR_IODEVICE, SL_IODEVICE_AUDIOINPUT,
672 SL_DEFAULTDEVICEID_AUDIOINPUT, NULL};
673 SLDataSource audioSrc = {&loc_dev, NULL};
674
675 // configure audio sink
676 SLDataLocator_AndroidSimpleBufferQueue loc_bq = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2};
677 SLDataFormat_PCM format_pcm = {SL_DATAFORMAT_PCM, 1, SL_SAMPLINGRATE_16,
678 SL_PCMSAMPLEFORMAT_FIXED_16, SL_PCMSAMPLEFORMAT_FIXED_16,
679 SL_SPEAKER_FRONT_CENTER, SL_BYTEORDER_LITTLEENDIAN};
680 SLDataSink audioSnk = {&loc_bq, &format_pcm};
681
682 // create audio recorder
683 // (requires the RECORD_AUDIO permission)
684 const SLInterfaceID id[1] = {SL_IID_ANDROIDSIMPLEBUFFERQUEUE};
685 const SLboolean req[1] = {SL_BOOLEAN_TRUE};
686 result = (*engineEngine)->CreateAudioRecorder(engineEngine, &recorderObject, &audioSrc,
687 &audioSnk, 1, id, req);
688 if (SL_RESULT_SUCCESS != result) {
689 return JNI_FALSE;
690 }
691
692 // realize the audio recorder
693 result = (*recorderObject)->Realize(recorderObject, SL_BOOLEAN_FALSE);
694 if (SL_RESULT_SUCCESS != result) {
695 return JNI_FALSE;
696 }
697
698 // get the record interface
699 result = (*recorderObject)->GetInterface(recorderObject, SL_IID_RECORD, &recorderRecord);
700 assert(SL_RESULT_SUCCESS == result);
701 (void)result;
702
703 // get the buffer queue interface
704 result = (*recorderObject)->GetInterface(recorderObject, SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
705 &recorderBufferQueue);
706 assert(SL_RESULT_SUCCESS == result);
707 (void)result;
708
709 // register callback on the buffer queue
710 result = (*recorderBufferQueue)->RegisterCallback(recorderBufferQueue, bqRecorderCallback,
711 NULL);
712 assert(SL_RESULT_SUCCESS == result);
713 (void)result;
714
715 return JNI_TRUE;
716 }
717
718
719 // set the recording state for the audio recorder
Java_com_example_nativeaudio_NativeAudio_startRecording(JNIEnv * env,jclass clazz)720 void Java_com_example_nativeaudio_NativeAudio_startRecording(JNIEnv* env, jclass clazz)
721 {
722 SLresult result;
723
724 // in case already recording, stop recording and clear buffer queue
725 result = (*recorderRecord)->SetRecordState(recorderRecord, SL_RECORDSTATE_STOPPED);
726 assert(SL_RESULT_SUCCESS == result);
727 (void)result;
728 result = (*recorderBufferQueue)->Clear(recorderBufferQueue);
729 assert(SL_RESULT_SUCCESS == result);
730 (void)result;
731
732 // the buffer is not valid for playback yet
733 recorderSize = 0;
734
735 // enqueue an empty buffer to be filled by the recorder
736 // (for streaming recording, we would enqueue at least 2 empty buffers to start things off)
737 result = (*recorderBufferQueue)->Enqueue(recorderBufferQueue, recorderBuffer,
738 RECORDER_FRAMES * sizeof(short));
739 // the most likely other result is SL_RESULT_BUFFER_INSUFFICIENT,
740 // which for this code example would indicate a programming error
741 assert(SL_RESULT_SUCCESS == result);
742 (void)result;
743
744 // start recording
745 result = (*recorderRecord)->SetRecordState(recorderRecord, SL_RECORDSTATE_RECORDING);
746 assert(SL_RESULT_SUCCESS == result);
747 (void)result;
748 }
749
750
751 // shut down the native audio system
Java_com_example_nativeaudio_NativeAudio_shutdown(JNIEnv * env,jclass clazz)752 void Java_com_example_nativeaudio_NativeAudio_shutdown(JNIEnv* env, jclass clazz)
753 {
754
755 // destroy buffer queue audio player object, and invalidate all associated interfaces
756 if (bqPlayerObject != NULL) {
757 (*bqPlayerObject)->Destroy(bqPlayerObject);
758 bqPlayerObject = NULL;
759 bqPlayerPlay = NULL;
760 bqPlayerBufferQueue = NULL;
761 bqPlayerEffectSend = NULL;
762 bqPlayerMuteSolo = NULL;
763 bqPlayerVolume = NULL;
764 }
765
766 // destroy file descriptor audio player object, and invalidate all associated interfaces
767 if (fdPlayerObject != NULL) {
768 (*fdPlayerObject)->Destroy(fdPlayerObject);
769 fdPlayerObject = NULL;
770 fdPlayerPlay = NULL;
771 fdPlayerSeek = NULL;
772 fdPlayerMuteSolo = NULL;
773 fdPlayerVolume = NULL;
774 }
775
776 // destroy URI audio player object, and invalidate all associated interfaces
777 if (uriPlayerObject != NULL) {
778 (*uriPlayerObject)->Destroy(uriPlayerObject);
779 uriPlayerObject = NULL;
780 uriPlayerPlay = NULL;
781 uriPlayerSeek = NULL;
782 uriPlayerMuteSolo = NULL;
783 uriPlayerVolume = NULL;
784 }
785
786 // destroy audio recorder object, and invalidate all associated interfaces
787 if (recorderObject != NULL) {
788 (*recorderObject)->Destroy(recorderObject);
789 recorderObject = NULL;
790 recorderRecord = NULL;
791 recorderBufferQueue = NULL;
792 }
793
794 // destroy output mix object, and invalidate all associated interfaces
795 if (outputMixObject != NULL) {
796 (*outputMixObject)->Destroy(outputMixObject);
797 outputMixObject = NULL;
798 outputMixEnvironmentalReverb = NULL;
799 }
800
801 // destroy engine object, and invalidate all associated interfaces
802 if (engineObject != NULL) {
803 (*engineObject)->Destroy(engineObject);
804 engineObject = NULL;
805 engineEngine = NULL;
806 }
807
808 }
809