1 /*
2 * Copyright (C) 2008 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 //#define LOG_NDEBUG 0
17
18 #define LOG_TAG "AudioTrack-JNI"
19
20 #include "android_media_AudioTrack.h"
21
22 #include <nativehelper/JNIHelp.h>
23 #include <nativehelper/ScopedUtfChars.h>
24 #include "core_jni_helpers.h"
25
26 #include <utils/Log.h>
27 #include <media/AudioParameter.h>
28 #include <media/AudioSystem.h>
29 #include <media/AudioTrack.h>
30
31 #include <android-base/macros.h>
32 #include <binder/MemoryHeapBase.h>
33 #include <binder/MemoryBase.h>
34
35 #include "android_media_AudioAttributes.h"
36 #include "android_media_AudioErrors.h"
37 #include "android_media_AudioFormat.h"
38 #include "android_media_AudioTrackCallback.h"
39 #include "android_media_DeviceCallback.h"
40 #include "android_media_MediaMetricsJNI.h"
41 #include "android_media_PlaybackParams.h"
42 #include "android_media_VolumeShaper.h"
43
44 #include <cinttypes>
45
46 // ----------------------------------------------------------------------------
47
48 using namespace android;
49
50 using ::android::media::VolumeShaper;
51
52 // ----------------------------------------------------------------------------
53 static const char* const kClassPathName = "android/media/AudioTrack";
54
55 struct audio_track_fields_t {
56 // these fields provide access from C++ to the...
57 jmethodID postNativeEventInJava; //... event post callback method
58 jfieldID nativeTrackInJavaObj; // stores in Java the native AudioTrack object
59 jfieldID jniData; // stores in Java additional resources used by the native AudioTrack
60 jfieldID fieldStreamType; // ... mStreamType field in the AudioTrack Java object
61 };
62 static audio_track_fields_t javaAudioTrackFields;
63 static PlaybackParams::fields_t gPlaybackParamsFields;
64 static VolumeShaperHelper::fields_t gVolumeShaperFields;
65
66 struct audiotrack_callback_cookie {
67 jclass audioTrack_class;
68 jobject audioTrack_ref;
69 bool busy;
70 Condition cond;
71 bool isOffload;
72 };
73
74 // keep these values in sync with AudioTrack.java
75 #define MODE_STATIC 0
76 #define MODE_STREAM 1
77
78 // ----------------------------------------------------------------------------
79 class AudioTrackJniStorage {
80 public:
81 sp<MemoryHeapBase> mMemHeap;
82 sp<MemoryBase> mMemBase;
83 audiotrack_callback_cookie mCallbackData{};
84 sp<JNIDeviceCallback> mDeviceCallback;
85 sp<JNIAudioTrackCallback> mAudioTrackCallback;
86
allocSharedMem(int sizeInBytes)87 bool allocSharedMem(int sizeInBytes) {
88 mMemHeap = new MemoryHeapBase(sizeInBytes, 0, "AudioTrack Heap Base");
89 if (mMemHeap->getHeapID() < 0) {
90 return false;
91 }
92 mMemBase = new MemoryBase(mMemHeap, 0, sizeInBytes);
93 return true;
94 }
95 };
96
97 class TunerConfigurationHelper {
98 JNIEnv *const mEnv;
99 jobject const mTunerConfiguration;
100
101 struct Ids {
IdsTunerConfigurationHelper::Ids102 Ids(JNIEnv *env)
103 : mClass(FindClassOrDie(env, "android/media/AudioTrack$TunerConfiguration")),
104 mContentId(GetFieldIDOrDie(env, mClass, "mContentId", "I")),
105 mSyncId(GetFieldIDOrDie(env, mClass, "mSyncId", "I")) {}
106 const jclass mClass;
107 const jfieldID mContentId;
108 const jfieldID mSyncId;
109 };
110
getIds(JNIEnv * env)111 static const Ids &getIds(JNIEnv *env) {
112 // Meyer's singleton, initializes first time control passes through
113 // declaration in a block and is thread-safe per ISO/IEC 14882:2011 6.7.4.
114 static Ids ids(env);
115 return ids;
116 }
117
118 public:
TunerConfigurationHelper(JNIEnv * env,jobject tunerConfiguration)119 TunerConfigurationHelper(JNIEnv *env, jobject tunerConfiguration)
120 : mEnv(env), mTunerConfiguration(tunerConfiguration) {}
121
getContentId() const122 int32_t getContentId() const {
123 if (mEnv == nullptr || mTunerConfiguration == nullptr) return 0;
124 const Ids &ids = getIds(mEnv);
125 return (int32_t)mEnv->GetIntField(mTunerConfiguration, ids.mContentId);
126 }
127
getSyncId() const128 int32_t getSyncId() const {
129 if (mEnv == nullptr || mTunerConfiguration == nullptr) return 0;
130 const Ids &ids = getIds(mEnv);
131 return (int32_t)mEnv->GetIntField(mTunerConfiguration, ids.mSyncId);
132 }
133
134 // optional check to confirm class and field ids can be found.
initCheckOrDie(JNIEnv * env)135 static void initCheckOrDie(JNIEnv *env) { (void)getIds(env); }
136 };
137
138 static Mutex sLock;
139 static SortedVector <audiotrack_callback_cookie *> sAudioTrackCallBackCookies;
140
141 // ----------------------------------------------------------------------------
142 #define DEFAULT_OUTPUT_SAMPLE_RATE 44100
143
144 #define AUDIOTRACK_ERROR_SETUP_AUDIOSYSTEM (-16)
145 #define AUDIOTRACK_ERROR_SETUP_INVALIDCHANNELMASK (-17)
146 #define AUDIOTRACK_ERROR_SETUP_INVALIDFORMAT (-18)
147 #define AUDIOTRACK_ERROR_SETUP_INVALIDSTREAMTYPE (-19)
148 #define AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED (-20)
149
150 // ----------------------------------------------------------------------------
audioCallback(int event,void * user,void * info)151 static void audioCallback(int event, void* user, void *info) {
152
153 audiotrack_callback_cookie *callbackInfo = (audiotrack_callback_cookie *)user;
154 {
155 Mutex::Autolock l(sLock);
156 if (sAudioTrackCallBackCookies.indexOf(callbackInfo) < 0) {
157 return;
158 }
159 callbackInfo->busy = true;
160 }
161
162 // used as default argument when event callback doesn't have any, or number of
163 // frames for EVENT_CAN_WRITE_MORE_DATA
164 int arg = 0;
165 bool postEvent = false;
166 switch (event) {
167 // Offload only events
168 case AudioTrack::EVENT_CAN_WRITE_MORE_DATA:
169 // this event will read the info return parameter of the callback:
170 // for JNI offload, use the returned size to indicate:
171 // 1/ no data is returned through callback, as it's all done through write()
172 // 2/ do not wait as AudioTrack does when it receives 0 bytes
173 if (callbackInfo->isOffload) {
174 AudioTrack::Buffer* pBuffer = (AudioTrack::Buffer*) info;
175 const size_t availableForWrite = pBuffer->size;
176 arg = availableForWrite > INT32_MAX ? INT32_MAX : (int) availableForWrite;
177 pBuffer->size = 0;
178 }
179 FALLTHROUGH_INTENDED;
180 case AudioTrack::EVENT_STREAM_END:
181 case AudioTrack::EVENT_NEW_IAUDIOTRACK: // a.k.a. tear down
182 if (callbackInfo->isOffload) {
183 postEvent = true;
184 }
185 break;
186
187 // PCM and offload events
188 case AudioTrack::EVENT_MARKER:
189 case AudioTrack::EVENT_NEW_POS:
190 postEvent = true;
191 break;
192 default:
193 // event will not be posted
194 break;
195 }
196
197 if (postEvent) {
198 JNIEnv *env = AndroidRuntime::getJNIEnv();
199 if (env != NULL) {
200 env->CallStaticVoidMethod(
201 callbackInfo->audioTrack_class,
202 javaAudioTrackFields.postNativeEventInJava,
203 callbackInfo->audioTrack_ref, event, arg, 0, NULL);
204 if (env->ExceptionCheck()) {
205 env->ExceptionDescribe();
206 env->ExceptionClear();
207 }
208 }
209 }
210
211 {
212 Mutex::Autolock l(sLock);
213 callbackInfo->busy = false;
214 callbackInfo->cond.broadcast();
215 }
216 }
217
218
219 // ----------------------------------------------------------------------------
getAudioTrack(JNIEnv * env,jobject thiz)220 static sp<AudioTrack> getAudioTrack(JNIEnv* env, jobject thiz)
221 {
222 Mutex::Autolock l(sLock);
223 AudioTrack* const at =
224 (AudioTrack*)env->GetLongField(thiz, javaAudioTrackFields.nativeTrackInJavaObj);
225 return sp<AudioTrack>(at);
226 }
227
setAudioTrack(JNIEnv * env,jobject thiz,const sp<AudioTrack> & at)228 static sp<AudioTrack> setAudioTrack(JNIEnv* env, jobject thiz, const sp<AudioTrack>& at)
229 {
230 Mutex::Autolock l(sLock);
231 sp<AudioTrack> old =
232 (AudioTrack*)env->GetLongField(thiz, javaAudioTrackFields.nativeTrackInJavaObj);
233 if (at.get()) {
234 at->incStrong((void*)setAudioTrack);
235 }
236 if (old != 0) {
237 old->decStrong((void*)setAudioTrack);
238 }
239 env->SetLongField(thiz, javaAudioTrackFields.nativeTrackInJavaObj, (jlong)at.get());
240 return old;
241 }
242
243 // ----------------------------------------------------------------------------
android_media_AudioTrack_getAudioTrack(JNIEnv * env,jobject audioTrackObj)244 sp<AudioTrack> android_media_AudioTrack_getAudioTrack(JNIEnv* env, jobject audioTrackObj) {
245 return getAudioTrack(env, audioTrackObj);
246 }
247
248 // ----------------------------------------------------------------------------
android_media_AudioTrack_setup(JNIEnv * env,jobject thiz,jobject weak_this,jobject jaa,jintArray jSampleRate,jint channelPositionMask,jint channelIndexMask,jint audioFormat,jint buffSizeInBytes,jint memoryMode,jintArray jSession,jlong nativeAudioTrack,jboolean offload,jint encapsulationMode,jobject tunerConfiguration,jstring opPackageName)249 static jint android_media_AudioTrack_setup(JNIEnv *env, jobject thiz, jobject weak_this,
250 jobject jaa, jintArray jSampleRate,
251 jint channelPositionMask, jint channelIndexMask,
252 jint audioFormat, jint buffSizeInBytes, jint memoryMode,
253 jintArray jSession, jlong nativeAudioTrack,
254 jboolean offload, jint encapsulationMode,
255 jobject tunerConfiguration, jstring opPackageName) {
256 ALOGV("sampleRates=%p, channel mask=%x, index mask=%x, audioFormat(Java)=%d, buffSize=%d,"
257 " nativeAudioTrack=0x%" PRIX64 ", offload=%d encapsulationMode=%d tuner=%p",
258 jSampleRate, channelPositionMask, channelIndexMask, audioFormat, buffSizeInBytes,
259 nativeAudioTrack, offload, encapsulationMode, tunerConfiguration);
260
261 if (jSession == NULL) {
262 ALOGE("Error creating AudioTrack: invalid session ID pointer");
263 return (jint) AUDIO_JAVA_ERROR;
264 }
265
266 const TunerConfigurationHelper tunerHelper(env, tunerConfiguration);
267
268 jint* nSession = (jint *) env->GetPrimitiveArrayCritical(jSession, NULL);
269 if (nSession == NULL) {
270 ALOGE("Error creating AudioTrack: Error retrieving session id pointer");
271 return (jint) AUDIO_JAVA_ERROR;
272 }
273 audio_session_t sessionId = (audio_session_t) nSession[0];
274 env->ReleasePrimitiveArrayCritical(jSession, nSession, 0);
275 nSession = NULL;
276
277 AudioTrackJniStorage* lpJniStorage = NULL;
278
279 jclass clazz = env->GetObjectClass(thiz);
280 if (clazz == NULL) {
281 ALOGE("Can't find %s when setting up callback.", kClassPathName);
282 return (jint) AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED;
283 }
284
285 // if we pass in an existing *Native* AudioTrack, we don't need to create/initialize one.
286 sp<AudioTrack> lpTrack;
287 if (nativeAudioTrack == 0) {
288 if (jaa == 0) {
289 ALOGE("Error creating AudioTrack: invalid audio attributes");
290 return (jint) AUDIO_JAVA_ERROR;
291 }
292
293 if (jSampleRate == 0) {
294 ALOGE("Error creating AudioTrack: invalid sample rates");
295 return (jint) AUDIO_JAVA_ERROR;
296 }
297
298 int* sampleRates = env->GetIntArrayElements(jSampleRate, NULL);
299 int sampleRateInHertz = sampleRates[0];
300 env->ReleaseIntArrayElements(jSampleRate, sampleRates, JNI_ABORT);
301
302 // Invalid channel representations are caught by !audio_is_output_channel() below.
303 audio_channel_mask_t nativeChannelMask = nativeChannelMaskFromJavaChannelMasks(
304 channelPositionMask, channelIndexMask);
305 if (!audio_is_output_channel(nativeChannelMask)) {
306 ALOGE("Error creating AudioTrack: invalid native channel mask %#x.", nativeChannelMask);
307 return (jint) AUDIOTRACK_ERROR_SETUP_INVALIDCHANNELMASK;
308 }
309
310 uint32_t channelCount = audio_channel_count_from_out_mask(nativeChannelMask);
311
312 // check the format.
313 // This function was called from Java, so we compare the format against the Java constants
314 audio_format_t format = audioFormatToNative(audioFormat);
315 if (format == AUDIO_FORMAT_INVALID) {
316 ALOGE("Error creating AudioTrack: unsupported audio format %d.", audioFormat);
317 return (jint) AUDIOTRACK_ERROR_SETUP_INVALIDFORMAT;
318 }
319
320 // compute the frame count
321 size_t frameCount;
322 if (audio_has_proportional_frames(format)) {
323 const size_t bytesPerSample = audio_bytes_per_sample(format);
324 frameCount = buffSizeInBytes / (channelCount * bytesPerSample);
325 } else {
326 frameCount = buffSizeInBytes;
327 }
328
329 // create the native AudioTrack object
330 ScopedUtfChars opPackageNameStr(env, opPackageName);
331 // TODO b/182469354: make consistent with AudioRecord
332 AttributionSourceState attributionSource;
333 attributionSource.packageName = std::string(opPackageNameStr.c_str());
334 attributionSource.token = sp<BBinder>::make();
335 lpTrack = new AudioTrack(attributionSource);
336
337 // read the AudioAttributes values
338 auto paa = JNIAudioAttributeHelper::makeUnique();
339 jint jStatus = JNIAudioAttributeHelper::nativeFromJava(env, jaa, paa.get());
340 if (jStatus != (jint)AUDIO_JAVA_SUCCESS) {
341 return jStatus;
342 }
343 ALOGV("AudioTrack_setup for usage=%d content=%d flags=0x%#x tags=%s",
344 paa->usage, paa->content_type, paa->flags, paa->tags);
345
346 // initialize the callback information:
347 // this data will be passed with every AudioTrack callback
348 lpJniStorage = new AudioTrackJniStorage();
349 lpJniStorage->mCallbackData.audioTrack_class = (jclass)env->NewGlobalRef(clazz);
350 // we use a weak reference so the AudioTrack object can be garbage collected.
351 lpJniStorage->mCallbackData.audioTrack_ref = env->NewGlobalRef(weak_this);
352 lpJniStorage->mCallbackData.isOffload = offload;
353 lpJniStorage->mCallbackData.busy = false;
354
355 audio_offload_info_t offloadInfo;
356 if (offload == JNI_TRUE) {
357 offloadInfo = AUDIO_INFO_INITIALIZER;
358 offloadInfo.format = format;
359 offloadInfo.sample_rate = sampleRateInHertz;
360 offloadInfo.channel_mask = nativeChannelMask;
361 offloadInfo.has_video = false;
362 offloadInfo.stream_type = AUDIO_STREAM_MUSIC; //required for offload
363 }
364
365 if (encapsulationMode != 0) {
366 offloadInfo = AUDIO_INFO_INITIALIZER;
367 offloadInfo.format = format;
368 offloadInfo.sample_rate = sampleRateInHertz;
369 offloadInfo.channel_mask = nativeChannelMask;
370 offloadInfo.stream_type = AUDIO_STREAM_MUSIC;
371 offloadInfo.encapsulation_mode =
372 static_cast<audio_encapsulation_mode_t>(encapsulationMode);
373 offloadInfo.content_id = tunerHelper.getContentId();
374 offloadInfo.sync_id = tunerHelper.getSyncId();
375 }
376
377 // initialize the native AudioTrack object
378 status_t status = NO_ERROR;
379 switch (memoryMode) {
380 case MODE_STREAM:
381 status = lpTrack->set(AUDIO_STREAM_DEFAULT, // stream type, but more info conveyed
382 // in paa (last argument)
383 sampleRateInHertz,
384 format, // word length, PCM
385 nativeChannelMask, offload ? 0 : frameCount,
386 offload ? AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD
387 : AUDIO_OUTPUT_FLAG_NONE,
388 audioCallback,
389 &(lpJniStorage->mCallbackData), // callback, callback data (user)
390 0, // notificationFrames == 0 since not using EVENT_MORE_DATA
391 // to feed the AudioTrack
392 0, // shared mem
393 true, // thread can call Java
394 sessionId, // audio session ID
395 offload ? AudioTrack::TRANSFER_SYNC_NOTIF_CALLBACK
396 : AudioTrack::TRANSFER_SYNC,
397 (offload || encapsulationMode) ? &offloadInfo : NULL,
398 AttributionSourceState(), // default uid, pid values
399 paa.get());
400 break;
401
402 case MODE_STATIC:
403 // AudioTrack is using shared memory
404
405 if (!lpJniStorage->allocSharedMem(buffSizeInBytes)) {
406 ALOGE("Error creating AudioTrack in static mode: error creating mem heap base");
407 goto native_init_failure;
408 }
409
410 status = lpTrack->set(AUDIO_STREAM_DEFAULT, // stream type, but more info conveyed
411 // in paa (last argument)
412 sampleRateInHertz,
413 format, // word length, PCM
414 nativeChannelMask, frameCount, AUDIO_OUTPUT_FLAG_NONE,
415 audioCallback,
416 &(lpJniStorage->mCallbackData), // callback, callback data (user)
417 0, // notificationFrames == 0 since not using EVENT_MORE_DATA
418 // to feed the AudioTrack
419 lpJniStorage->mMemBase, // shared mem
420 true, // thread can call Java
421 sessionId, // audio session ID
422 AudioTrack::TRANSFER_SHARED,
423 NULL, // default offloadInfo
424 AttributionSourceState(), // default uid, pid values
425 paa.get());
426 break;
427
428 default:
429 ALOGE("Unknown mode %d", memoryMode);
430 goto native_init_failure;
431 }
432
433 if (status != NO_ERROR) {
434 ALOGE("Error %d initializing AudioTrack", status);
435 goto native_init_failure;
436 }
437 // Set caller name so it can be logged in destructor.
438 // MediaMetricsConstants.h: AMEDIAMETRICS_PROP_CALLERNAME_VALUE_JAVA
439 lpTrack->setCallerName("java");
440 } else { // end if (nativeAudioTrack == 0)
441 lpTrack = (AudioTrack*)nativeAudioTrack;
442 // TODO: We need to find out which members of the Java AudioTrack might
443 // need to be initialized from the Native AudioTrack
444 // these are directly returned from getters:
445 // mSampleRate
446 // mAudioFormat
447 // mStreamType
448 // mChannelConfiguration
449 // mChannelCount
450 // mState (?)
451 // mPlayState (?)
452 // these may be used internally (Java AudioTrack.audioParamCheck():
453 // mChannelMask
454 // mChannelIndexMask
455 // mDataLoadMode
456
457 // initialize the callback information:
458 // this data will be passed with every AudioTrack callback
459 lpJniStorage = new AudioTrackJniStorage();
460 lpJniStorage->mCallbackData.audioTrack_class = (jclass)env->NewGlobalRef(clazz);
461 // we use a weak reference so the AudioTrack object can be garbage collected.
462 lpJniStorage->mCallbackData.audioTrack_ref = env->NewGlobalRef(weak_this);
463 lpJniStorage->mCallbackData.busy = false;
464 }
465 lpJniStorage->mAudioTrackCallback =
466 new JNIAudioTrackCallback(env, thiz, lpJniStorage->mCallbackData.audioTrack_ref,
467 javaAudioTrackFields.postNativeEventInJava);
468 lpTrack->setAudioTrackCallback(lpJniStorage->mAudioTrackCallback);
469
470 nSession = (jint *) env->GetPrimitiveArrayCritical(jSession, NULL);
471 if (nSession == NULL) {
472 ALOGE("Error creating AudioTrack: Error retrieving session id pointer");
473 goto native_init_failure;
474 }
475 // read the audio session ID back from AudioTrack in case we create a new session
476 nSession[0] = lpTrack->getSessionId();
477 env->ReleasePrimitiveArrayCritical(jSession, nSession, 0);
478 nSession = NULL;
479
480 {
481 const jint elements[1] = { (jint) lpTrack->getSampleRate() };
482 env->SetIntArrayRegion(jSampleRate, 0, 1, elements);
483 }
484
485 { // scope for the lock
486 Mutex::Autolock l(sLock);
487 sAudioTrackCallBackCookies.add(&lpJniStorage->mCallbackData);
488 }
489 // save our newly created C++ AudioTrack in the "nativeTrackInJavaObj" field
490 // of the Java object (in mNativeTrackInJavaObj)
491 setAudioTrack(env, thiz, lpTrack);
492
493 // save the JNI resources so we can free them later
494 //ALOGV("storing lpJniStorage: %x\n", (long)lpJniStorage);
495 env->SetLongField(thiz, javaAudioTrackFields.jniData, (jlong)lpJniStorage);
496
497 // since we had audio attributes, the stream type was derived from them during the
498 // creation of the native AudioTrack: push the same value to the Java object
499 env->SetIntField(thiz, javaAudioTrackFields.fieldStreamType, (jint) lpTrack->streamType());
500
501 return (jint) AUDIO_JAVA_SUCCESS;
502
503 // failures:
504 native_init_failure:
505 if (nSession != NULL) {
506 env->ReleasePrimitiveArrayCritical(jSession, nSession, 0);
507 }
508 env->DeleteGlobalRef(lpJniStorage->mCallbackData.audioTrack_class);
509 env->DeleteGlobalRef(lpJniStorage->mCallbackData.audioTrack_ref);
510 delete lpJniStorage;
511 env->SetLongField(thiz, javaAudioTrackFields.jniData, 0);
512
513 // lpTrack goes out of scope, so reference count drops to zero
514 return (jint) AUDIOTRACK_ERROR_SETUP_NATIVEINITFAILED;
515 }
516
517 // ----------------------------------------------------------------------------
518 static jboolean
android_media_AudioTrack_is_direct_output_supported(JNIEnv * env,jobject thiz,jint encoding,jint sampleRate,jint channelMask,jint channelIndexMask,jint contentType,jint usage,jint flags)519 android_media_AudioTrack_is_direct_output_supported(JNIEnv *env, jobject thiz,
520 jint encoding, jint sampleRate,
521 jint channelMask, jint channelIndexMask,
522 jint contentType, jint usage, jint flags) {
523 audio_config_base_t config = {};
524 audio_attributes_t attributes = {};
525 config.format = static_cast<audio_format_t>(audioFormatToNative(encoding));
526 config.sample_rate = static_cast<uint32_t>(sampleRate);
527 config.channel_mask = nativeChannelMaskFromJavaChannelMasks(channelMask, channelIndexMask);
528 attributes.content_type = static_cast<audio_content_type_t>(contentType);
529 attributes.usage = static_cast<audio_usage_t>(usage);
530 attributes.flags = static_cast<audio_flags_mask_t>(flags);
531 // ignore source and tags attributes as they don't affect querying whether output is supported
532 return AudioTrack::isDirectOutputSupported(config, attributes);
533 }
534
535 // ----------------------------------------------------------------------------
536 static void
android_media_AudioTrack_start(JNIEnv * env,jobject thiz)537 android_media_AudioTrack_start(JNIEnv *env, jobject thiz)
538 {
539 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
540 if (lpTrack == NULL) {
541 jniThrowException(env, "java/lang/IllegalStateException",
542 "Unable to retrieve AudioTrack pointer for start()");
543 return;
544 }
545
546 lpTrack->start();
547 }
548
549
550 // ----------------------------------------------------------------------------
551 static void
android_media_AudioTrack_stop(JNIEnv * env,jobject thiz)552 android_media_AudioTrack_stop(JNIEnv *env, jobject thiz)
553 {
554 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
555 if (lpTrack == NULL) {
556 jniThrowException(env, "java/lang/IllegalStateException",
557 "Unable to retrieve AudioTrack pointer for stop()");
558 return;
559 }
560
561 lpTrack->stop();
562 }
563
564
565 // ----------------------------------------------------------------------------
566 static void
android_media_AudioTrack_pause(JNIEnv * env,jobject thiz)567 android_media_AudioTrack_pause(JNIEnv *env, jobject thiz)
568 {
569 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
570 if (lpTrack == NULL) {
571 jniThrowException(env, "java/lang/IllegalStateException",
572 "Unable to retrieve AudioTrack pointer for pause()");
573 return;
574 }
575
576 lpTrack->pause();
577 }
578
579
580 // ----------------------------------------------------------------------------
581 static void
android_media_AudioTrack_flush(JNIEnv * env,jobject thiz)582 android_media_AudioTrack_flush(JNIEnv *env, jobject thiz)
583 {
584 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
585 if (lpTrack == NULL) {
586 jniThrowException(env, "java/lang/IllegalStateException",
587 "Unable to retrieve AudioTrack pointer for flush()");
588 return;
589 }
590
591 lpTrack->flush();
592 }
593
594 // ----------------------------------------------------------------------------
595 static void
android_media_AudioTrack_set_volume(JNIEnv * env,jobject thiz,jfloat leftVol,jfloat rightVol)596 android_media_AudioTrack_set_volume(JNIEnv *env, jobject thiz, jfloat leftVol, jfloat rightVol )
597 {
598 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
599 if (lpTrack == NULL) {
600 jniThrowException(env, "java/lang/IllegalStateException",
601 "Unable to retrieve AudioTrack pointer for setVolume()");
602 return;
603 }
604
605 lpTrack->setVolume(leftVol, rightVol);
606 }
607
608 // ----------------------------------------------------------------------------
609
610 #define CALLBACK_COND_WAIT_TIMEOUT_MS 1000
android_media_AudioTrack_release(JNIEnv * env,jobject thiz)611 static void android_media_AudioTrack_release(JNIEnv *env, jobject thiz) {
612 sp<AudioTrack> lpTrack = setAudioTrack(env, thiz, 0);
613 if (lpTrack == NULL) {
614 return;
615 }
616 //ALOGV("deleting lpTrack: %x\n", (int)lpTrack);
617
618 // delete the JNI data
619 AudioTrackJniStorage* pJniStorage = (AudioTrackJniStorage *)env->GetLongField(
620 thiz, javaAudioTrackFields.jniData);
621 // reset the native resources in the Java object so any attempt to access
622 // them after a call to release fails.
623 env->SetLongField(thiz, javaAudioTrackFields.jniData, 0);
624
625 if (pJniStorage) {
626 Mutex::Autolock l(sLock);
627 audiotrack_callback_cookie *lpCookie = &pJniStorage->mCallbackData;
628 //ALOGV("deleting pJniStorage: %x\n", (int)pJniStorage);
629 while (lpCookie->busy) {
630 if (lpCookie->cond.waitRelative(sLock,
631 milliseconds(CALLBACK_COND_WAIT_TIMEOUT_MS)) !=
632 NO_ERROR) {
633 break;
634 }
635 }
636 sAudioTrackCallBackCookies.remove(lpCookie);
637 // delete global refs created in native_setup
638 env->DeleteGlobalRef(lpCookie->audioTrack_class);
639 env->DeleteGlobalRef(lpCookie->audioTrack_ref);
640 delete pJniStorage;
641 }
642 }
643
644
645 // ----------------------------------------------------------------------------
android_media_AudioTrack_finalize(JNIEnv * env,jobject thiz)646 static void android_media_AudioTrack_finalize(JNIEnv *env, jobject thiz) {
647 //ALOGV("android_media_AudioTrack_finalize jobject: %x\n", (int)thiz);
648 android_media_AudioTrack_release(env, thiz);
649 }
650
651 // overloaded JNI array helper functions (same as in android_media_AudioRecord)
652 static inline
envGetArrayElements(JNIEnv * env,jbyteArray array,jboolean * isCopy)653 jbyte *envGetArrayElements(JNIEnv *env, jbyteArray array, jboolean *isCopy) {
654 return env->GetByteArrayElements(array, isCopy);
655 }
656
657 static inline
envReleaseArrayElements(JNIEnv * env,jbyteArray array,jbyte * elems,jint mode)658 void envReleaseArrayElements(JNIEnv *env, jbyteArray array, jbyte *elems, jint mode) {
659 env->ReleaseByteArrayElements(array, elems, mode);
660 }
661
662 static inline
envGetArrayElements(JNIEnv * env,jshortArray array,jboolean * isCopy)663 jshort *envGetArrayElements(JNIEnv *env, jshortArray array, jboolean *isCopy) {
664 return env->GetShortArrayElements(array, isCopy);
665 }
666
667 static inline
envReleaseArrayElements(JNIEnv * env,jshortArray array,jshort * elems,jint mode)668 void envReleaseArrayElements(JNIEnv *env, jshortArray array, jshort *elems, jint mode) {
669 env->ReleaseShortArrayElements(array, elems, mode);
670 }
671
672 static inline
envGetArrayElements(JNIEnv * env,jfloatArray array,jboolean * isCopy)673 jfloat *envGetArrayElements(JNIEnv *env, jfloatArray array, jboolean *isCopy) {
674 return env->GetFloatArrayElements(array, isCopy);
675 }
676
677 static inline
envReleaseArrayElements(JNIEnv * env,jfloatArray array,jfloat * elems,jint mode)678 void envReleaseArrayElements(JNIEnv *env, jfloatArray array, jfloat *elems, jint mode) {
679 env->ReleaseFloatArrayElements(array, elems, mode);
680 }
681
682 static inline
interpretWriteSizeError(ssize_t writeSize)683 jint interpretWriteSizeError(ssize_t writeSize) {
684 if (writeSize == WOULD_BLOCK) {
685 return (jint)0;
686 } else if (writeSize == NO_INIT) {
687 return AUDIO_JAVA_DEAD_OBJECT;
688 } else {
689 ALOGE("Error %zd during AudioTrack native read", writeSize);
690 return nativeToJavaStatus(writeSize);
691 }
692 }
693
694 // ----------------------------------------------------------------------------
695 template <typename T>
writeToTrack(const sp<AudioTrack> & track,jint audioFormat,const T * data,jint offsetInSamples,jint sizeInSamples,bool blocking)696 static jint writeToTrack(const sp<AudioTrack>& track, jint audioFormat, const T *data,
697 jint offsetInSamples, jint sizeInSamples, bool blocking) {
698 // give the data to the native AudioTrack object (the data starts at the offset)
699 ssize_t written = 0;
700 // regular write() or copy the data to the AudioTrack's shared memory?
701 size_t sizeInBytes = sizeInSamples * sizeof(T);
702 if (track->sharedBuffer() == 0) {
703 written = track->write(data + offsetInSamples, sizeInBytes, blocking);
704 // for compatibility with earlier behavior of write(), return 0 in this case
705 if (written == (ssize_t) WOULD_BLOCK) {
706 written = 0;
707 }
708 } else {
709 // writing to shared memory, check for capacity
710 if ((size_t)sizeInBytes > track->sharedBuffer()->size()) {
711 sizeInBytes = track->sharedBuffer()->size();
712 }
713 memcpy(track->sharedBuffer()->unsecurePointer(), data + offsetInSamples, sizeInBytes);
714 written = sizeInBytes;
715 }
716 if (written >= 0) {
717 return written / sizeof(T);
718 }
719 return interpretWriteSizeError(written);
720 }
721
722 // ----------------------------------------------------------------------------
723 template <typename T>
android_media_AudioTrack_writeArray(JNIEnv * env,jobject thiz,T javaAudioData,jint offsetInSamples,jint sizeInSamples,jint javaAudioFormat,jboolean isWriteBlocking)724 static jint android_media_AudioTrack_writeArray(JNIEnv *env, jobject thiz,
725 T javaAudioData,
726 jint offsetInSamples, jint sizeInSamples,
727 jint javaAudioFormat,
728 jboolean isWriteBlocking) {
729 //ALOGV("android_media_AudioTrack_writeArray(offset=%d, sizeInSamples=%d) called",
730 // offsetInSamples, sizeInSamples);
731 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
732 if (lpTrack == NULL) {
733 jniThrowException(env, "java/lang/IllegalStateException",
734 "Unable to retrieve AudioTrack pointer for write()");
735 return (jint)AUDIO_JAVA_INVALID_OPERATION;
736 }
737
738 if (javaAudioData == NULL) {
739 ALOGE("NULL java array of audio data to play");
740 return (jint)AUDIO_JAVA_BAD_VALUE;
741 }
742
743 // NOTE: We may use GetPrimitiveArrayCritical() when the JNI implementation changes in such
744 // a way that it becomes much more efficient. When doing so, we will have to prevent the
745 // AudioSystem callback to be called while in critical section (in case of media server
746 // process crash for instance)
747
748 // get the pointer for the audio data from the java array
749 auto cAudioData = envGetArrayElements(env, javaAudioData, NULL);
750 if (cAudioData == NULL) {
751 ALOGE("Error retrieving source of audio data to play");
752 return (jint)AUDIO_JAVA_BAD_VALUE; // out of memory or no data to load
753 }
754
755 jint samplesWritten = writeToTrack(lpTrack, javaAudioFormat, cAudioData,
756 offsetInSamples, sizeInSamples, isWriteBlocking == JNI_TRUE /* blocking */);
757
758 envReleaseArrayElements(env, javaAudioData, cAudioData, 0);
759
760 //ALOGV("write wrote %d (tried %d) samples in the native AudioTrack with offset %d",
761 // (int)samplesWritten, (int)(sizeInSamples), (int)offsetInSamples);
762 return samplesWritten;
763 }
764
765 // ----------------------------------------------------------------------------
android_media_AudioTrack_write_native_bytes(JNIEnv * env,jobject thiz,jobject javaByteBuffer,jint byteOffset,jint sizeInBytes,jint javaAudioFormat,jboolean isWriteBlocking)766 static jint android_media_AudioTrack_write_native_bytes(JNIEnv *env, jobject thiz,
767 jobject javaByteBuffer, jint byteOffset, jint sizeInBytes,
768 jint javaAudioFormat, jboolean isWriteBlocking) {
769 //ALOGV("android_media_AudioTrack_write_native_bytes(offset=%d, sizeInBytes=%d) called",
770 // offsetInBytes, sizeInBytes);
771 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
772 if (lpTrack == NULL) {
773 jniThrowException(env, "java/lang/IllegalStateException",
774 "Unable to retrieve AudioTrack pointer for write()");
775 return (jint)AUDIO_JAVA_INVALID_OPERATION;
776 }
777
778 const jbyte* bytes =
779 reinterpret_cast<const jbyte*>(env->GetDirectBufferAddress(javaByteBuffer));
780 if (bytes == NULL) {
781 ALOGE("Error retrieving source of audio data to play, can't play");
782 return (jint)AUDIO_JAVA_BAD_VALUE;
783 }
784
785 jint written = writeToTrack(lpTrack, javaAudioFormat, bytes, byteOffset,
786 sizeInBytes, isWriteBlocking == JNI_TRUE /* blocking */);
787
788 return written;
789 }
790
791 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_buffer_size_frames(JNIEnv * env,jobject thiz)792 static jint android_media_AudioTrack_get_buffer_size_frames(JNIEnv *env, jobject thiz) {
793 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
794 if (lpTrack == NULL) {
795 jniThrowException(env, "java/lang/IllegalStateException",
796 "Unable to retrieve AudioTrack pointer for getBufferSizeInFrames()");
797 return (jint)AUDIO_JAVA_ERROR;
798 }
799
800 ssize_t result = lpTrack->getBufferSizeInFrames();
801 if (result < 0) {
802 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
803 "Internal error detected in getBufferSizeInFrames() = %zd", result);
804 return (jint)AUDIO_JAVA_ERROR;
805 }
806 return (jint)result;
807 }
808
809 // ----------------------------------------------------------------------------
android_media_AudioTrack_set_buffer_size_frames(JNIEnv * env,jobject thiz,jint bufferSizeInFrames)810 static jint android_media_AudioTrack_set_buffer_size_frames(JNIEnv *env,
811 jobject thiz, jint bufferSizeInFrames) {
812 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
813 if (lpTrack == NULL) {
814 jniThrowException(env, "java/lang/IllegalStateException",
815 "Unable to retrieve AudioTrack pointer for setBufferSizeInFrames()");
816 return (jint)AUDIO_JAVA_ERROR;
817 }
818 // Value will be coerced into the valid range.
819 // But internal values are unsigned, size_t, so we need to clip
820 // against zero here where it is signed.
821 if (bufferSizeInFrames < 0) {
822 bufferSizeInFrames = 0;
823 }
824 ssize_t result = lpTrack->setBufferSizeInFrames(bufferSizeInFrames);
825 if (result < 0) {
826 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
827 "Internal error detected in setBufferSizeInFrames() = %zd", result);
828 return (jint)AUDIO_JAVA_ERROR;
829 }
830 return (jint)result;
831 }
832
833 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_buffer_capacity_frames(JNIEnv * env,jobject thiz)834 static jint android_media_AudioTrack_get_buffer_capacity_frames(JNIEnv *env, jobject thiz) {
835 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
836 if (lpTrack == NULL) {
837 jniThrowException(env, "java/lang/IllegalStateException",
838 "Unable to retrieve AudioTrack pointer for getBufferCapacityInFrames()");
839 return (jint)AUDIO_JAVA_ERROR;
840 }
841
842 return lpTrack->frameCount();
843 }
844
845 // ----------------------------------------------------------------------------
android_media_AudioTrack_set_playback_rate(JNIEnv * env,jobject thiz,jint sampleRateInHz)846 static jint android_media_AudioTrack_set_playback_rate(JNIEnv *env, jobject thiz,
847 jint sampleRateInHz) {
848 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
849 if (lpTrack == NULL) {
850 jniThrowException(env, "java/lang/IllegalStateException",
851 "Unable to retrieve AudioTrack pointer for setSampleRate()");
852 return (jint)AUDIO_JAVA_ERROR;
853 }
854 return nativeToJavaStatus(lpTrack->setSampleRate(sampleRateInHz));
855 }
856
857
858 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_playback_rate(JNIEnv * env,jobject thiz)859 static jint android_media_AudioTrack_get_playback_rate(JNIEnv *env, jobject thiz) {
860 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
861 if (lpTrack == NULL) {
862 jniThrowException(env, "java/lang/IllegalStateException",
863 "Unable to retrieve AudioTrack pointer for getSampleRate()");
864 return (jint)AUDIO_JAVA_ERROR;
865 }
866 return (jint) lpTrack->getSampleRate();
867 }
868
869
870 // ----------------------------------------------------------------------------
android_media_AudioTrack_set_playback_params(JNIEnv * env,jobject thiz,jobject params)871 static void android_media_AudioTrack_set_playback_params(JNIEnv *env, jobject thiz,
872 jobject params) {
873 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
874 if (lpTrack == NULL) {
875 jniThrowException(env, "java/lang/IllegalStateException",
876 "AudioTrack not initialized");
877 return;
878 }
879
880 PlaybackParams pbp;
881 pbp.fillFromJobject(env, gPlaybackParamsFields, params);
882
883 ALOGV("setPlaybackParams: %d:%f %d:%f %d:%u %d:%u",
884 pbp.speedSet, pbp.audioRate.mSpeed,
885 pbp.pitchSet, pbp.audioRate.mPitch,
886 pbp.audioFallbackModeSet, pbp.audioRate.mFallbackMode,
887 pbp.audioStretchModeSet, pbp.audioRate.mStretchMode);
888
889 // to simulate partially set params, we do a read-modify-write.
890 // TODO: pass in the valid set mask into AudioTrack.
891 AudioPlaybackRate rate = lpTrack->getPlaybackRate();
892 bool updatedRate = false;
893 if (pbp.speedSet) {
894 rate.mSpeed = pbp.audioRate.mSpeed;
895 updatedRate = true;
896 }
897 if (pbp.pitchSet) {
898 rate.mPitch = pbp.audioRate.mPitch;
899 updatedRate = true;
900 }
901 if (pbp.audioFallbackModeSet) {
902 rate.mFallbackMode = pbp.audioRate.mFallbackMode;
903 updatedRate = true;
904 }
905 if (pbp.audioStretchModeSet) {
906 rate.mStretchMode = pbp.audioRate.mStretchMode;
907 updatedRate = true;
908 }
909 if (updatedRate) {
910 if (lpTrack->setPlaybackRate(rate) != OK) {
911 jniThrowException(env, "java/lang/IllegalArgumentException",
912 "arguments out of range");
913 }
914 }
915 }
916
917
918 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_playback_params(JNIEnv * env,jobject thiz,jobject params)919 static jobject android_media_AudioTrack_get_playback_params(JNIEnv *env, jobject thiz,
920 jobject params) {
921 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
922 if (lpTrack == NULL) {
923 jniThrowException(env, "java/lang/IllegalStateException",
924 "AudioTrack not initialized");
925 return NULL;
926 }
927
928 PlaybackParams pbs;
929 pbs.audioRate = lpTrack->getPlaybackRate();
930 pbs.speedSet = true;
931 pbs.pitchSet = true;
932 pbs.audioFallbackModeSet = true;
933 pbs.audioStretchModeSet = true;
934 return pbs.asJobject(env, gPlaybackParamsFields);
935 }
936
937
938 // ----------------------------------------------------------------------------
android_media_AudioTrack_set_marker_pos(JNIEnv * env,jobject thiz,jint markerPos)939 static jint android_media_AudioTrack_set_marker_pos(JNIEnv *env, jobject thiz,
940 jint markerPos) {
941 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
942 if (lpTrack == NULL) {
943 jniThrowException(env, "java/lang/IllegalStateException",
944 "Unable to retrieve AudioTrack pointer for setMarkerPosition()");
945 return (jint)AUDIO_JAVA_ERROR;
946 }
947 return nativeToJavaStatus( lpTrack->setMarkerPosition(markerPos) );
948 }
949
950
951 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_marker_pos(JNIEnv * env,jobject thiz)952 static jint android_media_AudioTrack_get_marker_pos(JNIEnv *env, jobject thiz) {
953 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
954 uint32_t markerPos = 0;
955
956 if (lpTrack == NULL) {
957 jniThrowException(env, "java/lang/IllegalStateException",
958 "Unable to retrieve AudioTrack pointer for getMarkerPosition()");
959 return (jint)AUDIO_JAVA_ERROR;
960 }
961 lpTrack->getMarkerPosition(&markerPos);
962 return (jint)markerPos;
963 }
964
965
966 // ----------------------------------------------------------------------------
android_media_AudioTrack_set_pos_update_period(JNIEnv * env,jobject thiz,jint period)967 static jint android_media_AudioTrack_set_pos_update_period(JNIEnv *env, jobject thiz,
968 jint period) {
969 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
970 if (lpTrack == NULL) {
971 jniThrowException(env, "java/lang/IllegalStateException",
972 "Unable to retrieve AudioTrack pointer for setPositionUpdatePeriod()");
973 return (jint)AUDIO_JAVA_ERROR;
974 }
975 return nativeToJavaStatus( lpTrack->setPositionUpdatePeriod(period) );
976 }
977
978
979 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_pos_update_period(JNIEnv * env,jobject thiz)980 static jint android_media_AudioTrack_get_pos_update_period(JNIEnv *env, jobject thiz) {
981 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
982 uint32_t period = 0;
983
984 if (lpTrack == NULL) {
985 jniThrowException(env, "java/lang/IllegalStateException",
986 "Unable to retrieve AudioTrack pointer for getPositionUpdatePeriod()");
987 return (jint)AUDIO_JAVA_ERROR;
988 }
989 lpTrack->getPositionUpdatePeriod(&period);
990 return (jint)period;
991 }
992
993
994 // ----------------------------------------------------------------------------
android_media_AudioTrack_set_position(JNIEnv * env,jobject thiz,jint position)995 static jint android_media_AudioTrack_set_position(JNIEnv *env, jobject thiz,
996 jint position) {
997 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
998 if (lpTrack == NULL) {
999 jniThrowException(env, "java/lang/IllegalStateException",
1000 "Unable to retrieve AudioTrack pointer for setPosition()");
1001 return (jint)AUDIO_JAVA_ERROR;
1002 }
1003 return nativeToJavaStatus( lpTrack->setPosition(position) );
1004 }
1005
1006
1007 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_position(JNIEnv * env,jobject thiz)1008 static jint android_media_AudioTrack_get_position(JNIEnv *env, jobject thiz) {
1009 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1010 uint32_t position = 0;
1011
1012 if (lpTrack == NULL) {
1013 jniThrowException(env, "java/lang/IllegalStateException",
1014 "Unable to retrieve AudioTrack pointer for getPosition()");
1015 return (jint)AUDIO_JAVA_ERROR;
1016 }
1017 lpTrack->getPosition(&position);
1018 return (jint)position;
1019 }
1020
1021
1022 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_latency(JNIEnv * env,jobject thiz)1023 static jint android_media_AudioTrack_get_latency(JNIEnv *env, jobject thiz) {
1024 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1025
1026 if (lpTrack == NULL) {
1027 jniThrowException(env, "java/lang/IllegalStateException",
1028 "Unable to retrieve AudioTrack pointer for latency()");
1029 return (jint)AUDIO_JAVA_ERROR;
1030 }
1031 return (jint)lpTrack->latency();
1032 }
1033
1034 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_underrun_count(JNIEnv * env,jobject thiz)1035 static jint android_media_AudioTrack_get_underrun_count(JNIEnv *env, jobject thiz) {
1036 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1037
1038 if (lpTrack == NULL) {
1039 jniThrowException(env, "java/lang/IllegalStateException",
1040 "Unable to retrieve AudioTrack pointer for getUnderrunCount()");
1041 return (jint)AUDIO_JAVA_ERROR;
1042 }
1043 return (jint)lpTrack->getUnderrunCount();
1044 }
1045
1046 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_flags(JNIEnv * env,jobject thiz)1047 static jint android_media_AudioTrack_get_flags(JNIEnv *env, jobject thiz) {
1048 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1049
1050 if (lpTrack == NULL) {
1051 jniThrowException(env, "java/lang/IllegalStateException",
1052 "Unable to retrieve AudioTrack pointer for getFlags()");
1053 return (jint)AUDIO_JAVA_ERROR;
1054 }
1055 return (jint)lpTrack->getFlags();
1056 }
1057
1058 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_timestamp(JNIEnv * env,jobject thiz,jlongArray jTimestamp)1059 static jint android_media_AudioTrack_get_timestamp(JNIEnv *env, jobject thiz, jlongArray jTimestamp) {
1060 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1061
1062 if (lpTrack == NULL) {
1063 ALOGE("Unable to retrieve AudioTrack pointer for getTimestamp()");
1064 return (jint)AUDIO_JAVA_ERROR;
1065 }
1066 AudioTimestamp timestamp;
1067 status_t status = lpTrack->getTimestamp(timestamp);
1068 if (status == OK) {
1069 jlong* nTimestamp = (jlong *) env->GetPrimitiveArrayCritical(jTimestamp, NULL);
1070 if (nTimestamp == NULL) {
1071 ALOGE("Unable to get array for getTimestamp()");
1072 return (jint)AUDIO_JAVA_ERROR;
1073 }
1074 nTimestamp[0] = (jlong) timestamp.mPosition;
1075 nTimestamp[1] = (jlong) ((timestamp.mTime.tv_sec * 1000000000LL) + timestamp.mTime.tv_nsec);
1076 env->ReleasePrimitiveArrayCritical(jTimestamp, nTimestamp, 0);
1077 }
1078 return (jint) nativeToJavaStatus(status);
1079 }
1080
1081 // ----------------------------------------------------------------------------
1082 static jobject
android_media_AudioTrack_native_getMetrics(JNIEnv * env,jobject thiz)1083 android_media_AudioTrack_native_getMetrics(JNIEnv *env, jobject thiz)
1084 {
1085 ALOGD("android_media_AudioTrack_native_getMetrics");
1086
1087 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1088
1089 if (lpTrack == NULL) {
1090 ALOGE("Unable to retrieve AudioTrack pointer for getMetrics()");
1091 jniThrowException(env, "java/lang/IllegalStateException", NULL);
1092 return (jobject) NULL;
1093 }
1094
1095 // get what we have for the metrics from the track
1096 mediametrics::Item *item = NULL;
1097
1098 status_t err = lpTrack->getMetrics(item);
1099 if (err != OK) {
1100 ALOGE("getMetrics failed");
1101 jniThrowException(env, "java/lang/IllegalStateException", NULL);
1102 return (jobject) NULL;
1103 }
1104
1105 jobject mybundle = MediaMetricsJNI::writeMetricsToBundle(env, item, NULL /* mybundle */);
1106
1107 // housekeeping
1108 delete item;
1109 item = NULL;
1110
1111 return mybundle;
1112 }
1113
1114
1115 // ----------------------------------------------------------------------------
android_media_AudioTrack_set_loop(JNIEnv * env,jobject thiz,jint loopStart,jint loopEnd,jint loopCount)1116 static jint android_media_AudioTrack_set_loop(JNIEnv *env, jobject thiz,
1117 jint loopStart, jint loopEnd, jint loopCount) {
1118 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1119 if (lpTrack == NULL) {
1120 jniThrowException(env, "java/lang/IllegalStateException",
1121 "Unable to retrieve AudioTrack pointer for setLoop()");
1122 return (jint)AUDIO_JAVA_ERROR;
1123 }
1124 return nativeToJavaStatus( lpTrack->setLoop(loopStart, loopEnd, loopCount) );
1125 }
1126
1127
1128 // ----------------------------------------------------------------------------
android_media_AudioTrack_reload(JNIEnv * env,jobject thiz)1129 static jint android_media_AudioTrack_reload(JNIEnv *env, jobject thiz) {
1130 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1131 if (lpTrack == NULL) {
1132 jniThrowException(env, "java/lang/IllegalStateException",
1133 "Unable to retrieve AudioTrack pointer for reload()");
1134 return (jint)AUDIO_JAVA_ERROR;
1135 }
1136 return nativeToJavaStatus( lpTrack->reload() );
1137 }
1138
1139
1140 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_output_sample_rate(JNIEnv * env,jobject thiz,jint javaStreamType)1141 static jint android_media_AudioTrack_get_output_sample_rate(JNIEnv *env, jobject thiz,
1142 jint javaStreamType) {
1143 uint32_t afSamplingRate;
1144 // convert the stream type from Java to native value
1145 // FIXME: code duplication with android_media_AudioTrack_setup()
1146 audio_stream_type_t nativeStreamType;
1147 switch (javaStreamType) {
1148 case AUDIO_STREAM_VOICE_CALL:
1149 case AUDIO_STREAM_SYSTEM:
1150 case AUDIO_STREAM_RING:
1151 case AUDIO_STREAM_MUSIC:
1152 case AUDIO_STREAM_ALARM:
1153 case AUDIO_STREAM_NOTIFICATION:
1154 case AUDIO_STREAM_BLUETOOTH_SCO:
1155 case AUDIO_STREAM_DTMF:
1156 nativeStreamType = (audio_stream_type_t) javaStreamType;
1157 break;
1158 default:
1159 nativeStreamType = AUDIO_STREAM_DEFAULT;
1160 break;
1161 }
1162
1163 status_t status = AudioSystem::getOutputSamplingRate(&afSamplingRate, nativeStreamType);
1164 if (status != NO_ERROR) {
1165 ALOGE("Error %d in AudioSystem::getOutputSamplingRate() for stream type %d "
1166 "in AudioTrack JNI", status, nativeStreamType);
1167 return DEFAULT_OUTPUT_SAMPLE_RATE;
1168 } else {
1169 return afSamplingRate;
1170 }
1171 }
1172
1173
1174 // ----------------------------------------------------------------------------
1175 // returns the minimum required size for the successful creation of a streaming AudioTrack
1176 // returns -1 if there was an error querying the hardware.
android_media_AudioTrack_get_min_buff_size(JNIEnv * env,jobject thiz,jint sampleRateInHertz,jint channelCount,jint audioFormat)1177 static jint android_media_AudioTrack_get_min_buff_size(JNIEnv *env, jobject thiz,
1178 jint sampleRateInHertz, jint channelCount, jint audioFormat) {
1179
1180 size_t frameCount;
1181 const status_t status = AudioTrack::getMinFrameCount(&frameCount, AUDIO_STREAM_DEFAULT,
1182 sampleRateInHertz);
1183 if (status != NO_ERROR) {
1184 ALOGE("AudioTrack::getMinFrameCount() for sample rate %d failed with status %d",
1185 sampleRateInHertz, status);
1186 return -1;
1187 }
1188 const audio_format_t format = audioFormatToNative(audioFormat);
1189 if (audio_has_proportional_frames(format)) {
1190 const size_t bytesPerSample = audio_bytes_per_sample(format);
1191 return frameCount * channelCount * bytesPerSample;
1192 } else {
1193 return frameCount;
1194 }
1195 }
1196
1197 // ----------------------------------------------------------------------------
1198 static jint
android_media_AudioTrack_setAuxEffectSendLevel(JNIEnv * env,jobject thiz,jfloat level)1199 android_media_AudioTrack_setAuxEffectSendLevel(JNIEnv *env, jobject thiz, jfloat level )
1200 {
1201 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1202 if (lpTrack == NULL ) {
1203 jniThrowException(env, "java/lang/IllegalStateException",
1204 "Unable to retrieve AudioTrack pointer for setAuxEffectSendLevel()");
1205 return -1;
1206 }
1207
1208 status_t status = lpTrack->setAuxEffectSendLevel(level);
1209 if (status != NO_ERROR) {
1210 ALOGE("AudioTrack::setAuxEffectSendLevel() for level %g failed with status %d",
1211 level, status);
1212 }
1213 return (jint) status;
1214 }
1215
1216 // ----------------------------------------------------------------------------
android_media_AudioTrack_attachAuxEffect(JNIEnv * env,jobject thiz,jint effectId)1217 static jint android_media_AudioTrack_attachAuxEffect(JNIEnv *env, jobject thiz,
1218 jint effectId) {
1219 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1220 if (lpTrack == NULL) {
1221 jniThrowException(env, "java/lang/IllegalStateException",
1222 "Unable to retrieve AudioTrack pointer for attachAuxEffect()");
1223 return (jint)AUDIO_JAVA_ERROR;
1224 }
1225 return nativeToJavaStatus( lpTrack->attachAuxEffect(effectId) );
1226 }
1227
android_media_AudioTrack_setOutputDevice(JNIEnv * env,jobject thiz,jint device_id)1228 static jboolean android_media_AudioTrack_setOutputDevice(
1229 JNIEnv *env, jobject thiz, jint device_id) {
1230
1231 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1232 if (lpTrack == 0) {
1233 return false;
1234 }
1235 return lpTrack->setOutputDevice(device_id) == NO_ERROR;
1236 }
1237
android_media_AudioTrack_getRoutedDeviceId(JNIEnv * env,jobject thiz)1238 static jint android_media_AudioTrack_getRoutedDeviceId(
1239 JNIEnv *env, jobject thiz) {
1240
1241 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1242 if (lpTrack == NULL) {
1243 return 0;
1244 }
1245 return (jint)lpTrack->getRoutedDeviceId();
1246 }
1247
android_media_AudioTrack_enableDeviceCallback(JNIEnv * env,jobject thiz)1248 static void android_media_AudioTrack_enableDeviceCallback(
1249 JNIEnv *env, jobject thiz) {
1250
1251 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1252 if (lpTrack == NULL) {
1253 return;
1254 }
1255 AudioTrackJniStorage* pJniStorage = (AudioTrackJniStorage *)env->GetLongField(
1256 thiz, javaAudioTrackFields.jniData);
1257 if (pJniStorage == NULL || pJniStorage->mDeviceCallback != 0) {
1258 return;
1259 }
1260 pJniStorage->mDeviceCallback =
1261 new JNIDeviceCallback(env, thiz, pJniStorage->mCallbackData.audioTrack_ref,
1262 javaAudioTrackFields.postNativeEventInJava);
1263 lpTrack->addAudioDeviceCallback(pJniStorage->mDeviceCallback);
1264 }
1265
android_media_AudioTrack_disableDeviceCallback(JNIEnv * env,jobject thiz)1266 static void android_media_AudioTrack_disableDeviceCallback(
1267 JNIEnv *env, jobject thiz) {
1268
1269 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1270 if (lpTrack == NULL) {
1271 return;
1272 }
1273 AudioTrackJniStorage* pJniStorage = (AudioTrackJniStorage *)env->GetLongField(
1274 thiz, javaAudioTrackFields.jniData);
1275 if (pJniStorage == NULL || pJniStorage->mDeviceCallback == 0) {
1276 return;
1277 }
1278 lpTrack->removeAudioDeviceCallback(pJniStorage->mDeviceCallback);
1279 pJniStorage->mDeviceCallback.clear();
1280 }
1281
1282 // Pass through the arguments to the AudioFlinger track implementation.
android_media_AudioTrack_apply_volume_shaper(JNIEnv * env,jobject thiz,jobject jconfig,jobject joperation)1283 static jint android_media_AudioTrack_apply_volume_shaper(JNIEnv *env, jobject thiz,
1284 jobject jconfig, jobject joperation) {
1285 // NOTE: hard code here to prevent platform issues. Must match VolumeShaper.java
1286 const int VOLUME_SHAPER_INVALID_OPERATION = -38;
1287
1288 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1289 if (lpTrack == nullptr) {
1290 return (jint)VOLUME_SHAPER_INVALID_OPERATION;
1291 }
1292
1293 sp<VolumeShaper::Configuration> configuration;
1294 sp<VolumeShaper::Operation> operation;
1295 if (jconfig != nullptr) {
1296 configuration = VolumeShaperHelper::convertJobjectToConfiguration(
1297 env, gVolumeShaperFields, jconfig);
1298 ALOGV("applyVolumeShaper configuration: %s", configuration->toString().c_str());
1299 }
1300 if (joperation != nullptr) {
1301 operation = VolumeShaperHelper::convertJobjectToOperation(
1302 env, gVolumeShaperFields, joperation);
1303 ALOGV("applyVolumeShaper operation: %s", operation->toString().c_str());
1304 }
1305 VolumeShaper::Status status = lpTrack->applyVolumeShaper(configuration, operation);
1306 if (status == INVALID_OPERATION) {
1307 status = VOLUME_SHAPER_INVALID_OPERATION;
1308 }
1309 return (jint)status; // if status < 0 an error, else a VolumeShaper id
1310 }
1311
1312 // Pass through the arguments to the AudioFlinger track implementation.
android_media_AudioTrack_get_volume_shaper_state(JNIEnv * env,jobject thiz,jint id)1313 static jobject android_media_AudioTrack_get_volume_shaper_state(JNIEnv *env, jobject thiz,
1314 jint id) {
1315 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1316 if (lpTrack == nullptr) {
1317 return (jobject)nullptr;
1318 }
1319
1320 sp<VolumeShaper::State> state = lpTrack->getVolumeShaperState((int)id);
1321 if (state.get() == nullptr) {
1322 return (jobject)nullptr;
1323 }
1324 return VolumeShaperHelper::convertStateToJobject(env, gVolumeShaperFields, state);
1325 }
1326
android_media_AudioTrack_setPresentation(JNIEnv * env,jobject thiz,jint presentationId,jint programId)1327 static int android_media_AudioTrack_setPresentation(
1328 JNIEnv *env, jobject thiz, jint presentationId, jint programId) {
1329 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1330 if (lpTrack == NULL) {
1331 jniThrowException(env, "java/lang/IllegalStateException",
1332 "AudioTrack not initialized");
1333 return (jint)AUDIO_JAVA_ERROR;
1334 }
1335
1336 return (jint)lpTrack->selectPresentation((int)presentationId, (int)programId);
1337 }
1338
1339 // ----------------------------------------------------------------------------
android_media_AudioTrack_get_port_id(JNIEnv * env,jobject thiz)1340 static jint android_media_AudioTrack_get_port_id(JNIEnv *env, jobject thiz) {
1341 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1342 if (lpTrack == NULL) {
1343 jniThrowException(env, "java/lang/IllegalStateException",
1344 "AudioTrack not initialized");
1345 return (jint)AUDIO_PORT_HANDLE_NONE;
1346 }
1347 return (jint)lpTrack->getPortId();
1348 }
1349
1350 // ----------------------------------------------------------------------------
android_media_AudioTrack_set_delay_padding(JNIEnv * env,jobject thiz,jint delayInFrames,jint paddingInFrames)1351 static void android_media_AudioTrack_set_delay_padding(JNIEnv *env, jobject thiz,
1352 jint delayInFrames, jint paddingInFrames) {
1353 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1354 if (lpTrack == NULL) {
1355 jniThrowException(env, "java/lang/IllegalStateException",
1356 "AudioTrack not initialized");
1357 return;
1358 }
1359 AudioParameter param = AudioParameter();
1360 param.addInt(String8(AUDIO_OFFLOAD_CODEC_DELAY_SAMPLES), (int) delayInFrames);
1361 param.addInt(String8(AUDIO_OFFLOAD_CODEC_PADDING_SAMPLES), (int) paddingInFrames);
1362 lpTrack->setParameters(param.toString());
1363 }
1364
android_media_AudioTrack_setAudioDescriptionMixLeveldB(JNIEnv * env,jobject thiz,jfloat level)1365 static jint android_media_AudioTrack_setAudioDescriptionMixLeveldB(JNIEnv *env, jobject thiz,
1366 jfloat level) {
1367 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1368 if (lpTrack == nullptr) {
1369 jniThrowException(env, "java/lang/IllegalStateException", "AudioTrack not initialized");
1370 return (jint)AUDIO_JAVA_ERROR;
1371 }
1372
1373 return nativeToJavaStatus(lpTrack->setAudioDescriptionMixLevel(level));
1374 }
1375
android_media_AudioTrack_getAudioDescriptionMixLeveldB(JNIEnv * env,jobject thiz,jfloatArray level)1376 static jint android_media_AudioTrack_getAudioDescriptionMixLeveldB(JNIEnv *env, jobject thiz,
1377 jfloatArray level) {
1378 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1379 if (lpTrack == nullptr) {
1380 ALOGE("%s: AudioTrack not initialized", __func__);
1381 return (jint)AUDIO_JAVA_ERROR;
1382 }
1383 jfloat *nativeLevel = (jfloat *)env->GetPrimitiveArrayCritical(level, NULL);
1384 if (nativeLevel == nullptr) {
1385 ALOGE("%s: Cannot retrieve level pointer", __func__);
1386 return (jint)AUDIO_JAVA_ERROR;
1387 }
1388
1389 status_t status = lpTrack->getAudioDescriptionMixLevel(reinterpret_cast<float *>(nativeLevel));
1390 env->ReleasePrimitiveArrayCritical(level, nativeLevel, 0 /* mode */);
1391
1392 return nativeToJavaStatus(status);
1393 }
1394
android_media_AudioTrack_setDualMonoMode(JNIEnv * env,jobject thiz,jint dualMonoMode)1395 static jint android_media_AudioTrack_setDualMonoMode(JNIEnv *env, jobject thiz, jint dualMonoMode) {
1396 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1397 if (lpTrack == nullptr) {
1398 jniThrowException(env, "java/lang/IllegalStateException", "AudioTrack not initialized");
1399 return (jint)AUDIO_JAVA_ERROR;
1400 }
1401
1402 return nativeToJavaStatus(
1403 lpTrack->setDualMonoMode(static_cast<audio_dual_mono_mode_t>(dualMonoMode)));
1404 }
1405
android_media_AudioTrack_getDualMonoMode(JNIEnv * env,jobject thiz,jintArray dualMonoMode)1406 static jint android_media_AudioTrack_getDualMonoMode(JNIEnv *env, jobject thiz,
1407 jintArray dualMonoMode) {
1408 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1409 if (lpTrack == nullptr) {
1410 ALOGE("%s: AudioTrack not initialized", __func__);
1411 return (jint)AUDIO_JAVA_ERROR;
1412 }
1413 jint *nativeDualMonoMode = (jint *)env->GetPrimitiveArrayCritical(dualMonoMode, NULL);
1414 if (nativeDualMonoMode == nullptr) {
1415 ALOGE("%s: Cannot retrieve dualMonoMode pointer", __func__);
1416 return (jint)AUDIO_JAVA_ERROR;
1417 }
1418
1419 status_t status = lpTrack->getDualMonoMode(
1420 reinterpret_cast<audio_dual_mono_mode_t *>(nativeDualMonoMode));
1421 env->ReleasePrimitiveArrayCritical(dualMonoMode, nativeDualMonoMode, 0 /* mode */);
1422
1423 return nativeToJavaStatus(status);
1424 }
1425
android_media_AudioTrack_setLogSessionId(JNIEnv * env,jobject thiz,jstring jlogSessionId)1426 static void android_media_AudioTrack_setLogSessionId(JNIEnv *env, jobject thiz,
1427 jstring jlogSessionId) {
1428 sp<AudioTrack> track = getAudioTrack(env, thiz);
1429 if (track == nullptr) {
1430 jniThrowException(env, "java/lang/IllegalStateException",
1431 "Unable to retrieve AudioTrack pointer for setLogSessionId()");
1432 }
1433 if (jlogSessionId == nullptr) {
1434 ALOGV("%s: logSessionId nullptr", __func__);
1435 track->setLogSessionId(nullptr);
1436 return;
1437 }
1438 ScopedUtfChars logSessionId(env, jlogSessionId);
1439 ALOGV("%s: logSessionId '%s'", __func__, logSessionId.c_str());
1440 track->setLogSessionId(logSessionId.c_str());
1441 }
1442
android_media_AudioTrack_setPlayerIId(JNIEnv * env,jobject thiz,jint playerIId)1443 static void android_media_AudioTrack_setPlayerIId(JNIEnv *env, jobject thiz, jint playerIId) {
1444 sp<AudioTrack> track = getAudioTrack(env, thiz);
1445 if (track == nullptr) {
1446 jniThrowException(env, "java/lang/IllegalStateException",
1447 "Unable to retrieve AudioTrack pointer for setPlayerIId()");
1448 }
1449 ALOGV("%s: playerIId %d", __func__, playerIId);
1450 track->setPlayerIId(playerIId);
1451 }
1452
android_media_AudioTrack_getStartThresholdInFrames(JNIEnv * env,jobject thiz)1453 static jint android_media_AudioTrack_getStartThresholdInFrames(JNIEnv *env, jobject thiz) {
1454 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1455 if (lpTrack == nullptr) {
1456 jniThrowException(env, "java/lang/IllegalStateException",
1457 "Unable to retrieve AudioTrack pointer for getStartThresholdInFrames()");
1458 return (jint)AUDIO_JAVA_ERROR;
1459 }
1460 const ssize_t result = lpTrack->getStartThresholdInFrames();
1461 if (result <= 0) {
1462 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
1463 "Internal error detected in getStartThresholdInFrames() = %zd",
1464 result);
1465 return (jint)AUDIO_JAVA_ERROR;
1466 }
1467 return (jint)result; // this should be a positive value.
1468 }
1469
android_media_AudioTrack_setStartThresholdInFrames(JNIEnv * env,jobject thiz,jint startThresholdInFrames)1470 static jint android_media_AudioTrack_setStartThresholdInFrames(JNIEnv *env, jobject thiz,
1471 jint startThresholdInFrames) {
1472 sp<AudioTrack> lpTrack = getAudioTrack(env, thiz);
1473 if (lpTrack == nullptr) {
1474 jniThrowException(env, "java/lang/IllegalStateException",
1475 "Unable to retrieve AudioTrack pointer for setStartThresholdInFrames()");
1476 return (jint)AUDIO_JAVA_ERROR;
1477 }
1478 // non-positive values of startThresholdInFrames are not allowed by the Java layer.
1479 const ssize_t result = lpTrack->setStartThresholdInFrames(startThresholdInFrames);
1480 if (result <= 0) {
1481 jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
1482 "Internal error detected in setStartThresholdInFrames() = %zd",
1483 result);
1484 return (jint)AUDIO_JAVA_ERROR;
1485 }
1486 return (jint)result; // this should be a positive value.
1487 }
1488
1489 // ----------------------------------------------------------------------------
1490 // ----------------------------------------------------------------------------
1491 static const JNINativeMethod gMethods[] = {
1492 // name, signature, funcPtr
1493 {"native_is_direct_output_supported", "(IIIIIII)Z",
1494 (void *)android_media_AudioTrack_is_direct_output_supported},
1495 {"native_start", "()V", (void *)android_media_AudioTrack_start},
1496 {"native_stop", "()V", (void *)android_media_AudioTrack_stop},
1497 {"native_pause", "()V", (void *)android_media_AudioTrack_pause},
1498 {"native_flush", "()V", (void *)android_media_AudioTrack_flush},
1499 {"native_setup",
1500 "(Ljava/lang/Object;Ljava/lang/Object;[IIIIII[IJZILjava/lang/Object;Ljava/lang/String;)I",
1501 (void *)android_media_AudioTrack_setup},
1502 {"native_finalize", "()V", (void *)android_media_AudioTrack_finalize},
1503 {"native_release", "()V", (void *)android_media_AudioTrack_release},
1504 {"native_write_byte", "([BIIIZ)I", (void *)android_media_AudioTrack_writeArray<jbyteArray>},
1505 {"native_write_native_bytes", "(Ljava/nio/ByteBuffer;IIIZ)I",
1506 (void *)android_media_AudioTrack_write_native_bytes},
1507 {"native_write_short", "([SIIIZ)I",
1508 (void *)android_media_AudioTrack_writeArray<jshortArray>},
1509 {"native_write_float", "([FIIIZ)I",
1510 (void *)android_media_AudioTrack_writeArray<jfloatArray>},
1511 {"native_setVolume", "(FF)V", (void *)android_media_AudioTrack_set_volume},
1512 {"native_get_buffer_size_frames", "()I",
1513 (void *)android_media_AudioTrack_get_buffer_size_frames},
1514 {"native_set_buffer_size_frames", "(I)I",
1515 (void *)android_media_AudioTrack_set_buffer_size_frames},
1516 {"native_get_buffer_capacity_frames", "()I",
1517 (void *)android_media_AudioTrack_get_buffer_capacity_frames},
1518 {"native_set_playback_rate", "(I)I", (void *)android_media_AudioTrack_set_playback_rate},
1519 {"native_get_playback_rate", "()I", (void *)android_media_AudioTrack_get_playback_rate},
1520 {"native_set_playback_params", "(Landroid/media/PlaybackParams;)V",
1521 (void *)android_media_AudioTrack_set_playback_params},
1522 {"native_get_playback_params", "()Landroid/media/PlaybackParams;",
1523 (void *)android_media_AudioTrack_get_playback_params},
1524 {"native_set_marker_pos", "(I)I", (void *)android_media_AudioTrack_set_marker_pos},
1525 {"native_get_marker_pos", "()I", (void *)android_media_AudioTrack_get_marker_pos},
1526 {"native_set_pos_update_period", "(I)I",
1527 (void *)android_media_AudioTrack_set_pos_update_period},
1528 {"native_get_pos_update_period", "()I",
1529 (void *)android_media_AudioTrack_get_pos_update_period},
1530 {"native_set_position", "(I)I", (void *)android_media_AudioTrack_set_position},
1531 {"native_get_position", "()I", (void *)android_media_AudioTrack_get_position},
1532 {"native_get_latency", "()I", (void *)android_media_AudioTrack_get_latency},
1533 {"native_get_underrun_count", "()I", (void *)android_media_AudioTrack_get_underrun_count},
1534 {"native_get_flags", "()I", (void *)android_media_AudioTrack_get_flags},
1535 {"native_get_timestamp", "([J)I", (void *)android_media_AudioTrack_get_timestamp},
1536 {"native_getMetrics", "()Landroid/os/PersistableBundle;",
1537 (void *)android_media_AudioTrack_native_getMetrics},
1538 {"native_set_loop", "(III)I", (void *)android_media_AudioTrack_set_loop},
1539 {"native_reload_static", "()I", (void *)android_media_AudioTrack_reload},
1540 {"native_get_output_sample_rate", "(I)I",
1541 (void *)android_media_AudioTrack_get_output_sample_rate},
1542 {"native_get_min_buff_size", "(III)I", (void *)android_media_AudioTrack_get_min_buff_size},
1543 {"native_setAuxEffectSendLevel", "(F)I",
1544 (void *)android_media_AudioTrack_setAuxEffectSendLevel},
1545 {"native_attachAuxEffect", "(I)I", (void *)android_media_AudioTrack_attachAuxEffect},
1546 {"native_setOutputDevice", "(I)Z", (void *)android_media_AudioTrack_setOutputDevice},
1547 {"native_getRoutedDeviceId", "()I", (void *)android_media_AudioTrack_getRoutedDeviceId},
1548 {"native_enableDeviceCallback", "()V",
1549 (void *)android_media_AudioTrack_enableDeviceCallback},
1550 {"native_disableDeviceCallback", "()V",
1551 (void *)android_media_AudioTrack_disableDeviceCallback},
1552 {"native_applyVolumeShaper",
1553 "(Landroid/media/VolumeShaper$Configuration;Landroid/media/VolumeShaper$Operation;)I",
1554 (void *)android_media_AudioTrack_apply_volume_shaper},
1555 {"native_getVolumeShaperState", "(I)Landroid/media/VolumeShaper$State;",
1556 (void *)android_media_AudioTrack_get_volume_shaper_state},
1557 {"native_setPresentation", "(II)I", (void *)android_media_AudioTrack_setPresentation},
1558 {"native_getPortId", "()I", (void *)android_media_AudioTrack_get_port_id},
1559 {"native_set_delay_padding", "(II)V", (void *)android_media_AudioTrack_set_delay_padding},
1560 {"native_set_audio_description_mix_level_db", "(F)I",
1561 (void *)android_media_AudioTrack_setAudioDescriptionMixLeveldB},
1562 {"native_get_audio_description_mix_level_db", "([F)I",
1563 (void *)android_media_AudioTrack_getAudioDescriptionMixLeveldB},
1564 {"native_set_dual_mono_mode", "(I)I", (void *)android_media_AudioTrack_setDualMonoMode},
1565 {"native_get_dual_mono_mode", "([I)I", (void *)android_media_AudioTrack_getDualMonoMode},
1566 {"native_setLogSessionId", "(Ljava/lang/String;)V",
1567 (void *)android_media_AudioTrack_setLogSessionId},
1568 {"native_setPlayerIId", "(I)V", (void *)android_media_AudioTrack_setPlayerIId},
1569 {"native_setStartThresholdInFrames", "(I)I",
1570 (void *)android_media_AudioTrack_setStartThresholdInFrames},
1571 {"native_getStartThresholdInFrames", "()I",
1572 (void *)android_media_AudioTrack_getStartThresholdInFrames},
1573 };
1574
1575 // field names found in android/media/AudioTrack.java
1576 #define JAVA_POSTEVENT_CALLBACK_NAME "postEventFromNative"
1577 #define JAVA_NATIVETRACKINJAVAOBJ_FIELD_NAME "mNativeTrackInJavaObj"
1578 #define JAVA_JNIDATA_FIELD_NAME "mJniData"
1579 #define JAVA_STREAMTYPE_FIELD_NAME "mStreamType"
1580
1581 // ----------------------------------------------------------------------------
1582 // preconditions:
1583 // theClass is valid
android_media_getIntConstantFromClass(JNIEnv * pEnv,jclass theClass,const char * className,const char * constName,int * constVal)1584 bool android_media_getIntConstantFromClass(JNIEnv* pEnv, jclass theClass, const char* className,
1585 const char* constName, int* constVal) {
1586 jfieldID javaConst = NULL;
1587 javaConst = pEnv->GetStaticFieldID(theClass, constName, "I");
1588 if (javaConst != NULL) {
1589 *constVal = pEnv->GetStaticIntField(theClass, javaConst);
1590 return true;
1591 } else {
1592 ALOGE("Can't find %s.%s", className, constName);
1593 return false;
1594 }
1595 }
1596
1597 // ----------------------------------------------------------------------------
register_android_media_AudioTrack(JNIEnv * env)1598 int register_android_media_AudioTrack(JNIEnv *env)
1599 {
1600 // must be first
1601 int res = RegisterMethodsOrDie(env, kClassPathName, gMethods, NELEM(gMethods));
1602
1603 javaAudioTrackFields.nativeTrackInJavaObj = NULL;
1604 javaAudioTrackFields.postNativeEventInJava = NULL;
1605
1606 // Get the AudioTrack class
1607 jclass audioTrackClass = FindClassOrDie(env, kClassPathName);
1608
1609 // Get the postEvent method
1610 javaAudioTrackFields.postNativeEventInJava = GetStaticMethodIDOrDie(env,
1611 audioTrackClass, JAVA_POSTEVENT_CALLBACK_NAME,
1612 "(Ljava/lang/Object;IIILjava/lang/Object;)V");
1613
1614 // Get the variables fields
1615 // nativeTrackInJavaObj
1616 javaAudioTrackFields.nativeTrackInJavaObj = GetFieldIDOrDie(env,
1617 audioTrackClass, JAVA_NATIVETRACKINJAVAOBJ_FIELD_NAME, "J");
1618 // jniData
1619 javaAudioTrackFields.jniData = GetFieldIDOrDie(env,
1620 audioTrackClass, JAVA_JNIDATA_FIELD_NAME, "J");
1621 // fieldStreamType
1622 javaAudioTrackFields.fieldStreamType = GetFieldIDOrDie(env,
1623 audioTrackClass, JAVA_STREAMTYPE_FIELD_NAME, "I");
1624
1625 env->DeleteLocalRef(audioTrackClass);
1626
1627 // initialize PlaybackParams field info
1628 gPlaybackParamsFields.init(env);
1629
1630 gVolumeShaperFields.init(env);
1631
1632 // optional check that the TunerConfiguration class and fields exist.
1633 TunerConfigurationHelper::initCheckOrDie(env);
1634
1635 return res;
1636 }
1637
1638
1639 // ----------------------------------------------------------------------------
1640