1 /* 2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. 3 * 4 * Use of this source code is governed by a BSD-style license 5 * that can be found in the LICENSE file in the root of the source 6 * tree. An additional intellectual property rights grant can be found 7 * in the file PATENTS. All contributing project authors may 8 * be found in the AUTHORS file in the root of the source tree. 9 */ 10 11 #ifndef SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_TRACK_JNI_H_ 12 #define SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_TRACK_JNI_H_ 13 14 #include <jni.h> 15 #include <memory> 16 17 #include "absl/types/optional.h" 18 #include "modules/audio_device/audio_device_buffer.h" 19 #include "modules/audio_device/include/audio_device_defines.h" 20 #include "rtc_base/thread_checker.h" 21 #include "sdk/android/src/jni/audio_device/audio_common.h" 22 #include "sdk/android/src/jni/audio_device/audio_device_module.h" 23 24 namespace webrtc { 25 26 namespace jni { 27 28 // Implements 16-bit mono PCM audio output support for Android using the Java 29 // AudioTrack interface. Most of the work is done by its Java counterpart in 30 // WebRtcAudioTrack.java. This class is created and lives on a thread in 31 // C++-land, but decoded audio buffers are requested on a high-priority 32 // thread managed by the Java class. 33 // 34 // An instance can be created on any thread, but must then be used on one and 35 // the same thread. All public methods must also be called on the same thread. A 36 // thread checker will RTC_DCHECK if any method is called on an invalid thread 37 // 38 // This class uses AttachCurrentThreadIfNeeded to attach to a Java VM if needed. 39 // Additional thread checking guarantees that no other (possibly non attached) 40 // thread is used. 41 class AudioTrackJni : public AudioOutput { 42 public: 43 static ScopedJavaLocalRef<jobject> CreateJavaWebRtcAudioTrack( 44 JNIEnv* env, 45 const JavaRef<jobject>& j_context, 46 const JavaRef<jobject>& j_audio_manager); 47 48 AudioTrackJni(JNIEnv* env, 49 const AudioParameters& audio_parameters, 50 const JavaRef<jobject>& j_webrtc_audio_track); 51 ~AudioTrackJni() override; 52 53 int32_t Init() override; 54 int32_t Terminate() override; 55 56 int32_t InitPlayout() override; 57 bool PlayoutIsInitialized() const override; 58 59 int32_t StartPlayout() override; 60 int32_t StopPlayout() override; 61 bool Playing() const override; 62 63 bool SpeakerVolumeIsAvailable() override; 64 int SetSpeakerVolume(uint32_t volume) override; 65 absl::optional<uint32_t> SpeakerVolume() const override; 66 absl::optional<uint32_t> MaxSpeakerVolume() const override; 67 absl::optional<uint32_t> MinSpeakerVolume() const override; 68 int GetPlayoutUnderrunCount() override; 69 70 void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) override; 71 72 // Called from Java side so we can cache the address of the Java-manged 73 // |byte_buffer| in |direct_buffer_address_|. The size of the buffer 74 // is also stored in |direct_buffer_capacity_in_bytes_|. 75 // Called on the same thread as the creating thread. 76 void CacheDirectBufferAddress(JNIEnv* env, 77 const JavaParamRef<jobject>& byte_buffer); 78 // Called periodically by the Java based WebRtcAudioTrack object when 79 // playout has started. Each call indicates that |length| new bytes should 80 // be written to the memory area |direct_buffer_address_| for playout. 81 // This method is called on a high-priority thread from Java. The name of 82 // the thread is 'AudioTrackThread'. 83 void GetPlayoutData(JNIEnv* env, size_t length); 84 85 private: 86 // Stores thread ID in constructor. 87 rtc::ThreadChecker thread_checker_; 88 89 // Stores thread ID in first call to OnGetPlayoutData() from high-priority 90 // thread in Java. Detached during construction of this object. 91 rtc::ThreadChecker thread_checker_java_; 92 93 // Wraps the Java specific parts of the AudioTrackJni class. 94 JNIEnv* env_ = nullptr; 95 ScopedJavaGlobalRef<jobject> j_audio_track_; 96 97 // Contains audio parameters provided to this class at construction by the 98 // AudioManager. 99 const AudioParameters audio_parameters_; 100 101 // Cached copy of address to direct audio buffer owned by |j_audio_track_|. 102 void* direct_buffer_address_; 103 104 // Number of bytes in the direct audio buffer owned by |j_audio_track_|. 105 size_t direct_buffer_capacity_in_bytes_; 106 107 // Number of audio frames per audio buffer. Each audio frame corresponds to 108 // one sample of PCM mono data at 16 bits per sample. Hence, each audio 109 // frame contains 2 bytes (given that the Java layer only supports mono). 110 // Example: 480 for 48000 Hz or 441 for 44100 Hz. 111 size_t frames_per_buffer_; 112 113 bool initialized_; 114 115 bool playing_; 116 117 // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the 118 // AudioDeviceModuleImpl class and called by AudioDeviceModule::Create(). 119 // The AudioDeviceBuffer is a member of the AudioDeviceModuleImpl instance 120 // and therefore outlives this object. 121 AudioDeviceBuffer* audio_device_buffer_; 122 }; 123 124 } // namespace jni 125 126 } // namespace webrtc 127 128 #endif // SDK_ANDROID_SRC_JNI_AUDIO_DEVICE_AUDIO_TRACK_JNI_H_ 129