1 /*
2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "sdk/android/src/jni/audio_device/audio_track_jni.h"
12
13 #include <utility>
14
15 #include "rtc_base/arraysize.h"
16 #include "rtc_base/checks.h"
17 #include "rtc_base/format_macros.h"
18 #include "rtc_base/logging.h"
19 #include "rtc_base/platform_thread.h"
20 #include "sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioTrack_jni.h"
21 #include "sdk/android/src/jni/jni_helpers.h"
22 #include "system_wrappers/include/field_trial.h"
23 #include "system_wrappers/include/metrics.h"
24
25 namespace webrtc {
26
27 namespace jni {
28
CreateJavaWebRtcAudioTrack(JNIEnv * env,const JavaRef<jobject> & j_context,const JavaRef<jobject> & j_audio_manager)29 ScopedJavaLocalRef<jobject> AudioTrackJni::CreateJavaWebRtcAudioTrack(
30 JNIEnv* env,
31 const JavaRef<jobject>& j_context,
32 const JavaRef<jobject>& j_audio_manager) {
33 return Java_WebRtcAudioTrack_Constructor(env, j_context, j_audio_manager);
34 }
35
AudioTrackJni(JNIEnv * env,const AudioParameters & audio_parameters,const JavaRef<jobject> & j_webrtc_audio_track)36 AudioTrackJni::AudioTrackJni(JNIEnv* env,
37 const AudioParameters& audio_parameters,
38 const JavaRef<jobject>& j_webrtc_audio_track)
39 : j_audio_track_(env, j_webrtc_audio_track),
40 audio_parameters_(audio_parameters),
41 direct_buffer_address_(nullptr),
42 direct_buffer_capacity_in_bytes_(0),
43 frames_per_buffer_(0),
44 initialized_(false),
45 playing_(false),
46 audio_device_buffer_(nullptr) {
47 RTC_LOG(INFO) << "ctor";
48 RTC_DCHECK(audio_parameters_.is_valid());
49 Java_WebRtcAudioTrack_setNativeAudioTrack(env, j_audio_track_,
50 jni::jlongFromPointer(this));
51 // Detach from this thread since construction is allowed to happen on a
52 // different thread.
53 thread_checker_.Detach();
54 thread_checker_java_.Detach();
55 }
56
~AudioTrackJni()57 AudioTrackJni::~AudioTrackJni() {
58 RTC_LOG(INFO) << "dtor";
59 RTC_DCHECK(thread_checker_.IsCurrent());
60 Terminate();
61 }
62
Init()63 int32_t AudioTrackJni::Init() {
64 RTC_LOG(INFO) << "Init";
65 env_ = AttachCurrentThreadIfNeeded();
66 RTC_DCHECK(thread_checker_.IsCurrent());
67 return 0;
68 }
69
Terminate()70 int32_t AudioTrackJni::Terminate() {
71 RTC_LOG(INFO) << "Terminate";
72 RTC_DCHECK(thread_checker_.IsCurrent());
73 StopPlayout();
74 thread_checker_.Detach();
75 return 0;
76 }
77
InitPlayout()78 int32_t AudioTrackJni::InitPlayout() {
79 RTC_LOG(INFO) << "InitPlayout";
80 RTC_DCHECK(thread_checker_.IsCurrent());
81 if (initialized_) {
82 // Already initialized.
83 return 0;
84 }
85 RTC_DCHECK(!playing_);
86 double buffer_size_factor =
87 strtod(webrtc::field_trial::FindFullName(
88 "WebRTC-AudioDevicePlayoutBufferSizeFactor")
89 .c_str(),
90 nullptr);
91 if (buffer_size_factor == 0)
92 buffer_size_factor = 1.0;
93 int requested_buffer_size_bytes = Java_WebRtcAudioTrack_initPlayout(
94 env_, j_audio_track_, audio_parameters_.sample_rate(),
95 static_cast<int>(audio_parameters_.channels()), buffer_size_factor);
96 if (requested_buffer_size_bytes < 0) {
97 RTC_LOG(LS_ERROR) << "InitPlayout failed";
98 return -1;
99 }
100 // Update UMA histograms for both the requested and actual buffer size.
101 // To avoid division by zero, we assume the sample rate is 48k if an invalid
102 // value is found.
103 const int sample_rate = audio_parameters_.sample_rate() <= 0
104 ? 48000
105 : audio_parameters_.sample_rate();
106 // This calculation assumes that audio is mono.
107 const int requested_buffer_size_ms =
108 (requested_buffer_size_bytes * 1000) / (2 * sample_rate);
109 RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs",
110 requested_buffer_size_ms, 0, 1000, 100);
111 int actual_buffer_size_frames =
112 Java_WebRtcAudioTrack_getBufferSizeInFrames(env_, j_audio_track_);
113 if (actual_buffer_size_frames >= 0) {
114 const int actual_buffer_size_ms =
115 actual_buffer_size_frames * 1000 / sample_rate;
116 RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeAudioBufferSizeMs",
117 actual_buffer_size_ms, 0, 1000, 100);
118 }
119
120 initialized_ = true;
121 return 0;
122 }
123
PlayoutIsInitialized() const124 bool AudioTrackJni::PlayoutIsInitialized() const {
125 return initialized_;
126 }
127
StartPlayout()128 int32_t AudioTrackJni::StartPlayout() {
129 RTC_LOG(INFO) << "StartPlayout";
130 RTC_DCHECK(thread_checker_.IsCurrent());
131 if (playing_) {
132 // Already playing.
133 return 0;
134 }
135 if (!initialized_) {
136 RTC_DLOG(LS_WARNING)
137 << "Playout can not start since InitPlayout must succeed first";
138 return 0;
139 }
140 if (!Java_WebRtcAudioTrack_startPlayout(env_, j_audio_track_)) {
141 RTC_LOG(LS_ERROR) << "StartPlayout failed";
142 return -1;
143 }
144 playing_ = true;
145 return 0;
146 }
147
StopPlayout()148 int32_t AudioTrackJni::StopPlayout() {
149 RTC_LOG(INFO) << "StopPlayout";
150 RTC_DCHECK(thread_checker_.IsCurrent());
151 if (!initialized_ || !playing_) {
152 return 0;
153 }
154 if (!Java_WebRtcAudioTrack_stopPlayout(env_, j_audio_track_)) {
155 RTC_LOG(LS_ERROR) << "StopPlayout failed";
156 return -1;
157 }
158 // If we don't detach here, we will hit a RTC_DCHECK next time StartPlayout()
159 // is called since it will create a new Java thread.
160 thread_checker_java_.Detach();
161 initialized_ = false;
162 playing_ = false;
163 direct_buffer_address_ = nullptr;
164 return 0;
165 }
166
Playing() const167 bool AudioTrackJni::Playing() const {
168 return playing_;
169 }
170
SpeakerVolumeIsAvailable()171 bool AudioTrackJni::SpeakerVolumeIsAvailable() {
172 return true;
173 }
174
SetSpeakerVolume(uint32_t volume)175 int AudioTrackJni::SetSpeakerVolume(uint32_t volume) {
176 RTC_LOG(INFO) << "SetSpeakerVolume(" << volume << ")";
177 RTC_DCHECK(thread_checker_.IsCurrent());
178 return Java_WebRtcAudioTrack_setStreamVolume(env_, j_audio_track_,
179 static_cast<int>(volume))
180 ? 0
181 : -1;
182 }
183
MaxSpeakerVolume() const184 absl::optional<uint32_t> AudioTrackJni::MaxSpeakerVolume() const {
185 RTC_DCHECK(thread_checker_.IsCurrent());
186 return Java_WebRtcAudioTrack_getStreamMaxVolume(env_, j_audio_track_);
187 }
188
MinSpeakerVolume() const189 absl::optional<uint32_t> AudioTrackJni::MinSpeakerVolume() const {
190 RTC_DCHECK(thread_checker_.IsCurrent());
191 return 0;
192 }
193
SpeakerVolume() const194 absl::optional<uint32_t> AudioTrackJni::SpeakerVolume() const {
195 RTC_DCHECK(thread_checker_.IsCurrent());
196 const uint32_t volume =
197 Java_WebRtcAudioTrack_getStreamVolume(env_, j_audio_track_);
198 RTC_LOG(INFO) << "SpeakerVolume: " << volume;
199 return volume;
200 }
201
GetPlayoutUnderrunCount()202 int AudioTrackJni::GetPlayoutUnderrunCount() {
203 return Java_WebRtcAudioTrack_GetPlayoutUnderrunCount(env_, j_audio_track_);
204 }
205
206 // TODO(henrika): possibly add stereo support.
AttachAudioBuffer(AudioDeviceBuffer * audioBuffer)207 void AudioTrackJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
208 RTC_LOG(INFO) << "AttachAudioBuffer";
209 RTC_DCHECK(thread_checker_.IsCurrent());
210 audio_device_buffer_ = audioBuffer;
211 const int sample_rate_hz = audio_parameters_.sample_rate();
212 RTC_LOG(INFO) << "SetPlayoutSampleRate(" << sample_rate_hz << ")";
213 audio_device_buffer_->SetPlayoutSampleRate(sample_rate_hz);
214 const size_t channels = audio_parameters_.channels();
215 RTC_LOG(INFO) << "SetPlayoutChannels(" << channels << ")";
216 audio_device_buffer_->SetPlayoutChannels(channels);
217 }
218
CacheDirectBufferAddress(JNIEnv * env,const JavaParamRef<jobject> & byte_buffer)219 void AudioTrackJni::CacheDirectBufferAddress(
220 JNIEnv* env,
221 const JavaParamRef<jobject>& byte_buffer) {
222 RTC_LOG(INFO) << "OnCacheDirectBufferAddress";
223 RTC_DCHECK(thread_checker_.IsCurrent());
224 RTC_DCHECK(!direct_buffer_address_);
225 direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer.obj());
226 jlong capacity = env->GetDirectBufferCapacity(byte_buffer.obj());
227 RTC_LOG(INFO) << "direct buffer capacity: " << capacity;
228 direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity);
229 const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
230 frames_per_buffer_ = direct_buffer_capacity_in_bytes_ / bytes_per_frame;
231 RTC_LOG(INFO) << "frames_per_buffer: " << frames_per_buffer_;
232 }
233
234 // This method is called on a high-priority thread from Java. The name of
235 // the thread is 'AudioRecordTrack'.
GetPlayoutData(JNIEnv * env,size_t length)236 void AudioTrackJni::GetPlayoutData(JNIEnv* env,
237 size_t length) {
238 RTC_DCHECK(thread_checker_java_.IsCurrent());
239 const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
240 RTC_DCHECK_EQ(frames_per_buffer_, length / bytes_per_frame);
241 if (!audio_device_buffer_) {
242 RTC_LOG(LS_ERROR) << "AttachAudioBuffer has not been called";
243 return;
244 }
245 // Pull decoded data (in 16-bit PCM format) from jitter buffer.
246 int samples = audio_device_buffer_->RequestPlayoutData(frames_per_buffer_);
247 if (samples <= 0) {
248 RTC_LOG(LS_ERROR) << "AudioDeviceBuffer::RequestPlayoutData failed";
249 return;
250 }
251 RTC_DCHECK_EQ(samples, frames_per_buffer_);
252 // Copy decoded data into common byte buffer to ensure that it can be
253 // written to the Java based audio track.
254 samples = audio_device_buffer_->GetPlayoutData(direct_buffer_address_);
255 RTC_DCHECK_EQ(length, bytes_per_frame * samples);
256 }
257
258 } // namespace jni
259
260 } // namespace webrtc
261