• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include "modules/audio_device/android/audio_track_jni.h"
12 
13 #include <utility>
14 
15 #include "modules/audio_device/android/audio_manager.h"
16 #include "rtc_base/arraysize.h"
17 #include "rtc_base/checks.h"
18 #include "rtc_base/format_macros.h"
19 #include "rtc_base/logging.h"
20 #include "rtc_base/platform_thread.h"
21 #include "system_wrappers/include/field_trial.h"
22 #include "system_wrappers/include/metrics.h"
23 
24 namespace webrtc {
25 
26 // AudioTrackJni::JavaAudioTrack implementation.
JavaAudioTrack(NativeRegistration * native_reg,std::unique_ptr<GlobalRef> audio_track)27 AudioTrackJni::JavaAudioTrack::JavaAudioTrack(
28     NativeRegistration* native_reg,
29     std::unique_ptr<GlobalRef> audio_track)
30     : audio_track_(std::move(audio_track)),
31       init_playout_(native_reg->GetMethodId("initPlayout", "(IID)I")),
32       start_playout_(native_reg->GetMethodId("startPlayout", "()Z")),
33       stop_playout_(native_reg->GetMethodId("stopPlayout", "()Z")),
34       set_stream_volume_(native_reg->GetMethodId("setStreamVolume", "(I)Z")),
35       get_stream_max_volume_(
36           native_reg->GetMethodId("getStreamMaxVolume", "()I")),
37       get_stream_volume_(native_reg->GetMethodId("getStreamVolume", "()I")),
38       get_buffer_size_in_frames_(
39           native_reg->GetMethodId("getBufferSizeInFrames", "()I")) {}
40 
~JavaAudioTrack()41 AudioTrackJni::JavaAudioTrack::~JavaAudioTrack() {}
42 
InitPlayout(int sample_rate,int channels)43 bool AudioTrackJni::JavaAudioTrack::InitPlayout(int sample_rate, int channels) {
44   double buffer_size_factor =
45       strtod(webrtc::field_trial::FindFullName(
46                  "WebRTC-AudioDevicePlayoutBufferSizeFactor")
47                  .c_str(),
48              nullptr);
49   if (buffer_size_factor == 0)
50     buffer_size_factor = 1.0;
51   int requested_buffer_size_bytes = audio_track_->CallIntMethod(
52       init_playout_, sample_rate, channels, buffer_size_factor);
53   // Update UMA histograms for both the requested and actual buffer size.
54   if (requested_buffer_size_bytes >= 0) {
55     // To avoid division by zero, we assume the sample rate is 48k if an invalid
56     // value is found.
57     sample_rate = sample_rate <= 0 ? 48000 : sample_rate;
58     // This calculation assumes that audio is mono.
59     const int requested_buffer_size_ms =
60         (requested_buffer_size_bytes * 1000) / (2 * sample_rate);
61     RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeRequestedAudioBufferSizeMs",
62                          requested_buffer_size_ms, 0, 1000, 100);
63     int actual_buffer_size_frames =
64         audio_track_->CallIntMethod(get_buffer_size_in_frames_);
65     if (actual_buffer_size_frames >= 0) {
66       const int actual_buffer_size_ms =
67           actual_buffer_size_frames * 1000 / sample_rate;
68       RTC_HISTOGRAM_COUNTS("WebRTC.Audio.AndroidNativeAudioBufferSizeMs",
69                            actual_buffer_size_ms, 0, 1000, 100);
70     }
71     return true;
72   }
73   return false;
74 }
75 
StartPlayout()76 bool AudioTrackJni::JavaAudioTrack::StartPlayout() {
77   return audio_track_->CallBooleanMethod(start_playout_);
78 }
79 
StopPlayout()80 bool AudioTrackJni::JavaAudioTrack::StopPlayout() {
81   return audio_track_->CallBooleanMethod(stop_playout_);
82 }
83 
SetStreamVolume(int volume)84 bool AudioTrackJni::JavaAudioTrack::SetStreamVolume(int volume) {
85   return audio_track_->CallBooleanMethod(set_stream_volume_, volume);
86 }
87 
GetStreamMaxVolume()88 int AudioTrackJni::JavaAudioTrack::GetStreamMaxVolume() {
89   return audio_track_->CallIntMethod(get_stream_max_volume_);
90 }
91 
GetStreamVolume()92 int AudioTrackJni::JavaAudioTrack::GetStreamVolume() {
93   return audio_track_->CallIntMethod(get_stream_volume_);
94 }
95 
96 // TODO(henrika): possible extend usage of AudioManager and add it as member.
AudioTrackJni(AudioManager * audio_manager)97 AudioTrackJni::AudioTrackJni(AudioManager* audio_manager)
98     : j_environment_(JVM::GetInstance()->environment()),
99       audio_parameters_(audio_manager->GetPlayoutAudioParameters()),
100       direct_buffer_address_(nullptr),
101       direct_buffer_capacity_in_bytes_(0),
102       frames_per_buffer_(0),
103       initialized_(false),
104       playing_(false),
105       audio_device_buffer_(nullptr) {
106   RTC_LOG(INFO) << "ctor";
107   RTC_DCHECK(audio_parameters_.is_valid());
108   RTC_CHECK(j_environment_);
109   JNINativeMethod native_methods[] = {
110       {"nativeCacheDirectBufferAddress", "(Ljava/nio/ByteBuffer;J)V",
111        reinterpret_cast<void*>(
112            &webrtc::AudioTrackJni::CacheDirectBufferAddress)},
113       {"nativeGetPlayoutData", "(IJ)V",
114        reinterpret_cast<void*>(&webrtc::AudioTrackJni::GetPlayoutData)}};
115   j_native_registration_ = j_environment_->RegisterNatives(
116       "org/webrtc/voiceengine/WebRtcAudioTrack", native_methods,
117       arraysize(native_methods));
118   j_audio_track_.reset(
119       new JavaAudioTrack(j_native_registration_.get(),
120                          j_native_registration_->NewObject(
121                              "<init>", "(J)V", PointerTojlong(this))));
122   // Detach from this thread since we want to use the checker to verify calls
123   // from the Java based audio thread.
124   thread_checker_java_.Detach();
125 }
126 
~AudioTrackJni()127 AudioTrackJni::~AudioTrackJni() {
128   RTC_LOG(INFO) << "dtor";
129   RTC_DCHECK(thread_checker_.IsCurrent());
130   Terminate();
131 }
132 
Init()133 int32_t AudioTrackJni::Init() {
134   RTC_LOG(INFO) << "Init";
135   RTC_DCHECK(thread_checker_.IsCurrent());
136   return 0;
137 }
138 
Terminate()139 int32_t AudioTrackJni::Terminate() {
140   RTC_LOG(INFO) << "Terminate";
141   RTC_DCHECK(thread_checker_.IsCurrent());
142   StopPlayout();
143   return 0;
144 }
145 
InitPlayout()146 int32_t AudioTrackJni::InitPlayout() {
147   RTC_LOG(INFO) << "InitPlayout";
148   RTC_DCHECK(thread_checker_.IsCurrent());
149   RTC_DCHECK(!initialized_);
150   RTC_DCHECK(!playing_);
151   if (!j_audio_track_->InitPlayout(audio_parameters_.sample_rate(),
152                                    audio_parameters_.channels())) {
153     RTC_LOG(LS_ERROR) << "InitPlayout failed";
154     return -1;
155   }
156   initialized_ = true;
157   return 0;
158 }
159 
StartPlayout()160 int32_t AudioTrackJni::StartPlayout() {
161   RTC_LOG(INFO) << "StartPlayout";
162   RTC_DCHECK(thread_checker_.IsCurrent());
163   RTC_DCHECK(!playing_);
164   if (!initialized_) {
165     RTC_DLOG(LS_WARNING)
166         << "Playout can not start since InitPlayout must succeed first";
167     return 0;
168   }
169   if (!j_audio_track_->StartPlayout()) {
170     RTC_LOG(LS_ERROR) << "StartPlayout failed";
171     return -1;
172   }
173   playing_ = true;
174   return 0;
175 }
176 
StopPlayout()177 int32_t AudioTrackJni::StopPlayout() {
178   RTC_LOG(INFO) << "StopPlayout";
179   RTC_DCHECK(thread_checker_.IsCurrent());
180   if (!initialized_ || !playing_) {
181     return 0;
182   }
183   if (!j_audio_track_->StopPlayout()) {
184     RTC_LOG(LS_ERROR) << "StopPlayout failed";
185     return -1;
186   }
187   // If we don't detach here, we will hit a RTC_DCHECK in OnDataIsRecorded()
188   // next time StartRecording() is called since it will create a new Java
189   // thread.
190   thread_checker_java_.Detach();
191   initialized_ = false;
192   playing_ = false;
193   direct_buffer_address_ = nullptr;
194   return 0;
195 }
196 
SpeakerVolumeIsAvailable(bool & available)197 int AudioTrackJni::SpeakerVolumeIsAvailable(bool& available) {
198   available = true;
199   return 0;
200 }
201 
SetSpeakerVolume(uint32_t volume)202 int AudioTrackJni::SetSpeakerVolume(uint32_t volume) {
203   RTC_LOG(INFO) << "SetSpeakerVolume(" << volume << ")";
204   RTC_DCHECK(thread_checker_.IsCurrent());
205   return j_audio_track_->SetStreamVolume(volume) ? 0 : -1;
206 }
207 
MaxSpeakerVolume(uint32_t & max_volume) const208 int AudioTrackJni::MaxSpeakerVolume(uint32_t& max_volume) const {
209   RTC_DCHECK(thread_checker_.IsCurrent());
210   max_volume = j_audio_track_->GetStreamMaxVolume();
211   return 0;
212 }
213 
MinSpeakerVolume(uint32_t & min_volume) const214 int AudioTrackJni::MinSpeakerVolume(uint32_t& min_volume) const {
215   RTC_DCHECK(thread_checker_.IsCurrent());
216   min_volume = 0;
217   return 0;
218 }
219 
SpeakerVolume(uint32_t & volume) const220 int AudioTrackJni::SpeakerVolume(uint32_t& volume) const {
221   RTC_DCHECK(thread_checker_.IsCurrent());
222   volume = j_audio_track_->GetStreamVolume();
223   RTC_LOG(INFO) << "SpeakerVolume: " << volume;
224   return 0;
225 }
226 
227 // TODO(henrika): possibly add stereo support.
AttachAudioBuffer(AudioDeviceBuffer * audioBuffer)228 void AudioTrackJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {
229   RTC_LOG(INFO) << "AttachAudioBuffer";
230   RTC_DCHECK(thread_checker_.IsCurrent());
231   audio_device_buffer_ = audioBuffer;
232   const int sample_rate_hz = audio_parameters_.sample_rate();
233   RTC_LOG(INFO) << "SetPlayoutSampleRate(" << sample_rate_hz << ")";
234   audio_device_buffer_->SetPlayoutSampleRate(sample_rate_hz);
235   const size_t channels = audio_parameters_.channels();
236   RTC_LOG(INFO) << "SetPlayoutChannels(" << channels << ")";
237   audio_device_buffer_->SetPlayoutChannels(channels);
238 }
239 
240 JNI_FUNCTION_ALIGN
CacheDirectBufferAddress(JNIEnv * env,jobject obj,jobject byte_buffer,jlong nativeAudioTrack)241 void JNICALL AudioTrackJni::CacheDirectBufferAddress(JNIEnv* env,
242                                                      jobject obj,
243                                                      jobject byte_buffer,
244                                                      jlong nativeAudioTrack) {
245   webrtc::AudioTrackJni* this_object =
246       reinterpret_cast<webrtc::AudioTrackJni*>(nativeAudioTrack);
247   this_object->OnCacheDirectBufferAddress(env, byte_buffer);
248 }
249 
OnCacheDirectBufferAddress(JNIEnv * env,jobject byte_buffer)250 void AudioTrackJni::OnCacheDirectBufferAddress(JNIEnv* env,
251                                                jobject byte_buffer) {
252   RTC_LOG(INFO) << "OnCacheDirectBufferAddress";
253   RTC_DCHECK(thread_checker_.IsCurrent());
254   RTC_DCHECK(!direct_buffer_address_);
255   direct_buffer_address_ = env->GetDirectBufferAddress(byte_buffer);
256   jlong capacity = env->GetDirectBufferCapacity(byte_buffer);
257   RTC_LOG(INFO) << "direct buffer capacity: " << capacity;
258   direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity);
259   const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
260   frames_per_buffer_ = direct_buffer_capacity_in_bytes_ / bytes_per_frame;
261   RTC_LOG(INFO) << "frames_per_buffer: " << frames_per_buffer_;
262 }
263 
264 JNI_FUNCTION_ALIGN
GetPlayoutData(JNIEnv * env,jobject obj,jint length,jlong nativeAudioTrack)265 void JNICALL AudioTrackJni::GetPlayoutData(JNIEnv* env,
266                                            jobject obj,
267                                            jint length,
268                                            jlong nativeAudioTrack) {
269   webrtc::AudioTrackJni* this_object =
270       reinterpret_cast<webrtc::AudioTrackJni*>(nativeAudioTrack);
271   this_object->OnGetPlayoutData(static_cast<size_t>(length));
272 }
273 
274 // This method is called on a high-priority thread from Java. The name of
275 // the thread is 'AudioRecordTrack'.
OnGetPlayoutData(size_t length)276 void AudioTrackJni::OnGetPlayoutData(size_t length) {
277   RTC_DCHECK(thread_checker_java_.IsCurrent());
278   const size_t bytes_per_frame = audio_parameters_.channels() * sizeof(int16_t);
279   RTC_DCHECK_EQ(frames_per_buffer_, length / bytes_per_frame);
280   if (!audio_device_buffer_) {
281     RTC_LOG(LS_ERROR) << "AttachAudioBuffer has not been called";
282     return;
283   }
284   // Pull decoded data (in 16-bit PCM format) from jitter buffer.
285   int samples = audio_device_buffer_->RequestPlayoutData(frames_per_buffer_);
286   if (samples <= 0) {
287     RTC_LOG(LS_ERROR) << "AudioDeviceBuffer::RequestPlayoutData failed";
288     return;
289   }
290   RTC_DCHECK_EQ(samples, frames_per_buffer_);
291   // Copy decoded data into common byte buffer to ensure that it can be
292   // written to the Java based audio track.
293   samples = audio_device_buffer_->GetPlayoutData(direct_buffer_address_);
294   RTC_DCHECK_EQ(length, bytes_per_frame * samples);
295 }
296 
297 }  // namespace webrtc
298