• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *  Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #ifndef WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_RECORD_JNI_H_
12 #define WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_RECORD_JNI_H_
13 
14 #include <jni.h>
15 
16 #include "webrtc/base/thread_checker.h"
17 #include "webrtc/modules/audio_device/android/audio_manager.h"
18 #include "webrtc/modules/audio_device/include/audio_device_defines.h"
19 #include "webrtc/modules/audio_device/audio_device_generic.h"
20 #include "webrtc/modules/utility/include/helpers_android.h"
21 #include "webrtc/modules/utility/include/jvm_android.h"
22 
23 namespace webrtc {
24 
25 // Implements 16-bit mono PCM audio input support for Android using the Java
26 // AudioRecord interface. Most of the work is done by its Java counterpart in
27 // WebRtcAudioRecord.java. This class is created and lives on a thread in
28 // C++-land, but recorded audio buffers are delivered on a high-priority
29 // thread managed by the Java class.
30 //
31 // The Java class makes use of AudioEffect features (mainly AEC) which are
32 // first available in Jelly Bean. If it is instantiated running against earlier
33 // SDKs, the AEC provided by the APM in WebRTC must be used and enabled
34 // separately instead.
35 //
36 // An instance must be created and destroyed on one and the same thread.
37 // All public methods must also be called on the same thread. A thread checker
38 // will RTC_DCHECK if any method is called on an invalid thread.
39 //
40 // This class uses AttachCurrentThreadIfNeeded to attach to a Java VM if needed
41 // and detach when the object goes out of scope. Additional thread checking
42 // guarantees that no other (possibly non attached) thread is used.
43 class AudioRecordJni {
44  public:
45   // Wraps the Java specific parts of the AudioRecordJni into one helper class.
46   class JavaAudioRecord {
47    public:
48     JavaAudioRecord(NativeRegistration* native_registration,
49                    rtc::scoped_ptr<GlobalRef> audio_track);
50     ~JavaAudioRecord();
51 
52     int InitRecording(int sample_rate, size_t channels);
53     bool StartRecording();
54     bool StopRecording();
55     bool EnableBuiltInAEC(bool enable);
56     bool EnableBuiltInAGC(bool enable);
57     bool EnableBuiltInNS(bool enable);
58 
59    private:
60     rtc::scoped_ptr<GlobalRef> audio_record_;
61     jmethodID init_recording_;
62     jmethodID start_recording_;
63     jmethodID stop_recording_;
64     jmethodID enable_built_in_aec_;
65     jmethodID enable_built_in_agc_;
66     jmethodID enable_built_in_ns_;
67   };
68 
69   explicit AudioRecordJni(AudioManager* audio_manager);
70   ~AudioRecordJni();
71 
72   int32_t Init();
73   int32_t Terminate();
74 
75   int32_t InitRecording();
RecordingIsInitialized()76   bool RecordingIsInitialized() const { return initialized_; }
77 
78   int32_t StartRecording();
79   int32_t StopRecording();
Recording()80   bool Recording() const { return recording_; }
81 
82   void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
83 
84   int32_t EnableBuiltInAEC(bool enable);
85   int32_t EnableBuiltInAGC(bool enable);
86   int32_t EnableBuiltInNS(bool enable);
87 
88  private:
89   // Called from Java side so we can cache the address of the Java-manged
90   // |byte_buffer| in |direct_buffer_address_|. The size of the buffer
91   // is also stored in |direct_buffer_capacity_in_bytes_|.
92   // This method will be called by the WebRtcAudioRecord constructor, i.e.,
93   // on the same thread that this object is created on.
94   static void JNICALL CacheDirectBufferAddress(
95     JNIEnv* env, jobject obj, jobject byte_buffer, jlong nativeAudioRecord);
96   void OnCacheDirectBufferAddress(JNIEnv* env, jobject byte_buffer);
97 
98   // Called periodically by the Java based WebRtcAudioRecord object when
99   // recording has started. Each call indicates that there are |length| new
100   // bytes recorded in the memory area |direct_buffer_address_| and it is
101   // now time to send these to the consumer.
102   // This method is called on a high-priority thread from Java. The name of
103   // the thread is 'AudioRecordThread'.
104   static void JNICALL DataIsRecorded(
105     JNIEnv* env, jobject obj, jint length, jlong nativeAudioRecord);
106   void OnDataIsRecorded(int length);
107 
108   // Stores thread ID in constructor.
109   rtc::ThreadChecker thread_checker_;
110 
111   // Stores thread ID in first call to OnDataIsRecorded() from high-priority
112   // thread in Java. Detached during construction of this object.
113   rtc::ThreadChecker thread_checker_java_;
114 
115   // Calls AttachCurrentThread() if this thread is not attached at construction.
116   // Also ensures that DetachCurrentThread() is called at destruction.
117   AttachCurrentThreadIfNeeded attach_thread_if_needed_;
118 
119   // Wraps the JNI interface pointer and methods associated with it.
120   rtc::scoped_ptr<JNIEnvironment> j_environment_;
121 
122   // Contains factory method for creating the Java object.
123   rtc::scoped_ptr<NativeRegistration> j_native_registration_;
124 
125   // Wraps the Java specific parts of the AudioRecordJni class.
126   rtc::scoped_ptr<AudioRecordJni::JavaAudioRecord> j_audio_record_;
127 
128   // Raw pointer to the audio manger.
129   const AudioManager* audio_manager_;
130 
131   // Contains audio parameters provided to this class at construction by the
132   // AudioManager.
133   const AudioParameters audio_parameters_;
134 
135   // Delay estimate of the total round-trip delay (input + output).
136   // Fixed value set once in AttachAudioBuffer() and it can take one out of two
137   // possible values. See audio_common.h for details.
138   int total_delay_in_milliseconds_;
139 
140   // Cached copy of address to direct audio buffer owned by |j_audio_record_|.
141   void* direct_buffer_address_;
142 
143   // Number of bytes in the direct audio buffer owned by |j_audio_record_|.
144   size_t direct_buffer_capacity_in_bytes_;
145 
146   // Number audio frames per audio buffer. Each audio frame corresponds to
147   // one sample of PCM mono data at 16 bits per sample. Hence, each audio
148   // frame contains 2 bytes (given that the Java layer only supports mono).
149   // Example: 480 for 48000 Hz or 441 for 44100 Hz.
150   size_t frames_per_buffer_;
151 
152   bool initialized_;
153 
154   bool recording_;
155 
156   // Raw pointer handle provided to us in AttachAudioBuffer(). Owned by the
157   // AudioDeviceModuleImpl class and called by AudioDeviceModuleImpl::Create().
158   AudioDeviceBuffer* audio_device_buffer_;
159 };
160 
161 }  // namespace webrtc
162 
163 #endif  // WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_RECORD_JNI_H_
164