• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2010 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.net.rtp;
18 
19 import android.annotation.NonNull;
20 import android.content.AttributionSource;
21 import android.content.Context;
22 import android.media.AudioManager;
23 import android.os.Parcel;
24 
25 import java.util.HashMap;
26 import java.util.Locale;
27 import java.util.Map;
28 
29 /**
30  * An AudioGroup is an audio hub for the speaker, the microphone, and
31  * {@link AudioStream}s. Each of these components can be logically turned on
32  * or off by calling {@link #setMode(int)} or {@link RtpStream#setMode(int)}.
33  * The AudioGroup will go through these components and process them one by one
34  * within its execution loop. The loop consists of four steps. First, for each
35  * AudioStream not in {@link RtpStream#MODE_SEND_ONLY}, decodes its incoming
36  * packets and stores in its buffer. Then, if the microphone is enabled,
37  * processes the recorded audio and stores in its buffer. Third, if the speaker
38  * is enabled, mixes all AudioStream buffers and plays back. Finally, for each
39  * AudioStream not in {@link RtpStream#MODE_RECEIVE_ONLY}, mixes all other
40  * buffers and sends back the encoded packets. An AudioGroup does nothing if
41  * there is no AudioStream in it.
42  *
43  * <p>Few things must be noticed before using these classes. The performance is
44  * highly related to the system load and the network bandwidth. Usually a
45  * simpler {@link AudioCodec} costs fewer CPU cycles but requires more network
46  * bandwidth, and vise versa. Using two AudioStreams at the same time doubles
47  * not only the load but also the bandwidth. The condition varies from one
48  * device to another, and developers should choose the right combination in
49  * order to get the best result.</p>
50  *
51  * <p>It is sometimes useful to keep multiple AudioGroups at the same time. For
52  * example, a Voice over IP (VoIP) application might want to put a conference
53  * call on hold in order to make a new call but still allow people in the
54  * conference call talking to each other. This can be done easily using two
55  * AudioGroups, but there are some limitations. Since the speaker and the
56  * microphone are globally shared resources, only one AudioGroup at a time is
57  * allowed to run in a mode other than {@link #MODE_ON_HOLD}. The others will
58  * be unable to acquire these resources and fail silently.</p>
59  *
60  * <p class="note">Using this class requires
61  * {@link android.Manifest.permission#RECORD_AUDIO} permission. Developers
62  * should set the audio mode to {@link AudioManager#MODE_IN_COMMUNICATION}
63  * using {@link AudioManager#setMode(int)} and change it back when none of
64  * the AudioGroups is in use.</p>
65  *
66  * @see AudioStream
67  *
68  * @deprecated {@link android.net.sip.SipManager} and associated classes are no longer supported and
69  * should not be used as the basis of future VOIP apps.
70  */
71 public class AudioGroup {
72     /**
73      * This mode is similar to {@link #MODE_NORMAL} except the speaker and
74      * the microphone are both disabled.
75      */
76     public static final int MODE_ON_HOLD = 0;
77 
78     /**
79      * This mode is similar to {@link #MODE_NORMAL} except the microphone is
80      * disabled.
81      */
82     public static final int MODE_MUTED = 1;
83 
84     /**
85      * This mode indicates that the speaker, the microphone, and all
86      * {@link AudioStream}s in the group are enabled. First, the packets
87      * received from the streams are decoded and mixed with the audio recorded
88      * from the microphone. Then, the results are played back to the speaker,
89      * encoded and sent back to each stream.
90      */
91     public static final int MODE_NORMAL = 2;
92 
93     /**
94      * This mode is similar to {@link #MODE_NORMAL} except the echo suppression
95      * is enabled. It should be only used when the speaker phone is on.
96      */
97     public static final int MODE_ECHO_SUPPRESSION = 3;
98 
99     private static final int MODE_LAST = 3;
100 
101     private final Map<AudioStream, Long> mStreams;
102     private int mMode = MODE_ON_HOLD;
103 
104     private long mNative;
105     private Context mContext;
106     static {
107         System.loadLibrary("rtp_jni");
108     }
109 
110     /**
111      * Creates an empty AudioGroup.
112      * @deprecated Replaced by {@link #AudioGroup(Context)}
113      */
114     @Deprecated
AudioGroup()115     public AudioGroup() {
116         this(null);
117     }
118 
119     /**
120      * Creates an empty AudioGroup.
121      * @param context Context the group belongs to
122      */
AudioGroup(@onNull Context context)123     public AudioGroup(@NonNull Context context) {
124         mContext = context;
125         mStreams = new HashMap<AudioStream, Long>();
126     }
127 
128     /**
129      * Returns the {@link AudioStream}s in this group.
130      */
getStreams()131     public AudioStream[] getStreams() {
132         synchronized (this) {
133             return mStreams.keySet().toArray(new AudioStream[mStreams.size()]);
134         }
135     }
136 
137     /**
138      * Returns the current mode.
139      */
getMode()140     public int getMode() {
141         return mMode;
142     }
143 
144     /**
145      * Changes the current mode. It must be one of {@link #MODE_ON_HOLD},
146      * {@link #MODE_MUTED}, {@link #MODE_NORMAL}, and
147      * {@link #MODE_ECHO_SUPPRESSION}.
148      *
149      * @param mode The mode to change to.
150      * @throws IllegalArgumentException if the mode is invalid.
151      */
setMode(int mode)152     public void setMode(int mode) {
153         if (mode < 0 || mode > MODE_LAST) {
154             throw new IllegalArgumentException("Invalid mode");
155         }
156         synchronized (this) {
157             nativeSetMode(mode);
158             mMode = mode;
159         }
160     }
161 
nativeSetMode(int mode)162     private native void nativeSetMode(int mode);
163 
164     // Package-private method used by AudioStream.join().
add(AudioStream stream)165     synchronized void add(AudioStream stream) {
166         if (!mStreams.containsKey(stream)) {
167             try {
168                 AudioCodec codec = stream.getCodec();
169                 String codecSpec = String.format(Locale.US, "%d %s %s", codec.type,
170                         codec.rtpmap, codec.fmtp);
171 
172                 final long id;
173                 try (AttributionSource.ScopedParcelState attributionSourceState = mContext
174                         .getAttributionSource().asScopedParcelState()) {
175                     id = nativeAdd(stream.getMode(), stream.getSocket(),
176                             stream.getRemoteAddress().getHostAddress(),
177                             stream.getRemotePort(), codecSpec, stream.getDtmfType(),
178                             attributionSourceState.getParcel());
179                 }
180                 mStreams.put(stream, id);
181             } catch (NullPointerException e) {
182                 throw new IllegalStateException(e);
183             }
184         }
185     }
186 
nativeAdd(int mode, int socket, String remoteAddress, int remotePort, String codecSpec, int dtmfType, Parcel attributionSource)187     private native long nativeAdd(int mode, int socket, String remoteAddress, int remotePort,
188             String codecSpec, int dtmfType, Parcel attributionSource);
189 
190     // Package-private method used by AudioStream.join().
remove(AudioStream stream)191     synchronized void remove(AudioStream stream) {
192         Long id = mStreams.remove(stream);
193         if (id != null) {
194             nativeRemove(id);
195         }
196     }
197 
nativeRemove(long id)198     private native void nativeRemove(long id);
199 
200     /**
201      * Sends a DTMF digit to every {@link AudioStream} in this group. Currently
202      * only event {@code 0} to {@code 15} are supported.
203      *
204      * @throws IllegalArgumentException if the event is invalid.
205      */
sendDtmf(int event)206     public void sendDtmf(int event) {
207         if (event < 0 || event > 15) {
208             throw new IllegalArgumentException("Invalid event");
209         }
210         synchronized (this) {
211             nativeSendDtmf(event);
212         }
213     }
214 
nativeSendDtmf(int event)215     private native void nativeSendDtmf(int event);
216 
217     /**
218      * Removes every {@link AudioStream} in this group.
219      */
clear()220     public void clear() {
221         for (AudioStream stream : getStreams()) {
222             stream.join(null);
223         }
224     }
225 
226     @Override
finalize()227     protected void finalize() throws Throwable {
228         nativeRemove(0L);
229         super.finalize();
230     }
231 }
232