• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2010, Google Inc. All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  * 1.  Redistributions of source code must retain the above copyright
8  *    notice, this list of conditions and the following disclaimer.
9  * 2.  Redistributions in binary form must reproduce the above copyright
10  *    notice, this list of conditions and the following disclaimer in the
11  *    documentation and/or other materials provided with the distribution.
12  *
13  * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY
14  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
15  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
16  * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY
17  * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
18  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
19  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
20  * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
21  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
22  * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
23  */
24 
25 #ifndef AudioContext_h
26 #define AudioContext_h
27 
28 #include "bindings/v8/ScriptWrappable.h"
29 #include "core/dom/ActiveDOMObject.h"
30 #include "core/events/EventListener.h"
31 #include "modules/EventTargetModules.h"
32 #include "modules/webaudio/AsyncAudioDecoder.h"
33 #include "modules/webaudio/AudioDestinationNode.h"
34 #include "platform/audio/AudioBus.h"
35 #include "platform/heap/Handle.h"
36 #include "wtf/HashSet.h"
37 #include "wtf/MainThread.h"
38 #include "wtf/OwnPtr.h"
39 #include "wtf/PassRefPtr.h"
40 #include "wtf/RefCounted.h"
41 #include "wtf/RefPtr.h"
42 #include "wtf/ThreadSafeRefCounted.h"
43 #include "wtf/Threading.h"
44 #include "wtf/Vector.h"
45 #include "wtf/text/AtomicStringHash.h"
46 
47 namespace WebCore {
48 
49 class AnalyserNode;
50 class AudioBuffer;
51 class AudioBufferCallback;
52 class AudioBufferSourceNode;
53 class AudioListener;
54 class AudioSummingJunction;
55 class BiquadFilterNode;
56 class ChannelMergerNode;
57 class ChannelSplitterNode;
58 class ConvolverNode;
59 class DelayNode;
60 class Document;
61 class DynamicsCompressorNode;
62 class ExceptionState;
63 class GainNode;
64 class HTMLMediaElement;
65 class MediaElementAudioSourceNode;
66 class MediaStreamAudioDestinationNode;
67 class MediaStreamAudioSourceNode;
68 class OscillatorNode;
69 class PannerNode;
70 class PeriodicWave;
71 class ScriptProcessorNode;
72 class WaveShaperNode;
73 
74 // AudioContext is the cornerstone of the web audio API and all AudioNodes are created from it.
75 // For thread safety between the audio thread and the main thread, it has a rendering graph locking mechanism.
76 
77 class AudioContext : public ThreadSafeRefCountedWillBeThreadSafeRefCountedGarbageCollected<AudioContext>, public ActiveDOMObject, public ScriptWrappable, public EventTargetWithInlineData {
78     DEFINE_EVENT_TARGET_REFCOUNTING(ThreadSafeRefCountedWillBeThreadSafeRefCountedGarbageCollected<AudioContext>);
79     WILL_BE_USING_GARBAGE_COLLECTED_MIXIN(AudioContext);
80 public:
81     // Create an AudioContext for rendering to the audio hardware.
82     static PassRefPtrWillBeRawPtr<AudioContext> create(Document&, ExceptionState&);
83 
84     virtual ~AudioContext();
85 
86     virtual void trace(Visitor*) OVERRIDE;
87 
isInitialized()88     bool isInitialized() const { return m_isInitialized; }
isOfflineContext()89     bool isOfflineContext() { return m_isOfflineContext; }
90 
91     // Document notification
92     virtual void stop() OVERRIDE FINAL;
93     virtual bool hasPendingActivity() const OVERRIDE;
94 
destination()95     AudioDestinationNode* destination() { return m_destinationNode.get(); }
currentSampleFrame()96     size_t currentSampleFrame() const { return m_destinationNode->currentSampleFrame(); }
currentTime()97     double currentTime() const { return m_destinationNode->currentTime(); }
sampleRate()98     float sampleRate() const { return m_destinationNode->sampleRate(); }
99 
100     PassRefPtrWillBeRawPtr<AudioBuffer> createBuffer(unsigned numberOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&);
101 
102     // Asynchronous audio file data decoding.
103     void decodeAudioData(ArrayBuffer*, PassOwnPtr<AudioBufferCallback>, PassOwnPtr<AudioBufferCallback>, ExceptionState&);
104 
listener()105     AudioListener* listener() { return m_listener.get(); }
106 
107     // The AudioNode create methods are called on the main thread (from JavaScript).
108     PassRefPtrWillBeRawPtr<AudioBufferSourceNode> createBufferSource();
109     PassRefPtrWillBeRawPtr<MediaElementAudioSourceNode> createMediaElementSource(HTMLMediaElement*, ExceptionState&);
110     PassRefPtrWillBeRawPtr<MediaStreamAudioSourceNode> createMediaStreamSource(MediaStream*, ExceptionState&);
111     PassRefPtrWillBeRawPtr<MediaStreamAudioDestinationNode> createMediaStreamDestination();
112     PassRefPtrWillBeRawPtr<GainNode> createGain();
113     PassRefPtrWillBeRawPtr<BiquadFilterNode> createBiquadFilter();
114     PassRefPtrWillBeRawPtr<WaveShaperNode> createWaveShaper();
115     PassRefPtrWillBeRawPtr<DelayNode> createDelay(ExceptionState&);
116     PassRefPtrWillBeRawPtr<DelayNode> createDelay(double maxDelayTime, ExceptionState&);
117     PassRefPtrWillBeRawPtr<PannerNode> createPanner();
118     PassRefPtrWillBeRawPtr<ConvolverNode> createConvolver();
119     PassRefPtrWillBeRawPtr<DynamicsCompressorNode> createDynamicsCompressor();
120     PassRefPtrWillBeRawPtr<AnalyserNode> createAnalyser();
121     PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(ExceptionState&);
122     PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, ExceptionState&);
123     PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, ExceptionState&);
124     PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionState&);
125     PassRefPtrWillBeRawPtr<ChannelSplitterNode> createChannelSplitter(ExceptionState&);
126     PassRefPtrWillBeRawPtr<ChannelSplitterNode> createChannelSplitter(size_t numberOfOutputs, ExceptionState&);
127     PassRefPtrWillBeRawPtr<ChannelMergerNode> createChannelMerger(ExceptionState&);
128     PassRefPtrWillBeRawPtr<ChannelMergerNode> createChannelMerger(size_t numberOfInputs, ExceptionState&);
129     PassRefPtrWillBeRawPtr<OscillatorNode> createOscillator();
130     PassRefPtrWillBeRawPtr<PeriodicWave> createPeriodicWave(Float32Array* real, Float32Array* imag, ExceptionState&);
131 
132     // When a source node has no more processing to do (has finished playing), then it tells the context to dereference it.
133     void notifyNodeFinishedProcessing(AudioNode*);
134 
135     // Called at the start of each render quantum.
136     void handlePreRenderTasks();
137 
138     // Called at the end of each render quantum.
139     void handlePostRenderTasks();
140 
141     // Called periodically at the end of each render quantum to dereference finished source nodes.
142     void derefFinishedSourceNodes();
143 
144     // We schedule deletion of all marked nodes at the end of each realtime render quantum.
145     void markForDeletion(AudioNode*);
146     void deleteMarkedNodes();
147 
148     // AudioContext can pull node(s) at the end of each render quantum even when they are not connected to any downstream nodes.
149     // These two methods are called by the nodes who want to add/remove themselves into/from the automatic pull lists.
150     void addAutomaticPullNode(AudioNode*);
151     void removeAutomaticPullNode(AudioNode*);
152 
153     // Called right before handlePostRenderTasks() to handle nodes which need to be pulled even when they are not connected to anything.
154     void processAutomaticPullNodes(size_t framesToProcess);
155 
156     // Keeps track of the number of connections made.
incrementConnectionCount()157     void incrementConnectionCount()
158     {
159         ASSERT(isMainThread());
160         m_connectionCount++;
161     }
162 
connectionCount()163     unsigned connectionCount() const { return m_connectionCount; }
164 
165     //
166     // Thread Safety and Graph Locking:
167     //
168 
setAudioThread(ThreadIdentifier thread)169     void setAudioThread(ThreadIdentifier thread) { m_audioThread = thread; } // FIXME: check either not initialized or the same
audioThread()170     ThreadIdentifier audioThread() const { return m_audioThread; }
171     bool isAudioThread() const;
172 
173     // mustReleaseLock is set to true if we acquired the lock in this method call and caller must unlock(), false if it was previously acquired.
174     void lock(bool& mustReleaseLock);
175 
176     // Returns true if we own the lock.
177     // mustReleaseLock is set to true if we acquired the lock in this method call and caller must unlock(), false if it was previously acquired.
178     bool tryLock(bool& mustReleaseLock);
179 
180     void unlock();
181 
182     // Returns true if this thread owns the context's lock.
183     bool isGraphOwner() const;
184 
185     // Returns the maximum numuber of channels we can support.
maxNumberOfChannels()186     static unsigned maxNumberOfChannels() { return MaxNumberOfChannels;}
187 
188     class AutoLocker {
189     public:
AutoLocker(AudioContext * context)190         AutoLocker(AudioContext* context)
191             : m_context(context)
192         {
193             ASSERT(context);
194             context->lock(m_mustReleaseLock);
195         }
196 
~AutoLocker()197         ~AutoLocker()
198         {
199             if (m_mustReleaseLock)
200                 m_context->unlock();
201         }
202     private:
203         AudioContext* m_context;
204         bool m_mustReleaseLock;
205     };
206 
207     // In AudioNode::deref() a tryLock() is used for calling finishDeref(), but if it fails keep track here.
208     void addDeferredFinishDeref(AudioNode*);
209 
210     // In the audio thread at the start of each render cycle, we'll call handleDeferredFinishDerefs().
211     void handleDeferredFinishDerefs();
212 
213     // Only accessed when the graph lock is held.
214     void markSummingJunctionDirty(AudioSummingJunction*);
215     void markAudioNodeOutputDirty(AudioNodeOutput*);
216 
217     // Must be called on main thread.
218     void removeMarkedSummingJunction(AudioSummingJunction*);
219 
220     // EventTarget
221     virtual const AtomicString& interfaceName() const OVERRIDE FINAL;
222     virtual ExecutionContext* executionContext() const OVERRIDE FINAL;
223 
224     DEFINE_ATTRIBUTE_EVENT_LISTENER(complete);
225 
226     void startRendering();
227     void fireCompletionEvent();
228 
229     static unsigned s_hardwareContextCount;
230 
231 protected:
232     explicit AudioContext(Document*);
233     AudioContext(Document*, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate);
234 
235     static bool isSampleRateRangeGood(float sampleRate);
236 
237 private:
238     void initialize();
239     void uninitialize();
240 
241     // ExecutionContext calls stop twice.
242     // We'd like to schedule only one stop action for them.
243     bool m_isStopScheduled;
244     static void stopDispatch(void* userData);
245     bool m_isCleared;
246     void clear();
247 
248     void scheduleNodeDeletion();
249     static void deleteMarkedNodesDispatch(void* userData);
250 
251     // Set to true when the destination node has been initialized and is ready to process data.
252     bool m_isInitialized;
253 
254     // The context itself keeps a reference to all source nodes.  The source nodes, then reference all nodes they're connected to.
255     // In turn, these nodes reference all nodes they're connected to.  All nodes are ultimately connected to the AudioDestinationNode.
256     // When the context dereferences a source node, it will be deactivated from the rendering graph along with all other nodes it is
257     // uniquely connected to.  See the AudioNode::ref() and AudioNode::deref() methods for more details.
258     void refNode(AudioNode*);
259     void derefNode(AudioNode*);
260 
261     // When the context goes away, there might still be some sources which haven't finished playing.
262     // Make sure to dereference them here.
263     void derefUnfinishedSourceNodes();
264 
265     RefPtrWillBeMember<AudioDestinationNode> m_destinationNode;
266     RefPtrWillBeMember<AudioListener> m_listener;
267 
268     // Only accessed in the audio thread.
269     Vector<AudioNode*> m_finishedNodes;
270 
271     // We don't use RefPtr<AudioNode> here because AudioNode has a more complex ref() / deref() implementation
272     // with an optional argument for refType.  We need to use the special refType: RefTypeConnection
273     // Either accessed when the graph lock is held, or on the main thread when the audio thread has finished.
274     Vector<AudioNode*> m_referencedNodes;
275 
276     // Accumulate nodes which need to be deleted here.
277     // This is copied to m_nodesToDelete at the end of a render cycle in handlePostRenderTasks(), where we're assured of a stable graph
278     // state which will have no references to any of the nodes in m_nodesToDelete once the context lock is released
279     // (when handlePostRenderTasks() has completed).
280     Vector<AudioNode*> m_nodesMarkedForDeletion;
281 
282     // They will be scheduled for deletion (on the main thread) at the end of a render cycle (in realtime thread).
283     Vector<AudioNode*> m_nodesToDelete;
284     bool m_isDeletionScheduled;
285 
286     // Only accessed when the graph lock is held.
287     HashSet<AudioSummingJunction* > m_dirtySummingJunctions;
288     HashSet<AudioNodeOutput*> m_dirtyAudioNodeOutputs;
289     void handleDirtyAudioSummingJunctions();
290     void handleDirtyAudioNodeOutputs();
291 
292     // For the sake of thread safety, we maintain a seperate Vector of automatic pull nodes for rendering in m_renderingAutomaticPullNodes.
293     // It will be copied from m_automaticPullNodes by updateAutomaticPullNodes() at the very start or end of the rendering quantum.
294     HashSet<AudioNode*> m_automaticPullNodes;
295     Vector<AudioNode*> m_renderingAutomaticPullNodes;
296     // m_automaticPullNodesNeedUpdating keeps track if m_automaticPullNodes is modified.
297     bool m_automaticPullNodesNeedUpdating;
298     void updateAutomaticPullNodes();
299 
300     unsigned m_connectionCount;
301 
302     // Graph locking.
303     Mutex m_contextGraphMutex;
304     volatile ThreadIdentifier m_audioThread;
305     volatile ThreadIdentifier m_graphOwnerThread; // if the lock is held then this is the thread which owns it, otherwise == UndefinedThreadIdentifier
306 
307     // Only accessed in the audio thread.
308     Vector<AudioNode*> m_deferredFinishDerefList;
309 
310     RefPtrWillBeMember<AudioBuffer> m_renderTarget;
311 
312     bool m_isOfflineContext;
313 
314     AsyncAudioDecoder m_audioDecoder;
315 
316     // This is considering 32 is large enough for multiple channels audio.
317     // It is somewhat arbitrary and could be increased if necessary.
318     enum { MaxNumberOfChannels = 32 };
319 };
320 
321 } // WebCore
322 
323 #endif // AudioContext_h
324