2 * Copyright (C) 2010, Google Inc. All rights reserved.
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
7 * 1. Redistributions of source code must retain the above copyright
8 * notice, this list of conditions and the following disclaimer.
9 * 2. Redistributions in binary form must reproduce the above copyright
10 * notice, this list of conditions and the following disclaimer in the
11 * documentation and/or other materials provided with the distribution.
13 * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY
14 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
15 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
16 * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY
17 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
18 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
19 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
20 * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
21 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
22 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25 #ifndef AudioContext_h
26 #define AudioContext_h
28 #include "bindings/v8/ScriptWrappable.h"
29 #include "core/dom/ActiveDOMObject.h"
30 #include "core/events/EventListener.h"
31 #include "core/events/EventTarget.h"
32 #include "modules/webaudio/AsyncAudioDecoder.h"
33 #include "modules/webaudio/AudioDestinationNode.h"
34 #include "platform/audio/AudioBus.h"
35 #include "platform/heap/Handle.h"
36 #include "wtf/HashSet.h"
37 #include "wtf/MainThread.h"
38 #include "wtf/OwnPtr.h"
39 #include "wtf/PassRefPtr.h"
40 #include "wtf/RefCounted.h"
41 #include "wtf/RefPtr.h"
42 #include "wtf/ThreadSafeRefCounted.h"
43 #include "wtf/Threading.h"
44 #include "wtf/Vector.h"
45 #include "wtf/text/AtomicStringHash.h"
51 class AudioBufferCallback;
52 class AudioBufferSourceNode;
54 class AudioSummingJunction;
55 class BiquadFilterNode;
56 class ChannelMergerNode;
57 class ChannelSplitterNode;
61 class DynamicsCompressorNode;
64 class HTMLMediaElement;
65 class MediaElementAudioSourceNode;
66 class MediaStreamAudioDestinationNode;
67 class MediaStreamAudioSourceNode;
71 class ScriptProcessorNode;
74 // AudioContext is the cornerstone of the web audio API and all AudioNodes are created from it.
75 // For thread safety between the audio thread and the main thread, it has a rendering graph locking mechanism.
77 class AudioContext : public ThreadSafeRefCountedWillBeThreadSafeRefCountedGarbageCollected<AudioContext>, public ActiveDOMObject, public ScriptWrappable, public EventTargetWithInlineData {
78 DEFINE_EVENT_TARGET_REFCOUNTING(ThreadSafeRefCountedWillBeThreadSafeRefCountedGarbageCollected<AudioContext>);
80 // Create an AudioContext for rendering to the audio hardware.
81 static PassRefPtrWillBeRawPtr<AudioContext> create(Document&, ExceptionState&);
83 virtual ~AudioContext();
85 virtual void trace(Visitor*);
87 bool isInitialized() const;
88 // The constructor of an AudioNode must call this to initialize the context.
89 void lazyInitialize();
91 bool isOfflineContext() { return m_isOfflineContext; }
93 // Document notification
94 virtual void stop() OVERRIDE FINAL;
95 virtual bool hasPendingActivity() const OVERRIDE;
97 AudioDestinationNode* destination() { return m_destinationNode.get(); }
98 size_t currentSampleFrame() const { return m_destinationNode->currentSampleFrame(); }
99 double currentTime() const { return m_destinationNode->currentTime(); }
100 float sampleRate() const { return m_destinationNode->sampleRate(); }
102 PassRefPtrWillBeRawPtr<AudioBuffer> createBuffer(unsigned numberOfChannels, size_t numberOfFrames, float sampleRate, ExceptionState&);
104 // Asynchronous audio file data decoding.
105 void decodeAudioData(ArrayBuffer*, PassOwnPtr<AudioBufferCallback>, PassOwnPtr<AudioBufferCallback>, ExceptionState&);
107 AudioListener* listener() { return m_listener.get(); }
109 // The AudioNode create methods are called on the main thread (from JavaScript).
110 PassRefPtrWillBeRawPtr<AudioBufferSourceNode> createBufferSource();
111 PassRefPtrWillBeRawPtr<MediaElementAudioSourceNode> createMediaElementSource(HTMLMediaElement*, ExceptionState&);
112 PassRefPtrWillBeRawPtr<MediaStreamAudioSourceNode> createMediaStreamSource(MediaStream*, ExceptionState&);
113 PassRefPtrWillBeRawPtr<MediaStreamAudioDestinationNode> createMediaStreamDestination();
114 PassRefPtrWillBeRawPtr<GainNode> createGain();
115 PassRefPtrWillBeRawPtr<BiquadFilterNode> createBiquadFilter();
116 PassRefPtrWillBeRawPtr<WaveShaperNode> createWaveShaper();
117 PassRefPtrWillBeRawPtr<DelayNode> createDelay(ExceptionState&);
118 PassRefPtrWillBeRawPtr<DelayNode> createDelay(double maxDelayTime, ExceptionState&);
119 PassRefPtrWillBeRawPtr<PannerNode> createPanner();
120 PassRefPtrWillBeRawPtr<ConvolverNode> createConvolver();
121 PassRefPtrWillBeRawPtr<DynamicsCompressorNode> createDynamicsCompressor();
122 PassRefPtrWillBeRawPtr<AnalyserNode> createAnalyser();
123 PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(ExceptionState&);
124 PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, ExceptionState&);
125 PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, ExceptionState&);
126 PassRefPtrWillBeRawPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, size_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionState&);
127 PassRefPtrWillBeRawPtr<ChannelSplitterNode> createChannelSplitter(ExceptionState&);
128 PassRefPtrWillBeRawPtr<ChannelSplitterNode> createChannelSplitter(size_t numberOfOutputs, ExceptionState&);
129 PassRefPtrWillBeRawPtr<ChannelMergerNode> createChannelMerger(ExceptionState&);
130 PassRefPtrWillBeRawPtr<ChannelMergerNode> createChannelMerger(size_t numberOfInputs, ExceptionState&);
131 PassRefPtrWillBeRawPtr<OscillatorNode> createOscillator();
132 PassRefPtrWillBeRawPtr<PeriodicWave> createPeriodicWave(Float32Array* real, Float32Array* imag, ExceptionState&);
134 // When a source node has no more processing to do (has finished playing), then it tells the context to dereference it.
135 void notifyNodeFinishedProcessing(AudioNode*);
137 // Called at the start of each render quantum.
138 void handlePreRenderTasks();
140 // Called at the end of each render quantum.
141 void handlePostRenderTasks();
143 // Called periodically at the end of each render quantum to dereference finished source nodes.
144 void derefFinishedSourceNodes();
146 // We schedule deletion of all marked nodes at the end of each realtime render quantum.
147 void markForDeletion(AudioNode*);
148 void deleteMarkedNodes();
150 // AudioContext can pull node(s) at the end of each render quantum even when they are not connected to any downstream nodes.
151 // These two methods are called by the nodes who want to add/remove themselves into/from the automatic pull lists.
152 void addAutomaticPullNode(AudioNode*);
153 void removeAutomaticPullNode(AudioNode*);
155 // Called right before handlePostRenderTasks() to handle nodes which need to be pulled even when they are not connected to anything.
156 void processAutomaticPullNodes(size_t framesToProcess);
158 // Keeps track of the number of connections made.
159 void incrementConnectionCount()
161 ASSERT(isMainThread());
165 unsigned connectionCount() const { return m_connectionCount; }
168 // Thread Safety and Graph Locking:
171 void setAudioThread(ThreadIdentifier thread) { m_audioThread = thread; } // FIXME: check either not initialized or the same
172 ThreadIdentifier audioThread() const { return m_audioThread; }
173 bool isAudioThread() const;
175 // Returns true only after the audio thread has been started and then shutdown.
176 bool isAudioThreadFinished() { return m_isAudioThreadFinished; }
178 // mustReleaseLock is set to true if we acquired the lock in this method call and caller must unlock(), false if it was previously acquired.
179 void lock(bool& mustReleaseLock);
181 // Returns true if we own the lock.
182 // mustReleaseLock is set to true if we acquired the lock in this method call and caller must unlock(), false if it was previously acquired.
183 bool tryLock(bool& mustReleaseLock);
187 // Returns true if this thread owns the context's lock.
188 bool isGraphOwner() const;
190 // Returns the maximum numuber of channels we can support.
191 static unsigned maxNumberOfChannels() { return MaxNumberOfChannels;}
195 AutoLocker(AudioContext* context)
199 context->lock(m_mustReleaseLock);
204 if (m_mustReleaseLock)
208 AudioContext* m_context;
209 bool m_mustReleaseLock;
212 // In AudioNode::deref() a tryLock() is used for calling finishDeref(), but if it fails keep track here.
213 void addDeferredFinishDeref(AudioNode*);
215 // In the audio thread at the start of each render cycle, we'll call handleDeferredFinishDerefs().
216 void handleDeferredFinishDerefs();
218 // Only accessed when the graph lock is held.
219 void markSummingJunctionDirty(AudioSummingJunction*);
220 void markAudioNodeOutputDirty(AudioNodeOutput*);
222 // Must be called on main thread.
223 void removeMarkedSummingJunction(AudioSummingJunction*);
226 virtual const AtomicString& interfaceName() const OVERRIDE FINAL;
227 virtual ExecutionContext* executionContext() const OVERRIDE FINAL;
229 DEFINE_ATTRIBUTE_EVENT_LISTENER(complete);
231 void startRendering();
232 void fireCompletionEvent();
234 static unsigned s_hardwareContextCount;
237 explicit AudioContext(Document*);
238 AudioContext(Document*, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate);
240 static bool isSampleRateRangeGood(float sampleRate);
243 void constructCommon();
247 // ExecutionContext calls stop twice.
248 // We'd like to schedule only one stop action for them.
249 bool m_isStopScheduled;
250 static void stopDispatch(void* userData);
254 void scheduleNodeDeletion();
255 static void deleteMarkedNodesDispatch(void* userData);
257 // Set to true when the destination node has been initialized and is ready to process data.
258 bool m_isInitialized;
259 bool m_isAudioThreadFinished;
261 // The context itself keeps a reference to all source nodes. The source nodes, then reference all nodes they're connected to.
262 // In turn, these nodes reference all nodes they're connected to. All nodes are ultimately connected to the AudioDestinationNode.
263 // When the context dereferences a source node, it will be deactivated from the rendering graph along with all other nodes it is
264 // uniquely connected to. See the AudioNode::ref() and AudioNode::deref() methods for more details.
265 void refNode(AudioNode*);
266 void derefNode(AudioNode*);
268 // When the context goes away, there might still be some sources which haven't finished playing.
269 // Make sure to dereference them here.
270 void derefUnfinishedSourceNodes();
272 RefPtrWillBeMember<AudioDestinationNode> m_destinationNode;
273 RefPtrWillBeMember<AudioListener> m_listener;
275 // Only accessed in the audio thread.
276 Vector<AudioNode*> m_finishedNodes;
278 // We don't use RefPtr<AudioNode> here because AudioNode has a more complex ref() / deref() implementation
279 // with an optional argument for refType. We need to use the special refType: RefTypeConnection
280 // Either accessed when the graph lock is held, or on the main thread when the audio thread has finished.
281 Vector<AudioNode*> m_referencedNodes;
283 // Accumulate nodes which need to be deleted here.
284 // This is copied to m_nodesToDelete at the end of a render cycle in handlePostRenderTasks(), where we're assured of a stable graph
285 // state which will have no references to any of the nodes in m_nodesToDelete once the context lock is released
286 // (when handlePostRenderTasks() has completed).
287 Vector<AudioNode*> m_nodesMarkedForDeletion;
289 // They will be scheduled for deletion (on the main thread) at the end of a render cycle (in realtime thread).
290 Vector<AudioNode*> m_nodesToDelete;
291 bool m_isDeletionScheduled;
293 // Only accessed when the graph lock is held.
294 HashSet<AudioSummingJunction* > m_dirtySummingJunctions;
295 HashSet<AudioNodeOutput*> m_dirtyAudioNodeOutputs;
296 void handleDirtyAudioSummingJunctions();
297 void handleDirtyAudioNodeOutputs();
299 // For the sake of thread safety, we maintain a seperate Vector of automatic pull nodes for rendering in m_renderingAutomaticPullNodes.
300 // It will be copied from m_automaticPullNodes by updateAutomaticPullNodes() at the very start or end of the rendering quantum.
301 HashSet<AudioNode*> m_automaticPullNodes;
302 Vector<AudioNode*> m_renderingAutomaticPullNodes;
303 // m_automaticPullNodesNeedUpdating keeps track if m_automaticPullNodes is modified.
304 bool m_automaticPullNodesNeedUpdating;
305 void updateAutomaticPullNodes();
307 unsigned m_connectionCount;
310 Mutex m_contextGraphMutex;
311 volatile ThreadIdentifier m_audioThread;
312 volatile ThreadIdentifier m_graphOwnerThread; // if the lock is held then this is the thread which owns it, otherwise == UndefinedThreadIdentifier
314 // Only accessed in the audio thread.
315 Vector<AudioNode*> m_deferredFinishDerefList;
317 RefPtrWillBeMember<AudioBuffer> m_renderTarget;
319 bool m_isOfflineContext;
321 AsyncAudioDecoder m_audioDecoder;
323 // This is considering 32 is large enough for multiple channels audio.
324 // It is somewhat arbitrary and could be increased if necessary.
325 enum { MaxNumberOfChannels = 32 };
330 #endif // AudioContext_h