OLD | NEW |
1 /* | 1 /* |
2 * Copyright (C) 2010, Google Inc. All rights reserved. | 2 * Copyright (C) 2010, Google Inc. All rights reserved. |
3 * | 3 * |
4 * Redistribution and use in source and binary forms, with or without | 4 * Redistribution and use in source and binary forms, with or without |
5 * modification, are permitted provided that the following conditions | 5 * modification, are permitted provided that the following conditions |
6 * are met: | 6 * are met: |
7 * 1. Redistributions of source code must retain the above copyright | 7 * 1. Redistributions of source code must retain the above copyright |
8 * notice, this list of conditions and the following disclaimer. | 8 * notice, this list of conditions and the following disclaimer. |
9 * 2. Redistributions in binary form must reproduce the above copyright | 9 * 2. Redistributions in binary form must reproduce the above copyright |
10 * notice, this list of conditions and the following disclaimer in the | 10 * notice, this list of conditions and the following disclaimer in the |
(...skipping 110 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
121 PassRefPtr<MediaElementAudioSourceNode> createMediaElementSource(HTMLMediaEl
ement*, ExceptionState&); | 121 PassRefPtr<MediaElementAudioSourceNode> createMediaElementSource(HTMLMediaEl
ement*, ExceptionState&); |
122 PassRefPtr<MediaStreamAudioSourceNode> createMediaStreamSource(MediaStream*,
ExceptionState&); | 122 PassRefPtr<MediaStreamAudioSourceNode> createMediaStreamSource(MediaStream*,
ExceptionState&); |
123 PassRefPtr<MediaStreamAudioDestinationNode> createMediaStreamDestination(); | 123 PassRefPtr<MediaStreamAudioDestinationNode> createMediaStreamDestination(); |
124 PassRefPtr<GainNode> createGain(); | 124 PassRefPtr<GainNode> createGain(); |
125 PassRefPtr<BiquadFilterNode> createBiquadFilter(); | 125 PassRefPtr<BiquadFilterNode> createBiquadFilter(); |
126 PassRefPtr<WaveShaperNode> createWaveShaper(); | 126 PassRefPtr<WaveShaperNode> createWaveShaper(); |
127 PassRefPtr<DelayNode> createDelay(ExceptionState&); | 127 PassRefPtr<DelayNode> createDelay(ExceptionState&); |
128 PassRefPtr<DelayNode> createDelay(double maxDelayTime, ExceptionState&); | 128 PassRefPtr<DelayNode> createDelay(double maxDelayTime, ExceptionState&); |
129 PassRefPtr<PannerNode> createPanner(); | 129 PassRefPtr<PannerNode> createPanner(); |
130 PassRefPtr<ConvolverNode> createConvolver(); | 130 PassRefPtr<ConvolverNode> createConvolver(); |
131 PassRefPtr<DynamicsCompressorNode> createDynamicsCompressor(); | 131 PassRefPtr<DynamicsCompressorNode> createDynamicsCompressor(); |
132 PassRefPtr<AnalyserNode> createAnalyser(); | 132 PassRefPtr<AnalyserNode> createAnalyser(); |
133 PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, Exc
eptionState&); | 133 PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, Exc
eptionState&); |
134 PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, siz
e_t numberOfInputChannels, ExceptionState&); | 134 PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, siz
e_t numberOfInputChannels, ExceptionState&); |
135 PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, siz
e_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionState&); | 135 PassRefPtr<ScriptProcessorNode> createScriptProcessor(size_t bufferSize, siz
e_t numberOfInputChannels, size_t numberOfOutputChannels, ExceptionState&); |
136 PassRefPtr<ChannelSplitterNode> createChannelSplitter(ExceptionState&); | 136 PassRefPtr<ChannelSplitterNode> createChannelSplitter(ExceptionState&); |
137 PassRefPtr<ChannelSplitterNode> createChannelSplitter(size_t numberOfOutputs
, ExceptionState&); | 137 PassRefPtr<ChannelSplitterNode> createChannelSplitter(size_t numberOfOutputs
, ExceptionState&); |
138 PassRefPtr<ChannelMergerNode> createChannelMerger(ExceptionState&); | 138 PassRefPtr<ChannelMergerNode> createChannelMerger(ExceptionState&); |
139 PassRefPtr<ChannelMergerNode> createChannelMerger(size_t numberOfInputs, Exc
eptionState&); | 139 PassRefPtr<ChannelMergerNode> createChannelMerger(size_t numberOfInputs, Exc
eptionState&); |
140 PassRefPtr<OscillatorNode> createOscillator(); | 140 PassRefPtr<OscillatorNode> createOscillator(); |
141 PassRefPtr<PeriodicWave> createPeriodicWave(Float32Array* real, Float32Array
* imag, ExceptionState&); | 141 PassRefPtr<PeriodicWave> createPeriodicWave(Float32Array* real, Float32Array
* imag, ExceptionState&); |
(...skipping 27 matching lines...) Expand all Loading... |
169 { | 169 { |
170 ASSERT(isMainThread()); | 170 ASSERT(isMainThread()); |
171 m_connectionCount++; | 171 m_connectionCount++; |
172 } | 172 } |
173 | 173 |
174 unsigned connectionCount() const { return m_connectionCount; } | 174 unsigned connectionCount() const { return m_connectionCount; } |
175 | 175 |
176 // | 176 // |
177 // Thread Safety and Graph Locking: | 177 // Thread Safety and Graph Locking: |
178 // | 178 // |
179 | 179 |
180 void setAudioThread(ThreadIdentifier thread) { m_audioThread = thread; } //
FIXME: check either not initialized or the same | 180 void setAudioThread(ThreadIdentifier thread) { m_audioThread = thread; } //
FIXME: check either not initialized or the same |
181 ThreadIdentifier audioThread() const { return m_audioThread; } | 181 ThreadIdentifier audioThread() const { return m_audioThread; } |
182 bool isAudioThread() const; | 182 bool isAudioThread() const; |
183 | 183 |
184 // Returns true only after the audio thread has been started and then shutdo
wn. | 184 // Returns true only after the audio thread has been started and then shutdo
wn. |
185 bool isAudioThreadFinished() { return m_isAudioThreadFinished; } | 185 bool isAudioThreadFinished() { return m_isAudioThreadFinished; } |
186 | 186 |
187 // mustReleaseLock is set to true if we acquired the lock in this method cal
l and caller must unlock(), false if it was previously acquired. | 187 // mustReleaseLock is set to true if we acquired the lock in this method cal
l and caller must unlock(), false if it was previously acquired. |
188 void lock(bool& mustReleaseLock); | 188 void lock(bool& mustReleaseLock); |
189 | 189 |
190 // Returns true if we own the lock. | 190 // Returns true if we own the lock. |
191 // mustReleaseLock is set to true if we acquired the lock in this method cal
l and caller must unlock(), false if it was previously acquired. | 191 // mustReleaseLock is set to true if we acquired the lock in this method cal
l and caller must unlock(), false if it was previously acquired. |
192 bool tryLock(bool& mustReleaseLock); | 192 bool tryLock(bool& mustReleaseLock); |
193 | 193 |
194 void unlock(); | 194 void unlock(); |
195 | 195 |
196 // Returns true if this thread owns the context's lock. | 196 // Returns true if this thread owns the context's lock. |
197 bool isGraphOwner() const; | 197 bool isGraphOwner() const; |
198 | 198 |
199 // Returns the maximum numuber of channels we can support. | 199 // Returns the maximum numuber of channels we can support. |
200 static unsigned maxNumberOfChannels() { return MaxNumberOfChannels;} | 200 static unsigned maxNumberOfChannels() { return MaxNumberOfChannels;} |
201 | 201 |
202 class AutoLocker { | 202 class AutoLocker { |
203 public: | 203 public: |
204 AutoLocker(AudioContext* context) | 204 AutoLocker(AudioContext* context) |
205 : m_context(context) | 205 : m_context(context) |
206 { | 206 { |
207 ASSERT(context); | 207 ASSERT(context); |
208 context->lock(m_mustReleaseLock); | 208 context->lock(m_mustReleaseLock); |
209 } | 209 } |
210 | 210 |
211 ~AutoLocker() | 211 ~AutoLocker() |
212 { | 212 { |
213 if (m_mustReleaseLock) | 213 if (m_mustReleaseLock) |
214 m_context->unlock(); | 214 m_context->unlock(); |
215 } | 215 } |
216 private: | 216 private: |
217 AudioContext* m_context; | 217 AudioContext* m_context; |
218 bool m_mustReleaseLock; | 218 bool m_mustReleaseLock; |
219 }; | 219 }; |
220 | 220 |
221 // In AudioNode::deref() a tryLock() is used for calling finishDeref(), but
if it fails keep track here. | 221 // In AudioNode::deref() a tryLock() is used for calling finishDeref(), but
if it fails keep track here. |
222 void addDeferredFinishDeref(AudioNode*); | 222 void addDeferredFinishDeref(AudioNode*); |
223 | 223 |
224 // In the audio thread at the start of each render cycle, we'll call handleD
eferredFinishDerefs(). | 224 // In the audio thread at the start of each render cycle, we'll call handleD
eferredFinishDerefs(). |
225 void handleDeferredFinishDerefs(); | 225 void handleDeferredFinishDerefs(); |
226 | 226 |
227 // Only accessed when the graph lock is held. | 227 // Only accessed when the graph lock is held. |
228 void markSummingJunctionDirty(AudioSummingJunction*); | 228 void markSummingJunctionDirty(AudioSummingJunction*); |
229 void markAudioNodeOutputDirty(AudioNodeOutput*); | 229 void markAudioNodeOutputDirty(AudioNodeOutput*); |
230 | 230 |
231 // Must be called on main thread. | 231 // Must be called on main thread. |
232 void removeMarkedSummingJunction(AudioSummingJunction*); | 232 void removeMarkedSummingJunction(AudioSummingJunction*); |
233 | 233 |
234 // EventTarget | 234 // EventTarget |
235 virtual const AtomicString& interfaceName() const; | 235 virtual const AtomicString& interfaceName() const; |
236 virtual ScriptExecutionContext* scriptExecutionContext() const; | 236 virtual ScriptExecutionContext* scriptExecutionContext() const; |
237 virtual EventTargetData* eventTargetData() { return &m_eventTargetData; } | 237 virtual EventTargetData* eventTargetData() { return &m_eventTargetData; } |
238 virtual EventTargetData* ensureEventTargetData() { return &m_eventTargetData
; } | 238 virtual EventTargetData* ensureEventTargetData() { return &m_eventTargetData
; } |
239 | 239 |
240 DEFINE_ATTRIBUTE_EVENT_LISTENER(complete); | 240 DEFINE_ATTRIBUTE_EVENT_LISTENER(complete); |
241 | 241 |
242 // Reconcile ref/deref which are defined both in ThreadSafeRefCounted and Ev
entTarget. | 242 // Reconcile ref/deref which are defined both in ThreadSafeRefCounted and Ev
entTarget. |
243 using ThreadSafeRefCounted<AudioContext>::ref; | 243 using ThreadSafeRefCounted<AudioContext>::ref; |
244 using ThreadSafeRefCounted<AudioContext>::deref; | 244 using ThreadSafeRefCounted<AudioContext>::deref; |
245 | 245 |
246 void startRendering(); | 246 void startRendering(); |
247 void fireCompletionEvent(); | 247 void fireCompletionEvent(); |
248 | 248 |
249 static unsigned s_hardwareContextCount; | 249 static unsigned s_hardwareContextCount; |
250 | 250 |
251 protected: | 251 protected: |
252 explicit AudioContext(Document*); | 252 explicit AudioContext(Document*); |
253 AudioContext(Document*, unsigned numberOfChannels, size_t numberOfFrames, fl
oat sampleRate); | 253 AudioContext(Document*, unsigned numberOfChannels, size_t numberOfFrames, fl
oat sampleRate); |
254 | 254 |
255 static bool isSampleRateRangeGood(float sampleRate); | 255 static bool isSampleRateRangeGood(float sampleRate); |
256 | 256 |
257 private: | 257 private: |
258 void constructCommon(); | 258 void constructCommon(); |
259 | 259 |
260 void lazyInitialize(); | 260 void lazyInitialize(); |
261 void uninitialize(); | 261 void uninitialize(); |
262 | 262 |
263 // ScriptExecutionContext calls stop twice. | 263 // ScriptExecutionContext calls stop twice. |
264 // We'd like to schedule only one stop action for them. | 264 // We'd like to schedule only one stop action for them. |
265 bool m_isStopScheduled; | 265 bool m_isStopScheduled; |
266 static void stopDispatch(void* userData); | 266 static void stopDispatch(void* userData); |
267 void clear(); | 267 void clear(); |
268 | 268 |
269 void scheduleNodeDeletion(); | 269 void scheduleNodeDeletion(); |
270 static void deleteMarkedNodesDispatch(void* userData); | 270 static void deleteMarkedNodesDispatch(void* userData); |
271 | 271 |
272 bool m_isInitialized; | 272 bool m_isInitialized; |
273 bool m_isAudioThreadFinished; | 273 bool m_isAudioThreadFinished; |
274 | 274 |
275 // The context itself keeps a reference to all source nodes. The source nod
es, then reference all nodes they're connected to. | 275 // The context itself keeps a reference to all source nodes. The source nod
es, then reference all nodes they're connected to. |
276 // In turn, these nodes reference all nodes they're connected to. All nodes
are ultimately connected to the AudioDestinationNode. | 276 // In turn, these nodes reference all nodes they're connected to. All nodes
are ultimately connected to the AudioDestinationNode. |
277 // When the context dereferences a source node, it will be deactivated from
the rendering graph along with all other nodes it is | 277 // When the context dereferences a source node, it will be deactivated from
the rendering graph along with all other nodes it is |
278 // uniquely connected to. See the AudioNode::ref() and AudioNode::deref() m
ethods for more details. | 278 // uniquely connected to. See the AudioNode::ref() and AudioNode::deref() m
ethods for more details. |
279 void refNode(AudioNode*); | 279 void refNode(AudioNode*); |
280 void derefNode(AudioNode*); | 280 void derefNode(AudioNode*); |
281 | 281 |
(...skipping 35 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
317 // m_automaticPullNodesNeedUpdating keeps track if m_automaticPullNodes is m
odified. | 317 // m_automaticPullNodesNeedUpdating keeps track if m_automaticPullNodes is m
odified. |
318 bool m_automaticPullNodesNeedUpdating; | 318 bool m_automaticPullNodesNeedUpdating; |
319 void updateAutomaticPullNodes(); | 319 void updateAutomaticPullNodes(); |
320 | 320 |
321 unsigned m_connectionCount; | 321 unsigned m_connectionCount; |
322 | 322 |
323 // Graph locking. | 323 // Graph locking. |
324 Mutex m_contextGraphMutex; | 324 Mutex m_contextGraphMutex; |
325 volatile ThreadIdentifier m_audioThread; | 325 volatile ThreadIdentifier m_audioThread; |
326 volatile ThreadIdentifier m_graphOwnerThread; // if the lock is held then th
is is the thread which owns it, otherwise == UndefinedThreadIdentifier | 326 volatile ThreadIdentifier m_graphOwnerThread; // if the lock is held then th
is is the thread which owns it, otherwise == UndefinedThreadIdentifier |
327 | 327 |
328 // Only accessed in the audio thread. | 328 // Only accessed in the audio thread. |
329 Vector<AudioNode*> m_deferredFinishDerefList; | 329 Vector<AudioNode*> m_deferredFinishDerefList; |
330 | 330 |
331 // HRTF Database loader | 331 // HRTF Database loader |
332 RefPtr<HRTFDatabaseLoader> m_hrtfDatabaseLoader; | 332 RefPtr<HRTFDatabaseLoader> m_hrtfDatabaseLoader; |
333 | 333 |
334 // EventTarget | 334 // EventTarget |
335 virtual void refEventTarget() { ref(); } | 335 virtual void refEventTarget() { ref(); } |
336 virtual void derefEventTarget() { deref(); } | 336 virtual void derefEventTarget() { deref(); } |
337 EventTargetData m_eventTargetData; | 337 EventTargetData m_eventTargetData; |
338 | 338 |
339 RefPtr<AudioBuffer> m_renderTarget; | 339 RefPtr<AudioBuffer> m_renderTarget; |
340 | 340 |
341 bool m_isOfflineContext; | 341 bool m_isOfflineContext; |
342 | 342 |
343 AsyncAudioDecoder m_audioDecoder; | 343 AsyncAudioDecoder m_audioDecoder; |
344 | 344 |
345 // This is considering 32 is large enough for multiple channels audio. | 345 // This is considering 32 is large enough for multiple channels audio. |
346 // It is somewhat arbitrary and could be increased if necessary. | 346 // It is somewhat arbitrary and could be increased if necessary. |
347 enum { MaxNumberOfChannels = 32 }; | 347 enum { MaxNumberOfChannels = 32 }; |
348 | 348 |
349 // Number of AudioBufferSourceNodes that are active (playing). | 349 // Number of AudioBufferSourceNodes that are active (playing). |
350 int m_activeSourceCount; | 350 int m_activeSourceCount; |
351 }; | 351 }; |
352 | 352 |
353 } // WebCore | 353 } // WebCore |
354 | 354 |
355 #endif // AudioContext_h | 355 #endif // AudioContext_h |
OLD | NEW |