summaryrefslogtreecommitdiff
path: root/Source/WebCore/Modules/webaudio/AudioContext.h
diff options
context:
space:
mode:
authorLorry Tar Creator <lorry-tar-importer@lorry>2016-04-10 09:28:39 +0000
committerLorry Tar Creator <lorry-tar-importer@lorry>2016-04-10 09:28:39 +0000
commit32761a6cee1d0dee366b885b7b9c777e67885688 (patch)
treed6bec92bebfb216f4126356e55518842c2f476a1 /Source/WebCore/Modules/webaudio/AudioContext.h
parenta4e969f4965059196ca948db781e52f7cfebf19e (diff)
downloadWebKitGtk-tarball-32761a6cee1d0dee366b885b7b9c777e67885688.tar.gz
webkitgtk-2.4.11webkitgtk-2.4.11
Diffstat (limited to 'Source/WebCore/Modules/webaudio/AudioContext.h')
-rw-r--r--Source/WebCore/Modules/webaudio/AudioContext.h141
1 files changed, 51 insertions, 90 deletions
diff --git a/Source/WebCore/Modules/webaudio/AudioContext.h b/Source/WebCore/Modules/webaudio/AudioContext.h
index aba361573..1e965d9ad 100644
--- a/Source/WebCore/Modules/webaudio/AudioContext.h
+++ b/Source/WebCore/Modules/webaudio/AudioContext.h
@@ -31,10 +31,7 @@
#include "AudioDestinationNode.h"
#include "EventListener.h"
#include "EventTarget.h"
-#include "JSDOMPromise.h"
#include "MediaCanStartListener.h"
-#include "MediaProducer.h"
-#include "PlatformMediaSession.h"
#include <atomic>
#include <wtf/HashSet.h>
#include <wtf/MainThread.h>
@@ -59,7 +56,6 @@ class HTMLMediaElement;
class ChannelMergerNode;
class ChannelSplitterNode;
class GainNode;
-class GenericEventQueue;
class PannerNode;
class AudioListener;
class AudioSummingJunction;
@@ -77,10 +73,13 @@ class PeriodicWave;
// AudioContext is the cornerstone of the web audio API and all AudioNodes are created from it.
// For thread safety between the audio thread and the main thread, it has a rendering graph locking mechanism.
-class AudioContext : public ActiveDOMObject, public ThreadSafeRefCounted<AudioContext>, public EventTargetWithInlineData, public MediaCanStartListener, public MediaProducer, private PlatformMediaSessionClient {
+class AudioContext : public ActiveDOMObject, public ThreadSafeRefCounted<AudioContext>, public EventTargetWithInlineData, public MediaCanStartListener {
public:
// Create an AudioContext for rendering to the audio hardware.
- static RefPtr<AudioContext> create(Document&, ExceptionCode&);
+ static PassRefPtr<AudioContext> create(Document&, ExceptionCode&);
+
+ // Create an AudioContext for offline (non-realtime) rendering.
+ static PassRefPtr<AudioContext> createOfflineContext(Document*, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate, ExceptionCode&);
virtual ~AudioContext();
@@ -88,9 +87,15 @@ public:
bool isOfflineContext() { return m_isOfflineContext; }
- Document* document() const; // ASSERTs if document no longer exists.
+ // Returns true when initialize() was called AND all asynchronous initialization has completed.
+ bool isRunnable() const;
- const Document* hostingDocument() const override;
+ HRTFDatabaseLoader* hrtfDatabaseLoader() const { return m_hrtfDatabaseLoader.get(); }
+
+ // Document notification
+ virtual void stop() override;
+
+ Document* document() const; // ASSERTs if document no longer exists.
AudioDestinationNode* destination() { return m_destinationNode.get(); }
size_t currentSampleFrame() const { return m_destinationNode->currentSampleFrame(); }
@@ -109,17 +114,6 @@ public:
AudioListener* listener() { return m_listener.get(); }
- using ActiveDOMObject::suspend;
- using ActiveDOMObject::resume;
-
- typedef DOMPromise<std::nullptr_t, ExceptionCode> Promise;
-
- void suspend(Promise&&);
- void resume(Promise&&);
- void close(Promise&&);
-
- const AtomicString& state() const;
-
// The AudioNode create methods are called on the main thread (from JavaScript).
PassRefPtr<AudioBufferSourceNode> createBufferSource();
#if ENABLE(VIDEO)
@@ -243,6 +237,8 @@ public:
virtual EventTargetInterface eventTargetInterface() const override final { return AudioContextEventTargetInterfaceType; }
virtual ScriptExecutionContext* scriptExecutionContext() const override final;
+ DEFINE_ATTRIBUTE_EVENT_LISTENER(complete);
+
// Reconcile ref/deref which are defined both in ThreadSafeRefCounted and EventTarget.
using ThreadSafeRefCounted<AudioContext>::ref;
using ThreadSafeRefCounted<AudioContext>::deref;
@@ -260,14 +256,12 @@ public:
};
typedef unsigned BehaviorRestrictions;
- BehaviorRestrictions behaviorRestrictions() const { return m_restrictions; }
+ bool userGestureRequiredForAudioStart() const { return m_restrictions & RequireUserGestureForAudioStartRestriction; }
+ bool pageConsentRequiredForAudioStart() const { return m_restrictions & RequirePageConsentForAudioStartRestriction; }
+
void addBehaviorRestriction(BehaviorRestrictions restriction) { m_restrictions |= restriction; }
void removeBehaviorRestriction(BehaviorRestrictions restriction) { m_restrictions &= ~restriction; }
- void isPlayingAudioDidChange();
-
- void nodeWillBeginPlayback();
-
protected:
explicit AudioContext(Document&);
AudioContext(Document&, unsigned numberOfChannels, size_t numberOfFrames, float sampleRate);
@@ -280,24 +274,19 @@ private:
void lazyInitialize();
void uninitialize();
- bool willBeginPlayback();
- bool willPausePlayback();
-
- bool userGestureRequiredForAudioStart() const { return m_restrictions & RequireUserGestureForAudioStartRestriction; }
- bool pageConsentRequiredForAudioStart() const { return m_restrictions & RequirePageConsentForAudioStartRestriction; }
-
- enum class State { Suspended, Running, Interrupted, Closed };
- void setState(State);
-
+ // ScriptExecutionContext calls stop twice.
+ // We'd like to schedule only one stop action for them.
+ bool m_isStopScheduled;
+ static void stopDispatch(void* userData);
void clear();
void scheduleNodeDeletion();
+ static void deleteMarkedNodesDispatch(void* userData);
virtual void mediaCanStart() override;
- // MediaProducer
- virtual MediaProducer::MediaStateFlags mediaState() const override;
- virtual void pageMutedStateDidChange() override;
+ bool m_isInitialized;
+ bool m_isAudioThreadFinished;
// The context itself keeps a reference to all source nodes. The source nodes, then reference all nodes they're connected to.
// In turn, these nodes reference all nodes they're connected to. All nodes are ultimately connected to the AudioDestinationNode.
@@ -306,33 +295,12 @@ private:
void refNode(AudioNode*);
void derefNode(AudioNode*);
- // ActiveDOMObject API.
- void stop() override;
- bool canSuspendForDocumentSuspension() const override;
- const char* activeDOMObjectName() const override;
-
// When the context goes away, there might still be some sources which haven't finished playing.
// Make sure to dereference them here.
void derefUnfinishedSourceNodes();
- // PlatformMediaSessionClient
- PlatformMediaSession::MediaType mediaType() const override { return PlatformMediaSession::WebAudio; }
- PlatformMediaSession::MediaType presentationType() const override { return PlatformMediaSession::WebAudio; }
- void mayResumePlayback(bool shouldResume) override;
- void suspendPlayback() override;
- bool canReceiveRemoteControlCommands() const override { return false; }
- void didReceiveRemoteControlCommand(PlatformMediaSession::RemoteControlCommandType) override { }
- bool shouldOverrideBackgroundPlaybackRestriction(PlatformMediaSession::InterruptionType) const override { return false; }
-
- // EventTarget
- virtual void refEventTarget() override { ref(); }
- virtual void derefEventTarget() override { deref(); }
-
- void handleDirtyAudioSummingJunctions();
- void handleDirtyAudioNodeOutputs();
-
- void addReaction(State, Promise&&);
- void updateAutomaticPullNodes();
+ RefPtr<AudioDestinationNode> m_destinationNode;
+ RefPtr<AudioListener> m_listener;
// Only accessed in the audio thread.
Vector<AudioNode*> m_finishedNodes;
@@ -350,39 +318,42 @@ private:
// They will be scheduled for deletion (on the main thread) at the end of a render cycle (in realtime thread).
Vector<AudioNode*> m_nodesToDelete;
-
- bool m_isDeletionScheduled { false };
- bool m_isStopScheduled { false };
- bool m_isInitialized { false };
- bool m_isAudioThreadFinished { false };
- bool m_automaticPullNodesNeedUpdating { false };
- bool m_isOfflineContext { false };
+ bool m_isDeletionScheduled;
// Only accessed when the graph lock is held.
HashSet<AudioSummingJunction*> m_dirtySummingJunctions;
HashSet<AudioNodeOutput*> m_dirtyAudioNodeOutputs;
+ void handleDirtyAudioSummingJunctions();
+ void handleDirtyAudioNodeOutputs();
// For the sake of thread safety, we maintain a seperate Vector of automatic pull nodes for rendering in m_renderingAutomaticPullNodes.
// It will be copied from m_automaticPullNodes by updateAutomaticPullNodes() at the very start or end of the rendering quantum.
HashSet<AudioNode*> m_automaticPullNodes;
Vector<AudioNode*> m_renderingAutomaticPullNodes;
+ // m_automaticPullNodesNeedUpdating keeps track if m_automaticPullNodes is modified.
+ bool m_automaticPullNodesNeedUpdating;
+ void updateAutomaticPullNodes();
+
+ unsigned m_connectionCount;
+
+ // Graph locking.
+ Mutex m_contextGraphMutex;
+ volatile ThreadIdentifier m_audioThread;
+ volatile ThreadIdentifier m_graphOwnerThread; // if the lock is held then this is the thread which owns it, otherwise == UndefinedThreadIdentifier
+
// Only accessed in the audio thread.
Vector<AudioNode*> m_deferredFinishDerefList;
- Vector<Vector<Promise>> m_stateReactions;
+
+ // HRTF Database loader
+ RefPtr<HRTFDatabaseLoader> m_hrtfDatabaseLoader;
- std::unique_ptr<PlatformMediaSession> m_mediaSession;
- std::unique_ptr<GenericEventQueue> m_eventQueue;
+ // EventTarget
+ virtual void refEventTarget() override { ref(); }
+ virtual void derefEventTarget() override { deref(); }
RefPtr<AudioBuffer> m_renderTarget;
- RefPtr<AudioDestinationNode> m_destinationNode;
- RefPtr<AudioListener> m_listener;
-
- unsigned m_connectionCount { 0 };
-
- // Graph locking.
- Lock m_contextGraphMutex;
- volatile ThreadIdentifier m_audioThread { 0 };
- volatile ThreadIdentifier m_graphOwnerThread; // if the lock is held then this is the thread which owns it, otherwise == UndefinedThreadIdentifier
+
+ bool m_isOfflineContext;
AsyncAudioDecoder m_audioDecoder;
@@ -391,21 +362,11 @@ private:
enum { MaxNumberOfChannels = 32 };
// Number of AudioBufferSourceNodes that are active (playing).
- std::atomic<int> m_activeSourceCount { 0 };
+ std::atomic<int> m_activeSourceCount;
- BehaviorRestrictions m_restrictions { NoRestrictions };
-
- State m_state { State::Suspended };
+ BehaviorRestrictions m_restrictions;
};
-inline bool operator==(const AudioContext& lhs, const AudioContext& rhs) {
- return &lhs == &rhs;
-}
-
-inline bool operator!=(const AudioContext& lhs, const AudioContext& rhs) {
- return &lhs != &rhs;
-}
-
} // WebCore
#endif // AudioContext_h