diff --git a/Userland/Libraries/LibWeb/Forward.h b/Userland/Libraries/LibWeb/Forward.h index b473d18936..8547a47bb9 100644 --- a/Userland/Libraries/LibWeb/Forward.h +++ b/Userland/Libraries/LibWeb/Forward.h @@ -27,6 +27,7 @@ namespace Web::Bindings { class Intrinsics; class OptionConstructor; +enum class AudioContextLatencyCategory; enum class CanPlayTypeResult; enum class CanvasFillRule; enum class DOMParserSupportedType; @@ -603,6 +604,10 @@ class Table; namespace Web::WebAudio { class AudioContext; class BaseAudioContext; + +enum class AudioContextState; + +struct AudioContextOptions; } namespace Web::WebGL { diff --git a/Userland/Libraries/LibWeb/HTML/EventNames.h b/Userland/Libraries/LibWeb/HTML/EventNames.h index 3804919072..e646e3ff9b 100644 --- a/Userland/Libraries/LibWeb/HTML/EventNames.h +++ b/Userland/Libraries/LibWeb/HTML/EventNames.h @@ -84,6 +84,7 @@ namespace Web::HTML::EventNames { __ENUMERATE_HTML_EVENT(select) \ __ENUMERATE_HTML_EVENT(slotchange) \ __ENUMERATE_HTML_EVENT(stalled) \ + __ENUMERATE_HTML_EVENT(statechange) \ __ENUMERATE_HTML_EVENT(storage) \ __ENUMERATE_HTML_EVENT(submit) \ __ENUMERATE_HTML_EVENT(suspend) \ diff --git a/Userland/Libraries/LibWeb/WebAudio/AudioContext.cpp b/Userland/Libraries/LibWeb/WebAudio/AudioContext.cpp index 1be57a3983..4a261f3f24 100644 --- a/Userland/Libraries/LibWeb/WebAudio/AudioContext.cpp +++ b/Userland/Libraries/LibWeb/WebAudio/AudioContext.cpp @@ -5,20 +5,76 @@ */ #include +#include +#include #include +#include namespace Web::WebAudio { // https://webaudio.github.io/web-audio-api/#dom-audiocontext-audiocontext -WebIDL::ExceptionOr> AudioContext::construct_impl(JS::Realm& realm) +WebIDL::ExceptionOr> AudioContext::construct_impl(JS::Realm& realm, AudioContextOptions const& context_options) { - dbgln("(STUBBED) new AudioContext()"); - return MUST_OR_THROW_OOM(realm.heap().allocate(realm, realm)); + return MUST_OR_THROW_OOM(realm.heap().allocate(realm, realm, context_options)); } -AudioContext::AudioContext(JS::Realm& realm) +AudioContext::AudioContext(JS::Realm& realm, AudioContextOptions const& context_options) : BaseAudioContext(realm) { + // FIXME: If the current settings object’s responsible document is NOT fully active, throw an InvalidStateError and abort these steps. + + // 1: Set a [[control thread state]] to suspended on the AudioContext. + BaseAudioContext::set_control_state(Bindings::AudioContextState::Suspended); + + // 2: Set a [[rendering thread state]] to suspended on the AudioContext. + BaseAudioContext::set_rendering_state(Bindings::AudioContextState::Suspended); + + // 3: Let [[pending resume promises]] be a slot on this AudioContext, that is an initially empty ordered list of promises. + + // 4: If contextOptions is given, apply the options: + // 4.1: Set the internal latency of this AudioContext according to contextOptions.latencyHint, as described in latencyHint. + switch (context_options.latency_hint) { + case Bindings::AudioContextLatencyCategory::Balanced: + // FIXME: Determine optimal settings for balanced. + break; + case Bindings::AudioContextLatencyCategory::Interactive: + // FIXME: Determine optimal settings for interactive. + break; + case Bindings::AudioContextLatencyCategory::Playback: + // FIXME: Determine optimal settings for playback. + break; + default: + VERIFY_NOT_REACHED(); + } + + // 4.2: If contextOptions.sampleRate is specified, set the sampleRate of this AudioContext to this value. Otherwise, + // use the sample rate of the default output device. If the selected sample rate differs from the sample rate of the output device, + // this AudioContext MUST resample the audio output to match the sample rate of the output device. + if (context_options.sample_rate.has_value()) { + BaseAudioContext::set_sample_rate(context_options.sample_rate.value()); + } else { + // FIXME: This would ideally be coming from the default output device, but we can only get this on Serenity + // For now we'll just have to resample + BaseAudioContext::set_sample_rate(44100); + } + + // FIXME: 5: If the context is allowed to start, send a control message to start processing. + // FIXME: Implement control message queue to run following steps on the rendering thread + if (m_allowed_to_start) { + // FIXME: 5.1: Attempt to acquire system resources. In case of failure, abort the following steps. + + // 5.2: Set the [[rendering thread state]] to "running" on the AudioContext. + BaseAudioContext::set_rendering_state(Bindings::AudioContextState::Running); + + // 5.3: queue a media element task to execute the following steps: + queue_a_media_element_task([&realm, this]() { + // 5.3.1: Set the state attribute of the AudioContext to "running". + BaseAudioContext::set_control_state(Bindings::AudioContextState::Running); + + // 5.3.2: queue a media element task to fire an event named statechange at the AudioContext. + this->dispatch_event(DOM::Event::create(realm, HTML::EventNames::statechange).release_value_but_fixme_should_propagate_errors()); + }); + } } AudioContext::~AudioContext() = default; @@ -31,4 +87,219 @@ JS::ThrowCompletionOr AudioContext::initialize(JS::Realm& realm) return {}; } +void AudioContext::visit_edges(Cell::Visitor& visitor) +{ + Base::visit_edges(visitor); + for (auto& promise : m_pending_promises) + visitor.visit(promise); + for (auto& promise : m_pending_resume_promises) + visitor.visit(promise); +} + +// https://www.w3.org/TR/webaudio/#dom-audiocontext-getoutputtimestamp +AudioTimestamp AudioContext::get_output_timestamp() +{ + dbgln("(STUBBED) getOutputTimestamp()"); + return {}; +} + +// https://www.w3.org/TR/webaudio/#dom-audiocontext-resume +WebIDL::ExceptionOr> AudioContext::resume() +{ + auto& realm = this->realm(); + auto& vm = realm.vm(); + + // FIXME: 1. If this's relevant global object's associated Document is not fully active then return a promise rejected with "InvalidStateError" DOMException. + + // 2. Let promise be a new Promise. + auto promise = WebIDL::create_promise(realm); + + // 3. If the [[control thread state]] on the AudioContext is closed reject the promise with InvalidStateError, abort these steps, returning promise. + if (state() == Bindings::AudioContextState::Closed) { + WebIDL::reject_promise(realm, promise, WebIDL::InvalidStateError::create(realm, "Audio context is already closed.")); + return JS::NonnullGCPtr { verify_cast(*promise->promise()) }; + } + + // 4. Set [[suspended by user]] to true. + m_suspended_by_user = true; + + // 5. If the context is not allowed to start, append promise to [[pending promises]] and [[pending resume promises]] and abort these steps, returning promise. + if (m_allowed_to_start) { + TRY_OR_THROW_OOM(vm, m_pending_promises.try_append(promise)); + TRY_OR_THROW_OOM(vm, m_pending_resume_promises.try_append(promise)); + } + + // 6. Set the [[control thread state]] on the AudioContext to running. + set_control_state(Bindings::AudioContextState::Running); + + // 7. Queue a control message to resume the AudioContext. + // FIXME: Implement control message queue to run following steps on the rendering thread + + // FIXME: 7.1: Attempt to acquire system resources. + + // 7.2: Set the [[rendering thread state]] on the AudioContext to running. + set_rendering_state(Bindings::AudioContextState::Running); + + // 7.3: Start rendering the audio graph. + if (!start_rendering_audio_graph()) { + // 7.4: In case of failure, queue a media element task to execute the following steps: + queue_a_media_element_task([&realm, this]() { + // 7.4.1: Reject all promises from [[pending resume promises]] in order, then clear [[pending resume promises]]. + for (auto const& promise : m_pending_resume_promises) { + WebIDL::reject_promise(realm, promise, JS::js_null()); + } + m_pending_resume_promises.clear(); + + // FIXME: 7.4.2: Additionally, remove those promises from [[pending promises]]. + }); + } + + // 7.5: queue a media element task to execute the following steps: + queue_a_media_element_task([&realm, &promise, this]() { + // 7.5.1: Resolve all promises from [[pending resume promises]] in order. + for (auto const& promise : m_pending_resume_promises) { + *promise->resolve(); + } + + // 7.5.2: Clear [[pending resume promises]]. + m_pending_resume_promises.clear(); + + // FIXME: Additionally, remove those promises from [[pending promises]]. + + // 7.5.3: Resolve promise. + *promise->resolve(); + + // 7.5.4: If the state attribute of the AudioContext is not already "running": + if (state() != Bindings::AudioContextState::Running) { + // 7.5.4.1: Set the state attribute of the AudioContext to "running". + set_control_state(Bindings::AudioContextState::Running); + + // 7.5.4.2: queue a media element task to fire an event named statechange at the AudioContext. + queue_a_media_element_task([&realm, this]() { + this->dispatch_event(DOM::Event::create(realm, HTML::EventNames::statechange).release_value_but_fixme_should_propagate_errors()); + }); + } + }); + + // 8. Return promise. + return JS::NonnullGCPtr { verify_cast(*promise->promise()) }; +} + +// https://www.w3.org/TR/webaudio/#dom-audiocontext-suspend +WebIDL::ExceptionOr> AudioContext::suspend() +{ + auto& realm = this->realm(); + auto& vm = realm.vm(); + + // FIXME: 1. If this's relevant global object's associated Document is not fully active then return a promise rejected with "InvalidStateError" DOMException. + + // 2. Let promise be a new Promise. + auto promise = WebIDL::create_promise(realm); + + // 3. If the [[control thread state]] on the AudioContext is closed reject the promise with InvalidStateError, abort these steps, returning promise. + if (state() == Bindings::AudioContextState::Closed) { + WebIDL::reject_promise(realm, promise, WebIDL::InvalidStateError::create(realm, "Audio context is already closed.")); + return JS::NonnullGCPtr { verify_cast(*promise->promise()) }; + } + + // 4. Append promise to [[pending promises]]. + TRY_OR_THROW_OOM(vm, m_pending_promises.try_append(promise)); + + // 5. Set [[suspended by user]] to true. + m_suspended_by_user = true; + + // 6. Set the [[control thread state]] on the AudioContext to suspended. + set_control_state(Bindings::AudioContextState::Suspended); + + // 7. Queue a control message to suspend the AudioContext. + // FIXME: Implement control message queue to run following steps on the rendering thread + + // FIXME: 7.1: Attempt to release system resources. + + // 7.2: Set the [[rendering thread state]] on the AudioContext to suspended. + set_rendering_state(Bindings::AudioContextState::Suspended); + + // 7.3: queue a media element task to execute the following steps: + queue_a_media_element_task([&realm, &promise, this]() { + // 7.3.1: Resolve promise. + *promise->resolve(); + + // 7.3.2: If the state attribute of the AudioContext is not already "suspended": + if (state() != Bindings::AudioContextState::Suspended) { + // 7.3.2.1: Set the state attribute of the AudioContext to "suspended". + set_control_state(Bindings::AudioContextState::Suspended); + + // 7.3.2.2: queue a media element task to fire an event named statechange at the AudioContext. + queue_a_media_element_task([&realm, this]() { + this->dispatch_event(DOM::Event::create(realm, HTML::EventNames::statechange).release_value_but_fixme_should_propagate_errors()); + }); + } + }); + + // 8. Return promise. + return JS::NonnullGCPtr { verify_cast(*promise->promise()) }; +} + +// https://www.w3.org/TR/webaudio/#dom-audiocontext-close +WebIDL::ExceptionOr> AudioContext::close() +{ + auto& realm = this->realm(); + + // FIXME: 1. If this's relevant global object's associated Document is not fully active then return a promise rejected with "InvalidStateError" DOMException. + + // 2. Let promise be a new Promise. + auto promise = WebIDL::create_promise(realm); + + // 3. If the [[control thread state]] flag on the AudioContext is closed reject the promise with InvalidStateError, abort these steps, returning promise. + if (state() == Bindings::AudioContextState::Closed) { + WebIDL::reject_promise(realm, promise, WebIDL::InvalidStateError::create(realm, "Audio context is already closed.")); + return JS::NonnullGCPtr { verify_cast(*promise->promise()) }; + } + + // 4. Set the [[control thread state]] flag on the AudioContext to closed. + set_control_state(Bindings::AudioContextState::Closed); + + // 5. Queue a control message to close the AudioContext. + // FIXME: Implement control message queue to run following steps on the rendering thread + + // FIXME: 5.1: Attempt to release system resources. + + // 5.2: Set the [[rendering thread state]] to "suspended". + set_rendering_state(Bindings::AudioContextState::Suspended); + + // FIXME: 5.3: If this control message is being run in a reaction to the document being unloaded, abort this algorithm. + + // 5.4: queue a media element task to execute the following steps: + queue_a_media_element_task([&realm, &promise, this]() { + // 5.4.1: Resolve promise. + *promise->resolve(); + + // 5.4.2: If the state attribute of the AudioContext is not already "closed": + if (state() != Bindings::AudioContextState::Closed) { + // 5.4.2.1: Set the state attribute of the AudioContext to "closed". + set_control_state(Bindings::AudioContextState::Closed); + } + + // 5.4.2.2: queue a media element task to fire an event named statechange at the AudioContext. + // FIXME: Attempting to queue another task in here causes an assertion fail at Vector.h:148 + this->dispatch_event(DOM::Event::create(realm, HTML::EventNames::statechange).release_value_but_fixme_should_propagate_errors()); + }); + + // 6. Return promise + return JS::NonnullGCPtr { verify_cast(*promise->promise()) }; +} + +void AudioContext::queue_a_media_element_task(JS::SafeFunction steps) +{ + auto task = HTML::Task::create(m_media_element_event_task_source.source, HTML::current_settings_object().responsible_document(), move(steps)); + HTML::main_thread_event_loop().task_queue().add(move(task)); +} + +// FIXME: Actually implement the rendering thread +bool AudioContext::start_rendering_audio_graph() +{ + bool render_result = true; + return render_result; +} + } diff --git a/Userland/Libraries/LibWeb/WebAudio/AudioContext.h b/Userland/Libraries/LibWeb/WebAudio/AudioContext.h index a0fc18acf3..3ccb5697fb 100644 --- a/Userland/Libraries/LibWeb/WebAudio/AudioContext.h +++ b/Userland/Libraries/LibWeb/WebAudio/AudioContext.h @@ -6,23 +6,55 @@ #pragma once +#include +#include #include namespace Web::WebAudio { +struct AudioContextOptions { + Bindings::AudioContextLatencyCategory latency_hint = Bindings::AudioContextLatencyCategory::Interactive; + Optional sample_rate; +}; + +struct AudioTimestamp { + double context_time { 0 }; + double performance_time { 0 }; +}; + // https://webaudio.github.io/web-audio-api/#AudioContext class AudioContext final : public BaseAudioContext { WEB_PLATFORM_OBJECT(AudioContext, BaseAudioContext); public: - static WebIDL::ExceptionOr> construct_impl(JS::Realm&); + static WebIDL::ExceptionOr> construct_impl(JS::Realm&, AudioContextOptions const& context_options = {}); virtual ~AudioContext() override; + double base_latency() const { return m_base_latency; }; + double output_latency() const { return m_output_latency; }; + AudioTimestamp get_output_timestamp(); + WebIDL::ExceptionOr> resume(); + WebIDL::ExceptionOr> suspend(); + WebIDL::ExceptionOr> close(); + private: - explicit AudioContext(JS::Realm&); + explicit AudioContext(JS::Realm&, AudioContextOptions const& context_options); virtual JS::ThrowCompletionOr initialize(JS::Realm&) override; + virtual void visit_edges(Cell::Visitor&) override; + + double m_base_latency { 0 }; + double m_output_latency { 0 }; + + bool m_allowed_to_start = true; + Vector> m_pending_promises; + Vector> m_pending_resume_promises; + bool m_suspended_by_user = false; + HTML::UniqueTaskSource m_media_element_event_task_source {}; + + void queue_a_media_element_task(JS::SafeFunction steps); + bool start_rendering_audio_graph(); }; } diff --git a/Userland/Libraries/LibWeb/WebAudio/AudioContext.idl b/Userland/Libraries/LibWeb/WebAudio/AudioContext.idl index b3a41552de..9fad4595f8 100644 --- a/Userland/Libraries/LibWeb/WebAudio/AudioContext.idl +++ b/Userland/Libraries/LibWeb/WebAudio/AudioContext.idl @@ -1,8 +1,31 @@ #import +// https://www.w3.org/TR/webaudio/#enumdef-audiocontextlatencycategory +enum AudioContextLatencyCategory { "balanced", "interactive", "playback" }; + // https://webaudio.github.io/web-audio-api/#AudioContext [Exposed=Window] interface AudioContext : BaseAudioContext { - // FIXME: Should be constructor (optional AudioContextOptions contextOptions = {}); - constructor(); + constructor(optional AudioContextOptions contextOptions = {}); + readonly attribute double baseLatency; + readonly attribute double outputLatency; + AudioTimestamp getOutputTimestamp (); + Promise resume (); + Promise suspend (); + Promise close (); + // FIXME: MediaElementAudioSourceNode createMediaElementSource (HTMLMediaElement mediaElement); + // FIXME: MediaStreamAudioSourceNode createMediaStreamSource (MediaStream mediaStream); + // FIXME: MediaStreamTrackAudioSourceNode createMediaStreamTrackSource (MediaStreamTrack mediaStreamTrack); + // FIXME: MediaStreamAudioDestinationNode createMediaStreamDestination (); +}; + +dictionary AudioContextOptions { + AudioContextLatencyCategory latencyHint = "interactive"; + float sampleRate; +}; + +dictionary AudioTimestamp { + double contextTime; + // FIXME: Should be DOMHighResTimeStamp, but DOMHighResTimeStamp doesn't get parsed as a double during codegen + double performanceTime; }; diff --git a/Userland/Libraries/LibWeb/WebAudio/BaseAudioContext.cpp b/Userland/Libraries/LibWeb/WebAudio/BaseAudioContext.cpp index 02b8a119f0..7ce5f69436 100644 --- a/Userland/Libraries/LibWeb/WebAudio/BaseAudioContext.cpp +++ b/Userland/Libraries/LibWeb/WebAudio/BaseAudioContext.cpp @@ -5,6 +5,7 @@ */ #include +#include #include namespace Web::WebAudio { @@ -24,4 +25,14 @@ JS::ThrowCompletionOr BaseAudioContext::initialize(JS::Realm& realm) return {}; } +void BaseAudioContext::set_onstatechange(WebIDL::CallbackType* event_handler) +{ + set_event_handler_attribute(HTML::EventNames::statechange, event_handler); +} + +WebIDL::CallbackType* BaseAudioContext::onstatechange() +{ + return event_handler_attribute(HTML::EventNames::statechange); +} + } diff --git a/Userland/Libraries/LibWeb/WebAudio/BaseAudioContext.h b/Userland/Libraries/LibWeb/WebAudio/BaseAudioContext.h index 6b6365a4a0..c587f64be5 100644 --- a/Userland/Libraries/LibWeb/WebAudio/BaseAudioContext.h +++ b/Userland/Libraries/LibWeb/WebAudio/BaseAudioContext.h @@ -6,6 +6,7 @@ #pragma once +#include #include namespace Web::WebAudio { @@ -17,10 +18,28 @@ class BaseAudioContext : public DOM::EventTarget { public: virtual ~BaseAudioContext() override; + float sample_rate() const { return m_sample_rate; }; + double current_time() const { return m_current_time; }; + Bindings::AudioContextState state() const { return m_control_thread_state; }; + + void set_onstatechange(WebIDL::CallbackType*); + WebIDL::CallbackType* onstatechange(); + + void set_sample_rate(float sample_rate) { m_sample_rate = sample_rate; }; + void set_control_state(Bindings::AudioContextState state) { m_control_thread_state = state; }; + void set_rendering_state(Bindings::AudioContextState state) { m_rendering_thread_state = state; }; + protected: explicit BaseAudioContext(JS::Realm&); virtual JS::ThrowCompletionOr initialize(JS::Realm&) override; + +private: + float m_sample_rate { 0 }; + double m_current_time { 0 }; + + Bindings::AudioContextState m_control_thread_state = Bindings::AudioContextState::Suspended; + Bindings::AudioContextState m_rendering_thread_state = Bindings::AudioContextState::Suspended; }; } diff --git a/Userland/Libraries/LibWeb/WebAudio/BaseAudioContext.idl b/Userland/Libraries/LibWeb/WebAudio/BaseAudioContext.idl index a246460979..c28354fc3f 100644 --- a/Userland/Libraries/LibWeb/WebAudio/BaseAudioContext.idl +++ b/Userland/Libraries/LibWeb/WebAudio/BaseAudioContext.idl @@ -1,6 +1,43 @@ #import +#import + +// https://www.w3.org/TR/webaudio/#enumdef-audiocontextstate +enum AudioContextState { "suspended", "running", "closed" }; + +// FIXME: callback DecodeErrorCallback = undefined (DOMException error); + +// FIXME: callback DecodeSuccessCallback = undefined (AudioBuffer decodedData); // https://webaudio.github.io/web-audio-api/#BaseAudioContext [Exposed=Window] interface BaseAudioContext : EventTarget { + // FIXME: readonly attribute AudioDestinationNode destination; + readonly attribute float sampleRate; + readonly attribute double currentTime; + // FIXME: readonly attribute AudioListener listener; + readonly attribute AudioContextState state; + // FIXME: [SameObject, SecureContext] + // readonly attribute AudioWorklet audioWorklet; + attribute EventHandler onstatechange; + + // FIXME: AnalyserNode createAnalyser (); + // FIXME: BiquadFilterNode createBiquadFilter (); + // FIXME: AudioBuffer createBuffer (unsigned long numberOfChannels, unsigned long length, float sampleRate); + // FIXME: AudioBufferSourceNode createBufferSource (); + // FIXME: ChannelMergerNode createChannelMerger (optional unsigned long numberOfInputs = 6); + // FIXME: ChannelSplitterNode createChannelSplitter (optional unsigned long numberOfOutputs = 6); + // FIXME: ConstantSourceNode createConstantSource (); + // FIXME: ConvolverNode createConvolver (); + // FIXME: DelayNode createDelay (optional double maxDelayTime = 1.0); + // FIXME: DynamicsCompressorNode createDynamicsCompressor (); + // FIXME: GainNode createGain (); + // FIXME: IIRFilterNode createIIRFilter (sequence feedforward, sequence feedback); + // FIXME: OscillatorNode createOscillator (); + // FIXME: PannerNode createPanner (); + // FIXME: PeriodicWave createPeriodicWave (sequence real, sequence imag, optional PeriodicWaveConstraints constraints = {}); + // FIXME: ScriptProcessorNode createScriptProcessor(optional unsigned long bufferSize = 0, optional unsigned long numberOfInputChannels = 2, optional unsigned long numberOfOutputChannels = 2); + // FIXME: StereoPannerNode createStereoPanner (); + // FIXME: WaveShaperNode createWaveShaper (); + + // FIXME: Promise decodeAudioData (ArrayBuffer audioData, optional DecodeSuccessCallback? successCallback, optional DecodeErrorCallback? errorCallback); };