LibWeb/WebAudio: Implement basic startRendering

Adds passing WPT. Does not handle actually rendering audio yet.
This commit is contained in:
Ben Eidson 2025-10-22 10:51:31 -04:00 committed by Jelle Raaijmakers
parent 5abb5d555a
commit 01947ded23
6 changed files with 143 additions and 8 deletions

View File

@ -1,12 +1,17 @@
/* /*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org> * Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
* Copyright (c) 2025, Ben Eidson <b.e.eidson@gmail.com>
* *
* SPDX-License-Identifier: BSD-2-Clause * SPDX-License-Identifier: BSD-2-Clause
*/ */
#include <LibWeb/Bindings/Intrinsics.h> #include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/DOM/Event.h>
#include <LibWeb/HTML/EventNames.h> #include <LibWeb/HTML/EventNames.h>
#include <LibWeb/HTML/Navigable.h>
#include <LibWeb/HTML/Scripting/TemporaryExecutionContext.h>
#include <LibWeb/HTML/Window.h> #include <LibWeb/HTML/Window.h>
#include <LibWeb/WebAudio/AudioBuffer.h>
#include <LibWeb/WebAudio/AudioDestinationNode.h> #include <LibWeb/WebAudio/AudioDestinationNode.h>
#include <LibWeb/WebAudio/OfflineAudioContext.h> #include <LibWeb/WebAudio/OfflineAudioContext.h>
@ -23,7 +28,7 @@ WebIDL::ExceptionOr<GC::Ref<OfflineAudioContext>> OfflineAudioContext::construct
TRY(verify_audio_options_inside_nominal_range(realm, context_options.number_of_channels, context_options.length, context_options.sample_rate)); TRY(verify_audio_options_inside_nominal_range(realm, context_options.number_of_channels, context_options.length, context_options.sample_rate));
// Let c be a new OfflineAudioContext object. Initialize c as follows: // Let c be a new OfflineAudioContext object. Initialize c as follows:
auto c = realm.create<OfflineAudioContext>(realm, context_options.length, context_options.sample_rate); auto c = realm.create<OfflineAudioContext>(realm, context_options.number_of_channels, context_options.length, context_options.sample_rate);
// 1. Set the [[control thread state]] for c to "suspended". // 1. Set the [[control thread state]] for c to "suspended".
c->set_control_state(Bindings::AudioContextState::Suspended); c->set_control_state(Bindings::AudioContextState::Suspended);
@ -58,7 +63,86 @@ OfflineAudioContext::~OfflineAudioContext() = default;
// https://webaudio.github.io/web-audio-api/#dom-offlineaudiocontext-startrendering // https://webaudio.github.io/web-audio-api/#dom-offlineaudiocontext-startrendering
WebIDL::ExceptionOr<GC::Ref<WebIDL::Promise>> OfflineAudioContext::start_rendering() WebIDL::ExceptionOr<GC::Ref<WebIDL::Promise>> OfflineAudioContext::start_rendering()
{ {
return WebIDL::NotSupportedError::create(realm(), "FIXME: Implement OfflineAudioContext::start_rendering"_utf16); auto& realm = this->realm();
// 1. If thiss relevant global objects associated Document is not fully active then return a promise rejected with "InvalidStateError" DOMException.
auto& window = as<HTML::Window>(HTML::relevant_global_object(*this));
auto const& associated_document = window.associated_document();
if (!associated_document.is_fully_active()) {
auto error = WebIDL::InvalidStateError::create(realm, "Document is not fully active"_utf16);
return WebIDL::create_rejected_promise_from_exception(realm, error);
}
// AD-HOC: Not in spec explicitly, but this should account for detached iframes too. See /the-offlineaudiocontext-interface/startrendering-after-discard.html WPT.
auto navigable = window.navigable();
if (navigable && navigable->has_been_destroyed()) {
auto error = WebIDL::InvalidStateError::create(realm, "The iframe has been detached"_utf16);
return WebIDL::create_rejected_promise_from_exception(realm, error);
}
// 2. If the [[rendering started]] slot on the OfflineAudioContext is true, return a rejected promise with InvalidStateError, and abort these steps.
if (m_rendering_started) {
auto error = WebIDL::InvalidStateError::create(realm, "Rendering is already started"_utf16);
return WebIDL::create_rejected_promise_from_exception(realm, error);
}
// 3. Set the [[rendering started]] slot of the OfflineAudioContext to true.
m_rendering_started = true;
// 4. Let promise be a new promise.
auto promise = WebIDL::create_promise(realm);
// 5. Create a new AudioBuffer, with a number of channels, length and sample rate equal respectively to the
// numberOfChannels, length and sampleRate values passed to this instances constructor in the contextOptions
// parameter.
auto buffer_result = create_buffer(m_number_of_channels, length(), sample_rate());
// 6. If an exception was thrown during the preceding AudioBuffer constructor call, reject promise with this exception.
if (buffer_result.is_exception()) {
return WebIDL::create_rejected_promise_from_exception(realm, buffer_result.exception());
}
// Assign this buffer to an internal slot [[rendered buffer]] in the OfflineAudioContext.
m_rendered_buffer = buffer_result.release_value();
// 7. Otherwise, in the case that the buffer was successfully constructed, begin offline rendering.
begin_offline_rendering(promise);
// 8. Append promise to [[pending promises]].
m_pending_promises.append(promise);
// 9. Return promise.
return promise;
}
void OfflineAudioContext::begin_offline_rendering(GC::Ref<WebIDL::Promise> promise)
{
auto& realm = this->realm();
// To begin offline rendering, the following steps MUST happen on a rendering thread that is created for the occasion.
// FIXME: 1: Given the current connections and scheduled changes, start rendering length sample-frames of audio into [[rendered buffer]]
// FIXME: 2: For every render quantum, check and suspend rendering if necessary.
// FIXME: 3: If a suspended context is resumed, continue to render the buffer.
// 4: Once the rendering is complete, queue a media element task to execute the following steps:
queue_a_media_element_task(GC::create_function(heap(), [&realm, promise, this]() {
HTML::TemporaryExecutionContext context(realm, HTML::TemporaryExecutionContext::CallbacksEnabled::Yes);
// 4.1 Resolve the promise created by startRendering() with [[rendered buffer]].
WebIDL::resolve_promise(realm, promise, this->m_rendered_buffer);
// AD-HOC: Remove resolved promise from [[pending promises]]
// https://github.com/WebAudio/web-audio-api/issues/2648
m_pending_promises.remove_all_matching([promise](GC::Ref<WebIDL::Promise> const& p) {
return p.ptr() == promise.ptr();
});
// 4.2: Queue a media element task to fire an event named complete at the OfflineAudioContext using OfflineAudioCompletionEvent
// whose renderedBuffer property is set to [[rendered buffer]].
// FIXME: Need to implement OfflineAudioCompletionEvent.
queue_a_media_element_task(GC::create_function(heap(), [&realm, this]() {
this->dispatch_event(DOM::Event::create(realm, HTML::EventNames::complete));
}));
}));
} }
WebIDL::ExceptionOr<GC::Ref<WebIDL::Promise>> OfflineAudioContext::resume() WebIDL::ExceptionOr<GC::Ref<WebIDL::Promise>> OfflineAudioContext::resume()
@ -91,9 +175,10 @@ void OfflineAudioContext::set_oncomplete(GC::Ptr<WebIDL::CallbackType> value)
set_event_handler_attribute(HTML::EventNames::complete, value); set_event_handler_attribute(HTML::EventNames::complete, value);
} }
OfflineAudioContext::OfflineAudioContext(JS::Realm& realm, WebIDL::UnsignedLong length, float sample_rate) OfflineAudioContext::OfflineAudioContext(JS::Realm& realm, WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate)
: BaseAudioContext(realm, sample_rate) : BaseAudioContext(realm, sample_rate)
, m_length(length) , m_length(length)
, m_number_of_channels(number_of_channels)
{ {
} }
@ -106,6 +191,7 @@ void OfflineAudioContext::initialize(JS::Realm& realm)
void OfflineAudioContext::visit_edges(Cell::Visitor& visitor) void OfflineAudioContext::visit_edges(Cell::Visitor& visitor)
{ {
Base::visit_edges(visitor); Base::visit_edges(visitor);
visitor.visit(m_rendered_buffer);
} }
} }

View File

@ -1,5 +1,6 @@
/* /*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org> * Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
* Copyright (c) 2025, Ben Eidson <b.e.eidson@gmail.com>
* *
* SPDX-License-Identifier: BSD-2-Clause * SPDX-License-Identifier: BSD-2-Clause
*/ */
@ -42,12 +43,18 @@ public:
void set_oncomplete(GC::Ptr<WebIDL::CallbackType>); void set_oncomplete(GC::Ptr<WebIDL::CallbackType>);
private: private:
OfflineAudioContext(JS::Realm&, WebIDL::UnsignedLong length, float sample_rate); OfflineAudioContext(JS::Realm&, WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate);
virtual void initialize(JS::Realm&) override; virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override; virtual void visit_edges(Cell::Visitor&) override;
WebIDL::UnsignedLong m_length {}; WebIDL::UnsignedLong m_length {};
WebIDL::UnsignedLong m_number_of_channels {};
bool m_rendering_started { false };
GC::Ptr<AudioBuffer> m_rendered_buffer;
void begin_offline_rendering(GC::Ref<WebIDL::Promise> promise);
}; };
} }

View File

@ -1 +1,2 @@
128 128
InvalidStateError: Rendering is already started

View File

@ -0,0 +1,6 @@
Harness status: OK
Found 1 tests
1 Pass
Pass startRendering()

View File

@ -1,11 +1,22 @@
<!DOCTYPE html> <!DOCTYPE html>
<script src="../include.js"></script> <script src="../include.js"></script>
<script> <script>
asyncTest(async done => {
// Once the ctorofflineaudiocontext WPT test is updated to check // Once the ctorofflineaudiocontext WPT test is updated to check
// renderQuantumSize and renderSizeHint, this test is not needed. // renderQuantumSize and renderSizeHint, this test is not needed.
test(() => {
const audioContext = new OfflineAudioContext(1, 1, 44100) const audioContext = new OfflineAudioContext(1, 1, 44100)
println(`${audioContext.renderQuantumSize}`); println(`${audioContext.renderQuantumSize}`);
// Second call must reject with InvalidStateError
await audioContext.startRendering();
try {
await audioContext.startRendering();
println('FAIL: started rendering on repeated call');
} catch (e) {
println(`${e}`);
}
done();
}); });
</script> </script>

View File

@ -0,0 +1,24 @@
<!doctype html>
<title>Test for rejected promise from startRendering() on an
OfflineAudioContext in a discarded browsing context</title>
<script src=../../../resources/testharness.js></script>
<script src=../../../resources/testharnessreport.js></script>
<body></body>
<script>
let context;
let childDOMException;
setup(() => {
const frame = document.createElement('iframe');
document.body.appendChild(frame);
context = new frame.contentWindow.OfflineAudioContext(
{length: 1, sampleRate: 48000});
childDOMException = frame.contentWindow.DOMException;
frame.remove();
});
promise_test((t) => promise_rejects_dom(
t, 'InvalidStateError', childDOMException, context.startRendering()),
'startRendering()');
// decodeAudioData() is tested in
// offlineaudiocontext-detached-execution-context.html
</script>