1
0
Fork 0
mirror of https://github.com/LadybirdBrowser/ladybird.git synced 2025-06-09 09:34:57 +09:00

Everywhere: Hoist the Libraries folder to the top-level

This commit is contained in:
Timothy Flynn 2024-11-09 12:25:08 -05:00 committed by Andreas Kling
parent 950e819ee7
commit 93712b24bf
Notes: github-actions[bot] 2024-11-10 11:51:52 +00:00
4547 changed files with 104 additions and 113 deletions

View file

@ -0,0 +1,159 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibJS/Runtime/Completion.h>
#include <LibJS/Runtime/Realm.h>
#include <LibJS/Runtime/TypedArray.h>
#include <LibJS/Runtime/VM.h>
#include <LibWeb/Bindings/AudioBufferPrototype.h>
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/WebAudio/AudioBuffer.h>
#include <LibWeb/WebAudio/BaseAudioContext.h>
#include <LibWeb/WebIDL/DOMException.h>
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(AudioBuffer);
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBuffer>> AudioBuffer::create(JS::Realm& realm, WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate)
{
return construct_impl(realm, { number_of_channels, length, sample_rate });
}
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBuffer>> AudioBuffer::construct_impl(JS::Realm& realm, AudioBufferOptions const& options)
{
auto& vm = realm.vm();
// 1. If any of the values in options lie outside its nominal range, throw a NotSupportedError exception and abort the following steps.
TRY(BaseAudioContext::verify_audio_options_inside_nominal_range(realm, options.number_of_channels, options.length, options.sample_rate));
// 2. Let b be a new AudioBuffer object.
// 3. Respectively assign the values of the attributes numberOfChannels, length, sampleRate of the AudioBufferOptions passed in the
// constructor to the internal slots [[number of channels]], [[length]], [[sample rate]].
auto buffer = vm.heap().allocate<AudioBuffer>(realm, realm, options);
// 4. Set the internal slot [[internal data]] of this AudioBuffer to the result of calling CreateByteDataBlock([[length]] * [[number of channels]]).
buffer->m_channels.ensure_capacity(options.number_of_channels);
for (WebIDL::UnsignedLong i = 0; i < options.number_of_channels; ++i)
buffer->m_channels.unchecked_append(TRY(JS::Float32Array::create(realm, options.length)));
return buffer;
}
AudioBuffer::~AudioBuffer() = default;
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-samplerate
float AudioBuffer::sample_rate() const
{
// The sample-rate for the PCM audio data in samples per second. This MUST return the value of [[sample rate]].
return m_sample_rate;
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-length
WebIDL::UnsignedLong AudioBuffer::length() const
{
// Length of the PCM audio data in sample-frames. This MUST return the value of [[length]].
return m_length;
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-duration
double AudioBuffer::duration() const
{
// Duration of the PCM audio data in seconds.
// This is computed from the [[sample rate]] and the [[length]] of the AudioBuffer by performing a division between the [[length]] and the [[sample rate]].
return m_length / static_cast<double>(m_sample_rate);
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-numberofchannels
WebIDL::UnsignedLong AudioBuffer::number_of_channels() const
{
// The number of discrete audio channels. This MUST return the value of [[number of channels]].
return m_channels.size();
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-getchanneldata
WebIDL::ExceptionOr<JS::NonnullGCPtr<JS::Float32Array>> AudioBuffer::get_channel_data(WebIDL::UnsignedLong channel) const
{
if (channel >= m_channels.size())
return WebIDL::IndexSizeError::create(realm(), "Channel index is out of range"_string);
return m_channels[channel];
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-copyfromchannel
WebIDL::ExceptionOr<void> AudioBuffer::copy_from_channel(JS::Handle<WebIDL::BufferSource> const& destination, WebIDL::UnsignedLong channel_number, WebIDL::UnsignedLong buffer_offset) const
{
// The copyFromChannel() method copies the samples from the specified channel of the AudioBuffer to the destination array.
//
// Let buffer be the AudioBuffer with Nb frames, let Nf be the number of elements in the destination array, and k be the value
// of bufferOffset. Then the number of frames copied from buffer to destination is max(0,min(Nbk,Nf)). If this is less than Nf,
// then the remaining elements of destination are not modified.
auto& vm = this->vm();
if (!is<JS::Float32Array>(*destination->raw_object()))
return vm.throw_completion<JS::TypeError>(JS::ErrorType::NotAnObjectOfType, "Float32Array");
auto& float32_array = static_cast<JS::Float32Array&>(*destination->raw_object());
auto const channel = TRY(get_channel_data(channel_number));
auto channel_length = channel->data().size();
if (buffer_offset >= channel_length)
return {};
u32 count = min(float32_array.data().size(), channel_length - buffer_offset);
channel->data().slice(buffer_offset, count).copy_to(float32_array.data());
return {};
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-copytochannel
WebIDL::ExceptionOr<void> AudioBuffer::copy_to_channel(JS::Handle<WebIDL::BufferSource> const& source, WebIDL::UnsignedLong channel_number, WebIDL::UnsignedLong buffer_offset)
{
// The copyToChannel() method copies the samples to the specified channel of the AudioBuffer from the source array.
//
// A UnknownError may be thrown if source cannot be copied to the buffer.
//
// Let buffer be the AudioBuffer with Nb frames, let Nf be the number of elements in the source array, and k be the value
// of bufferOffset. Then the number of frames copied from source to the buffer is max(0,min(Nbk,Nf)). If this is less than Nf,
// then the remaining elements of buffer are not modified.
auto& vm = this->vm();
if (!is<JS::Float32Array>(*source->raw_object()))
return vm.throw_completion<JS::TypeError>(JS::ErrorType::NotAnObjectOfType, "Float32Array");
auto const& float32_array = static_cast<JS::Float32Array const&>(*source->raw_object());
auto channel = TRY(get_channel_data(channel_number));
auto channel_length = channel->data().size();
if (buffer_offset >= channel_length)
return {};
u32 count = min(float32_array.data().size(), channel_length - buffer_offset);
float32_array.data().slice(0, count).copy_to(channel->data().slice(buffer_offset, count));
return {};
}
AudioBuffer::AudioBuffer(JS::Realm& realm, AudioBufferOptions const& options)
: Bindings::PlatformObject(realm)
, m_length(options.length)
, m_sample_rate(options.sample_rate)
{
}
void AudioBuffer::initialize(JS::Realm& realm)
{
Base::initialize(realm);
WEB_SET_PROTOTYPE_FOR_INTERFACE(AudioBuffer);
}
void AudioBuffer::visit_edges(Cell::Visitor& visitor)
{
Base::visit_edges(visitor);
visitor.visit(m_channels);
}
}

View file

@ -0,0 +1,65 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/Vector.h>
#include <LibJS/Forward.h>
#include <LibWeb/Bindings/PlatformObject.h>
#include <LibWeb/WebIDL/Buffers.h>
#include <LibWeb/WebIDL/ExceptionOr.h>
#include <LibWeb/WebIDL/Types.h>
namespace Web::WebAudio {
struct AudioBufferOptions {
WebIDL::UnsignedLong number_of_channels { 1 };
WebIDL::UnsignedLong length {};
float sample_rate {};
};
// https://webaudio.github.io/web-audio-api/#AudioBuffer
class AudioBuffer final : public Bindings::PlatformObject {
WEB_PLATFORM_OBJECT(AudioBuffer, Bindings::PlatformObject);
JS_DECLARE_ALLOCATOR(AudioBuffer);
public:
static WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBuffer>> create(JS::Realm&, WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate);
static WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBuffer>> construct_impl(JS::Realm&, AudioBufferOptions const&);
virtual ~AudioBuffer() override;
float sample_rate() const;
WebIDL::UnsignedLong length() const;
double duration() const;
WebIDL::UnsignedLong number_of_channels() const;
WebIDL::ExceptionOr<JS::NonnullGCPtr<JS::Float32Array>> get_channel_data(WebIDL::UnsignedLong channel) const;
WebIDL::ExceptionOr<void> copy_from_channel(JS::Handle<WebIDL::BufferSource> const&, WebIDL::UnsignedLong channel_number, WebIDL::UnsignedLong buffer_offset = 0) const;
WebIDL::ExceptionOr<void> copy_to_channel(JS::Handle<WebIDL::BufferSource> const&, WebIDL::UnsignedLong channel_number, WebIDL::UnsignedLong buffer_offset = 0);
private:
explicit AudioBuffer(JS::Realm&, AudioBufferOptions const&);
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-number-of-channels-slot
// The number of audio channels for this AudioBuffer, which is an unsigned long.
//
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-internal-data-slot
// A data block holding the audio sample data.
Vector<JS::NonnullGCPtr<JS::Float32Array>> m_channels; // [[internal data]] / [[number_of_channels]]
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-length-slot
// The length of each channel of this AudioBuffer, which is an unsigned long.
WebIDL::UnsignedLong m_length {}; // [[length]]
// https://webaudio.github.io/web-audio-api/#dom-audiobuffer-sample-rate-slot
// The sample-rate, in Hz, of this AudioBuffer, a float.
float m_sample_rate {}; // [[sample rate]]
};
}

View file

@ -0,0 +1,23 @@
// https://webaudio.github.io/web-audio-api/#AudioBufferOptions
dictionary AudioBufferOptions {
unsigned long numberOfChannels = 1;
required unsigned long length;
required float sampleRate;
};
// https://webaudio.github.io/web-audio-api/#AudioBuffer
[Exposed=Window]
interface AudioBuffer {
constructor (AudioBufferOptions options);
readonly attribute float sampleRate;
readonly attribute unsigned long length;
readonly attribute double duration;
readonly attribute unsigned long numberOfChannels;
Float32Array getChannelData(unsigned long channel);
undefined copyFromChannel(Float32Array destination,
unsigned long channelNumber,
optional unsigned long bufferOffset = 0);
undefined copyToChannel(Float32Array source,
unsigned long channelNumber,
optional unsigned long bufferOffset = 0);
};

View file

@ -0,0 +1,134 @@
/*
* Copyright (c) 2024, Bar Yemini <bar.ye651@gmail.com>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibWeb/Bindings/AudioScheduledSourceNodePrototype.h>
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/WebAudio/AudioBuffer.h>
#include <LibWeb/WebAudio/AudioBufferSourceNode.h>
#include <LibWeb/WebAudio/AudioParam.h>
#include <LibWeb/WebAudio/AudioScheduledSourceNode.h>
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(AudioBufferSourceNode);
AudioBufferSourceNode::AudioBufferSourceNode(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, AudioBufferSourceOptions const& options)
: AudioScheduledSourceNode(realm, context)
, m_buffer(options.buffer)
, m_playback_rate(AudioParam::create(realm, options.playback_rate, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_detune(AudioParam::create(realm, options.detune, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_loop(options.loop)
, m_loop_start(options.loop_start)
, m_loop_end(options.loop_end)
{
}
AudioBufferSourceNode::~AudioBufferSourceNode() = default;
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-buffer
WebIDL::ExceptionOr<void> AudioBufferSourceNode::set_buffer(JS::GCPtr<AudioBuffer> buffer)
{
m_buffer = buffer;
return {};
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-buffer
JS::GCPtr<AudioBuffer> AudioBufferSourceNode::buffer() const
{
return m_buffer;
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-playbackrate
JS::NonnullGCPtr<AudioParam> AudioBufferSourceNode::playback_rate() const
{
return m_playback_rate;
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-detune
JS::NonnullGCPtr<AudioParam> AudioBufferSourceNode::detune() const
{
return m_detune;
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-loop
WebIDL::ExceptionOr<void> AudioBufferSourceNode::set_loop(bool loop)
{
m_loop = loop;
return {};
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-loop
bool AudioBufferSourceNode::loop() const
{
return m_loop;
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-loopstart
WebIDL::ExceptionOr<void> AudioBufferSourceNode::set_loop_start(double loop_start)
{
m_loop_start = loop_start;
return {};
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-loopstart
double AudioBufferSourceNode::loop_start() const
{
return m_loop_start;
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-loopend
WebIDL::ExceptionOr<void> AudioBufferSourceNode::set_loop_end(double loop_end)
{
m_loop_end = loop_end;
return {};
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-loopend
double AudioBufferSourceNode::loop_end() const
{
return m_loop_end;
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-start`
WebIDL::ExceptionOr<void> AudioBufferSourceNode::start(Optional<double> when, Optional<double> offset, Optional<double> duration)
{
(void)when;
(void)offset;
(void)duration;
dbgln("FIXME: Implement AudioBufferSourceNode::start(when, offset, duration)");
return {};
}
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBufferSourceNode>> AudioBufferSourceNode::create(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, AudioBufferSourceOptions const& options)
{
return construct_impl(realm, context, options);
}
// https://webaudio.github.io/web-audio-api/#dom-audiobuffersourcenode-audiobuffersourcenode
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBufferSourceNode>> AudioBufferSourceNode::construct_impl(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, AudioBufferSourceOptions const& options)
{
// When the constructor is called with a BaseAudioContext c and an option object option, the user agent
// MUST initialize the AudioNode this, with context and options as arguments.
auto node = realm.vm().heap().allocate<AudioBufferSourceNode>(realm, realm, context, options);
return node;
}
void AudioBufferSourceNode::initialize(JS::Realm& realm)
{
Base::initialize(realm);
WEB_SET_PROTOTYPE_FOR_INTERFACE(AudioBufferSourceNode);
}
void AudioBufferSourceNode::visit_edges(Cell::Visitor& visitor)
{
Base::visit_edges(visitor);
visitor.visit(m_buffer);
visitor.visit(m_playback_rate);
visitor.visit(m_detune);
}
}

View file

@ -0,0 +1,67 @@
/*
* Copyright (c) 2024, Bar Yemini <bar.ye651@gmail.com>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibWeb/Bindings/AudioBufferSourceNodePrototype.h>
#include <LibWeb/WebAudio/AudioBuffer.h>
#include <LibWeb/WebAudio/AudioParam.h>
#include <LibWeb/WebAudio/AudioScheduledSourceNode.h>
namespace Web::WebAudio {
// https://webaudio.github.io/web-audio-api/#AudioBufferSourceOptions
struct AudioBufferSourceOptions {
JS::GCPtr<AudioBuffer> buffer;
float detune { 0 };
bool loop { false };
double loop_end { 0 };
double loop_start { 0 };
float playback_rate { 1 };
};
// https://webaudio.github.io/web-audio-api/#AudioBufferSourceNode
class AudioBufferSourceNode : public AudioScheduledSourceNode {
WEB_PLATFORM_OBJECT(AudioBufferSourceNode, AudioScheduledSourceNode);
JS_DECLARE_ALLOCATOR(AudioBufferSourceNode);
public:
virtual ~AudioBufferSourceNode() override;
WebIDL::ExceptionOr<void> set_buffer(JS::GCPtr<AudioBuffer>);
JS::GCPtr<AudioBuffer> buffer() const;
JS::NonnullGCPtr<AudioParam> playback_rate() const;
JS::NonnullGCPtr<AudioParam> detune() const;
WebIDL::ExceptionOr<void> set_loop(bool);
bool loop() const;
WebIDL::ExceptionOr<void> set_loop_start(double);
double loop_start() const;
WebIDL::ExceptionOr<void> set_loop_end(double);
double loop_end() const;
WebIDL::UnsignedLong number_of_inputs() override { return 0; }
WebIDL::UnsignedLong number_of_outputs() override { return 2; }
WebIDL::ExceptionOr<void> start(Optional<double>, Optional<double>, Optional<double>);
static WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBufferSourceNode>> create(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, AudioBufferSourceOptions const& = {});
static WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBufferSourceNode>> construct_impl(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, AudioBufferSourceOptions const& = {});
protected:
AudioBufferSourceNode(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, AudioBufferSourceOptions const& = {});
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
private:
JS::GCPtr<AudioBuffer> m_buffer;
JS::NonnullGCPtr<AudioParam> m_playback_rate;
JS::NonnullGCPtr<AudioParam> m_detune;
bool m_loop { false };
double m_loop_start { 0.0 };
double m_loop_end { 0.0 };
};
}

View file

@ -0,0 +1,26 @@
#import <WebAudio/AudioBuffer.idl>
#import <WebAudio/AudioParam.idl>
#import <WebAudio/AudioScheduledSourceNode.idl>
#import <WebAudio/BaseAudioContext.idl>
dictionary AudioBufferSourceOptions {
AudioBuffer? buffer;
float detune = 0;
boolean loop = false;
double loopEnd = 0;
double loopStart = 0;
float playbackRate = 1;
};
// https://webaudio.github.io/web-audio-api/#AudioBufferSourceNode
[Exposed=Window]
interface AudioBufferSourceNode : AudioScheduledSourceNode {
constructor(BaseAudioContext context, optional AudioBufferSourceOptions options = {});
attribute AudioBuffer? buffer;
readonly attribute AudioParam playbackRate;
readonly attribute AudioParam detune;
attribute boolean loop;
attribute double loopStart;
attribute double loopEnd;
undefined start(optional double when = 0, optional double offset, optional double duration);
};

View file

@ -0,0 +1,318 @@
/*
* Copyright (c) 2023, Luke Wilde <lukew@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibWeb/Bindings/AudioContextPrototype.h>
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/DOM/Event.h>
#include <LibWeb/HTML/HTMLMediaElement.h>
#include <LibWeb/HTML/Scripting/TemporaryExecutionContext.h>
#include <LibWeb/HTML/Window.h>
#include <LibWeb/WebAudio/AudioContext.h>
#include <LibWeb/WebIDL/Promise.h>
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(AudioContext);
// https://webaudio.github.io/web-audio-api/#dom-audiocontext-audiocontext
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioContext>> AudioContext::construct_impl(JS::Realm& realm, AudioContextOptions const& context_options)
{
return realm.heap().allocate<AudioContext>(realm, realm, context_options);
}
AudioContext::AudioContext(JS::Realm& realm, AudioContextOptions const& context_options)
: BaseAudioContext(realm)
{
// FIXME: If the current settings objects responsible document is NOT fully active, throw an InvalidStateError and abort these steps.
// 1: Set a [[control thread state]] to suspended on the AudioContext.
BaseAudioContext::set_control_state(Bindings::AudioContextState::Suspended);
// 2: Set a [[rendering thread state]] to suspended on the AudioContext.
BaseAudioContext::set_rendering_state(Bindings::AudioContextState::Suspended);
// 3: Let [[pending resume promises]] be a slot on this AudioContext, that is an initially empty ordered list of promises.
// 4: If contextOptions is given, apply the options:
// 4.1: Set the internal latency of this AudioContext according to contextOptions.latencyHint, as described in latencyHint.
switch (context_options.latency_hint) {
case Bindings::AudioContextLatencyCategory::Balanced:
// FIXME: Determine optimal settings for balanced.
break;
case Bindings::AudioContextLatencyCategory::Interactive:
// FIXME: Determine optimal settings for interactive.
break;
case Bindings::AudioContextLatencyCategory::Playback:
// FIXME: Determine optimal settings for playback.
break;
default:
VERIFY_NOT_REACHED();
}
// 4.2: If contextOptions.sampleRate is specified, set the sampleRate of this AudioContext to this value. Otherwise,
// use the sample rate of the default output device. If the selected sample rate differs from the sample rate of the output device,
// this AudioContext MUST resample the audio output to match the sample rate of the output device.
if (context_options.sample_rate.has_value()) {
BaseAudioContext::set_sample_rate(context_options.sample_rate.value());
} else {
// FIXME: This would ideally be coming from the default output device, but we can only get this on Serenity
// For now we'll just have to resample
BaseAudioContext::set_sample_rate(44100);
}
// FIXME: 5: If the context is allowed to start, send a control message to start processing.
// FIXME: Implement control message queue to run following steps on the rendering thread
if (m_allowed_to_start) {
// FIXME: 5.1: Attempt to acquire system resources. In case of failure, abort the following steps.
// 5.2: Set the [[rendering thread state]] to "running" on the AudioContext.
BaseAudioContext::set_rendering_state(Bindings::AudioContextState::Running);
// 5.3: queue a media element task to execute the following steps:
queue_a_media_element_task(JS::create_heap_function(heap(), [&realm, this]() {
// 5.3.1: Set the state attribute of the AudioContext to "running".
BaseAudioContext::set_control_state(Bindings::AudioContextState::Running);
// 5.3.2: queue a media element task to fire an event named statechange at the AudioContext.
this->dispatch_event(DOM::Event::create(realm, HTML::EventNames::statechange));
}));
}
}
AudioContext::~AudioContext() = default;
void AudioContext::initialize(JS::Realm& realm)
{
Base::initialize(realm);
WEB_SET_PROTOTYPE_FOR_INTERFACE(AudioContext);
}
void AudioContext::visit_edges(Cell::Visitor& visitor)
{
Base::visit_edges(visitor);
visitor.visit(m_pending_resume_promises);
}
// https://www.w3.org/TR/webaudio/#dom-audiocontext-getoutputtimestamp
AudioTimestamp AudioContext::get_output_timestamp()
{
dbgln("(STUBBED) getOutputTimestamp()");
return {};
}
// https://www.w3.org/TR/webaudio/#dom-audiocontext-resume
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> AudioContext::resume()
{
auto& realm = this->realm();
// 1. If this's relevant global object's associated Document is not fully active then return a promise rejected with "InvalidStateError" DOMException.
auto const& associated_document = verify_cast<HTML::Window>(HTML::relevant_global_object(*this)).associated_document();
if (!associated_document.is_fully_active())
return WebIDL::InvalidStateError::create(realm, "Document is not fully active"_string);
// 2. Let promise be a new Promise.
auto promise = WebIDL::create_promise(realm);
// 3. If the [[control thread state]] on the AudioContext is closed reject the promise with InvalidStateError, abort these steps, returning promise.
if (state() == Bindings::AudioContextState::Closed) {
WebIDL::reject_promise(realm, promise, WebIDL::InvalidStateError::create(realm, "Audio context is already closed."_string));
return promise;
}
// 4. Set [[suspended by user]] to true.
m_suspended_by_user = true;
// 5. If the context is not allowed to start, append promise to [[pending promises]] and [[pending resume promises]] and abort these steps, returning promise.
if (m_allowed_to_start) {
m_pending_promises.append(promise);
m_pending_resume_promises.append(promise);
}
// 6. Set the [[control thread state]] on the AudioContext to running.
set_control_state(Bindings::AudioContextState::Running);
// 7. Queue a control message to resume the AudioContext.
// FIXME: Implement control message queue to run following steps on the rendering thread
// FIXME: 7.1: Attempt to acquire system resources.
// 7.2: Set the [[rendering thread state]] on the AudioContext to running.
set_rendering_state(Bindings::AudioContextState::Running);
// 7.3: Start rendering the audio graph.
if (!start_rendering_audio_graph()) {
// 7.4: In case of failure, queue a media element task to execute the following steps:
queue_a_media_element_task(JS::create_heap_function(heap(), [&realm, this]() {
HTML::TemporaryExecutionContext context(realm, HTML::TemporaryExecutionContext::CallbacksEnabled::Yes);
// 7.4.1: Reject all promises from [[pending resume promises]] in order, then clear [[pending resume promises]].
for (auto const& promise : m_pending_resume_promises) {
WebIDL::reject_promise(realm, promise, JS::js_null());
// 7.4.2: Additionally, remove those promises from [[pending promises]].
m_pending_promises.remove_first_matching([&promise](auto& pending_promise) {
return pending_promise == promise;
});
}
m_pending_resume_promises.clear();
}));
}
// 7.5: queue a media element task to execute the following steps:
queue_a_media_element_task(JS::create_heap_function(heap(), [&realm, promise, this]() {
HTML::TemporaryExecutionContext context(realm, HTML::TemporaryExecutionContext::CallbacksEnabled::Yes);
// 7.5.1: Resolve all promises from [[pending resume promises]] in order.
// 7.5.2: Clear [[pending resume promises]]. Additionally, remove those promises from
// [[pending promises]].
for (auto const& pending_resume_promise : m_pending_resume_promises) {
WebIDL::resolve_promise(realm, pending_resume_promise, JS::js_undefined());
m_pending_promises.remove_first_matching([&pending_resume_promise](auto& pending_promise) {
return pending_promise == pending_resume_promise;
});
}
m_pending_resume_promises.clear();
// 7.5.3: Resolve promise.
WebIDL::resolve_promise(realm, promise, JS::js_undefined());
// 7.5.4: If the state attribute of the AudioContext is not already "running":
if (state() != Bindings::AudioContextState::Running) {
// 7.5.4.1: Set the state attribute of the AudioContext to "running".
set_control_state(Bindings::AudioContextState::Running);
// 7.5.4.2: queue a media element task to fire an event named statechange at the AudioContext.
queue_a_media_element_task(JS::create_heap_function(heap(), [&realm, this]() {
this->dispatch_event(DOM::Event::create(realm, HTML::EventNames::statechange));
}));
}
}));
// 8. Return promise.
return promise;
}
// https://www.w3.org/TR/webaudio/#dom-audiocontext-suspend
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> AudioContext::suspend()
{
auto& realm = this->realm();
// 1. If this's relevant global object's associated Document is not fully active then return a promise rejected with "InvalidStateError" DOMException.
auto const& associated_document = verify_cast<HTML::Window>(HTML::relevant_global_object(*this)).associated_document();
if (!associated_document.is_fully_active())
return WebIDL::InvalidStateError::create(realm, "Document is not fully active"_string);
// 2. Let promise be a new Promise.
auto promise = WebIDL::create_promise(realm);
// 3. If the [[control thread state]] on the AudioContext is closed reject the promise with InvalidStateError, abort these steps, returning promise.
if (state() == Bindings::AudioContextState::Closed) {
WebIDL::reject_promise(realm, promise, WebIDL::InvalidStateError::create(realm, "Audio context is already closed."_string));
return promise;
}
// 4. Append promise to [[pending promises]].
m_pending_promises.append(promise);
// 5. Set [[suspended by user]] to true.
m_suspended_by_user = true;
// 6. Set the [[control thread state]] on the AudioContext to suspended.
set_control_state(Bindings::AudioContextState::Suspended);
// 7. Queue a control message to suspend the AudioContext.
// FIXME: Implement control message queue to run following steps on the rendering thread
// FIXME: 7.1: Attempt to release system resources.
// 7.2: Set the [[rendering thread state]] on the AudioContext to suspended.
set_rendering_state(Bindings::AudioContextState::Suspended);
// 7.3: queue a media element task to execute the following steps:
queue_a_media_element_task(JS::create_heap_function(heap(), [&realm, promise, this]() {
HTML::TemporaryExecutionContext context(realm, HTML::TemporaryExecutionContext::CallbacksEnabled::Yes);
// 7.3.1: Resolve promise.
WebIDL::resolve_promise(realm, promise, JS::js_undefined());
// 7.3.2: If the state attribute of the AudioContext is not already "suspended":
if (state() != Bindings::AudioContextState::Suspended) {
// 7.3.2.1: Set the state attribute of the AudioContext to "suspended".
set_control_state(Bindings::AudioContextState::Suspended);
// 7.3.2.2: queue a media element task to fire an event named statechange at the AudioContext.
queue_a_media_element_task(JS::create_heap_function(heap(), [&realm, this]() {
this->dispatch_event(DOM::Event::create(realm, HTML::EventNames::statechange));
}));
}
}));
// 8. Return promise.
return promise;
}
// https://www.w3.org/TR/webaudio/#dom-audiocontext-close
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> AudioContext::close()
{
auto& realm = this->realm();
// 1. If this's relevant global object's associated Document is not fully active then return a promise rejected with "InvalidStateError" DOMException.
auto const& associated_document = verify_cast<HTML::Window>(HTML::relevant_global_object(*this)).associated_document();
if (!associated_document.is_fully_active())
return WebIDL::InvalidStateError::create(realm, "Document is not fully active"_string);
// 2. Let promise be a new Promise.
auto promise = WebIDL::create_promise(realm);
// 3. If the [[control thread state]] flag on the AudioContext is closed reject the promise with InvalidStateError, abort these steps, returning promise.
if (state() == Bindings::AudioContextState::Closed) {
WebIDL::reject_promise(realm, promise, WebIDL::InvalidStateError::create(realm, "Audio context is already closed."_string));
return promise;
}
// 4. Set the [[control thread state]] flag on the AudioContext to closed.
set_control_state(Bindings::AudioContextState::Closed);
// 5. Queue a control message to close the AudioContext.
// FIXME: Implement control message queue to run following steps on the rendering thread
// FIXME: 5.1: Attempt to release system resources.
// 5.2: Set the [[rendering thread state]] to "suspended".
set_rendering_state(Bindings::AudioContextState::Suspended);
// FIXME: 5.3: If this control message is being run in a reaction to the document being unloaded, abort this algorithm.
// 5.4: queue a media element task to execute the following steps:
queue_a_media_element_task(JS::create_heap_function(heap(), [&realm, promise, this]() {
HTML::TemporaryExecutionContext context(realm, HTML::TemporaryExecutionContext::CallbacksEnabled::Yes);
// 5.4.1: Resolve promise.
WebIDL::resolve_promise(realm, promise, JS::js_undefined());
// 5.4.2: If the state attribute of the AudioContext is not already "closed":
if (state() != Bindings::AudioContextState::Closed) {
// 5.4.2.1: Set the state attribute of the AudioContext to "closed".
set_control_state(Bindings::AudioContextState::Closed);
}
// 5.4.2.2: queue a media element task to fire an event named statechange at the AudioContext.
// FIXME: Attempting to queue another task in here causes an assertion fail at Vector.h:148
this->dispatch_event(DOM::Event::create(realm, HTML::EventNames::statechange));
}));
// 6. Return promise
return promise;
}
// FIXME: Actually implement the rendering thread
bool AudioContext::start_rendering_audio_graph()
{
bool render_result = true;
return render_result;
}
}

View file

@ -0,0 +1,58 @@
/*
* Copyright (c) 2023, Luke Wilde <lukew@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibWeb/Bindings/AudioContextPrototype.h>
#include <LibWeb/HighResolutionTime/DOMHighResTimeStamp.h>
#include <LibWeb/WebAudio/BaseAudioContext.h>
namespace Web::WebAudio {
struct AudioContextOptions {
Bindings::AudioContextLatencyCategory latency_hint = Bindings::AudioContextLatencyCategory::Interactive;
Optional<float> sample_rate;
};
struct AudioTimestamp {
double context_time { 0 };
double performance_time { 0 };
};
// https://webaudio.github.io/web-audio-api/#AudioContext
class AudioContext final : public BaseAudioContext {
WEB_PLATFORM_OBJECT(AudioContext, BaseAudioContext);
JS_DECLARE_ALLOCATOR(AudioContext);
public:
static WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioContext>> construct_impl(JS::Realm&, AudioContextOptions const& context_options = {});
virtual ~AudioContext() override;
double base_latency() const { return m_base_latency; }
double output_latency() const { return m_output_latency; }
AudioTimestamp get_output_timestamp();
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> resume();
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> suspend();
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> close();
private:
explicit AudioContext(JS::Realm&, AudioContextOptions const& context_options);
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
double m_base_latency { 0 };
double m_output_latency { 0 };
bool m_allowed_to_start = true;
Vector<JS::NonnullGCPtr<WebIDL::Promise>> m_pending_resume_promises;
bool m_suspended_by_user = false;
bool start_rendering_audio_graph();
};
}

View file

@ -0,0 +1,31 @@
#import <WebAudio/BaseAudioContext.idl>
// https://www.w3.org/TR/webaudio/#enumdef-audiocontextlatencycategory
enum AudioContextLatencyCategory { "balanced", "interactive", "playback" };
// https://webaudio.github.io/web-audio-api/#AudioContext
[Exposed=Window]
interface AudioContext : BaseAudioContext {
constructor(optional AudioContextOptions contextOptions = {});
readonly attribute double baseLatency;
readonly attribute double outputLatency;
AudioTimestamp getOutputTimestamp ();
Promise<undefined> resume ();
Promise<undefined> suspend ();
Promise<undefined> close ();
[FIXME] MediaElementAudioSourceNode createMediaElementSource (HTMLMediaElement mediaElement);
[FIXME] MediaStreamAudioSourceNode createMediaStreamSource (MediaStream mediaStream);
[FIXME] MediaStreamTrackAudioSourceNode createMediaStreamTrackSource (MediaStreamTrack mediaStreamTrack);
[FIXME] MediaStreamAudioDestinationNode createMediaStreamDestination ();
};
dictionary AudioContextOptions {
AudioContextLatencyCategory latencyHint = "interactive";
float sampleRate;
};
dictionary AudioTimestamp {
double contextTime;
// FIXME: Should be DOMHighResTimeStamp, but DOMHighResTimeStamp doesn't get parsed as a double during codegen
double performanceTime;
};

View file

@ -0,0 +1,71 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
* Copyright (c) 2024, Bar Yemini <bar.ye651@gmail.com>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibWeb/Bindings/AudioDestinationNodePrototype.h>
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/WebAudio/AudioContext.h>
#include <LibWeb/WebAudio/AudioDestinationNode.h>
#include <LibWeb/WebAudio/AudioNode.h>
#include <LibWeb/WebAudio/BaseAudioContext.h>
#include <LibWeb/WebAudio/OfflineAudioContext.h>
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(AudioDestinationNode);
AudioDestinationNode::AudioDestinationNode(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context)
: AudioNode(realm, context)
{
}
AudioDestinationNode::~AudioDestinationNode() = default;
// https://webaudio.github.io/web-audio-api/#dom-audiodestinationnode-maxchannelcount
WebIDL::UnsignedLong AudioDestinationNode::max_channel_count()
{
dbgln("FIXME: Implement Audio::DestinationNode::max_channel_count()");
return 2;
}
JS::NonnullGCPtr<AudioDestinationNode> AudioDestinationNode::construct_impl(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context)
{
return realm.heap().allocate<AudioDestinationNode>(realm, realm, context);
}
void AudioDestinationNode::initialize(JS::Realm& realm)
{
Base::initialize(realm);
WEB_SET_PROTOTYPE_FOR_INTERFACE(AudioDestinationNode);
}
void AudioDestinationNode::visit_edges(Cell::Visitor& visitor)
{
Base::visit_edges(visitor);
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-channelcount
WebIDL::ExceptionOr<void> AudioDestinationNode::set_channel_count(WebIDL::UnsignedLong channel_count)
{
// The behavior depends on whether the destination node is the destination of an AudioContext
// or OfflineAudioContext:
// AudioContext: The channel count MUST be between 1 and maxChannelCount. An IndexSizeError
// exception MUST be thrown for any attempt to set the count outside this range.
if (is<AudioContext>(*context())) {
if (channel_count < 1 || channel_count > max_channel_count())
return WebIDL::IndexSizeError::create(realm(), "Channel index is out of range"_string);
}
// OfflineAudioContext: The channel count cannot be changed. An InvalidStateError exception MUST
// be thrown for any attempt to change the value.
if (is<OfflineAudioContext>(*context()))
return WebIDL::InvalidStateError::create(realm(), "Cannot change channel count in an OfflineAudioContext"_string);
return AudioNode::set_channel_count(channel_count);
}
}

View file

@ -0,0 +1,39 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
* Copyright (c) 2024, Bar Yemini <bar.ye651@gmail.com>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibWeb/Bindings/AudioDestinationNodePrototype.h>
#include <LibWeb/WebAudio/AudioNode.h>
#include <LibWeb/WebAudio/BaseAudioContext.h>
#include <LibWeb/WebIDL/Types.h>
namespace Web::WebAudio {
// https://webaudio.github.io/web-audio-api/#AudioDestinationNode
class AudioDestinationNode : public AudioNode {
WEB_PLATFORM_OBJECT(AudioDestinationNode, AudioNode);
JS_DECLARE_ALLOCATOR(AudioDestinationNode);
public:
virtual ~AudioDestinationNode() override;
WebIDL::UnsignedLong max_channel_count();
WebIDL::UnsignedLong number_of_inputs() override { return 1; }
WebIDL::UnsignedLong number_of_outputs() override { return 1; }
WebIDL::ExceptionOr<void> set_channel_count(WebIDL::UnsignedLong) override;
static JS::NonnullGCPtr<AudioDestinationNode> construct_impl(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>);
protected:
AudioDestinationNode(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>);
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
};
}

View file

@ -0,0 +1,7 @@
#import <WebAudio/AudioNode.idl>
// https://webaudio.github.io/web-audio-api/#AudioDestinationNode
[Exposed=Window]
interface AudioDestinationNode : AudioNode {
readonly attribute unsigned long maxChannelCount;
};

View file

@ -0,0 +1,94 @@
/*
* Copyright (c) 2024, Jelle Raaijmakers <jelle@ladybird.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibJS/Heap/CellAllocator.h>
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/WebAudio/AudioListener.h>
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(AudioListener);
AudioListener::AudioListener(JS::Realm& realm)
: Bindings::PlatformObject(realm)
, m_forward_x(AudioParam::create(realm, 0.f, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_forward_y(AudioParam::create(realm, 0.f, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_forward_z(AudioParam::create(realm, -1.f, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_position_x(AudioParam::create(realm, 0.f, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_position_y(AudioParam::create(realm, 0.f, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_position_z(AudioParam::create(realm, 0.f, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_up_x(AudioParam::create(realm, 0.f, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_up_y(AudioParam::create(realm, 1.f, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_up_z(AudioParam::create(realm, 0.f, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
{
}
JS::NonnullGCPtr<AudioListener> AudioListener::create(JS::Realm& realm)
{
return realm.vm().heap().allocate<AudioListener>(realm, realm);
}
AudioListener::~AudioListener() = default;
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-setposition
WebIDL::ExceptionOr<void> AudioListener::set_position(float x, float y, float z)
{
// This method is DEPRECATED. It is equivalent to setting positionX.value, positionY.value, and
// positionZ.value directly with the given x, y, and z values, respectively.
// FIXME: Consequently, any of the positionX, positionY, and positionZ AudioParams for this
// AudioListener have an automation curve set using setValueCurveAtTime() at the time this
// method is called, a NotSupportedError MUST be thrown.
m_position_x->set_value(x);
m_position_y->set_value(y);
m_position_z->set_value(z);
return {};
}
// https://webaudio.github.io/web-audio-api/#dom-audiolistener-setorientation
WebIDL::ExceptionOr<void> AudioListener::set_orientation(float x, float y, float z, float x_up, float y_up, float z_up)
{
// This method is DEPRECATED. It is equivalent to setting forwardX.value, forwardY.value,
// forwardZ.value, upX.value, upY.value, and upZ.value directly with the given x, y, z, xUp,
// yUp, and zUp values, respectively.
// FIXME: Consequently, if any of the forwardX, forwardY, forwardZ, upX, upY and upZ
// AudioParams have an automation curve set using setValueCurveAtTime() at the time this
// method is called, a NotSupportedError MUST be thrown.
m_forward_x->set_value(x);
m_forward_y->set_value(y);
m_forward_z->set_value(z);
m_up_x->set_value(x_up);
m_up_y->set_value(y_up);
m_up_z->set_value(z_up);
return {};
}
void AudioListener::initialize(JS::Realm& realm)
{
Base::initialize(realm);
WEB_SET_PROTOTYPE_FOR_INTERFACE(AudioListener);
}
void AudioListener::visit_edges(Cell::Visitor& visitor)
{
Base::visit_edges(visitor);
visitor.visit(m_forward_x);
visitor.visit(m_forward_y);
visitor.visit(m_forward_z);
visitor.visit(m_position_x);
visitor.visit(m_position_y);
visitor.visit(m_position_z);
visitor.visit(m_up_x);
visitor.visit(m_up_y);
visitor.visit(m_up_z);
}
}

View file

@ -0,0 +1,56 @@
/*
* Copyright (c) 2024, Jelle Raaijmakers <jelle@ladybird.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibJS/Forward.h>
#include <LibWeb/Bindings/AudioListenerPrototype.h>
#include <LibWeb/Bindings/PlatformObject.h>
#include <LibWeb/WebAudio/AudioParam.h>
#include <LibWeb/WebIDL/ExceptionOr.h>
namespace Web::WebAudio {
// https://webaudio.github.io/web-audio-api/#AudioListener
class AudioListener final : public Bindings::PlatformObject {
WEB_PLATFORM_OBJECT(AudioListener, Bindings::PlatformObject);
JS_DECLARE_ALLOCATOR(AudioListener);
public:
static JS::NonnullGCPtr<AudioListener> create(JS::Realm&);
virtual ~AudioListener() override;
JS::NonnullGCPtr<AudioParam> forward_x() const { return m_forward_x; }
JS::NonnullGCPtr<AudioParam> forward_y() const { return m_forward_y; }
JS::NonnullGCPtr<AudioParam> forward_z() const { return m_forward_z; }
JS::NonnullGCPtr<AudioParam> position_x() const { return m_position_x; }
JS::NonnullGCPtr<AudioParam> position_y() const { return m_position_y; }
JS::NonnullGCPtr<AudioParam> position_z() const { return m_position_z; }
JS::NonnullGCPtr<AudioParam> up_x() const { return m_up_x; }
JS::NonnullGCPtr<AudioParam> up_y() const { return m_up_y; }
JS::NonnullGCPtr<AudioParam> up_z() const { return m_up_z; }
WebIDL::ExceptionOr<void> set_position(float x, float y, float z);
WebIDL::ExceptionOr<void> set_orientation(float x, float y, float z, float x_up, float y_up, float z_up);
private:
explicit AudioListener(JS::Realm&);
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
JS::NonnullGCPtr<AudioParam> m_forward_x;
JS::NonnullGCPtr<AudioParam> m_forward_y;
JS::NonnullGCPtr<AudioParam> m_forward_z;
JS::NonnullGCPtr<AudioParam> m_position_x;
JS::NonnullGCPtr<AudioParam> m_position_y;
JS::NonnullGCPtr<AudioParam> m_position_z;
JS::NonnullGCPtr<AudioParam> m_up_x;
JS::NonnullGCPtr<AudioParam> m_up_y;
JS::NonnullGCPtr<AudioParam> m_up_z;
};
}

View file

@ -0,0 +1,15 @@
// https://webaudio.github.io/web-audio-api/#AudioListener
[Exposed=Window]
interface AudioListener {
readonly attribute AudioParam positionX;
readonly attribute AudioParam positionY;
readonly attribute AudioParam positionZ;
readonly attribute AudioParam forwardX;
readonly attribute AudioParam forwardY;
readonly attribute AudioParam forwardZ;
readonly attribute AudioParam upX;
readonly attribute AudioParam upY;
readonly attribute AudioParam upZ;
undefined setPosition (float x, float y, float z);
undefined setOrientation (float x, float y, float z, float xUp, float yUp, float zUp);
};

View file

@ -0,0 +1,177 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/WebAudio/AudioNode.h>
#include <LibWeb/WebAudio/BaseAudioContext.h>
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(AudioNode);
AudioNode::AudioNode(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context)
: DOM::EventTarget(realm)
, m_context(context)
{
}
AudioNode::~AudioNode() = default;
WebIDL::ExceptionOr<void> AudioNode::initialize_audio_node_options(AudioNodeOptions const& given_options, AudioNodeDefaultOptions const& default_options)
{
// Set channel count, fallback to default if not provided
if (given_options.channel_count.has_value()) {
TRY(set_channel_count(given_options.channel_count.value()));
} else {
TRY(set_channel_count(default_options.channel_count));
}
// Set channel count mode, fallback to default if not provided
if (given_options.channel_count_mode.has_value()) {
TRY(set_channel_count_mode(given_options.channel_count_mode.value()));
} else {
TRY(set_channel_count_mode(default_options.channel_count_mode));
}
// Set channel interpretation, fallback to default if not provided
if (given_options.channel_interpretation.has_value()) {
TRY(set_channel_interpretation(given_options.channel_interpretation.value()));
} else {
TRY(set_channel_interpretation(default_options.channel_interpretation));
}
return {};
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-connect
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioNode>> AudioNode::connect(JS::NonnullGCPtr<AudioNode> destination_node, WebIDL::UnsignedLong output, WebIDL::UnsignedLong input)
{
// There can only be one connection between a given output of one specific node and a given input of another specific node.
// Multiple connections with the same termini are ignored.
// If the destination parameter is an AudioNode that has been created using another AudioContext, an InvalidAccessError MUST be thrown.
if (m_context != destination_node->m_context) {
return WebIDL::InvalidAccessError::create(realm(), "Cannot connect to an AudioNode in a different AudioContext"_string);
}
(void)output;
(void)input;
dbgln("FIXME: Implement Audio::connect(AudioNode)");
return destination_node;
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-connect-destinationparam-output
void AudioNode::connect(JS::NonnullGCPtr<AudioParam> destination_param, WebIDL::UnsignedLong output)
{
(void)destination_param;
(void)output;
dbgln("FIXME: Implement AudioNode::connect(AudioParam)");
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect
void AudioNode::disconnect()
{
dbgln("FIXME: Implement AudioNode::disconnect()");
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-output
void AudioNode::disconnect(WebIDL::UnsignedLong output)
{
(void)output;
dbgln("FIXME: Implement AudioNode::disconnect(output)");
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationnode
void AudioNode::disconnect(JS::NonnullGCPtr<AudioNode> destination_node)
{
(void)destination_node;
dbgln("FIXME: Implement AudioNode::disconnect(destination_node)");
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationnode-output
void AudioNode::disconnect(JS::NonnullGCPtr<AudioNode> destination_node, WebIDL::UnsignedLong output)
{
(void)destination_node;
(void)output;
dbgln("FIXME: Implement AudioNode::disconnect(destination_node, output)");
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationnode-output-input
void AudioNode::disconnect(JS::NonnullGCPtr<AudioNode> destination_node, WebIDL::UnsignedLong output, WebIDL::UnsignedLong input)
{
(void)destination_node;
(void)output;
(void)input;
dbgln("FIXME: Implement AudioNode::disconnect(destination_node, output, input)");
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationparam
void AudioNode::disconnect(JS::NonnullGCPtr<AudioParam> destination_param)
{
(void)destination_param;
dbgln("FIXME: Implement AudioNode::disconnect(destination_param)");
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-disconnect-destinationparam-output
void AudioNode::disconnect(JS::NonnullGCPtr<AudioParam> destination_param, WebIDL::UnsignedLong output)
{
(void)destination_param;
(void)output;
dbgln("FIXME: Implement AudioNode::disconnect(destination_param, output)");
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-channelcount
WebIDL::ExceptionOr<void> AudioNode::set_channel_count(WebIDL::UnsignedLong channel_count)
{
// If this value is set to zero or to a value greater than the implementations maximum number
// of channels the implementation MUST throw a NotSupportedError exception.
if (channel_count == 0 || channel_count > BaseAudioContext::MAX_NUMBER_OF_CHANNELS)
return WebIDL::NotSupportedError::create(realm(), "Invalid channel count"_string);
m_channel_count = channel_count;
return {};
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-channelcountmode
WebIDL::ExceptionOr<void> AudioNode::set_channel_count_mode(Bindings::ChannelCountMode channel_count_mode)
{
m_channel_count_mode = channel_count_mode;
return {};
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-channelcountmode
Bindings::ChannelCountMode AudioNode::channel_count_mode()
{
return m_channel_count_mode;
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-channelinterpretation
WebIDL::ExceptionOr<void> AudioNode::set_channel_interpretation(Bindings::ChannelInterpretation channel_interpretation)
{
m_channel_interpretation = channel_interpretation;
return {};
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-channelinterpretation
Bindings::ChannelInterpretation AudioNode::channel_interpretation()
{
return m_channel_interpretation;
}
void AudioNode::initialize(JS::Realm& realm)
{
Base::initialize(realm);
WEB_SET_PROTOTYPE_FOR_INTERFACE(AudioNode);
}
void AudioNode::visit_edges(Cell::Visitor& visitor)
{
Base::visit_edges(visitor);
visitor.visit(m_context);
}
}

View file

@ -0,0 +1,86 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/Optional.h>
#include <LibJS/Forward.h>
#include <LibWeb/Bindings/AudioNodePrototype.h>
#include <LibWeb/Bindings/PlatformObject.h>
#include <LibWeb/DOM/EventTarget.h>
#include <LibWeb/WebIDL/Types.h>
namespace Web::WebAudio {
// https://webaudio.github.io/web-audio-api/#AudioNodeOptions
struct AudioNodeOptions {
Optional<WebIDL::UnsignedLong> channel_count;
Optional<Bindings::ChannelCountMode> channel_count_mode;
Optional<Bindings::ChannelInterpretation> channel_interpretation;
};
struct AudioNodeDefaultOptions {
WebIDL::UnsignedLong channel_count;
Bindings::ChannelCountMode channel_count_mode;
Bindings::ChannelInterpretation channel_interpretation;
};
// https://webaudio.github.io/web-audio-api/#AudioNode
class AudioNode : public DOM::EventTarget {
WEB_PLATFORM_OBJECT(AudioNode, DOM::EventTarget);
JS_DECLARE_ALLOCATOR(AudioNode);
public:
virtual ~AudioNode() override;
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioNode>> connect(JS::NonnullGCPtr<AudioNode> destination_node, WebIDL::UnsignedLong output = 0, WebIDL::UnsignedLong input = 0);
void connect(JS::NonnullGCPtr<AudioParam> destination_param, WebIDL::UnsignedLong output = 0);
void disconnect();
void disconnect(WebIDL::UnsignedLong output);
void disconnect(JS::NonnullGCPtr<AudioNode> destination_node);
void disconnect(JS::NonnullGCPtr<AudioNode> destination_node, WebIDL::UnsignedLong output);
void disconnect(JS::NonnullGCPtr<AudioNode> destination_node, WebIDL::UnsignedLong output, WebIDL::UnsignedLong input);
void disconnect(JS::NonnullGCPtr<AudioParam> destination_param);
void disconnect(JS::NonnullGCPtr<AudioParam> destination_param, WebIDL::UnsignedLong output);
// https://webaudio.github.io/web-audio-api/#dom-audionode-context
JS::NonnullGCPtr<BaseAudioContext const> context() const
{
// The BaseAudioContext which owns this AudioNode.
return m_context;
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-numberofinputs
virtual WebIDL::UnsignedLong number_of_inputs() = 0;
// https://webaudio.github.io/web-audio-api/#dom-audionode-numberofoutputs
virtual WebIDL::UnsignedLong number_of_outputs() = 0;
// https://webaudio.github.io/web-audio-api/#dom-audionode-channelcount
virtual WebIDL::ExceptionOr<void> set_channel_count(WebIDL::UnsignedLong);
virtual WebIDL::UnsignedLong channel_count() const { return m_channel_count; }
virtual WebIDL::ExceptionOr<void> set_channel_count_mode(Bindings::ChannelCountMode);
Bindings::ChannelCountMode channel_count_mode();
WebIDL::ExceptionOr<void> set_channel_interpretation(Bindings::ChannelInterpretation);
Bindings::ChannelInterpretation channel_interpretation();
WebIDL::ExceptionOr<void> initialize_audio_node_options(AudioNodeOptions const& given_options, AudioNodeDefaultOptions const& default_options);
protected:
AudioNode(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>);
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
private:
JS::NonnullGCPtr<BaseAudioContext> m_context;
WebIDL::UnsignedLong m_channel_count { 2 };
Bindings::ChannelCountMode m_channel_count_mode { Bindings::ChannelCountMode::Max };
Bindings::ChannelInterpretation m_channel_interpretation { Bindings::ChannelInterpretation::Speakers };
};
}

View file

@ -0,0 +1,46 @@
#import <WebAudio/AudioParam.idl>
#import <WebAudio/BaseAudioContext.idl>
// https://webaudio.github.io/web-audio-api/#enumdef-channelcountmode
enum ChannelCountMode {
"max",
"clamped-max",
"explicit"
};
// https://webaudio.github.io/web-audio-api/#enumdef-channelinterpretation
enum ChannelInterpretation {
"speakers",
"discrete"
};
// https://webaudio.github.io/web-audio-api/#AudioNodeOptions
dictionary AudioNodeOptions {
unsigned long channelCount;
ChannelCountMode channelCountMode;
ChannelInterpretation channelInterpretation;
};
// https://webaudio.github.io/web-audio-api/#AudioNode
[Exposed=Window]
interface AudioNode : EventTarget {
AudioNode connect(AudioNode destinationNode,
optional unsigned long output = 0,
optional unsigned long input = 0);
undefined connect(AudioParam destinationParam, optional unsigned long output = 0);
undefined disconnect();
undefined disconnect(unsigned long output);
undefined disconnect(AudioNode destinationNode);
undefined disconnect(AudioNode destinationNode, unsigned long output);
undefined disconnect(AudioNode destinationNode,
unsigned long output,
unsigned long input);
undefined disconnect(AudioParam destinationParam);
undefined disconnect(AudioParam destinationParam, unsigned long output);
readonly attribute BaseAudioContext context;
readonly attribute unsigned long numberOfInputs;
readonly attribute unsigned long numberOfOutputs;
attribute unsigned long channelCount;
attribute ChannelCountMode channelCountMode;
attribute ChannelInterpretation channelInterpretation;
};

View file

@ -0,0 +1,148 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibWeb/Bindings/AudioParamPrototype.h>
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/WebAudio/AudioParam.h>
#include <LibWeb/WebIDL/ExceptionOr.h>
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(AudioParam);
AudioParam::AudioParam(JS::Realm& realm, float default_value, float min_value, float max_value, Bindings::AutomationRate automation_rate)
: Bindings::PlatformObject(realm)
, m_current_value(default_value)
, m_default_value(default_value)
, m_min_value(min_value)
, m_max_value(max_value)
, m_automation_rate(automation_rate)
{
}
JS::NonnullGCPtr<AudioParam> AudioParam::create(JS::Realm& realm, float default_value, float min_value, float max_value, Bindings::AutomationRate automation_rate)
{
return realm.vm().heap().allocate<AudioParam>(realm, realm, default_value, min_value, max_value, automation_rate);
}
AudioParam::~AudioParam() = default;
// https://webaudio.github.io/web-audio-api/#dom-audioparam-value
// https://webaudio.github.io/web-audio-api/#simple-nominal-range
float AudioParam::value() const
{
// Each AudioParam includes minValue and maxValue attributes that together form the simple nominal range
// for the parameter. In effect, value of the parameter is clamped to the range [minValue, maxValue].
return clamp(m_current_value, min_value(), max_value());
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-value
void AudioParam::set_value(float value)
{
m_current_value = value;
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-automationrate
Bindings::AutomationRate AudioParam::automation_rate() const
{
return m_automation_rate;
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-automationrate
WebIDL::ExceptionOr<void> AudioParam::set_automation_rate(Bindings::AutomationRate automation_rate)
{
dbgln("FIXME: Fully implement AudioParam::set_automation_rate");
m_automation_rate = automation_rate;
return {};
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-defaultvalue
float AudioParam::default_value() const
{
return m_default_value;
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-minvalue
float AudioParam::min_value() const
{
return m_min_value;
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-maxvalue
float AudioParam::max_value() const
{
return m_max_value;
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-setvalueattime
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> AudioParam::set_value_at_time(float value, double start_time)
{
(void)value;
(void)start_time;
dbgln("FIXME: Implement AudioParam::set_value_at_time");
return JS::NonnullGCPtr<AudioParam> { *this };
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-linearramptovalueattime
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> AudioParam::linear_ramp_to_value_at_time(float value, double end_time)
{
(void)value;
(void)end_time;
return WebIDL::NotSupportedError::create(realm(), "FIXME: Implement AudioParam::linear_ramp_to_value_at_time"_string);
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-exponentialramptovalueattime
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> AudioParam::exponential_ramp_to_value_at_time(float value, double end_time)
{
(void)value;
(void)end_time;
return WebIDL::NotSupportedError::create(realm(), "FIXME: Implement AudioParam::exponential_ramp_to_value_at_time"_string);
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-settargetattime
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> AudioParam::set_target_at_time(float target, double start_time, float time_constant)
{
(void)target;
(void)start_time;
(void)time_constant;
return WebIDL::NotSupportedError::create(realm(), "FIXME: Implement AudioParam::set_target_at_time"_string);
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-setvaluecurveattime
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> AudioParam::set_value_curve_at_time(Span<float> values, double start_time, double duration)
{
(void)values;
(void)start_time;
(void)duration;
return WebIDL::NotSupportedError::create(realm(), "FIXME: Implement AudioParam::set_value_curve_at_time"_string);
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-cancelscheduledvalues
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> AudioParam::cancel_scheduled_values(double cancel_time)
{
(void)cancel_time;
return WebIDL::NotSupportedError::create(realm(), "FIXME: Implement AudioParam::cancel_scheduled_values"_string);
}
// https://webaudio.github.io/web-audio-api/#dom-audioparam-cancelandholdattime
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> AudioParam::cancel_and_hold_at_time(double cancel_time)
{
(void)cancel_time;
return WebIDL::NotSupportedError::create(realm(), "FIXME: Implement AudioParam::cancel_and_hold_at_time"_string);
}
void AudioParam::initialize(JS::Realm& realm)
{
Base::initialize(realm);
WEB_SET_PROTOTYPE_FOR_INTERFACE(AudioParam);
}
void AudioParam::visit_edges(Cell::Visitor& visitor)
{
Base::visit_edges(visitor);
}
}

View file

@ -0,0 +1,60 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibJS/Forward.h>
#include <LibWeb/Bindings/AudioParamPrototype.h>
#include <LibWeb/Bindings/PlatformObject.h>
namespace Web::WebAudio {
// https://webaudio.github.io/web-audio-api/#AudioParam
class AudioParam final : public Bindings::PlatformObject {
WEB_PLATFORM_OBJECT(AudioParam, Bindings::PlatformObject);
JS_DECLARE_ALLOCATOR(AudioParam);
public:
static JS::NonnullGCPtr<AudioParam> create(JS::Realm&, float default_value, float min_value, float max_value, Bindings::AutomationRate);
virtual ~AudioParam() override;
float value() const;
void set_value(float);
Bindings::AutomationRate automation_rate() const;
WebIDL::ExceptionOr<void> set_automation_rate(Bindings::AutomationRate);
float default_value() const;
float min_value() const;
float max_value() const;
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> set_value_at_time(float value, double start_time);
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> linear_ramp_to_value_at_time(float value, double end_time);
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> exponential_ramp_to_value_at_time(float value, double end_time);
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> set_target_at_time(float target, double start_time, float time_constant);
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> set_value_curve_at_time(Span<float> values, double start_time, double duration);
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> cancel_scheduled_values(double cancel_time);
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioParam>> cancel_and_hold_at_time(double cancel_time);
private:
AudioParam(JS::Realm&, float default_value, float min_value, float max_value, Bindings::AutomationRate);
// https://webaudio.github.io/web-audio-api/#dom-audioparam-current-value-slot
float m_current_value {}; // [[current value]]
float m_default_value {};
float m_min_value {};
float m_max_value {};
Bindings::AutomationRate m_automation_rate {};
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
};
}

View file

@ -0,0 +1,24 @@
// https://webaudio.github.io/web-audio-api/#enumdef-automationrate
enum AutomationRate {
"a-rate",
"k-rate"
};
// https://webaudio.github.io/web-audio-api/#AudioParam
[Exposed=Window]
interface AudioParam {
attribute float value;
attribute AutomationRate automationRate;
readonly attribute float defaultValue;
readonly attribute float minValue;
readonly attribute float maxValue;
AudioParam setValueAtTime(float value, double startTime);
AudioParam linearRampToValueAtTime(float value, double endTime);
AudioParam exponentialRampToValueAtTime(float value, double endTime);
AudioParam setTargetAtTime(float target, double startTime, float timeConstant);
AudioParam setValueCurveAtTime(sequence<float> values,
double startTime,
double duration);
AudioParam cancelScheduledValues(double cancelTime);
AudioParam cancelAndHoldAtTime(double cancelTime);
};

View file

@ -0,0 +1,60 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibWeb/Bindings/AudioScheduledSourceNodePrototype.h>
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/HTML/EventNames.h>
#include <LibWeb/WebAudio/AudioScheduledSourceNode.h>
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(AudioScheduledSourceNode);
AudioScheduledSourceNode::AudioScheduledSourceNode(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context)
: AudioNode(realm, context)
{
}
AudioScheduledSourceNode::~AudioScheduledSourceNode() = default;
// https://webaudio.github.io/web-audio-api/#dom-audioscheduledsourcenode-onended
JS::GCPtr<WebIDL::CallbackType> AudioScheduledSourceNode::onended()
{
return event_handler_attribute(HTML::EventNames::ended);
}
// https://webaudio.github.io/web-audio-api/#dom-audioscheduledsourcenode-onended
void AudioScheduledSourceNode::set_onended(JS::GCPtr<WebIDL::CallbackType> value)
{
set_event_handler_attribute(HTML::EventNames::ended, value);
}
// https://webaudio.github.io/web-audio-api/#dom-audioscheduledsourcenode-start
WebIDL::ExceptionOr<void> AudioScheduledSourceNode::start(double when)
{
(void)when;
return WebIDL::NotSupportedError::create(realm(), "FIXME: Implement AudioScheduledSourceNode::start"_string);
}
// https://webaudio.github.io/web-audio-api/#dom-audioscheduledsourcenode-stop
WebIDL::ExceptionOr<void> AudioScheduledSourceNode::stop(double when)
{
(void)when;
return WebIDL::NotSupportedError::create(realm(), "FIXME: Implement AudioScheduledSourceNode::stop"_string);
}
void AudioScheduledSourceNode::initialize(JS::Realm& realm)
{
Base::initialize(realm);
WEB_SET_PROTOTYPE_FOR_INTERFACE(AudioScheduledSourceNode);
}
void AudioScheduledSourceNode::visit_edges(Cell::Visitor& visitor)
{
Base::visit_edges(visitor);
}
}

View file

@ -0,0 +1,34 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibWeb/WebAudio/AudioNode.h>
namespace Web::WebAudio {
// https://webaudio.github.io/web-audio-api/#AudioScheduledSourceNode
class AudioScheduledSourceNode : public AudioNode {
WEB_PLATFORM_OBJECT(AudioScheduledSourceNode, AudioNode);
JS_DECLARE_ALLOCATOR(AudioScheduledSourceNode);
public:
virtual ~AudioScheduledSourceNode() override;
JS::GCPtr<WebIDL::CallbackType> onended();
void set_onended(JS::GCPtr<WebIDL::CallbackType>);
WebIDL::ExceptionOr<void> start(double when = 0);
WebIDL::ExceptionOr<void> stop(double when = 0);
protected:
AudioScheduledSourceNode(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>);
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
};
}

View file

@ -0,0 +1,10 @@
#import <DOM/EventHandler.idl>
#import <WebAudio/AudioNode.idl>
// https://webaudio.github.io/web-audio-api/#AudioScheduledSourceNode
[Exposed=Window]
interface AudioScheduledSourceNode : AudioNode {
attribute EventHandler onended;
undefined start(optional double when = 0);
undefined stop(optional double when = 0);
};

View file

@ -0,0 +1,254 @@
/*
* Copyright (c) 2023, Luke Wilde <lukew@serenityos.org>
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
* Copyright (c) 2024, Jelle Raaijmakers <jelle@ladybird.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibWeb/Bindings/BaseAudioContextPrototype.h>
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/HTML/EventNames.h>
#include <LibWeb/HTML/Scripting/ExceptionReporter.h>
#include <LibWeb/HTML/Window.h>
#include <LibWeb/WebAudio/AudioBuffer.h>
#include <LibWeb/WebAudio/AudioBufferSourceNode.h>
#include <LibWeb/WebAudio/AudioDestinationNode.h>
#include <LibWeb/WebAudio/BaseAudioContext.h>
#include <LibWeb/WebAudio/BiquadFilterNode.h>
#include <LibWeb/WebAudio/DynamicsCompressorNode.h>
#include <LibWeb/WebAudio/GainNode.h>
#include <LibWeb/WebAudio/OscillatorNode.h>
#include <LibWeb/WebIDL/AbstractOperations.h>
#include <LibWeb/WebIDL/Promise.h>
namespace Web::WebAudio {
BaseAudioContext::BaseAudioContext(JS::Realm& realm, float sample_rate)
: DOM::EventTarget(realm)
, m_destination(AudioDestinationNode::construct_impl(realm, *this))
, m_sample_rate(sample_rate)
, m_listener(AudioListener::create(realm))
{
}
BaseAudioContext::~BaseAudioContext() = default;
void BaseAudioContext::initialize(JS::Realm& realm)
{
Base::initialize(realm);
WEB_SET_PROTOTYPE_FOR_INTERFACE(BaseAudioContext);
}
void BaseAudioContext::visit_edges(Cell::Visitor& visitor)
{
Base::visit_edges(visitor);
visitor.visit(m_destination);
visitor.visit(m_pending_promises);
visitor.visit(m_listener);
}
void BaseAudioContext::set_onstatechange(WebIDL::CallbackType* event_handler)
{
set_event_handler_attribute(HTML::EventNames::statechange, event_handler);
}
WebIDL::CallbackType* BaseAudioContext::onstatechange()
{
return event_handler_attribute(HTML::EventNames::statechange);
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createbiquadfilter
WebIDL::ExceptionOr<JS::NonnullGCPtr<BiquadFilterNode>> BaseAudioContext::create_biquad_filter()
{
// Factory method for a BiquadFilterNode representing a second order filter which can be configured as one of several common filter types.
return BiquadFilterNode::create(realm(), *this);
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createbuffer
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBuffer>> BaseAudioContext::create_buffer(WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate)
{
// Creates an AudioBuffer of the given size. The audio data in the buffer will be zero-initialized (silent).
// A NotSupportedError exception MUST be thrown if any of the arguments is negative, zero, or outside its nominal range.
return AudioBuffer::create(realm(), number_of_channels, length, sample_rate);
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createbuffersource
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBufferSourceNode>> BaseAudioContext::create_buffer_source()
{
// Factory method for a AudioBufferSourceNode.
return AudioBufferSourceNode::create(realm(), *this);
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createoscillator
WebIDL::ExceptionOr<JS::NonnullGCPtr<OscillatorNode>> BaseAudioContext::create_oscillator()
{
// Factory method for an OscillatorNode.
return OscillatorNode::create(realm(), *this);
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createdynamicscompressor
WebIDL::ExceptionOr<JS::NonnullGCPtr<DynamicsCompressorNode>> BaseAudioContext::create_dynamics_compressor()
{
// Factory method for a DynamicsCompressorNode.
return DynamicsCompressorNode::create(realm(), *this);
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-creategain
WebIDL::ExceptionOr<JS::NonnullGCPtr<GainNode>> BaseAudioContext::create_gain()
{
// Factory method for GainNode.
return GainNode::create(realm(), *this);
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createbuffer
WebIDL::ExceptionOr<void> BaseAudioContext::verify_audio_options_inside_nominal_range(JS::Realm& realm, WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate)
{
// A NotSupportedError exception MUST be thrown if any of the arguments is negative, zero, or outside its nominal range.
if (number_of_channels == 0)
return WebIDL::NotSupportedError::create(realm, "Number of channels must not be '0'"_string);
if (number_of_channels > MAX_NUMBER_OF_CHANNELS)
return WebIDL::NotSupportedError::create(realm, "Number of channels is greater than allowed range"_string);
if (length == 0)
return WebIDL::NotSupportedError::create(realm, "Length of buffer must be at least 1"_string);
if (sample_rate < MIN_SAMPLE_RATE || sample_rate > MAX_SAMPLE_RATE)
return WebIDL::NotSupportedError::create(realm, "Sample rate is outside of allowed range"_string);
return {};
}
void BaseAudioContext::queue_a_media_element_task(JS::NonnullGCPtr<JS::HeapFunction<void()>> steps)
{
auto task = HTML::Task::create(vm(), m_media_element_event_task_source.source, HTML::current_principal_settings_object().responsible_document(), steps);
HTML::main_thread_event_loop().task_queue().add(task);
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-decodeaudiodata
JS::NonnullGCPtr<WebIDL::Promise> BaseAudioContext::decode_audio_data(JS::Handle<WebIDL::BufferSource> audio_data, JS::GCPtr<WebIDL::CallbackType> success_callback, JS::GCPtr<WebIDL::CallbackType> error_callback)
{
auto& realm = this->realm();
// FIXME: When decodeAudioData is called, the following steps MUST be performed on the control thread:
// 1. If this's relevant global object's associated Document is not fully active then return a
// promise rejected with "InvalidStateError" DOMException.
auto const& associated_document = verify_cast<HTML::Window>(HTML::relevant_global_object(*this)).associated_document();
if (!associated_document.is_fully_active()) {
auto error = WebIDL::InvalidStateError::create(realm, "The document is not fully active."_string);
return WebIDL::create_rejected_promise_from_exception(realm, error);
}
// 2. Let promise be a new Promise.
auto promise = WebIDL::create_promise(realm);
// FIXME: 3. If audioData is detached, execute the following steps:
if (true) {
// 3.1. Append promise to [[pending promises]].
m_pending_promises.append(promise);
// FIXME: 3.2. Detach the audioData ArrayBuffer. If this operations throws, jump to the step 3.
// 3.3. Queue a decoding operation to be performed on another thread.
queue_a_decoding_operation(promise, move(audio_data), success_callback, error_callback);
}
// 4. Else, execute the following error steps:
else {
// 4.1. Let error be a DataCloneError.
auto error = WebIDL::DataCloneError::create(realm, "Audio data is not detached."_string);
// 4.2. Reject promise with error, and remove it from [[pending promises]].
WebIDL::reject_promise(realm, promise, error);
m_pending_promises.remove_first_matching([&promise](auto& pending_promise) {
return pending_promise == promise;
});
// 4.3. Queue a media element task to invoke errorCallback with error.
if (error_callback) {
queue_a_media_element_task(JS::create_heap_function(heap(), [&realm, error_callback, error] {
auto completion = WebIDL::invoke_callback(*error_callback, {}, error);
if (completion.is_abrupt())
HTML::report_exception(completion, realm);
}));
}
}
// 5. Return promise.
return promise;
}
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-decodeaudiodata
void BaseAudioContext::queue_a_decoding_operation(JS::NonnullGCPtr<JS::PromiseCapability> promise, [[maybe_unused]] JS::Handle<WebIDL::BufferSource> audio_data, JS::GCPtr<WebIDL::CallbackType> success_callback, JS::GCPtr<WebIDL::CallbackType> error_callback)
{
auto& realm = this->realm();
// FIXME: When queuing a decoding operation to be performed on another thread, the following steps
// MUST happen on a thread that is not the control thread nor the rendering thread, called
// the decoding thread.
// 1. Let can decode be a boolean flag, initially set to true.
auto can_decode { true };
// FIXME: 2. Attempt to determine the MIME type of audioData, using MIME Sniffing §6.2 Matching an
// audio or video type pattern. If the audio or video type pattern matching algorithm returns
// undefined, set can decode to false.
// 3. If can decode is true,
if (can_decode) {
// FIXME: attempt to decode the encoded audioData into linear PCM. In case of
// failure, set can decode to false.
// FIXME: If the media byte-stream contains multiple audio tracks, only decode the first track to linear pcm.
}
// 4. If can decode is false,
if (!can_decode) {
// queue a media element task to execute the following steps:
queue_a_media_element_task(JS::create_heap_function(heap(), [this, &realm, promise, error_callback] {
// 4.1. Let error be a DOMException whose name is EncodingError.
auto error = WebIDL::EncodingError::create(realm, "Unable to decode."_string);
// 4.1.2. Reject promise with error, and remove it from [[pending promises]].
WebIDL::reject_promise(realm, promise, error);
m_pending_promises.remove_first_matching([&promise](auto& pending_promise) {
return pending_promise == promise;
});
// 4.2. If errorCallback is not missing, invoke errorCallback with error.
if (error_callback) {
auto completion = WebIDL::invoke_callback(*error_callback, {}, error);
if (completion.is_abrupt())
HTML::report_exception(completion, realm);
}
}));
}
// 5. Otherwise:
else {
// FIXME: 5.1. Take the result, representing the decoded linear PCM audio data, and resample it to the
// sample-rate of the BaseAudioContext if it is different from the sample-rate of
// audioData.
// FIXME: 5.2. queue a media element task to execute the following steps:
// FIXME: 5.2.1. Let buffer be an AudioBuffer containing the final result (after possibly performing
// sample-rate conversion).
auto buffer = MUST(create_buffer(2, 1, 44100));
// 5.2.2. Resolve promise with buffer.
WebIDL::resolve_promise(realm, promise, buffer);
// 5.2.3. If successCallback is not missing, invoke successCallback with buffer.
if (success_callback) {
auto completion = WebIDL::invoke_callback(*success_callback, {}, buffer);
if (completion.is_abrupt())
HTML::report_exception(completion, realm);
}
}
}
}

View file

@ -0,0 +1,91 @@
/*
* Copyright (c) 2023, Luke Wilde <lukew@serenityos.org>
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
* Copyright (c) 2024, Jelle Raaijmakers <jelle@ladybird.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibWeb/Bindings/BaseAudioContextPrototype.h>
#include <LibWeb/DOM/EventTarget.h>
#include <LibWeb/WebAudio/AudioListener.h>
#include <LibWeb/WebAudio/BiquadFilterNode.h>
#include <LibWeb/WebIDL/Types.h>
namespace Web::WebAudio {
class AudioDestinationNode;
// https://webaudio.github.io/web-audio-api/#BaseAudioContext
class BaseAudioContext : public DOM::EventTarget {
WEB_PLATFORM_OBJECT(BaseAudioContext, DOM::EventTarget);
public:
virtual ~BaseAudioContext() override;
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createbuffer-numberofchannels
// > An implementation MUST support at least 32 channels.
// Other browsers appear to only allow 32 channels - so let's limit ourselves to that too.
static constexpr WebIDL::UnsignedLong MAX_NUMBER_OF_CHANNELS { 32 };
// https://webaudio.github.io/web-audio-api/#dom-baseaudiocontext-createbuffer-samplerate
// > An implementation MUST support sample rates in at least the range 8000 to 96000.
// This doesn't seem consistent between browsers. We use what firefox accepts from testing BaseAudioContext.createAudioBuffer.
static constexpr float MIN_SAMPLE_RATE { 8000 };
static constexpr float MAX_SAMPLE_RATE { 192000 };
JS::NonnullGCPtr<AudioDestinationNode> destination() const { return m_destination; }
float sample_rate() const { return m_sample_rate; }
double current_time() const { return m_current_time; }
JS::NonnullGCPtr<AudioListener> listener() const { return m_listener; }
Bindings::AudioContextState state() const { return m_control_thread_state; }
// https://webaudio.github.io/web-audio-api/#--nyquist-frequency
float nyquist_frequency() const { return m_sample_rate / 2; }
void set_onstatechange(WebIDL::CallbackType*);
WebIDL::CallbackType* onstatechange();
void set_sample_rate(float sample_rate) { m_sample_rate = sample_rate; }
void set_control_state(Bindings::AudioContextState state) { m_control_thread_state = state; }
void set_rendering_state(Bindings::AudioContextState state) { m_rendering_thread_state = state; }
static WebIDL::ExceptionOr<void> verify_audio_options_inside_nominal_range(JS::Realm&, WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate);
WebIDL::ExceptionOr<JS::NonnullGCPtr<BiquadFilterNode>> create_biquad_filter();
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBuffer>> create_buffer(WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate);
WebIDL::ExceptionOr<JS::NonnullGCPtr<AudioBufferSourceNode>> create_buffer_source();
WebIDL::ExceptionOr<JS::NonnullGCPtr<OscillatorNode>> create_oscillator();
WebIDL::ExceptionOr<JS::NonnullGCPtr<DynamicsCompressorNode>> create_dynamics_compressor();
WebIDL::ExceptionOr<JS::NonnullGCPtr<GainNode>> create_gain();
JS::NonnullGCPtr<WebIDL::Promise> decode_audio_data(JS::Handle<WebIDL::BufferSource>, JS::GCPtr<WebIDL::CallbackType>, JS::GCPtr<WebIDL::CallbackType>);
protected:
explicit BaseAudioContext(JS::Realm&, float m_sample_rate = 0);
void queue_a_media_element_task(JS::NonnullGCPtr<JS::HeapFunction<void()>>);
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
JS::NonnullGCPtr<AudioDestinationNode> m_destination;
Vector<JS::NonnullGCPtr<WebIDL::Promise>> m_pending_promises;
private:
void queue_a_decoding_operation(JS::NonnullGCPtr<JS::PromiseCapability>, JS::Handle<WebIDL::BufferSource>, JS::GCPtr<WebIDL::CallbackType>, JS::GCPtr<WebIDL::CallbackType>);
float m_sample_rate { 0 };
double m_current_time { 0 };
JS::NonnullGCPtr<AudioListener> m_listener;
Bindings::AudioContextState m_control_thread_state = Bindings::AudioContextState::Suspended;
Bindings::AudioContextState m_rendering_thread_state = Bindings::AudioContextState::Suspended;
HTML::UniqueTaskSource m_media_element_event_task_source {};
};
}

View file

@ -0,0 +1,51 @@
#import <DOM/EventTarget.idl>
#import <DOM/EventHandler.idl>
#import <WebAudio/AudioBuffer.idl>
#import <WebAudio/AudioBufferSourceNode.idl>
#import <WebAudio/AudioDestinationNode.idl>
#import <WebAudio/AudioListener.idl>
#import <WebAudio/DynamicsCompressorNode.idl>
#import <WebAudio/GainNode.idl>
#import <WebAudio/OscillatorNode.idl>
#import <WebIDL/DOMException.idl>
// https://www.w3.org/TR/webaudio/#enumdef-audiocontextstate
enum AudioContextState { "suspended", "running", "closed" };
callback DecodeErrorCallback = undefined (DOMException error);
callback DecodeSuccessCallback = undefined (AudioBuffer decodedData);
// https://webaudio.github.io/web-audio-api/#BaseAudioContext
[Exposed=Window]
interface BaseAudioContext : EventTarget {
readonly attribute AudioDestinationNode destination;
readonly attribute float sampleRate;
readonly attribute double currentTime;
readonly attribute AudioListener listener;
readonly attribute AudioContextState state;
// FIXME: [SameObject, SecureContext]
[FIXME] readonly attribute AudioWorklet audioWorklet;
attribute EventHandler onstatechange;
[FIXME] AnalyserNode createAnalyser ();
BiquadFilterNode createBiquadFilter ();
AudioBuffer createBuffer(unsigned long numberOfChannels, unsigned long length, float sampleRate);
AudioBufferSourceNode createBufferSource ();
[FIXME] ChannelMergerNode createChannelMerger (optional unsigned long numberOfInputs = 6);
[FIXME] ChannelSplitterNode createChannelSplitter (optional unsigned long numberOfOutputs = 6);
[FIXME] ConstantSourceNode createConstantSource ();
[FIXME] ConvolverNode createConvolver ();
[FIXME] DelayNode createDelay (optional double maxDelayTime = 1.0);
DynamicsCompressorNode createDynamicsCompressor();
GainNode createGain();
[FIXME] IIRFilterNode createIIRFilter (sequence<double> feedforward, sequence<double> feedback);
OscillatorNode createOscillator();
[FIXME] PannerNode createPanner ();
[FIXME] PeriodicWave createPeriodicWave (sequence<float> real, sequence<float> imag, optional PeriodicWaveConstraints constraints = {});
[FIXME] ScriptProcessorNode createScriptProcessor(optional unsigned long bufferSize = 0, optional unsigned long numberOfInputChannels = 2, optional unsigned long numberOfOutputChannels = 2);
[FIXME] StereoPannerNode createStereoPanner ();
[FIXME] WaveShaperNode createWaveShaper ();
Promise<AudioBuffer> decodeAudioData (ArrayBuffer audioData, optional DecodeSuccessCallback? successCallback, optional DecodeErrorCallback? errorCallback);
};

View file

@ -0,0 +1,117 @@
/*
* Copyright (c) 2024, Bar Yemini <bar.ye651@gmail.com>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibWeb/Bindings/AudioParamPrototype.h>
#include <LibWeb/Bindings/BiquadFilterNodePrototype.h>
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/WebAudio/AudioNode.h>
#include <LibWeb/WebAudio/AudioParam.h>
#include <LibWeb/WebAudio/BiquadFilterNode.h>
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(BiquadFilterNode);
BiquadFilterNode::BiquadFilterNode(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, BiquadFilterOptions const& options)
: AudioNode(realm, context)
, m_type(options.type)
, m_frequency(AudioParam::create(realm, options.frequency, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_detune(AudioParam::create(realm, options.detune, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_q(AudioParam::create(realm, options.q, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
, m_gain(AudioParam::create(realm, options.gain, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
{
}
BiquadFilterNode::~BiquadFilterNode() = default;
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-type
void BiquadFilterNode::set_type(Bindings::BiquadFilterType type)
{
m_type = type;
}
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-type
Bindings::BiquadFilterType BiquadFilterNode::type() const
{
return m_type;
}
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-frequency
JS::NonnullGCPtr<AudioParam> BiquadFilterNode::frequency() const
{
return m_frequency;
}
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-detune
JS::NonnullGCPtr<AudioParam> BiquadFilterNode::detune() const
{
return m_detune;
}
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-q
JS::NonnullGCPtr<AudioParam> BiquadFilterNode::q() const
{
return m_q;
}
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-gain
JS::NonnullGCPtr<AudioParam> BiquadFilterNode::gain() const
{
return m_gain;
}
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-getfrequencyresponse
WebIDL::ExceptionOr<void> BiquadFilterNode::get_frequency_response(JS::Handle<WebIDL::BufferSource> const& frequency_hz, JS::Handle<WebIDL::BufferSource> const& mag_response, JS::Handle<WebIDL::BufferSource> const& phase_response)
{
(void)frequency_hz;
(void)mag_response;
(void)phase_response;
dbgln("FIXME: Implement BiquadFilterNode::get_frequency_response(Float32Array, Float32Array, Float32Array)");
return {};
}
WebIDL::ExceptionOr<JS::NonnullGCPtr<BiquadFilterNode>> BiquadFilterNode::create(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, BiquadFilterOptions const& options)
{
return construct_impl(realm, context, options);
}
// https://webaudio.github.io/web-audio-api/#dom-biquadfilternode-biquadfilternode
WebIDL::ExceptionOr<JS::NonnullGCPtr<BiquadFilterNode>> BiquadFilterNode::construct_impl(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, BiquadFilterOptions const& options)
{
// When the constructor is called with a BaseAudioContext c and an option object option, the user agent
// MUST initialize the AudioNode this, with context and options as arguments.
auto node = realm.vm().heap().allocate<BiquadFilterNode>(realm, realm, context, options);
// Default options for channel count and interpretation
// https://webaudio.github.io/web-audio-api/#BiquadFilterNode
AudioNodeDefaultOptions default_options;
default_options.channel_count_mode = Bindings::ChannelCountMode::Max;
default_options.channel_interpretation = Bindings::ChannelInterpretation::Speakers;
default_options.channel_count = 2;
// FIXME: Set tail-time to yes
TRY(node->initialize_audio_node_options(options, default_options));
return node;
}
void BiquadFilterNode::initialize(JS::Realm& realm)
{
Base::initialize(realm);
WEB_SET_PROTOTYPE_FOR_INTERFACE(BiquadFilterNode);
}
void BiquadFilterNode::visit_edges(Cell::Visitor& visitor)
{
Base::visit_edges(visitor);
visitor.visit(m_frequency);
visitor.visit(m_detune);
visitor.visit(m_q);
visitor.visit(m_gain);
}
}

View file

@ -0,0 +1,60 @@
/*
* Copyright (c) 2024, Bar Yemini <bar.ye651@gmail.com>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibWeb/Bindings/BiquadFilterNodePrototype.h>
#include <LibWeb/WebAudio/AudioNode.h>
#include <LibWeb/WebAudio/AudioParam.h>
namespace Web::WebAudio {
// https://webaudio.github.io/web-audio-api/#BiquadFilterOptions
struct BiquadFilterOptions : AudioNodeOptions {
Bindings::BiquadFilterType type { Bindings::BiquadFilterType::Lowpass };
float q { 1 };
float detune { 0 };
float frequency { 350 };
float gain { 0 };
};
// https://webaudio.github.io/web-audio-api/#BiquadFilterNode
class BiquadFilterNode : public AudioNode {
WEB_PLATFORM_OBJECT(BiquadFilterNode, AudioNode);
JS_DECLARE_ALLOCATOR(BiquadFilterNode);
public:
virtual ~BiquadFilterNode() override;
WebIDL::UnsignedLong number_of_inputs() override { return 1; }
WebIDL::UnsignedLong number_of_outputs() override { return 1; }
void set_type(Bindings::BiquadFilterType);
Bindings::BiquadFilterType type() const;
JS::NonnullGCPtr<AudioParam> frequency() const;
JS::NonnullGCPtr<AudioParam> detune() const;
JS::NonnullGCPtr<AudioParam> q() const;
JS::NonnullGCPtr<AudioParam> gain() const;
WebIDL::ExceptionOr<void> get_frequency_response(JS::Handle<WebIDL::BufferSource> const&, JS::Handle<WebIDL::BufferSource> const&, JS::Handle<WebIDL::BufferSource> const&);
static WebIDL::ExceptionOr<JS::NonnullGCPtr<BiquadFilterNode>> create(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, BiquadFilterOptions const& = {});
static WebIDL::ExceptionOr<JS::NonnullGCPtr<BiquadFilterNode>> construct_impl(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, BiquadFilterOptions const& = {});
protected:
BiquadFilterNode(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, BiquadFilterOptions const& = {});
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
private:
Bindings::BiquadFilterType m_type { Bindings::BiquadFilterType::Lowpass };
JS::NonnullGCPtr<AudioParam> m_frequency;
JS::NonnullGCPtr<AudioParam> m_detune;
JS::NonnullGCPtr<AudioParam> m_q;
JS::NonnullGCPtr<AudioParam> m_gain;
};
}

View file

@ -0,0 +1,33 @@
#import <WebAudio/AudioParam.idl>
#import <WebAudio/AudioNode.idl>
#import <WebAudio/BaseAudioContext.idl>
enum BiquadFilterType {
"lowpass",
"highpass",
"bandpass",
"lowshelf",
"highshelf",
"peaking",
"notch",
"allpass"
};
dictionary BiquadFilterOptions : AudioNodeOptions {
BiquadFilterType type = "lowpass";
float Q = 1;
float detune = 0;
float frequency = 350;
float gain = 0;
};
[Exposed=Window]
interface BiquadFilterNode : AudioNode {
constructor (BaseAudioContext context, optional BiquadFilterOptions options = {});
attribute BiquadFilterType type;
readonly attribute AudioParam frequency;
readonly attribute AudioParam detune;
readonly attribute AudioParam Q;
readonly attribute AudioParam gain;
undefined getFrequencyResponse (Float32Array frequencyHz, Float32Array magResponse, Float32Array phaseResponse);
};

View file

@ -0,0 +1,92 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibWeb/Bindings/DynamicsCompressorNodePrototype.h>
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/WebAudio/AudioParam.h>
#include <LibWeb/WebAudio/DynamicsCompressorNode.h>
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(DynamicsCompressorNode);
DynamicsCompressorNode::~DynamicsCompressorNode() = default;
WebIDL::ExceptionOr<JS::NonnullGCPtr<DynamicsCompressorNode>> DynamicsCompressorNode::create(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, DynamicsCompressorOptions const& options)
{
return construct_impl(realm, context, options);
}
// https://webaudio.github.io/web-audio-api/#dom-dynamicscompressornode-dynamicscompressornode
WebIDL::ExceptionOr<JS::NonnullGCPtr<DynamicsCompressorNode>> DynamicsCompressorNode::construct_impl(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, DynamicsCompressorOptions const& options)
{
// Create the node and allocate memory
auto node = realm.vm().heap().allocate<DynamicsCompressorNode>(realm, realm, context, options);
// Default options for channel count and interpretation
// https://webaudio.github.io/web-audio-api/#DynamicsCompressorNode
AudioNodeDefaultOptions default_options;
default_options.channel_count_mode = Bindings::ChannelCountMode::ClampedMax;
default_options.channel_interpretation = Bindings::ChannelInterpretation::Speakers;
default_options.channel_count = 2;
// FIXME: Set tail-time to yes
TRY(node->initialize_audio_node_options(options, default_options));
return node;
}
DynamicsCompressorNode::DynamicsCompressorNode(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, DynamicsCompressorOptions const& options)
: AudioNode(realm, context)
, m_threshold(AudioParam::create(realm, options.threshold, -100, 0, Bindings::AutomationRate::KRate))
, m_knee(AudioParam::create(realm, options.knee, 0, 40, Bindings::AutomationRate::KRate))
, m_ratio(AudioParam::create(realm, options.ratio, 1, 20, Bindings::AutomationRate::KRate))
, m_attack(AudioParam::create(realm, options.attack, 0, 1, Bindings::AutomationRate::KRate))
, m_release(AudioParam::create(realm, options.release, 0, 1, Bindings::AutomationRate::KRate))
{
}
void DynamicsCompressorNode::initialize(JS::Realm& realm)
{
Base::initialize(realm);
WEB_SET_PROTOTYPE_FOR_INTERFACE(DynamicsCompressorNode);
}
void DynamicsCompressorNode::visit_edges(Cell::Visitor& visitor)
{
Base::visit_edges(visitor);
visitor.visit(m_threshold);
visitor.visit(m_knee);
visitor.visit(m_ratio);
visitor.visit(m_attack);
visitor.visit(m_release);
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-channelcountmode
WebIDL::ExceptionOr<void> DynamicsCompressorNode::set_channel_count_mode(Bindings::ChannelCountMode mode)
{
if (mode == Bindings::ChannelCountMode::Max) {
// Return a NotSupportedError if 'max' is used
return WebIDL::NotSupportedError::create(realm(), "DynamicsCompressorNode does not support 'max' as channelCountMode."_string);
}
// If the mode is valid, call the base class implementation
return AudioNode::set_channel_count_mode(mode);
}
// https://webaudio.github.io/web-audio-api/#dom-audionode-channelcount
WebIDL::ExceptionOr<void> DynamicsCompressorNode::set_channel_count(WebIDL::UnsignedLong channel_count)
{
if (channel_count > 2) {
// Return a NotSupportedError if 'max' is used
return WebIDL::NotSupportedError::create(realm(), "DynamicsCompressorNode does not support channel count greater than 2"_string);
}
// If the mode is valid, call the base class implementation
return AudioNode::set_channel_count(channel_count);
}
}

View file

@ -0,0 +1,72 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibWeb/WebAudio/AudioNode.h>
namespace Web::WebAudio {
// https://webaudio.github.io/web-audio-api/#DynamicsCompressorOptions
struct DynamicsCompressorOptions : AudioNodeOptions {
float attack { 0.003 };
float knee { 30 };
float ratio { 12 };
float release { 0.25 };
float threshold { -24 };
};
// https://webaudio.github.io/web-audio-api/#DynamicsCompressorNode
class DynamicsCompressorNode : public AudioNode {
WEB_PLATFORM_OBJECT(DynamicsCompressorNode, AudioNode);
JS_DECLARE_ALLOCATOR(DynamicsCompressorNode);
public:
virtual ~DynamicsCompressorNode() override;
static WebIDL::ExceptionOr<JS::NonnullGCPtr<DynamicsCompressorNode>> create(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, DynamicsCompressorOptions const& = {});
static WebIDL::ExceptionOr<JS::NonnullGCPtr<DynamicsCompressorNode>> construct_impl(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, DynamicsCompressorOptions const& = {});
WebIDL::UnsignedLong number_of_inputs() override { return 1; }
WebIDL::UnsignedLong number_of_outputs() override { return 1; }
JS::NonnullGCPtr<AudioParam const> threshold() const { return m_threshold; }
JS::NonnullGCPtr<AudioParam const> knee() const { return m_knee; }
JS::NonnullGCPtr<AudioParam const> ratio() const { return m_ratio; }
JS::NonnullGCPtr<AudioParam const> attack() const { return m_attack; }
JS::NonnullGCPtr<AudioParam const> release() const { return m_release; }
float reduction() const { return m_reduction; }
WebIDL::ExceptionOr<void> set_channel_count_mode(Bindings::ChannelCountMode) override;
WebIDL::ExceptionOr<void> set_channel_count(WebIDL::UnsignedLong) override;
protected:
DynamicsCompressorNode(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, DynamicsCompressorOptions const& = {});
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
private:
// https://webaudio.github.io/web-audio-api/#dom-dynamicscompressornode-threshold
JS::NonnullGCPtr<AudioParam> m_threshold;
// https://webaudio.github.io/web-audio-api/#dom-dynamicscompressornode-knee
JS::NonnullGCPtr<AudioParam> m_knee;
// https://webaudio.github.io/web-audio-api/#dom-dynamicscompressornode-ratio
JS::NonnullGCPtr<AudioParam> m_ratio;
// https://webaudio.github.io/web-audio-api/#dom-dynamicscompressornode-attack
JS::NonnullGCPtr<AudioParam> m_attack;
// https://webaudio.github.io/web-audio-api/#dom-dynamicscompressornode-release
JS::NonnullGCPtr<AudioParam> m_release;
// https://webaudio.github.io/web-audio-api/#dom-dynamicscompressornode-internal-reduction-slot
float m_reduction { 0 }; // [[internal reduction]]
};
}

View file

@ -0,0 +1,25 @@
#import <WebAudio/AudioNode.idl>
#import <WebAudio/AudioParam.idl>
#import <WebAudio/BaseAudioContext.idl>
// https://webaudio.github.io/web-audio-api/#DynamicsCompressorOptions
dictionary DynamicsCompressorOptions : AudioNodeOptions {
float attack = 0.003;
float knee = 30;
float ratio = 12;
float release = 0.25;
float threshold = -24;
};
// https://webaudio.github.io/web-audio-api/#DynamicsCompressorNode
[Exposed=Window]
interface DynamicsCompressorNode : AudioNode {
constructor(BaseAudioContext context,
optional DynamicsCompressorOptions options = {});
readonly attribute AudioParam threshold;
readonly attribute AudioParam knee;
readonly attribute AudioParam ratio;
readonly attribute float reduction;
readonly attribute AudioParam attack;
readonly attribute AudioParam release;
};

View file

@ -0,0 +1,60 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/WebAudio/AudioNode.h>
#include <LibWeb/WebAudio/AudioParam.h>
#include <LibWeb/WebAudio/BaseAudioContext.h>
#include <LibWeb/WebAudio/GainNode.h>
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(GainNode);
GainNode::~GainNode() = default;
WebIDL::ExceptionOr<JS::NonnullGCPtr<GainNode>> GainNode::create(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, GainOptions const& options)
{
return construct_impl(realm, context, options);
}
// https://webaudio.github.io/web-audio-api/#dom-gainnode-gainnode
WebIDL::ExceptionOr<JS::NonnullGCPtr<GainNode>> GainNode::construct_impl(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, GainOptions const& options)
{
// Create the node and allocate memory
auto node = realm.vm().heap().allocate<GainNode>(realm, realm, context, options);
// Default options for channel count and interpretation
// https://webaudio.github.io/web-audio-api/#GainNode
AudioNodeDefaultOptions default_options;
default_options.channel_count_mode = Bindings::ChannelCountMode::Max;
default_options.channel_interpretation = Bindings::ChannelInterpretation::Speakers;
default_options.channel_count = 2;
// FIXME: Set tail-time to no
TRY(node->initialize_audio_node_options(options, default_options));
return node;
}
GainNode::GainNode(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, GainOptions const& options)
: AudioNode(realm, context)
, m_gain(AudioParam::create(realm, options.gain, NumericLimits<float>::lowest(), NumericLimits<float>::max(), Bindings::AutomationRate::ARate))
{
}
void GainNode::initialize(JS::Realm& realm)
{
Base::initialize(realm);
WEB_SET_PROTOTYPE_FOR_INTERFACE(GainNode);
}
void GainNode::visit_edges(Cell::Visitor& visitor)
{
Base::visit_edges(visitor);
visitor.visit(m_gain);
}
}

View file

@ -0,0 +1,46 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibWeb/Bindings/GainNodePrototype.h>
#include <LibWeb/WebAudio/AudioNode.h>
namespace Web::WebAudio {
// https://webaudio.github.io/web-audio-api/#GainOptions
struct GainOptions : AudioNodeOptions {
float gain { 1.0 };
};
// https://webaudio.github.io/web-audio-api/#GainNode
class GainNode : public AudioNode {
WEB_PLATFORM_OBJECT(GainNode, AudioNode);
JS_DECLARE_ALLOCATOR(GainNode);
public:
virtual ~GainNode() override;
static WebIDL::ExceptionOr<JS::NonnullGCPtr<GainNode>> create(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, GainOptions const& = {});
static WebIDL::ExceptionOr<JS::NonnullGCPtr<GainNode>> construct_impl(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, GainOptions const& = {});
WebIDL::UnsignedLong number_of_inputs() override { return 1; }
WebIDL::UnsignedLong number_of_outputs() override { return 1; }
JS::NonnullGCPtr<AudioParam const> gain() const { return m_gain; }
protected:
GainNode(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, GainOptions const& = {});
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
private:
// https://webaudio.github.io/web-audio-api/#dom-gainnode-gain
JS::NonnullGCPtr<AudioParam> m_gain;
};
}

View file

@ -0,0 +1,15 @@
#import <WebAudio/AudioNode.idl>
#import <WebAudio/AudioParam.idl>
#import <WebAudio/BaseAudioContext.idl>
// https://webaudio.github.io/web-audio-api/#GainOptions
dictionary GainOptions : AudioNodeOptions {
float gain = 1.0;
};
// https://webaudio.github.io/web-audio-api/#GainNode
[Exposed=Window]
interface GainNode : AudioNode {
constructor(BaseAudioContext context, optional GainOptions options = {});
readonly attribute AudioParam gain;
};

View file

@ -0,0 +1,93 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/HTML/EventNames.h>
#include <LibWeb/HTML/Window.h>
#include <LibWeb/WebAudio/OfflineAudioContext.h>
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(OfflineAudioContext);
WebIDL::ExceptionOr<JS::NonnullGCPtr<OfflineAudioContext>> OfflineAudioContext::construct_impl(JS::Realm& realm, OfflineAudioContextOptions const& context_options)
{
return construct_impl(realm, context_options.number_of_channels, context_options.length, context_options.sample_rate);
}
// https://webaudio.github.io/web-audio-api/#dom-offlineaudiocontext-offlineaudiocontext-numberofchannels-length-samplerate
WebIDL::ExceptionOr<JS::NonnullGCPtr<OfflineAudioContext>> OfflineAudioContext::construct_impl(JS::Realm& realm,
WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate)
{
// The OfflineAudioContext can be constructed with the same arguments as AudioContext.createBuffer.
// A NotSupportedError exception MUST be thrown if any of the arguments is negative, zero, or outside its nominal range.
TRY(verify_audio_options_inside_nominal_range(realm, number_of_channels, length, sample_rate));
return realm.heap().allocate<OfflineAudioContext>(realm, realm, number_of_channels, length, sample_rate);
}
OfflineAudioContext::~OfflineAudioContext() = default;
// https://webaudio.github.io/web-audio-api/#dom-offlineaudiocontext-startrendering
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> OfflineAudioContext::start_rendering()
{
return WebIDL::NotSupportedError::create(realm(), "FIXME: Implement OfflineAudioContext::start_rendering"_string);
}
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> OfflineAudioContext::resume()
{
return WebIDL::NotSupportedError::create(realm(), "FIXME: Implement OfflineAudioContext::resume"_string);
}
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> OfflineAudioContext::suspend(double suspend_time)
{
(void)suspend_time;
return WebIDL::NotSupportedError::create(realm(), "FIXME: Implement OfflineAudioContext::suspend"_string);
}
// https://webaudio.github.io/web-audio-api/#dom-offlineaudiocontext-length
WebIDL::UnsignedLong OfflineAudioContext::length() const
{
// The size of the buffer in sample-frames. This is the same as the value of the length parameter for the constructor.
return m_length;
}
// https://webaudio.github.io/web-audio-api/#dom-offlineaudiocontext-oncomplete
JS::GCPtr<WebIDL::CallbackType> OfflineAudioContext::oncomplete()
{
return event_handler_attribute(HTML::EventNames::complete);
}
// https://webaudio.github.io/web-audio-api/#dom-offlineaudiocontext-oncomplete
void OfflineAudioContext::set_oncomplete(JS::GCPtr<WebIDL::CallbackType> value)
{
set_event_handler_attribute(HTML::EventNames::complete, value);
}
OfflineAudioContext::OfflineAudioContext(JS::Realm& realm, OfflineAudioContextOptions const&)
: BaseAudioContext(realm)
{
}
OfflineAudioContext::OfflineAudioContext(JS::Realm& realm, WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate)
: BaseAudioContext(realm, sample_rate)
, m_length(length)
{
(void)number_of_channels;
}
void OfflineAudioContext::initialize(JS::Realm& realm)
{
Base::initialize(realm);
WEB_SET_PROTOTYPE_FOR_INTERFACE(OfflineAudioContext);
}
void OfflineAudioContext::visit_edges(Cell::Visitor& visitor)
{
Base::visit_edges(visitor);
}
}

View file

@ -0,0 +1,54 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibWeb/Bindings/OfflineAudioContextPrototype.h>
#include <LibWeb/HighResolutionTime/DOMHighResTimeStamp.h>
#include <LibWeb/WebAudio/BaseAudioContext.h>
#include <LibWeb/WebIDL/Types.h>
namespace Web::WebAudio {
// https://webaudio.github.io/web-audio-api/#OfflineAudioContextOptions
struct OfflineAudioContextOptions {
WebIDL::UnsignedLong number_of_channels { 1 };
WebIDL::UnsignedLong length {};
float sample_rate {};
};
// https://webaudio.github.io/web-audio-api/#OfflineAudioContext
class OfflineAudioContext final : public BaseAudioContext {
WEB_PLATFORM_OBJECT(OfflineAudioContext, BaseAudioContext);
JS_DECLARE_ALLOCATOR(OfflineAudioContext);
public:
static WebIDL::ExceptionOr<JS::NonnullGCPtr<OfflineAudioContext>> construct_impl(JS::Realm&, OfflineAudioContextOptions const&);
static WebIDL::ExceptionOr<JS::NonnullGCPtr<OfflineAudioContext>> construct_impl(JS::Realm&,
WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate);
virtual ~OfflineAudioContext() override;
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> start_rendering();
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> resume();
WebIDL::ExceptionOr<JS::NonnullGCPtr<WebIDL::Promise>> suspend(double suspend_time);
WebIDL::UnsignedLong length() const;
JS::GCPtr<WebIDL::CallbackType> oncomplete();
void set_oncomplete(JS::GCPtr<WebIDL::CallbackType>);
private:
OfflineAudioContext(JS::Realm&, OfflineAudioContextOptions const&);
OfflineAudioContext(JS::Realm&, WebIDL::UnsignedLong number_of_channels, WebIDL::UnsignedLong length, float sample_rate);
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
WebIDL::UnsignedLong m_length {};
};
}

View file

@ -0,0 +1,21 @@
#import <WebAudio/BaseAudioContext.idl>
// https://webaudio.github.io/web-audio-api/#OfflineAudioContextOptions
dictionary OfflineAudioContextOptions {
unsigned long numberOfChannels = 1;
required unsigned long length;
required float sampleRate;
// FIXME: (AudioContextRenderSizeCategory or unsigned long) renderSizeHint = "default";
};
// https://webaudio.github.io/web-audio-api/#OfflineAudioContext
[Exposed=Window]
interface OfflineAudioContext : BaseAudioContext {
constructor(OfflineAudioContextOptions contextOptions);
constructor(unsigned long numberOfChannels, unsigned long length, float sampleRate);
Promise<AudioBuffer> startRendering();
Promise<undefined> resume();
Promise<undefined> suspend(double suspendTime);
readonly attribute unsigned long length;
attribute EventHandler oncomplete;
};

View file

@ -0,0 +1,78 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/Bindings/OscillatorNodePrototype.h>
#include <LibWeb/WebAudio/AudioParam.h>
#include <LibWeb/WebAudio/BaseAudioContext.h>
#include <LibWeb/WebAudio/OscillatorNode.h>
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(OscillatorNode);
OscillatorNode::~OscillatorNode() = default;
WebIDL::ExceptionOr<JS::NonnullGCPtr<OscillatorNode>> OscillatorNode::create(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, OscillatorOptions const& options)
{
return construct_impl(realm, context, options);
}
// https://webaudio.github.io/web-audio-api/#dom-oscillatornode-oscillatornode
WebIDL::ExceptionOr<JS::NonnullGCPtr<OscillatorNode>> OscillatorNode::construct_impl(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, OscillatorOptions const& options)
{
// FIXME: Invoke "Initialize the AudioNode" steps.
TRY(verify_valid_type(realm, options.type));
auto node = realm.vm().heap().allocate<OscillatorNode>(realm, realm, context, options);
return node;
}
OscillatorNode::OscillatorNode(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext> context, OscillatorOptions const& options)
: AudioScheduledSourceNode(realm, context)
, m_frequency(AudioParam::create(realm, options.frequency, -context->nyquist_frequency(), context->nyquist_frequency(), Bindings::AutomationRate::ARate))
{
}
// https://webaudio.github.io/web-audio-api/#dom-oscillatornode-type
Bindings::OscillatorType OscillatorNode::type() const
{
return m_type;
}
// https://webaudio.github.io/web-audio-api/#dom-oscillatornode-type
WebIDL::ExceptionOr<void> OscillatorNode::verify_valid_type(JS::Realm& realm, Bindings::OscillatorType type)
{
// The shape of the periodic waveform. It may directly be set to any of the type constant values except
// for "custom". ⌛ Doing so MUST throw an InvalidStateError exception. The setPeriodicWave() method can
// be used to set a custom waveform, which results in this attribute being set to "custom". The default
// value is "sine". When this attribute is set, the phase of the oscillator MUST be conserved.
if (type == Bindings::OscillatorType::Custom)
return WebIDL::InvalidStateError::create(realm, "Oscillator node type cannot be set to 'custom'"_string);
return {};
}
// https://webaudio.github.io/web-audio-api/#dom-oscillatornode-type
WebIDL::ExceptionOr<void> OscillatorNode::set_type(Bindings::OscillatorType type)
{
TRY(verify_valid_type(realm(), type));
m_type = type;
return {};
}
void OscillatorNode::initialize(JS::Realm& realm)
{
Base::initialize(realm);
WEB_SET_PROTOTYPE_FOR_INTERFACE(OscillatorNode);
}
void OscillatorNode::visit_edges(Cell::Visitor& visitor)
{
Base::visit_edges(visitor);
visitor.visit(m_frequency);
}
}

View file

@ -0,0 +1,56 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <LibWeb/Bindings/OscillatorNodePrototype.h>
#include <LibWeb/WebAudio/AudioScheduledSourceNode.h>
namespace Web::WebAudio {
// https://webaudio.github.io/web-audio-api/#OscillatorOptions
struct OscillatorOptions : AudioNodeOptions {
Bindings::OscillatorType type { Bindings::OscillatorType::Sine };
float frequency { 440 };
float detune { 0 };
JS::GCPtr<PeriodicWave> periodic_wave;
};
// https://webaudio.github.io/web-audio-api/#OscillatorNode
class OscillatorNode : public AudioScheduledSourceNode {
WEB_PLATFORM_OBJECT(OscillatorNode, AudioScheduledSourceNode);
JS_DECLARE_ALLOCATOR(OscillatorNode);
public:
virtual ~OscillatorNode() override;
static WebIDL::ExceptionOr<JS::NonnullGCPtr<OscillatorNode>> create(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, OscillatorOptions const& = {});
static WebIDL::ExceptionOr<JS::NonnullGCPtr<OscillatorNode>> construct_impl(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, OscillatorOptions const& = {});
Bindings::OscillatorType type() const;
WebIDL::ExceptionOr<void> set_type(Bindings::OscillatorType);
JS::NonnullGCPtr<AudioParam const> frequency() const { return m_frequency; }
WebIDL::UnsignedLong number_of_inputs() override { return 0; }
WebIDL::UnsignedLong number_of_outputs() override { return 1; }
protected:
OscillatorNode(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, OscillatorOptions const& = {});
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
private:
static WebIDL::ExceptionOr<void> verify_valid_type(JS::Realm&, Bindings::OscillatorType);
// https://webaudio.github.io/web-audio-api/#dom-oscillatornode-type
Bindings::OscillatorType m_type { Bindings::OscillatorType::Sine };
// https://webaudio.github.io/web-audio-api/#dom-oscillatornode-frequency
JS::NonnullGCPtr<AudioParam> m_frequency;
};
}

View file

@ -0,0 +1,29 @@
#import <WebAudio/AudioScheduledSourceNode.idl>
#import <WebAudio/PeriodicWave.idl>
// https://webaudio.github.io/web-audio-api/#enumdef-oscillatortype
enum OscillatorType {
"sine",
"square",
"sawtooth",
"triangle",
"custom"
};
// https://webaudio.github.io/web-audio-api/#OscillatorOptions
dictionary OscillatorOptions : AudioNodeOptions {
OscillatorType type = "sine";
float frequency = 440;
float detune = 0;
PeriodicWave periodicWave;
};
// https://webaudio.github.io/web-audio-api/#OscillatorNode
[Exposed=Window]
interface OscillatorNode : AudioScheduledSourceNode {
constructor(BaseAudioContext context, optional OscillatorOptions options = {});
attribute OscillatorType type;
readonly attribute AudioParam frequency;
[FIXME] readonly attribute AudioParam detune;
[FIXME] undefined setPeriodicWave(PeriodicWave periodicWave);
};

View file

@ -0,0 +1,35 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibWeb/Bindings/Intrinsics.h>
#include <LibWeb/Bindings/PeriodicWavePrototype.h>
#include <LibWeb/WebAudio/PeriodicWave.h>
#include <LibWeb/WebIDL/ExceptionOr.h>
namespace Web::WebAudio {
JS_DEFINE_ALLOCATOR(PeriodicWave);
// https://webaudio.github.io/web-audio-api/#dom-periodicwave-periodicwave
WebIDL::ExceptionOr<JS::NonnullGCPtr<PeriodicWave>> PeriodicWave::construct_impl(JS::Realm& realm, JS::NonnullGCPtr<BaseAudioContext>, PeriodicWaveOptions const&)
{
return WebIDL::NotSupportedError::create(realm, "FIXME: Implement PeriodicWave::construct_impl"_string);
}
PeriodicWave::~PeriodicWave() = default;
void PeriodicWave::initialize(JS::Realm& realm)
{
Base::initialize(realm);
WEB_SET_PROTOTYPE_FOR_INTERFACE(PeriodicWave);
}
void PeriodicWave::visit_edges(Cell::Visitor& visitor)
{
Base::visit_edges(visitor);
}
}

View file

@ -0,0 +1,41 @@
/*
* Copyright (c) 2024, Shannon Booth <shannon@serenityos.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#pragma once
#include <AK/Vector.h>
#include <LibJS/Forward.h>
#include <LibWeb/Bindings/PlatformObject.h>
namespace Web::WebAudio {
// https://webaudio.github.io/web-audio-api/#PeriodicWaveConstraints
struct PeriodicWaveConstraints {
bool disable_normalization { false };
};
// https://webaudio.github.io/web-audio-api/#PeriodicWaveOptions
struct PeriodicWaveOptions : PeriodicWaveConstraints {
Optional<Vector<float>> real;
Optional<Vector<float>> imag;
};
// https://webaudio.github.io/web-audio-api/#PeriodicWave
class PeriodicWave : public Bindings::PlatformObject {
WEB_PLATFORM_OBJECT(PeriodicWave, Bindings::PlatformObject);
JS_DECLARE_ALLOCATOR(PeriodicWave);
public:
static WebIDL::ExceptionOr<JS::NonnullGCPtr<PeriodicWave>> construct_impl(JS::Realm&, JS::NonnullGCPtr<BaseAudioContext>, PeriodicWaveOptions const&);
virtual ~PeriodicWave() override;
protected:
virtual void initialize(JS::Realm&) override;
virtual void visit_edges(Cell::Visitor&) override;
};
}

View file

@ -0,0 +1,18 @@
#import <WebAudio/BaseAudioContext.idl>
// https://webaudio.github.io/web-audio-api/#PeriodicWaveConstraints
dictionary PeriodicWaveConstraints {
boolean disableNormalization = false;
};
// https://webaudio.github.io/web-audio-api/#PeriodicWaveOptions
dictionary PeriodicWaveOptions : PeriodicWaveConstraints {
sequence<float> real;
sequence<float> imag;
};
// https://webaudio.github.io/web-audio-api/#PeriodicWave
[Exposed=Window]
interface PeriodicWave {
constructor(BaseAudioContext context, optional PeriodicWaveOptions options = {});
};