diff --git a/packages/dev/core/src/Audio/v2/abstractAudioBus.ts b/packages/dev/core/src/Audio/v2/abstractAudioBus.ts new file mode 100644 index 00000000000..790af127e16 --- /dev/null +++ b/packages/dev/core/src/Audio/v2/abstractAudioBus.ts @@ -0,0 +1,30 @@ +import type { Nullable } from "../../types"; +import type { AudioEngineV2 } from "./audioEngine"; +import { AbstractNamedAudioNode, AudioNodeType } from "./abstractAudioNode"; + +/** + * Options for creating a new audio bus node. + */ +export interface IAbstractAudioBusOptions { + /** + * The volume of the audio bus. + */ + volume?: number; +} + +/** + * Abstract class representing an audio bus node with a volume control. + */ +export abstract class AbstractAudioBus extends AbstractNamedAudioNode { + /** + * The volume of the audio bus. + */ + public volume: number; + + /** @internal */ + constructor(name: string, engine: AudioEngineV2, options: Nullable = null) { + super(name, engine, AudioNodeType.InputOutput); + + this.volume = options?.volume ?? 1; + } +} diff --git a/packages/dev/core/src/Audio/v2/abstractAudioNode.ts b/packages/dev/core/src/Audio/v2/abstractAudioNode.ts new file mode 100644 index 00000000000..8c528c0ef78 --- /dev/null +++ b/packages/dev/core/src/Audio/v2/abstractAudioNode.ts @@ -0,0 +1,210 @@ +import { Observable } from "../../Misc/observable"; +import type { Nullable } from "../../types"; +import type { AudioEngineV2 } from "./audioEngine"; +import { AbstractAudioNodeParent } from "./abstractAudioNodeParent"; + +export enum AudioNodeType { + /** + * Input nodes receive audio data from an upstream node. + */ + Input = 1, + + /** + * Output nodes send audio data to a downstream node. + */ + Output = 2, + + /** + * Input/Output nodes receive audio data from an upstream node and send audio data to a downstream node. + */ + InputOutput = 3, +} + +/** + * Abstract class for an audio node. + */ +export abstract class AbstractAudioNode extends AbstractAudioNodeParent { + // If parent is null, node is owned by audio engine. + private _parent: Nullable = null; + + /** + * The connected downstream audio nodes. + * + * Undefined for input nodes. + */ + protected readonly _connectedDownstreamNodes?: Set | undefined; + + /** + * The connected upstream audio nodes. + * + * Undefined for output nodes. + */ + protected readonly _connectedUpstreamNodes?: Set | undefined; + + /** + * The audio engine this node belongs to. + */ + public readonly engine: AudioEngineV2; + + /** + * Observable for when the audio node is disposed. + */ + public readonly onDisposeObservable = new Observable(); + + /** @internal */ + constructor(engine: AudioEngineV2, nodeType: AudioNodeType, parent: Nullable = null) { + super(); + + this.engine = engine; + this.parent = parent; + + if (nodeType | AudioNodeType.Input) { + this._connectedDownstreamNodes = new Set(); + } + + if (nodeType | AudioNodeType.Output) { + this._connectedUpstreamNodes = new Set(); + } + } + + /** + * Releases associated resources. + */ + public override dispose(): void { + super.dispose(); + + this.parent.children.delete(this); + + if (this._connectedDownstreamNodes) { + for (const node of Array.from(this._connectedDownstreamNodes)) { + this._disconnect(node); + } + this._connectedDownstreamNodes.clear(); + } + + if (this._connectedUpstreamNodes) { + for (const node of Array.from(this._connectedUpstreamNodes)) { + node._disconnect(this); + } + this._connectedUpstreamNodes.clear(); + } + + this.onDisposeObservable.notifyObservers(this); + this.onDisposeObservable.clear(); + } + + /** + * The parent audio node. + */ + public get parent(): AbstractAudioNodeParent { + return this._parent ?? this.engine; + } + + /** + * Sets the parent audio node. + */ + public set parent(parent: Nullable) { + if (this._parent === parent) { + return; + } + + this.parent.children.delete(this); + this._parent = parent; + this.parent.children.add(this); + } + + /** + * The audio node's type. + */ + public get type(): AudioNodeType { + let type = 0; + + if (this._connectedDownstreamNodes) { + type |= AudioNodeType.Output; + } + + if (this._connectedUpstreamNodes) { + type |= AudioNodeType.Input; + } + + return type; + } + + /** + * Gets a string identifying the name of the class + * @returns the class's name as a string + */ + public abstract getClassName(): string; + + /** + * Connect to a downstream audio input node. + * @param node - The downstream audio input node to connect + */ + protected _connect(node: AbstractAudioNode): void { + if (!this._connectedDownstreamNodes) { + return; + } + + if (this._connectedDownstreamNodes.has(node)) { + return; + } + + if (!node._onConnect(this)) { + return; + } + + this._connectedDownstreamNodes.add(node); + } + + /** + * Disconnect from a downstream audio input node. + * @param node - The downstream audio input node to disconnect + */ + protected _disconnect(node: AbstractAudioNode): void { + if (!this._connectedDownstreamNodes) { + return; + } + + this._connectedDownstreamNodes.delete(node); + + node._onDisconnect(this); + } + + /** + * Called when an upstream audio output node is connecting. + * @param node - The connecting upstream audio node + * @returns `true` if the connection succeeds; otherwise `false` + */ + protected _onConnect(node: AbstractAudioNode): boolean { + if (!this._connectedUpstreamNodes) { + return false; + } + + this._connectedUpstreamNodes.add(node); + + return true; + } + + /** + * Called when an upstream audio output node disconnects. + * @param node - The disconnecting upstream audio node + */ + protected _onDisconnect(node: AbstractAudioNode): void { + this._connectedUpstreamNodes?.delete(node); + } +} + +/** + * Abstract class for an audio node with a name. + */ +export abstract class AbstractNamedAudioNode extends AbstractAudioNode { + /** + * The name of the audio node. + */ + public name: string; + + constructor(name: string, engine: AudioEngineV2, nodeType: AudioNodeType) { + super(engine, nodeType); + this.name = name; + } +} diff --git a/packages/dev/core/src/Audio/v2/abstractAudioNodeParent.ts b/packages/dev/core/src/Audio/v2/abstractAudioNodeParent.ts new file mode 100644 index 00000000000..1fad948663b --- /dev/null +++ b/packages/dev/core/src/Audio/v2/abstractAudioNodeParent.ts @@ -0,0 +1,24 @@ +import type { IDisposable } from "../../scene"; +import type { AbstractAudioNode } from "./abstractAudioNode"; + +/** + * Abstract base class for audio node parents. + */ +export class AbstractAudioNodeParent implements IDisposable { + /** + * The children audio nodes. + */ + public readonly children = new Set(); + + /** + * Releases associated resources. + */ + public dispose(): void { + if (this.children) { + for (const node of Array.from(this.children)) { + node.dispose(); + } + this.children.clear(); + } + } +} diff --git a/packages/dev/core/src/Audio/v2/abstractSound.ts b/packages/dev/core/src/Audio/v2/abstractSound.ts new file mode 100644 index 00000000000..bd43464ead1 --- /dev/null +++ b/packages/dev/core/src/Audio/v2/abstractSound.ts @@ -0,0 +1,239 @@ +import { Observable } from "../../Misc/observable"; +import type { Nullable } from "../../types"; +import type { AudioEngineV2 } from "./audioEngine"; +import { AbstractNamedAudioNode, AudioNodeType } from "./abstractAudioNode"; +import type { AbstractSoundInstance } from "./abstractSoundInstance"; +import type { AbstractPrimaryAudioBus } from "./audioBus"; +import { SoundState } from "./soundState"; + +/** + * Options for creating a new sound. + */ +export interface ISoundOptions { + /** + * Whether the sound should start playing immediately. + */ + autoplay?: boolean; + /** + * Whether the sound should loop. + */ + loop?: boolean; + /** + * The maximum number of instances that can play at the same time. + */ + maxInstances?: number; + /** + * The volume of the sound. + */ + volume?: number; + /** + * The output bus for the sound. + */ + outputBus?: AbstractPrimaryAudioBus; + /** + * The sound's start offset in seconds. + */ + startOffset?: number; +} + +/** + * Abstract class representing a sound in the audio engine. + */ +export abstract class AbstractSound extends AbstractNamedAudioNode { + private _state: SoundState = SoundState.Stopped; + + // Owned by AbstractAudioEngine. + + // Non-owning. + protected _soundInstances = new Set(); + + protected _outputBus: Nullable = null; + + /** + * Whether the sound should start playing immediately. + */ + public readonly autoplay: boolean; + + /** + * Whether the sound should loop. + */ + public loop: boolean; + + /** + * The maximum number of instances that can play at the same time. + */ + public maxInstances: number; + + /** + * The volume of the sound. + */ + public abstract get volume(): number; + public abstract set volume(value: number); + + /** + * The sound's start offset in seconds. + */ + public startOffset: number; + + /** + * Observable for when the sound ends. + */ + public onEndedObservable = new Observable(); + + /** + * The state of the sound. + */ + public get state(): SoundState { + return this._state; + } + + /** + * The output bus for the sound. + */ + public get outputBus(): Nullable { + return this._outputBus; + } + + public set outputBus(outputBus: Nullable) { + if (this._outputBus === outputBus) { + return; + } + + if (this._outputBus) { + this._disconnect(this._outputBus); + } + + this._outputBus = outputBus; + + if (this._outputBus) { + this._connect(this._outputBus); + } + } + + /** @internal */ + constructor(name: string, engine: AudioEngineV2, options: Nullable = null) { + super(name, engine, AudioNodeType.Output); + + this.autoplay = options?.autoplay ?? false; + this.loop = options?.loop ?? false; + this.maxInstances = options?.maxInstances ?? Infinity; + this.startOffset = options?.startOffset ?? 0; + } + + /** + * Releases associated resources. + */ + public override dispose(): void { + super.dispose(); + + this.stop(); + + this._outputBus = null; + this._soundInstances.clear(); + this.onEndedObservable.clear(); + + this.onDisposeObservable.notifyObservers(this); + } + + protected abstract _createSoundInstance(): AbstractSoundInstance; + + /** + * Pauses the sound. + */ + public pause(): void { + if (!this._soundInstances) { + return; + } + + for (const instance of Array.from(this._soundInstances)) { + instance.pause(); + } + + this._state = SoundState.Paused; + } + + /** + * Resumes the sound. + */ + public resume(): void { + if (this._state !== SoundState.Paused) { + return; + } + + if (!this._soundInstances) { + return; + } + + for (const instance of Array.from(this._soundInstances)) { + instance.resume(); + } + + this._state = SoundState.Started; + } + + /** + * Stops the sound. + * @param waitTime - The time to wait before stopping the sound in seconds. + */ + public stop(waitTime: Nullable = null): void { + if (waitTime && 0 < waitTime) { + this._state = SoundState.Stopping; + } else { + this._state = SoundState.Stopped; + } + + if (!this._soundInstances) { + return; + } + + for (const instance of Array.from(this._soundInstances)) { + instance.stop(waitTime); + } + } + + protected get _isPaused(): boolean { + return this._state === SoundState.Paused && this._soundInstances.size > 0; + } + + protected _onSoundInstanceEnded: (instance: AbstractSoundInstance) => void = (instance) => { + this._soundInstances.delete(instance); + + if (this._soundInstances.size === 0) { + this._state = SoundState.Stopped; + this.onEndedObservable.notifyObservers(this); + } + }; + + protected _play( + instance: AbstractSoundInstance, + waitTime: Nullable = null, + startOffset: Nullable = null, + duration: Nullable = null + ): Nullable { + if (this.state === SoundState.Paused && this._soundInstances.size > 0) { + this.resume(); + return null; + } + + instance.onEndedObservable.addOnce(this._onSoundInstanceEnded); + instance.play(waitTime, startOffset, duration); + + this._soundInstances.add(instance); + + this._state = SoundState.Started; + + return instance; + } + + protected _stopExcessInstances(): void { + if (this.maxInstances < Infinity) { + const numberOfInstancesToStop = Array.from(this._soundInstances).filter((instance) => instance.state === SoundState.Started).length - this.maxInstances; + const it = this._soundInstances.values(); + + for (let i = 0; i < numberOfInstancesToStop; i++) { + const instance = it.next().value; + instance.stop(); + } + } + } +} diff --git a/packages/dev/core/src/Audio/v2/abstractSoundInstance.ts b/packages/dev/core/src/Audio/v2/abstractSoundInstance.ts new file mode 100644 index 00000000000..8e1e902483f --- /dev/null +++ b/packages/dev/core/src/Audio/v2/abstractSoundInstance.ts @@ -0,0 +1,55 @@ +import { Observable } from "../../Misc/observable"; +import type { Nullable } from "../../types"; +import { AbstractAudioNode, AudioNodeType } from "./abstractAudioNode"; +import type { AbstractSound } from "./abstractSound"; +import { SoundState } from "./soundState"; + +/** @internal */ +export abstract class AbstractSoundInstance extends AbstractAudioNode { + protected _state: SoundState = SoundState.Stopped; + protected _source: AbstractSound; + protected _startOffset: number = 0; + + /** Observable triggered when the sound instance's playback ends */ + public readonly onEndedObservable = new Observable(); + + /** Observable triggered when the sound instance's state changes */ + public readonly onStateChangedObservable = new Observable(); + + /** @internal */ + constructor(source: AbstractSound) { + super(source.engine, AudioNodeType.Output); + + this._source = source; + this._startOffset = source.startOffset; + } + + /** @internal */ + public override dispose(): void { + super.dispose(); + this.stop(); + this.onStateChangedObservable.clear(); + } + + public abstract get startTime(): number; + public abstract get currentTime(): number; + + /** The playback state of sound instance */ + public get state(): SoundState { + return this._state; + } + + public abstract play(waitTime?: Nullable, startOffset?: Nullable, duration?: Nullable): void; + public abstract pause(): void; + public abstract resume(): void; + public abstract stop(waitTime?: Nullable): void; + + protected _setState(value: SoundState) { + if (this._state === value) { + return; + } + + this._state = value; + this.onStateChangedObservable.notifyObservers(this); + } +} diff --git a/packages/dev/core/src/Audio/v2/audioBus.ts b/packages/dev/core/src/Audio/v2/audioBus.ts new file mode 100644 index 00000000000..f12a3075200 --- /dev/null +++ b/packages/dev/core/src/Audio/v2/audioBus.ts @@ -0,0 +1,98 @@ +import type { Nullable } from "../../types"; +import type { IAbstractAudioBusOptions } from "./abstractAudioBus"; +import { AbstractAudioBus } from "./abstractAudioBus"; +import type { AudioEngineV2 } from "./audioEngine"; +import type { AudioPositioner } from "./audioPositioner"; +import type { AudioSender } from "./audioSender"; +import type { MainAudioBus } from "./mainAudioBus"; + +export type AbstractPrimaryAudioBus = MainAudioBus | AudioBus; + +/** + * Options for creating a new audio bus. + */ +export interface IAudioBusOptions extends IAbstractAudioBusOptions { + /** + * Whether to enable the positioner. + */ + enablePositioner?: boolean; + /** + * The output bus of the audio bus. + */ + outputBus?: AbstractPrimaryAudioBus; +} + +/** + * Abstract class for an audio bus. + */ +export abstract class AudioBus extends AbstractAudioBus { + private _outputBus: Nullable = null; + private _positioner: Nullable = null; + + /** + * The sender of the audio bus. + */ + public readonly sender: AudioSender; + + /** @internal */ + constructor(name: string, engine: AudioEngineV2, options: Nullable = null) { + super(name, engine); + + if (options?.enablePositioner) { + this.enablePositioner(); + } + + this.sender = {} as any; //engine.createSender(this); + + if (options?.outputBus) { + this.outputBus = options.outputBus; + } + } + + /** + * The positioner of the audio bus. + */ + public get positioner(): Nullable { + return this._positioner; + } + + /** + * Enables the positioner of the audio bus. + * @returns A promise that resolves when the positioner is enabled. + */ + public async enablePositioner() { + if (this._positioner) { + return; + } + + this._positioner = await this._createPositioner(); + } + + /** + * Gets the output bus of the audio bus. + */ + public get outputBus(): Nullable { + return this._outputBus; + } + + /** + * Sets the output bus of the audio bus. + */ + public set outputBus(outputBus: Nullable) { + if (this._outputBus === outputBus) { + return; + } + + if (this._outputBus) { + this._disconnect(this._outputBus); + } + + this._outputBus = outputBus; + + if (this._outputBus) { + this._connect(this._outputBus); + } + } + + protected abstract _createPositioner(): Promise; +} diff --git a/packages/dev/core/src/Audio/v2/audioEngine.ts b/packages/dev/core/src/Audio/v2/audioEngine.ts new file mode 100644 index 00000000000..05067d251ef --- /dev/null +++ b/packages/dev/core/src/Audio/v2/audioEngine.ts @@ -0,0 +1,128 @@ +import type { Nullable } from "../../types"; +import type { AbstractAudioNode } from "./abstractAudioNode"; +import { AbstractAudioNodeParent } from "./abstractAudioNodeParent"; +import type { MainAudioBus } from "./mainAudioBus"; +import type { AbstractSound } from "./abstractSound"; +import type { AbstractSoundInstance } from "./abstractSoundInstance"; +import type { SpatialAudioListener } from "./spatialAudioListener"; + +/** + * Abstract base class for audio engines. + */ +export abstract class AudioEngineV2 extends AbstractAudioNodeParent { + // Owns top-level AbstractAudioNode objects. + // Owns all AbstractSound objects. + + // Not owned, but all items should be in parent's `children` container, too, which is owned. + private readonly _mainBuses = new Set(); + + private _defaultMainBus: Nullable = null; + + // Owned + private readonly _sounds = new Set(); + + // Not owned, but all items should be in parent's `children` container, too, which is owned. + private readonly _soundInstances = new Set(); + + /** + * The spatial audio listeners. + */ + public readonly listeners = new Set(); // Owned + + /** + * `true` if the engine is a WebAudio engine; otherwise `false`. + */ + public abstract get isWebAudio(): boolean; + + /** + * The current state of the audio engine. + */ + public abstract get state(): string; + + /** + * The current time in seconds. + */ + public abstract get currentTime(): number; + + /** + * The main output node. + */ + public abstract get mainOutput(): Nullable; + + /** + * The default main bus. + */ + public get defaultMainBus(): Nullable { + if (this._mainBuses.size === 0) { + return null; + } + + if (!this._defaultMainBus) { + this._defaultMainBus = Array.from(this._mainBuses)[0]; + } + + return this._defaultMainBus; + } + + /** + * Releases associated resources. + */ + public override dispose(): void { + super.dispose(); + + this._soundInstances.clear(); + + if (this.listeners) { + for (const listener of Array.from(this.listeners)) { + listener.dispose(); + } + this.listeners.clear(); + } + + for (const source of Array.from(this._sounds)) { + source.dispose(); + } + this._sounds.clear(); + } + + /** + * Checks if the specified format is valid. + * @param format The format to check. The format is the audio file extension, such as "mp3" or "wav". + * @returns `true` if the format is valid; otherwise `false`. + */ + public abstract formatIsValid(format: string): boolean; + + /** + * Pauses the audio engine if it is running. + * @param waitTime The time in seconds to wait before pausing the audio engine. + * @returns A promise that resolves when the audio engine is paused. + */ + public abstract pause(waitTime?: Nullable): Promise; + + /** + * Resumes the audio engine if it is not running. + * @returns A promise that resolves when the audio engine is running. + */ + public abstract resume(): Promise; + + protected _addMainBus(mainBus: MainAudioBus): void { + this._mainBuses.add(mainBus); + mainBus.onDisposeObservable.addOnce(() => { + this._mainBuses.delete(mainBus); + }); + } + + protected _addSound(sound: AbstractSound): void { + this._sounds.add(sound); + sound.onDisposeObservable.addOnce(() => { + this._sounds.delete(sound); + }); + } + + protected _addSoundInstance(soundInstance: AbstractSoundInstance): void { + this._soundInstances.add(soundInstance); + soundInstance.onDisposeObservable.addOnce(() => { + this._soundInstances.delete(soundInstance); + }); + } +} diff --git a/packages/dev/core/src/Audio/v2/audioPositioner.ts b/packages/dev/core/src/Audio/v2/audioPositioner.ts new file mode 100644 index 00000000000..efd52add02f --- /dev/null +++ b/packages/dev/core/src/Audio/v2/audioPositioner.ts @@ -0,0 +1,114 @@ +import type { Quaternion, Vector3 } from "../../Maths/math.vector"; +import type { TransformNode } from "../../Meshes/transformNode"; +import type { Nullable } from "../../types"; +import { AbstractAudioNode, AudioNodeType } from "./abstractAudioNode"; +import type { SpatialAudioListener } from "./spatialAudioListener"; +import type { ISpatialAudioTransformOptions } from "./spatialAudioTransform"; +import { SpatialAudioTransform } from "./spatialAudioTransform"; + +/** + * Options for creating a new audio positioner. + */ +export interface IAudioPositionerOptions extends ISpatialAudioTransformOptions {} + +/** + * Abstract base class for audio positioners. + */ +export abstract class AudioPositioner extends AbstractAudioNode { + // Not owned + private _pannerGain: number = 1; + private _pannerPosition: Nullable = null; + private _spatialTransform: SpatialAudioTransform; + private _spatializerGain: number = 1; + + // TODO: Add spatializer cone angles/volumes, etc ... + + /** + * The spatial audio listeners. + */ + public readonly listeners = new Set(); + + /** @internal */ + constructor(parent: AbstractAudioNode, options: Nullable = null) { + super(parent.engine, AudioNodeType.InputOutput, parent); + + this._spatialTransform = new SpatialAudioTransform(options); + } + + /** + * Releases associated resources. + */ + public override dispose(): void { + super.dispose(); + + this._spatialTransform.dispose(); + + this.listeners?.clear(); + } + + /** + * The position of the audio positioner. + */ + public get position(): Vector3 { + return this._spatialTransform.position; + } + + public set position(position: Vector3) { + this._spatialTransform.position = position; + } + + /** + * The rotation of the audio positioner. + */ + public get rotation(): Quaternion { + return this._spatialTransform.rotation; + } + + public set rotation(rotation: Quaternion) { + this._spatialTransform.rotation = rotation; + } + + /** + * The scale of the audio positioner. + */ + public get attachedTransformNode(): Nullable { + return this._spatialTransform.attachedTransformNode; + } + + public set attachedTransformNode(node: Nullable) { + this._spatialTransform.attachedTransformNode = node; + } + + /** + * The spatializer gain of the audio positioner. + */ + public get spatializerGain(): number { + return this._spatializerGain; + } + + public set spatializerGain(value: number) { + this._spatializerGain = value; + } + + /** + * The panner gain of the audio positioner. + */ + public get pannerGain(): number { + return this._pannerGain; + } + + public set pannerGain(value: number) { + this._pannerGain = value; + } + + /** + * The panner position of the audio positioner. + */ + public get pannerPosition(): Nullable { + return this._pannerPosition; + } + + public set pannerPosition(value: Nullable) { + this._pannerPosition = value; + } +} diff --git a/packages/dev/core/src/Audio/v2/audioSend.ts b/packages/dev/core/src/Audio/v2/audioSend.ts new file mode 100644 index 00000000000..69f4fd8ae62 --- /dev/null +++ b/packages/dev/core/src/Audio/v2/audioSend.ts @@ -0,0 +1,81 @@ +import type { Nullable } from "../../types"; +import type { AudioEngineV2 } from "./audioEngine"; +import { AbstractAudioNode, AudioNodeType } from "./abstractAudioNode"; +import type { AuxiliaryAudioBus } from "./auxilliaryAudioBus"; + +/** + * The type of send. + */ +export enum AudioSendType { + /** + * The send is post-fader. + */ + PostFader, + /** + * The send is pre-fader. + */ + PreFader, +} + +/** + * Options for creating a new audio send. + */ +export interface IAudioSendOptions { + /** + * The type of send. + */ + sendType?: AudioSendType; +} + +/** + * Owned by AbstractAudioEngine. + */ +export abstract class AudioSend extends AbstractAudioNode { + private _outputBus: Nullable = null; + private _sendType: AudioSendType; + + /** @internal */ + constructor(engine: AudioEngineV2, options: Nullable = null) { + super(engine, AudioNodeType.InputOutput); + + this._sendType = options?.sendType ?? AudioSendType.PostFader; + } + + /** + * The output bus. + */ + public get outputBus(): Nullable { + return this._outputBus; + } + + public set outputBus(outputBus: Nullable) { + if (this._outputBus === outputBus) { + return; + } + + if (this._outputBus) { + this._disconnect(this._outputBus); + } + + this._outputBus = outputBus; + + if (this._outputBus) { + this._connect(this._outputBus); + } + } + + /** + * The type of send. + */ + public get sendType(): AudioSendType { + return this._sendType; + } + + public set sendType(sendType: AudioSendType) { + if (this._sendType === sendType) { + return; + } + + this._sendType = sendType; + } +} diff --git a/packages/dev/core/src/Audio/v2/audioSender.ts b/packages/dev/core/src/Audio/v2/audioSender.ts new file mode 100644 index 00000000000..5a1c5eba807 --- /dev/null +++ b/packages/dev/core/src/Audio/v2/audioSender.ts @@ -0,0 +1,37 @@ +import { AbstractAudioNode, AudioNodeType } from "./abstractAudioNode"; +import type { AudioSend } from "./audioSend"; + +/** + * Abstract base class for audio senders. + */ +export abstract class AudioSender extends AbstractAudioNode { + private _sends = new Set(); + + /** @internal */ + constructor(parent: AbstractAudioNode) { + super(parent.engine, AudioNodeType.InputOutput, parent); + } + + /** + * The audio sends. + */ + public get sends(): IterableIterator { + return this._sends.values(); + } + + /** + * Adds a send to the audio sender. + * @param send - The send to add. + */ + public addSend(send: AudioSend): void { + this._sends.add(send); + } + + /** + * Removes a send from the audio sender. + * @param send - The send to remove. + */ + public removeSend(send: AudioSend): void { + this._sends.delete(send); + } +} diff --git a/packages/dev/core/src/Audio/v2/audioUtils.ts b/packages/dev/core/src/Audio/v2/audioUtils.ts new file mode 100644 index 00000000000..3adc4a99bb4 --- /dev/null +++ b/packages/dev/core/src/Audio/v2/audioUtils.ts @@ -0,0 +1,8 @@ +/** + * Converts a pitch interval in cents to a playback rate. + * @param cents - The pitch interval in cents. + * @returns The playback rate. + */ +export function centsToPlaybackRate(cents: number): number { + return Math.pow(2, cents / 1200); +} diff --git a/packages/dev/core/src/Audio/v2/auxilliaryAudioBus.ts b/packages/dev/core/src/Audio/v2/auxilliaryAudioBus.ts new file mode 100644 index 00000000000..a8b3e2c7101 --- /dev/null +++ b/packages/dev/core/src/Audio/v2/auxilliaryAudioBus.ts @@ -0,0 +1,92 @@ +import type { Nullable } from "../../types"; +import type { IAbstractAudioBusOptions } from "./abstractAudioBus"; +import { AbstractAudioBus } from "./abstractAudioBus"; +import type { AudioEngineV2 } from "./audioEngine"; +import type { AudioPositioner } from "./audioPositioner"; +import type { AudioSender } from "./audioSender"; + +/** + * Options for creating a new auxilliary audio bus. + */ +export interface IAuxilliaryAudioBusOptions extends IAbstractAudioBusOptions { + /** + * Whether to enable a positioner for the auxilliary audio bus. + */ + enablePositioner?: boolean; + /** + * The output bus for the auxilliary audio bus. + */ + outputBus?: AbstractAudioBus; +} + +/** + * Abstract class representing an auxilliary audio bus in the audio engine. + */ +export abstract class AuxiliaryAudioBus extends AbstractAudioBus { + private _positioner: Nullable = null; + private _outputBus: Nullable = null; + + /** + * The sender for the auxilliary audio bus. + */ + public readonly sender: AudioSender; + + /** @internal */ + constructor(name: string, engine: AudioEngineV2, options: Nullable = null) { + super(name, engine); + + if (options?.enablePositioner) { + this.enablePositioner(); + } + + this.sender = {} as any; //engine.createSender(this); + + if (options?.outputBus) { + this.outputBus = options.outputBus; + } + } + + /** + * The positioner for the auxilliary audio bus. + */ + public get positioner(): Nullable { + return this._positioner; + } + + /** + * Enables the positioner for the auxilliary audio bus. + * @returns A promise that resolves when the positioner is enabled. + */ + public async enablePositioner() { + if (this._positioner) { + return; + } + + this._positioner = await this._createPositioner(); + } + + /** + * The output bus for the auxilliary audio bus. + */ + public get outputBus(): Nullable { + return this._outputBus; + } + + public set outputBus(outputBus: Nullable) { + if (this._outputBus === outputBus) { + return; + } + + if (this._outputBus) { + this._disconnect(this._outputBus); + } + + this._outputBus = outputBus; + + if (this._outputBus) { + this._connect(this._outputBus); + } + } + + protected abstract _createPositioner(): Promise; +} diff --git a/packages/dev/core/src/Audio/v2/index.ts b/packages/dev/core/src/Audio/v2/index.ts new file mode 100644 index 00000000000..4f8c16a97b3 --- /dev/null +++ b/packages/dev/core/src/Audio/v2/index.ts @@ -0,0 +1,9 @@ +// TODO: What's the best way to expose the WebAudio factory functions without exposing the WebAudio classes? +export { SoundState } from "./soundState"; +export { CreateAudioEngineAsync } from "./webAudio/webAudioEngine"; +export { CreateMainAudioBusAsync } from "./webAudio/webAudioMainBus"; +export { CreateMainAudioOutputAsync } from "./webAudio/webAudioMainOutput"; +export { CreateAudioPositionerAsync } from "./webAudio/webAudioPositioner"; +export { CreateAudioSenderAsync } from "./webAudio/webAudioSender"; +export { CreateSoundAsync, CreateSoundBufferAsync } from "./webAudio/webAudioStaticSound"; +export { CreateStreamingSoundAsync } from "./webAudio/webAudioStreamingSound"; diff --git a/packages/dev/core/src/Audio/v2/mainAudioBus.ts b/packages/dev/core/src/Audio/v2/mainAudioBus.ts new file mode 100644 index 00000000000..2bc8dd0c406 --- /dev/null +++ b/packages/dev/core/src/Audio/v2/mainAudioBus.ts @@ -0,0 +1,12 @@ +import { AbstractAudioBus } from "./abstractAudioBus"; +import type { AudioEngineV2 } from "./audioEngine"; + +/** + * Abstract class representing the main audio bus in the audio engine. + */ +export abstract class MainAudioBus extends AbstractAudioBus { + /** @internal */ + constructor(name: string, engine: AudioEngineV2) { + super(name, engine); + } +} diff --git a/packages/dev/core/src/Audio/v2/mainAudioOutput.ts b/packages/dev/core/src/Audio/v2/mainAudioOutput.ts new file mode 100644 index 00000000000..97749f8babf --- /dev/null +++ b/packages/dev/core/src/Audio/v2/mainAudioOutput.ts @@ -0,0 +1,12 @@ +import type { AudioEngineV2 } from "./audioEngine"; +import { AbstractAudioNode, AudioNodeType } from "./abstractAudioNode"; + +/** + * Abstract class for the main audio output node. + */ +export abstract class MainAudioOutput extends AbstractAudioNode { + /** @internal */ + constructor(engine: AudioEngineV2) { + super(engine, AudioNodeType.Input); + } +} diff --git a/packages/dev/core/src/Audio/v2/soundState.ts b/packages/dev/core/src/Audio/v2/soundState.ts new file mode 100644 index 00000000000..524ba04a767 --- /dev/null +++ b/packages/dev/core/src/Audio/v2/soundState.ts @@ -0,0 +1,25 @@ +/** + * The state of a sound. + */ +export enum SoundState { + /** + * The sound is waiting for its instances to stop. + */ + Stopping, + /** + * The sound is stopped. + */ + Stopped, + /** + * The sound is waiting for its instances to start. + */ + Starting, + /** + * The sound has started playing. + */ + Started, + /** + * The sound is paused. + */ + Paused, +} diff --git a/packages/dev/core/src/Audio/v2/spatialAudioListener.ts b/packages/dev/core/src/Audio/v2/spatialAudioListener.ts new file mode 100644 index 00000000000..ea043c3b245 --- /dev/null +++ b/packages/dev/core/src/Audio/v2/spatialAudioListener.ts @@ -0,0 +1,55 @@ +import type { Quaternion, Vector3 } from "../../Maths/math.vector"; +import type { TransformNode } from "../../Meshes/transformNode"; +import type { IDisposable } from "../../scene"; +import type { Nullable } from "../../types"; +import type { AudioEngineV2 } from "./audioEngine"; +import type { ISpatialAudioTransformOptions } from "./spatialAudioTransform"; +import { SpatialAudioTransform } from "./spatialAudioTransform"; + +/** @internal */ +export class SpatialAudioListener implements IDisposable { + private _engine: AudioEngineV2; + private _spatialTransform: SpatialAudioTransform; + + /** @internal */ + constructor(engine: AudioEngineV2, options: Nullable = null) { + this._engine = engine; + this._spatialTransform = new SpatialAudioTransform(options); + + this._engine.listeners.add(this); + } + + /** @internal */ + public dispose(): void { + this._engine.listeners.delete(this); + + this._spatialTransform.dispose(); + } + + /** @internal */ + public get position(): Vector3 { + return this._spatialTransform.position; + } + + public set position(position: Vector3) { + this._spatialTransform.position = position; + } + + /** @internal */ + public get rotation(): Quaternion { + return this._spatialTransform.rotation; + } + + public set rotation(rotation: Quaternion) { + this._spatialTransform.rotation = rotation; + } + + /** @internal */ + public get attachedTransformNode(): Nullable { + return this._spatialTransform.attachedTransformNode; + } + + public set attachedTransformNode(node: Nullable) { + this._spatialTransform.attachedTransformNode = node; + } +} diff --git a/packages/dev/core/src/Audio/v2/spatialAudioTransform.ts b/packages/dev/core/src/Audio/v2/spatialAudioTransform.ts new file mode 100644 index 00000000000..cad69e5e57d --- /dev/null +++ b/packages/dev/core/src/Audio/v2/spatialAudioTransform.ts @@ -0,0 +1,113 @@ +import { Quaternion, Vector3 } from "../../Maths/math.vector"; +import type { TransformNode } from "../../Meshes/transformNode"; +import type { IDisposable } from "../../scene"; +import type { Nullable } from "../../types"; + +/** + * Options for creating a new spatial audio transform. + */ +export interface ISpatialAudioTransformOptions { + /** + * The position of the transform. + */ + position?: Vector3; + /** + * The rotation of the transform. + */ + rotation?: Quaternion; + /** + * The transform node to attach to. + */ + attachedTransformNode?: Nullable; +} + +/** @internal */ +export class SpatialAudioTransform implements IDisposable { + private _attachedTransformNode: Nullable = null; + private _position: Vector3; + private _positionDirty: boolean = false; + private _rotation: Quaternion; + private _rotationDirty: boolean = false; + + /** @internal */ + constructor(options: Nullable = null) { + this._position = options?.position ?? Vector3.Zero(); + this._rotation = options?.rotation ?? Quaternion.Identity(); + this._attachedTransformNode = options?.attachedTransformNode ?? null; + } + + /** @internal */ + public dispose(): void { + this._detachFromTransformNode(); + this._attachedTransformNode = null; + } + + /** @internal */ + public get position(): Vector3 { + this._updatePosition(); + return this._position; + } + + public set position(position: Vector3) { + this._position.copyFrom(position); + } + + /** @internal */ + public get rotation(): Quaternion { + this._updateRotation(); + return this._rotation; + } + + public set rotation(rotation: Quaternion) { + this._rotation.copyFrom(rotation); + } + + /** @internal */ + public get attachedTransformNode(): Nullable { + return this._attachedTransformNode; + } + + public set attachedTransformNode(node: Nullable) { + this._detachFromTransformNode(); + + this._attachedTransformNode = node; + + if (node) { + node.onAfterWorldMatrixUpdateObservable.add(this._onAttachedTransformNodeWorldMatrixUpdated.bind(this)); + this._positionDirty = true; + this._rotationDirty = true; + } else { + this._positionDirty = false; + this._rotationDirty = false; + } + } + + private _detachFromTransformNode() { + if (this._attachedTransformNode) { + this._attachedTransformNode.onAfterWorldMatrixUpdateObservable.removeCallback(this._onAttachedTransformNodeWorldMatrixUpdated); + } + } + + private _onAttachedTransformNodeWorldMatrixUpdated(): void { + this._positionDirty = true; + this._rotationDirty = true; + } + + private _updatePosition() { + if (!this._positionDirty) { + return; + } + this._positionDirty = false; + + this._attachedTransformNode!.getWorldMatrix().decompose(undefined, undefined, this._position); + } + + private _updateRotation() { + if (!this._rotationDirty) { + return; + } + this._rotationDirty = false; + + this._attachedTransformNode!.getWorldMatrix().decompose(undefined, this._rotation); + } +} diff --git a/packages/dev/core/src/Audio/v2/staticSound.ts b/packages/dev/core/src/Audio/v2/staticSound.ts new file mode 100644 index 00000000000..fbbafb0c0cd --- /dev/null +++ b/packages/dev/core/src/Audio/v2/staticSound.ts @@ -0,0 +1,101 @@ +import type { Nullable } from "../../types"; +import type { AudioEngineV2 } from "./audioEngine"; +import type { ISoundOptions } from "./abstractSound"; +import { AbstractSound } from "./abstractSound"; +import type { StaticSoundBuffer } from "./staticSoundBuffer"; +import type { StaticSoundInstance } from "./staticSoundInstance"; + +/** + * Options for creating a new static sound. + */ +export interface IStaticSoundOptions extends ISoundOptions { + /** + * How long to play the sound in seconds. + */ + duration?: number; + /** + * The start of the loop range in seconds. + */ + loopStart?: number; + /** + * The end of the loop range in seconds. + */ + loopEnd?: number; + /** + * The pitch of the sound. + */ + pitch?: number; + /** + * The playback rate of the sound. + */ + playbackRate?: number; + /** + * Whether to skip codec checking before attempting to load each source URL when `source` is a string array. + */ + skipCodecCheck?: boolean; +} + +/** + * Abstract class representing a static sound in the audio engine. + */ +export abstract class StaticSound extends AbstractSound { + /** + * How long to play the sound in seconds. + */ + public duration: number; + + /** + * The start of the loop range in seconds. + */ + public loopStart: number; + + /** + * The end of the loop range in seconds. + */ + public loopEnd: number; + + /** + * The pitch of the sound. + */ + public pitch: number; + + /** + * The playback rate of the sound. + */ + public playbackRate: number; + + public abstract readonly buffer: StaticSoundBuffer; + + /** @internal */ + constructor(name: string, engine: AudioEngineV2, options: Nullable = null) { + super(name, engine, options); + + this.duration = options?.duration ?? 0; + this.loopStart = options?.loopStart ?? 0; + this.loopEnd = options?.loopEnd ?? 0; + this.pitch = options?.pitch ?? 0; + this.playbackRate = options?.playbackRate ?? 1; + } + + protected abstract override _createSoundInstance(): StaticSoundInstance; + + /** + * Plays the sound. + * @param waitTime - The time to wait before playing the sound in seconds. + * @param startOffset - The time within the sound source to start playing the sound in seconds. + * @param duration - How long to play the sound in seconds. + * @returns The new playback instance, or `null` if the sound was resumed from pause. + */ + public play(waitTime: Nullable = null, startOffset: Nullable = null, duration: Nullable = null): StaticSoundInstance { + if (this._isPaused && this._soundInstances.size > 0) { + this.resume(); + return Array.from(this._soundInstances)[this._soundInstances.size - 1] as StaticSoundInstance; + } + + const instance = this._createSoundInstance(); + this._play(instance, waitTime, startOffset, duration); + this._stopExcessInstances(); + + return instance; + } +} diff --git a/packages/dev/core/src/Audio/v2/staticSoundBuffer.ts b/packages/dev/core/src/Audio/v2/staticSoundBuffer.ts new file mode 100644 index 00000000000..b9b55cb40e9 --- /dev/null +++ b/packages/dev/core/src/Audio/v2/staticSoundBuffer.ts @@ -0,0 +1,21 @@ +import type { AudioEngineV2 } from "./audioEngine"; + +/** + * Abstract class for static sound buffer. + */ +export abstract class StaticSoundBuffer { + /** + * The engine that the sound buffer belongs to. + */ + public readonly engine: AudioEngineV2; + + public abstract get sampleRate(): number; + public abstract get length(): number; + public abstract get duration(): number; + public abstract get numberOfChannels(): number; + + /** @internal */ + constructor(engine: AudioEngineV2) { + this.engine = engine; + } +} diff --git a/packages/dev/core/src/Audio/v2/staticSoundInstance.ts b/packages/dev/core/src/Audio/v2/staticSoundInstance.ts new file mode 100644 index 00000000000..b59d5f2be7c --- /dev/null +++ b/packages/dev/core/src/Audio/v2/staticSoundInstance.ts @@ -0,0 +1,10 @@ +import type { AbstractSound } from "./abstractSound"; +import { AbstractSoundInstance } from "./abstractSoundInstance"; + +/** @internal */ +export abstract class StaticSoundInstance extends AbstractSoundInstance { + /** @internal */ + constructor(source: AbstractSound) { + super(source); + } +} diff --git a/packages/dev/core/src/Audio/v2/streamingSound.ts b/packages/dev/core/src/Audio/v2/streamingSound.ts new file mode 100644 index 00000000000..dd393743c15 --- /dev/null +++ b/packages/dev/core/src/Audio/v2/streamingSound.ts @@ -0,0 +1,69 @@ +import type { Nullable } from "../../types"; +import type { AudioEngineV2 } from "./audioEngine"; +import type { ISoundOptions } from "./abstractSound"; +import { AbstractSound } from "./abstractSound"; +import { SoundState } from "./soundState"; +import type { StreamingSoundInstance } from "./streamingSoundInstance"; + +export type StreamingSoundPreloadType = "none" | "metadata" | "auto"; + +/** + * Options for creating a new streaming sound. + */ +export interface IStreamingSoundOptions extends ISoundOptions { + /** + * The preload type of the sound. + */ + preload?: StreamingSoundPreloadType; +} + +/** + * Abstract class representing a streaming sound in the audio engine. + */ +export abstract class StreamingSound extends AbstractSound { + /** + * The preload type for the sound stream. + */ + public preload: StreamingSoundPreloadType; + + /** + * Set to `true` to preserve the pitch of the sound when changing the playback rate; otherwise `false`. + */ + public preservesPitch: boolean; + + /** @internal */ + constructor(name: string, engine: AudioEngineV2, options: Nullable = null) { + super(name, engine, options); + + this.preload = options?.preload ?? "auto"; + } + + protected abstract override _createSoundInstance(): StreamingSoundInstance; + + /** + * Plays the sound. + * @param waitTime - The time to wait before playing the sound in seconds. + * @param startOffset - The time within the sound source to start playing the sound in seconds. + * @param duration - How long to play the sound in seconds. + * @returns The new playback instance, or `null` if the sound was resumed from pause. + */ + public play(waitTime: Nullable = null, startOffset: Nullable = null, duration: Nullable = null): StreamingSoundInstance { + if (this._isPaused && this._soundInstances.size > 0) { + this.resume(); + return Array.from(this._soundInstances)[this._soundInstances.size - 1] as StreamingSoundInstance; + } + + const instance = this._createSoundInstance(); + this._play(instance, waitTime, startOffset, duration); + + const onInstanceStateChanged = () => { + if (instance.state === SoundState.Started) { + this._stopExcessInstances(); + instance.onStateChangedObservable.removeCallback(onInstanceStateChanged); + } + }; + instance.onStateChangedObservable.add(onInstanceStateChanged); + + return instance; + } +} diff --git a/packages/dev/core/src/Audio/v2/streamingSoundInstance.ts b/packages/dev/core/src/Audio/v2/streamingSoundInstance.ts new file mode 100644 index 00000000000..f617540f235 --- /dev/null +++ b/packages/dev/core/src/Audio/v2/streamingSoundInstance.ts @@ -0,0 +1,15 @@ +import { Observable } from "../../Misc/observable"; +import type { AbstractSound } from "./abstractSound"; +import { AbstractSoundInstance } from "./abstractSoundInstance"; + +/** + * A streaming sound instance. + */ +export abstract class StreamingSoundInstance extends AbstractSoundInstance { + /** Observable triggered when the instance is ready to play */ + public onReadyObservable = new Observable(); + + protected constructor(source: AbstractSound) { + super(source); + } +} diff --git a/packages/dev/core/src/Audio/v2/webAudio/webAudioBus.ts b/packages/dev/core/src/Audio/v2/webAudio/webAudioBus.ts new file mode 100644 index 00000000000..4ad0710244d --- /dev/null +++ b/packages/dev/core/src/Audio/v2/webAudio/webAudioBus.ts @@ -0,0 +1,67 @@ +import type { Nullable } from "core/types"; +import type { AbstractAudioNode } from "../abstractAudioNode"; +import type { IAudioBusOptions } from "../audioBus"; +import { AudioBus } from "../audioBus"; +import type { AudioPositioner } from "../audioPositioner"; +import type { WebAudioEngine } from "./webAudioEngine"; +import type { WebAudioMainOutput } from "./webAudioMainOutput"; +import { CreateAudioPositionerAsync } from "./webAudioPositioner"; + +/** + * Options for creating a new WebAudioBus. + */ +export interface IWebAudioBusOptions extends IAudioBusOptions {} + +/** @internal */ +export class WebAudioBus extends AudioBus { + private _gainNode: GainNode; + + /** @internal */ + public get webAudioInputNode(): AudioNode { + return this._gainNode; + } + + /** @internal */ + public get webAudioOutputNode(): AudioNode { + return this._gainNode; + } + + /** @internal */ + constructor(name: string, engine: WebAudioEngine, options: Nullable = null) { + super(name, engine, options); + } + + /** @internal */ + public async init(): Promise { + this._gainNode = new GainNode(await (this.engine as WebAudioEngine).audioContext); + } + + /** @internal */ + public getClassName(): string { + return "WebAudioBus"; + } + + protected override _createPositioner(): Promise { + return CreateAudioPositionerAsync(this); + } + + protected override _connect(node: AbstractAudioNode): void { + super._connect(node); + + if (node.getClassName() === "WebAudioMainOutput" && (node as WebAudioMainOutput).webAudioInputNode) { + this.webAudioOutputNode.connect((node as WebAudioMainOutput).webAudioInputNode); + } else { + throw new Error("Unsupported node type."); + } + } + + protected override _disconnect(node: AbstractAudioNode): void { + super._disconnect(node); + + if (node.getClassName() === "WebAudioMainOutput" && (node as WebAudioMainOutput).webAudioInputNode) { + this.webAudioOutputNode.disconnect((node as WebAudioMainOutput).webAudioInputNode); + } else { + throw new Error("Unsupported node type."); + } + } +} diff --git a/packages/dev/core/src/Audio/v2/webAudio/webAudioEngine.ts b/packages/dev/core/src/Audio/v2/webAudio/webAudioEngine.ts new file mode 100644 index 00000000000..0dac7913fd7 --- /dev/null +++ b/packages/dev/core/src/Audio/v2/webAudio/webAudioEngine.ts @@ -0,0 +1,230 @@ +import type { Nullable } from "../../../types"; +import { AudioEngineV2 } from "../audioEngine"; +import type { AbstractAudioNode } from "../abstractAudioNode"; +import type { AbstractSound } from "../abstractSound"; +import type { AbstractSoundInstance } from "../abstractSoundInstance"; +import type { MainAudioBus } from "../mainAudioBus"; +import { CreateMainAudioBusAsync } from "./webAudioMainBus"; +import { CreateMainAudioOutputAsync } from "./webAudioMainOutput"; + +/** + * Options for creating a new WebAudioEngine. + */ +export interface IWebAudioEngineOptions { + /** + * The audio context to be used by the engine. + */ + audioContext?: AudioContext | OfflineAudioContext; + + /** + * Set to `true` to automatically resume the audio context when the user interacts with the page; otherwise `false`. Default is `true`. + */ + resumeOnInteraction?: boolean; +} + +/** + * Creates a new WebAudioEngine. + * @param options - The options for creating the audio engine. + * @returns A promise that resolves with the created audio engine. + */ +export async function CreateAudioEngineAsync(options: Nullable = null): Promise { + const engine = new WebAudioEngine(); + await engine.init(options); + return engine; +} + +const formatMimeTypeMap = new Map([ + ["aac", "audio/aac"], + ["ac3", "audio/ac3"], + ["flac", "audio/flac"], + ["m4a", "audio/mp4"], + ["mp3", 'audio/mpeg; codecs="mp3"'], + ["mp4", "audio/mp4"], + ["ogg", 'audio/ogg; codecs="vorbis"'], + ["wav", "audio/wav"], + ["webm", 'audio/webm; codecs="vorbis"'], +]); + +/** @internal */ +export class WebAudioEngine extends AudioEngineV2 { + private _audioContext: AudioContext | OfflineAudioContext; + private _audioContextStarted = false; + + private _onAudioContextStateChange = (() => { + if (this.state === "running") { + this._audioContextStarted = true; + } + if (this.state === "suspended" || this.state === "interrupted") { + if (this._resumeOnInteraction) { + document.addEventListener("click", this._onInteraction, { once: true }); + } + } + }).bind(this); + + private _mainOutput: Nullable = null; + + private _invalidFormats = new Set(); + private _validFormats = new Set(); + + /** @internal */ + public get isWebAudio(): boolean { + return true; + } + + /** @internal */ + public get currentTime(): number { + return this._audioContext.currentTime; + } + + /** @internal */ + public get mainOutput(): Nullable { + return this._mainOutput; + } + + private _initAudioContext: () => void = (async () => { + if (this._audioContext === undefined) { + this._audioContext = new AudioContext(); + } + + this._audioContext.addEventListener("statechange", this._onAudioContextStateChange); + + await this.resume(); + + document.removeEventListener("click", this._initAudioContext); + }).bind(this); + + private _resolveAudioContext: (audioContext: BaseAudioContext) => void; + + private _resumeOnInteraction = true; + + private _onInteraction = (() => { + if (this._resumeOnInteraction) { + this.resume(); + } + + document.removeEventListener("click", this._onInteraction); + }).bind(this); + + // TODO: Make this return the audio context directly, not a Promise. + // TODO: Consider waiting for a click in init to avoid the console warning, but stop waiting and create the audio context immediately if this member gets accessed, which will trigger the console warning. + /** @internal */ + public audioContext: Promise; + + /** @internal */ + public get state(): string { + return this._audioContext.state; + } + + /** @internal */ + public get webAudioInputNode(): AudioNode { + return this._audioContext.destination; + } + + /** @internal */ + public async init(options: Nullable = null): Promise { + this.audioContext = new Promise((resolve) => { + this._resolveAudioContext = resolve; + + if (this._resumeOnInteraction) { + document.addEventListener("click", this._initAudioContext, { once: true }); + } + }); + + if (options?.audioContext) { + this._audioContext = options.audioContext; + this._initAudioContext(); + } + + this._resumeOnInteraction = options?.resumeOnInteraction ?? true; + + await this.audioContext; + this._mainOutput = await CreateMainAudioOutputAsync(this); + await CreateMainAudioBusAsync("default", this); + } + + /** @internal */ + public override dispose(): void { + super.dispose(); + + if (this._audioContext instanceof AudioContext && this._audioContext.state !== "closed") { + this._audioContext.close(); + } + + document.removeEventListener("click", this._initAudioContext); + document.removeEventListener("click", this._onInteraction); + this._audioContext.removeEventListener("statechange", this._onAudioContextStateChange); + } + + /** @internal */ + public flagInvalidFormat(format: string): void { + this._invalidFormats.add(format); + } + + /** @internal */ + public formatIsValid(format: string): boolean { + if (this._validFormats.has(format)) { + return true; + } + + if (this._invalidFormats.has(format)) { + return false; + } + + const mimeType = formatMimeTypeMap.get(format); + if (mimeType === undefined) { + return false; + } + + const audio = new Audio(); + if (audio.canPlayType(mimeType) === "") { + this._invalidFormats.add(format); + return false; + } + + this._validFormats.add(format); + + return true; + } + + /** @internal */ + public override async pause(waitTime: Nullable = null): Promise { + if (this._audioContext instanceof AudioContext) { + await this._audioContext.suspend(); + } else if (this._audioContext instanceof OfflineAudioContext) { + return this._audioContext.suspend(waitTime ?? 0); + } + } + + /** @internal */ + public override async resume(): Promise { + if (this._audioContext === undefined) { + this._initAudioContext(); + } + + if (this._audioContext instanceof AudioContext) { + await this._audioContext.resume(); + this._resolveAudioContext(this._audioContext); + } else if (this._audioContext instanceof OfflineAudioContext) { + if (this._audioContextStarted) { + return this._audioContext.resume(); + } + } + + this._resolveAudioContext(this._audioContext); + } + + /** @internal */ + public addMainBus(mainBus: MainAudioBus): void { + this._addMainBus(mainBus); + } + + /** @internal */ + public addSound(sound: AbstractSound): void { + this._addSound(sound); + } + + /** @internal */ + public addSoundInstance(soundInstance: AbstractSoundInstance): void { + this._addSoundInstance(soundInstance); + } +} diff --git a/packages/dev/core/src/Audio/v2/webAudio/webAudioMainBus.ts b/packages/dev/core/src/Audio/v2/webAudio/webAudioMainBus.ts new file mode 100644 index 00000000000..a9daf510204 --- /dev/null +++ b/packages/dev/core/src/Audio/v2/webAudio/webAudioMainBus.ts @@ -0,0 +1,76 @@ +import type { AudioEngineV2 } from "../audioEngine"; +import type { AbstractAudioNode } from "../abstractAudioNode"; +import { MainAudioBus } from "../mainAudioBus"; +import type { WebAudioEngine } from "./webAudioEngine"; +import type { WebAudioMainOutput } from "./webAudioMainOutput"; + +/** + * Creates a new main audio bus. + * @param name - The name of the main bus. + * @param engine - The audio engine. + * @returns A promise that resolves with the created main audio bus. + */ +export async function CreateMainAudioBusAsync(name: string, engine: AudioEngineV2): Promise { + if (!engine.isWebAudio) { + throw new Error("Wrong engine type."); + } + + const bus = new WebAudioMainBus(name, engine as WebAudioEngine); + await bus.init(); + (engine as WebAudioEngine).addMainBus(bus); + return bus; +} + +/** @internal */ +export class WebAudioMainBus extends MainAudioBus { + private _gainNode: GainNode; + + /** @internal */ + public get webAudioInputNode(): AudioNode { + return this._gainNode; + } + + /** @internal */ + public get webAudioOutputNode(): AudioNode { + return this._gainNode; + } + + /** @internal */ + constructor(name: string, engine: WebAudioEngine) { + super(name, engine); + } + + /** @internal */ + public async init(): Promise { + this._gainNode = new GainNode(await (this.engine as WebAudioEngine).audioContext); + + if (this.engine.mainOutput) { + this._connect(this.engine.mainOutput); + } + } + + /** @internal */ + public getClassName(): string { + return "WebAudioMainBus"; + } + + protected override _connect(node: AbstractAudioNode): void { + super._connect(node); + + if (node.getClassName() === "WebAudioMainOutput" && (node as WebAudioMainOutput).webAudioInputNode) { + this.webAudioOutputNode.connect((node as WebAudioMainOutput).webAudioInputNode); + } else { + throw new Error("Unsupported node type."); + } + } + + protected override _disconnect(node: AbstractAudioNode): void { + super._disconnect(node); + + if (node.getClassName() === "WebAudioMainOutput" && (node as WebAudioMainOutput).webAudioInputNode) { + this.webAudioOutputNode.disconnect((node as WebAudioMainOutput).webAudioInputNode); + } else { + throw new Error("Unsupported node type."); + } + } +} diff --git a/packages/dev/core/src/Audio/v2/webAudio/webAudioMainOutput.ts b/packages/dev/core/src/Audio/v2/webAudio/webAudioMainOutput.ts new file mode 100644 index 00000000000..c711c45a32a --- /dev/null +++ b/packages/dev/core/src/Audio/v2/webAudio/webAudioMainOutput.ts @@ -0,0 +1,49 @@ +import type { AudioEngineV2 } from "../audioEngine"; +import { MainAudioOutput } from "../mainAudioOutput"; +import type { WebAudioEngine } from "./webAudioEngine"; + +/** + * Creates a new main audio output. + * @param engine - The audio engine. + * @returns A promise that resolves with the created audio output. + */ +export async function CreateMainAudioOutputAsync(engine: AudioEngineV2): Promise { + if (!engine.isWebAudio) { + throw new Error("Wrong engine type."); + } + + const mainAudioOutput = new WebAudioMainOutput(engine); + await mainAudioOutput.init(); + return mainAudioOutput; +} + +/** @internal */ +export class WebAudioMainOutput extends MainAudioOutput { + private _destinationNode: AudioDestinationNode; + private _gainNode: GainNode; + + /** @internal */ + public get webAudioInputNode(): AudioNode { + return this._gainNode; + } + + /** @internal */ + constructor(engine: AudioEngineV2) { + super(engine); + } + + /** @internal */ + public async init(): Promise { + const audioContext = await (this.engine as WebAudioEngine).audioContext; + + this._gainNode = new GainNode(audioContext); + this._destinationNode = audioContext.destination; + + this._gainNode.connect(this._destinationNode); + } + + /** @internal */ + public getClassName(): string { + return "WebAudioMainOutput"; + } +} diff --git a/packages/dev/core/src/Audio/v2/webAudio/webAudioPositioner.ts b/packages/dev/core/src/Audio/v2/webAudio/webAudioPositioner.ts new file mode 100644 index 00000000000..2148b6554da --- /dev/null +++ b/packages/dev/core/src/Audio/v2/webAudio/webAudioPositioner.ts @@ -0,0 +1,25 @@ +import type { Nullable } from "../../../types"; +import type { AbstractAudioNode } from "../abstractAudioNode"; +import type { IAudioPositionerOptions } from "../audioPositioner"; +import { AudioPositioner } from "../audioPositioner"; +import type { ISpatialAudioTransformOptions } from "../spatialAudioTransform"; + +/** @internal */ +export interface IWebAudioPositionerOptions extends IAudioPositionerOptions {} + +/** @internal */ +export async function CreateAudioPositionerAsync(parent: AbstractAudioNode, options: Nullable = null): Promise { + return new WebAudioPositioner(parent, options); +} + +class WebAudioPositioner extends AudioPositioner { + /** @internal */ + constructor(parent: AbstractAudioNode, options: Nullable = null) { + super(parent, options); + } + + /** @internal */ + public getClassName(): string { + return "WebAudioPositioner"; + } +} diff --git a/packages/dev/core/src/Audio/v2/webAudio/webAudioSender.ts b/packages/dev/core/src/Audio/v2/webAudio/webAudioSender.ts new file mode 100644 index 00000000000..0fc7de2304b --- /dev/null +++ b/packages/dev/core/src/Audio/v2/webAudio/webAudioSender.ts @@ -0,0 +1,19 @@ +import type { AbstractAudioNode } from "../abstractAudioNode"; +import { AudioSender } from "../audioSender"; + +/** @internal */ +export async function CreateAudioSenderAsync(parent: AbstractAudioNode): Promise { + return new WebAudioSender(parent); +} + +class WebAudioSender extends AudioSender { + /** @internal */ + constructor(parent: AbstractAudioNode) { + super(parent); + } + + /** @internal */ + public getClassName(): string { + return "WebAudioSender"; + } +} diff --git a/packages/dev/core/src/Audio/v2/webAudio/webAudioStaticSound.ts b/packages/dev/core/src/Audio/v2/webAudio/webAudioStaticSound.ts new file mode 100644 index 00000000000..0e104a43932 --- /dev/null +++ b/packages/dev/core/src/Audio/v2/webAudio/webAudioStaticSound.ts @@ -0,0 +1,378 @@ +import type { Nullable } from "../../../types"; +import type { AudioEngineV2 } from "../audioEngine"; +import type { AbstractAudioNode } from "../abstractAudioNode"; +import { SoundState } from "../soundState"; +import type { IStaticSoundOptions } from "../staticSound"; +import { StaticSound } from "../staticSound"; +import { StaticSoundBuffer } from "../staticSoundBuffer"; +import { StaticSoundInstance } from "../staticSoundInstance"; +import type { WebAudioBus } from "./webAudioBus"; +import type { WebAudioEngine } from "./webAudioEngine"; +import type { WebAudioMainBus } from "./webAudioMainBus"; + +const fileExtensionRegex = new RegExp("\\.(\\w{3,4}$|\\?)"); + +export type StaticSoundSourceType = ArrayBuffer | AudioBuffer | StaticSoundBuffer | string | string[]; + +/** + * Creates a new static sound. + * @param name - The name of the sound. + * @param source - The source of the sound. + * @param engine - The audio engine. + * @param options - The options for the static sound. + * @returns A promise that resolves to the created static sound. + */ +export async function CreateSoundAsync(name: string, source: StaticSoundSourceType, engine: AudioEngineV2, options: Nullable = null): Promise { + if (!engine.isWebAudio) { + throw new Error("Unsupported engine type."); + } + + const sound = new WebAudioStaticSound(name, engine as WebAudioEngine, options); + await sound.init(source, options); + (engine as WebAudioEngine).addSound(sound); + return sound; +} + +/** + * Creates a new static sound buffer. + * @param source - The source of the sound buffer. + * @param engine - The audio engine. + * @param options - The options for the static sound buffer. + * @returns A promise that resolves to the created static sound buffer. + */ +export async function CreateSoundBufferAsync(source: StaticSoundSourceType, engine: AudioEngineV2, options: Nullable = null): Promise { + if (!engine.isWebAudio) { + throw new Error("Unsupported engine type."); + } + + const buffer = new WebAudioStaticSoundBuffer(engine as WebAudioEngine); + await buffer.init(source, options); + return buffer; +} + +/** @internal */ +class WebAudioStaticSound extends StaticSound { + private _gainNode: GainNode; + + /** @internal */ + public override readonly engine: WebAudioEngine; + + /** @internal */ + public audioContext: BaseAudioContext; + + private _buffer: WebAudioStaticSoundBuffer; + + /** @internal */ + public get buffer(): WebAudioStaticSoundBuffer { + return this._buffer; + } + + /** @internal */ + public get volume(): number { + return this._gainNode.gain.value; + } + + public set volume(value: number) { + this._gainNode.gain.value = value; + } + + /** @internal */ + public get webAudioInputNode() { + return this._gainNode; + } + + /** @internal */ + public get webAudioOutputNode() { + return this._gainNode; + } + + /** @internal */ + constructor(name: string, engine: WebAudioEngine, options: Nullable = null) { + super(name, engine, options); + } + + /** @internal */ + public async init(source: StaticSoundSourceType, options: Nullable = null): Promise { + this.audioContext = await this.engine.audioContext; + + this._gainNode = new GainNode(this.audioContext); + + if (source instanceof WebAudioStaticSoundBuffer) { + this._buffer = source as WebAudioStaticSoundBuffer; + } else if (typeof source === "string" || Array.isArray(source) || source instanceof ArrayBuffer || source instanceof AudioBuffer) { + this._buffer = (await CreateSoundBufferAsync(source, this.engine, options)) as WebAudioStaticSoundBuffer; + } + + this.outputBus = options?.outputBus ?? this.engine.defaultMainBus; + this.volume = options?.volume ?? 1; + + if (options?.autoplay) { + this.play(null, this.startOffset, this.duration > 0 ? this.duration : null); + } + } + + /** @internal */ + public getClassName(): string { + return "WebAudioStaticSound"; + } + + protected _createSoundInstance(): WebAudioStaticSoundInstance { + const soundInstance = new WebAudioStaticSoundInstance(this); + this.engine.addSoundInstance(soundInstance); + return soundInstance; + } + + protected override _connect(node: AbstractAudioNode): void { + super._connect(node); + + if (node.getClassName() === "WebAudioMainBus" || node.getClassName() === "WebAudioBus") { + this.webAudioOutputNode.connect((node as WebAudioMainBus | WebAudioBus).webAudioInputNode); + } else { + throw new Error("Unsupported node type."); + } + } + + protected override _disconnect(node: AbstractAudioNode): void { + super._disconnect(node); + + if (node.getClassName() === "WebAudioMainBus" || node.getClassName() === "WebAudioBus") { + this.webAudioOutputNode.disconnect((node as WebAudioMainBus | WebAudioBus).webAudioInputNode); + } else { + throw new Error("Unsupported node type."); + } + } +} + +/** @internal */ +class WebAudioStaticSoundBuffer extends StaticSoundBuffer { + /** @internal */ + public override readonly engine: WebAudioEngine; + + /** @internal */ + public audioBuffer: AudioBuffer; + + /** @internal */ + public get sampleRate(): number { + return this.audioBuffer.sampleRate; + } + + /** @internal */ + public get length(): number { + return this.audioBuffer.length; + } + + /** @internal */ + public get duration(): number { + return this.audioBuffer.duration; + } + + /** @internal */ + public get numberOfChannels(): number { + return this.audioBuffer.numberOfChannels; + } + + /** @internal */ + constructor(engine: WebAudioEngine) { + super(engine); + } + + public async init(source: StaticSoundSourceType, options: Nullable = null): Promise { + if (source instanceof AudioBuffer) { + this.audioBuffer = source; + } else if (typeof source === "string") { + await this._initFromUrl(source); + } else if (Array.isArray(source)) { + await this._initFromUrls(source, options?.skipCodecCheck ?? false); + } else if (source instanceof ArrayBuffer) { + await this._initFromArrayBuffer(source); + } + } + + private async _initFromUrl(url: string): Promise { + await this._initFromArrayBuffer(await (await fetch(url)).arrayBuffer()); + } + + private async _initFromUrls(urls: string[], skipCodecCheck: boolean): Promise { + for (const url of urls) { + if (skipCodecCheck) { + await this._initFromUrl(url); + } else { + const format = url.match(fileExtensionRegex)?.at(1); + if (format && this.engine.formatIsValid(format)) { + try { + await this._initFromUrl(url); + } catch (e) { + if (format && 0 < format.length) { + this.engine.flagInvalidFormat(format); + } + } + } + } + + if (this.audioBuffer) { + break; + } + } + } + + private async _initFromArrayBuffer(arrayBuffer: ArrayBuffer): Promise { + this.audioBuffer = await (await this.engine.audioContext).decodeAudioData(arrayBuffer); + } +} + +/** @internal */ +class WebAudioStaticSoundInstance extends StaticSoundInstance { + private _startTime: number = 0; + private _currentTime: number = 0; + + protected override _source: WebAudioStaticSound; + + /** @internal */ + public sourceNode: Nullable; + + /** @internal */ + get startTime(): number { + if (this._state === SoundState.Stopped) { + return 0; + } + + return this._startTime; + } + + /** @internal */ + get currentTime(): number { + if (this._state === SoundState.Stopped) { + return 0; + } + + const timeSinceLastStart = this._state === SoundState.Paused ? 0 : this.engine.currentTime - this._startTime; + return this._currentTime + timeSinceLastStart; + } + + constructor(source: WebAudioStaticSound) { + super(source); + this._initSourceNode(); + } + + /** @internal */ + public override dispose(): void { + super.dispose(); + this.stop(); + this._deinitSourceNode(); + } + + /** @internal */ + public play(waitTime: Nullable = null, startOffset: Nullable = null, duration: Nullable = null): void { + if (this._state === SoundState.Started) { + return; + } + + if (this._state === SoundState.Paused) { + // TODO: Make this fall within loop points when loop start/end is set. + startOffset = (this.currentTime + this._startOffset) % this._source.buffer.duration; + waitTime = 0; + } else if (startOffset) { + this._startOffset = startOffset; + } else { + startOffset = this._startOffset; + } + + this._setState(SoundState.Started); + this._startTime = this.engine.currentTime + (waitTime ?? 0); + + this._initSourceNode(); + this.sourceNode?.start(this._startTime, startOffset ?? 0, duration === null ? undefined : duration); + } + + /** @internal */ + public pause(): void { + if (this._state === SoundState.Paused) { + return; + } + + this._setState(SoundState.Paused); + this._currentTime += this.engine.currentTime - this._startTime; + + this.sourceNode?.stop(); + this._deinitSourceNode(); + } + + /** @internal */ + public resume(): void { + if (this._state === SoundState.Paused) { + this.play(); + } + } + + /** @internal */ + public stop(waitTime: Nullable = null): void { + if (this._state === SoundState.Stopped) { + return; + } + + this._setState(SoundState.Stopped); + + this.sourceNode?.stop(waitTime ? this.engine.currentTime + waitTime : 0); + } + + /** @internal */ + public getClassName(): string { + return "WebAudioStaticSoundInstance"; + } + + protected _onEnded = (() => { + this._startTime = 0; + + this.onEndedObservable.notifyObservers(this); + this._deinitSourceNode(); + }).bind(this); + + protected override _connect(node: AbstractAudioNode): void { + super._connect(node); + + if (node instanceof WebAudioStaticSound && node.webAudioInputNode) { + this.sourceNode?.connect(node.webAudioInputNode); + } else { + throw new Error("Unsupported node type."); + } + } + + protected override _disconnect(node: AbstractAudioNode): void { + super._disconnect(node); + + if (node instanceof WebAudioStaticSound && node.webAudioInputNode) { + this.sourceNode?.disconnect(node.webAudioInputNode); + } else { + throw new Error("Unsupported node type."); + } + } + + private _initSourceNode(): void { + if (this.sourceNode) { + return; + } + + this.sourceNode = new AudioBufferSourceNode(this._source.audioContext, { + buffer: this._source.buffer.audioBuffer, + detune: this._source.pitch, + loop: this._source.loop, + loopEnd: this._source.loopEnd, + loopStart: this._source.loopStart, + playbackRate: this._source.playbackRate, + }); + + this.sourceNode.addEventListener("ended", this._onEnded, { once: true }); + this._connect(this._source); + } + + private _deinitSourceNode(): void { + if (!this.sourceNode) { + return; + } + + this._disconnect(this._source); + this.sourceNode.removeEventListener("ended", this._onEnded); + + this.sourceNode = null; + } +} diff --git a/packages/dev/core/src/Audio/v2/webAudio/webAudioStreamingSound.ts b/packages/dev/core/src/Audio/v2/webAudio/webAudioStreamingSound.ts new file mode 100644 index 00000000000..a6943412691 --- /dev/null +++ b/packages/dev/core/src/Audio/v2/webAudio/webAudioStreamingSound.ts @@ -0,0 +1,361 @@ +import { Tools } from "../../../Misc/tools"; +import type { Nullable } from "../../../types"; +import type { AudioEngineV2 } from "../audioEngine"; +import type { AbstractAudioNode } from "../abstractAudioNode"; +import { SoundState } from "../soundState"; +import type { IStreamingSoundOptions } from "../streamingSound"; +import { StreamingSound } from "../streamingSound"; +import { StreamingSoundInstance } from "../streamingSoundInstance"; +import type { WebAudioBus } from "./webAudioBus"; +import type { WebAudioEngine } from "./webAudioEngine"; +import type { WebAudioMainBus } from "./webAudioMainBus"; + +export type StreamingSoundSourceType = HTMLMediaElement | string | string[]; + +/** + * Creates a new streaming sound. + * @param name - The name of the sound. + * @param source - The source of the sound. + * @param engine - The audio engine. + * @param options - The options for the streaming sound. + * @returns A promise that resolves to the created streaming sound. + */ +export async function CreateStreamingSoundAsync( + name: string, + source: StreamingSoundSourceType, + engine: AudioEngineV2, + options: Nullable = null +): Promise { + if (!engine.isWebAudio) { + throw new Error("Unsupported engine type."); + } + + const sound = new WebAudioStreamingSound(name, engine as WebAudioEngine, options); + await sound.init(source, options); + (engine as WebAudioEngine).addSound(sound); + return sound; +} + +/** @internal */ +class WebAudioStreamingSound extends StreamingSound { + private _gainNode: GainNode; + + /** @internal */ + public source: StreamingSoundSourceType; + + /** @internal */ + public override readonly engine: WebAudioEngine; + + /** @internal */ + public audioContext: AudioContext; + + /** @internal */ + public get volume(): number { + return this._gainNode.gain.value; + } + + public set volume(value: number) { + this._gainNode.gain.value = value; + } + + /** @internal */ + public get webAudioInputNode() { + return this._gainNode; + } + + /** @internal */ + public get webAudioOutputNode() { + return this._gainNode; + } + + /** @internal */ + public get currentTime(): number { + return 0; + } + + /** @internal */ + constructor(name: string, engine: WebAudioEngine, options: Nullable = null) { + super(name, engine, options); + } + + /** @internal */ + public async init(source: StreamingSoundSourceType, options: Nullable = null): Promise { + const audioContext = await this.engine.audioContext; + + if (!(audioContext instanceof AudioContext)) { + throw new Error("Unsupported audio context type."); + } + + this.audioContext = audioContext; + + this._gainNode = new GainNode(this.audioContext); + + this.source = source; + this.outputBus = options?.outputBus ?? this.engine.defaultMainBus; + this.volume = options?.volume ?? 1; + + if (options?.autoplay) { + await this.play(null, this.startOffset); + } + } + + /** @internal */ + public getClassName(): string { + return "WebAudioStreamingSound"; + } + + protected _createSoundInstance(): WebAudioStreamingSoundInstance { + const soundInstance = new WebAudioStreamingSoundInstance(this); + this.engine.addSoundInstance(soundInstance); + return soundInstance; + } + + protected override _connect(node: AbstractAudioNode): void { + super._connect(node); + + if (node.getClassName() === "WebAudioMainBus" || node.getClassName() === "WebAudioBus") { + this.webAudioOutputNode.connect((node as WebAudioMainBus | WebAudioBus).webAudioInputNode); + } else { + throw new Error("Unsupported node type."); + } + } + + protected override _disconnect(node: AbstractAudioNode): void { + super._disconnect(node); + + if (node.getClassName() === "WebAudioMainBus" || node.getClassName() === "WebAudioBus") { + this.webAudioOutputNode.disconnect((node as WebAudioMainBus | WebAudioBus).webAudioInputNode); + } else { + throw new Error("Unsupported node type."); + } + } +} + +/** @internal */ +class WebAudioStreamingSoundInstance extends StreamingSoundInstance { + private _waitTimer: Nullable = null; + + private _isReadyPromise: Promise = new Promise((resolve) => { + this._resolveIsReadyPromise = resolve; + }); + private _resolveIsReadyPromise: (mediaElement: HTMLMediaElement) => void; + + private _onCanPlayThrough: () => void = (() => { + this._resolveIsReadyPromise(this.mediaElement); + this.onReadyObservable.notifyObservers(this); + }).bind(this); + + private _onEnded: () => void = (() => { + this.onEndedObservable.notifyObservers(this); + this.dispose(); + }).bind(this); + + protected override _source: WebAudioStreamingSound; + + /** @internal */ + public mediaElement: HTMLMediaElement; + + /** @internal */ + public sourceNode: Nullable; + + private _startTime: number = 0; + + /** @internal */ + get startTime(): number { + if (this._state === SoundState.Stopped) { + return 0; + } + + return this._startTime; + } + + /** @internal */ + get currentTime(): number { + if (this._state === SoundState.Stopped) { + return 0; + } + + return this._source.audioContext.currentTime - this._startTime; + } + + constructor(source: WebAudioStreamingSound) { + super(source); + + if (typeof source.source === "string") { + this._initFromUrl(source.source); + } else if (Array.isArray(source.source)) { + this._initFromUrls(source.source); + } else if (source.source instanceof HTMLMediaElement) { + this._initFromMediaElement(source.source); + } + } + + private _initFromUrl(url: string): void { + const audio = new Audio(url); + this._initFromMediaElement(audio); + } + + private _initFromUrls(urls: string[]): void { + const audio = new Audio(); + + for (const url of urls) { + const source = document.createElement("source"); + source.src = url; + audio.appendChild(source); + } + + this._initFromMediaElement(audio); + } + + private _initFromMediaElement(mediaElement: HTMLMediaElement): void { + Tools.SetCorsBehavior(mediaElement.currentSrc, mediaElement); + + mediaElement.controls = false; + mediaElement.loop = this._source.loop; + mediaElement.preload = this._source.preload; + mediaElement.preservesPitch = this._source.preservesPitch; + + mediaElement.addEventListener("canplaythrough", this._onCanPlayThrough, { once: true }); + mediaElement.addEventListener("ended", this._onEnded, { once: true }); + + mediaElement.load(); + + document.body.appendChild(mediaElement); + + this.sourceNode = new MediaElementAudioSourceNode(this._source.audioContext, { mediaElement: mediaElement }); + this._connect(this._source); + + this.mediaElement = mediaElement; + } + + /** @internal */ + public override dispose(): void { + super.dispose(); + + this.stop(); + this._clearWaitTimer(); + + this.sourceNode = null; + + if (document.body.contains(this.mediaElement)) { + document.body.removeChild(this.mediaElement); + } + + this.mediaElement.removeEventListener("ended", this._onEnded); + this.mediaElement.removeEventListener("canplaythrough", this._onCanPlayThrough); + for (const child of Array.from(this.mediaElement.children)) { + this.mediaElement.removeChild(child); + } + } + + /** @internal */ + public play(waitTime: Nullable = null, startOffset: Nullable = null): void { + if (this._state === SoundState.Started) { + return; + } + + if (startOffset && startOffset > 0) { + if (this.mediaElement) { + this.mediaElement.currentTime = startOffset; + } + } + + this._clearWaitTimer(); + + if (waitTime && waitTime > 0) { + this._waitTimer = setTimeout(() => { + this._waitTimer = null; + this._setState(SoundState.Starting); + this._play(); + }, waitTime * 1000); + } else { + this._setState(SoundState.Starting); + this._play(); + } + } + + /** @internal */ + public pause(): void { + if (this._state !== SoundState.Starting && this._state !== SoundState.Started) { + return; + } + + this.mediaElement.pause(); + this._setState(SoundState.Paused); + } + + /** @internal */ + public resume(): void { + if (this._state === SoundState.Paused) { + this.play(); + } + } + + /** @internal */ + public override stop(waitTime: Nullable = null): void { + if (this._state === SoundState.Stopped) { + return; + } + + this._clearWaitTimer(); + + if (waitTime && waitTime > 0) { + this._waitTimer = setTimeout(() => { + this._waitTimer = null; + this._stop(); + }, waitTime * 1000); + } else { + this._stop(); + } + } + + /** @internal */ + public getClassName(): string { + return "WebAudioStreamingSoundInstance"; + } + + protected override _connect(node: AbstractAudioNode): void { + super._connect(node); + + if (node instanceof WebAudioStreamingSound && node.webAudioInputNode) { + this.sourceNode?.connect(node.webAudioInputNode); + } else { + throw new Error("Unsupported node type."); + } + } + + protected override _disconnect(node: AbstractAudioNode): void { + super._disconnect(node); + + if (node instanceof WebAudioStreamingSound && node.webAudioInputNode) { + this.sourceNode?.disconnect(node.webAudioInputNode); + } else { + throw new Error("Unsupported node type."); + } + } + + private async _play(): Promise { + await this._isReadyPromise; + + if (this._state !== SoundState.Starting) { + return; + } + + this.mediaElement.play(); + this._startTime = this._source.audioContext.currentTime; + this._setState(SoundState.Started); + } + + private _stop(): void { + this.mediaElement.pause(); + this._setState(SoundState.Stopped); + this._onEnded(); + } + + private _clearWaitTimer(): void { + if (this._waitTimer) { + clearTimeout(this._waitTimer); + this._waitTimer = null; + } + } +} diff --git a/packages/dev/core/src/index.ts b/packages/dev/core/src/index.ts index 1f530660372..df04e703cc1 100644 --- a/packages/dev/core/src/index.ts +++ b/packages/dev/core/src/index.ts @@ -3,6 +3,7 @@ export * from "./Actions/index"; export * from "./Animations/index"; export * from "./assetContainer"; export * from "./Audio/index"; +export * from "./Audio/v2/index"; export * from "./BakedVertexAnimation/index"; export * from "./Behaviors/index"; export * from "./Bones/index";