Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/gapless playback #708

Merged
merged 7 commits into from
Oct 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/app/common/settings/settings.base.ts
Original file line number Diff line number Diff line change
Expand Up @@ -84,4 +84,5 @@ export abstract class SettingsBase {
public abstract playerType: string;
public abstract fullPlayerPositionSizeMaximized: string;
public abstract coverPlayerPosition: string;
public abstract enableGaplessPlayback: boolean;
}
12 changes: 12 additions & 0 deletions src/app/common/settings/settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -790,6 +790,14 @@ export class Settings implements SettingsBase {
this.settings.set('coverPlayerPosition', v);
}

// enableGaplessPlayback
public get enableGaplessPlayback(): boolean {
return <boolean>this.settings.get('enableGaplessPlayback');
}
public set enableGaplessPlayback(v: boolean) {
this.settings.set('enableGaplessPlayback', v);
}

// Initialize
private initialize(): void {
if (!this.settings.has('language')) {
Expand Down Expand Up @@ -1119,5 +1127,9 @@ export class Settings implements SettingsBase {
if (!this.settings.has('coverPlayerPosition')) {
this.settings.set('coverPlayerPosition', '50;50');
}

if (!this.settings.has('enableGaplessPlayback')) {
this.settings.set('enableGaplessPlayback', true);
}
}
}
3 changes: 3 additions & 0 deletions src/app/services/playback/audio-player.base.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
import { Observable } from 'rxjs';

export abstract class AudioPlayerBase {
public analyser: AnalyserNode;
public audio: HTMLAudioElement;
public playbackFinished$: Observable<void>;
public abstract progressSeconds: number;
public abstract totalSeconds: number;
public abstract isPaused: boolean;
public abstract play(audioFilePath: string): void;
public abstract stop(): void;
public abstract pause(): void;
Expand All @@ -13,4 +15,5 @@ export abstract class AudioPlayerBase {
public abstract mute(): void;
public abstract unMute(): void;
public abstract skipToSeconds(seconds: number): void;
public abstract preloadNextTrack(audioFilePath: string): void;
}
212 changes: 191 additions & 21 deletions src/app/services/playback/audio-player.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,82 +5,164 @@ import { MathExtensions } from '../../common/math-extensions';
import { PromiseUtils } from '../../common/utils/promise-utils';
import { StringUtils } from '../../common/utils/string-utils';
import { AudioPlayerBase } from './audio-player.base';
import { SettingsBase } from '../../common/settings/settings.base';

@Injectable()
export class AudioPlayer implements AudioPlayerBase {
private _audio: HTMLAudioElement;
private _audioContext: AudioContext;
private _buffer: AudioBuffer | undefined = undefined;
private _sourceNode: AudioBufferSourceNode | undefined = undefined;
private _gainNode: GainNode;
private _isPlayingOnWebAudio: boolean = false;
private _webAudioStartTime: number = 0;
private _webAudioPausedAt: number = 0;
private _analyser: AnalyserNode;
private _enableGaplessPlayback: boolean = true;
private _isPaused: boolean;
private _pausedWhilePlayingOnHtml5Audio: boolean = false;
private _nextAudioPath: string = '';

public constructor(
private mathExtensions: MathExtensions,
private settings: SettingsBase,
private logger: Logger,
) {
this._enableGaplessPlayback = this.settings.enableGaplessPlayback;
this._audio = new Audio();
this._audio.preload = 'auto';
this._audioContext = new AudioContext();
this._gainNode = this._audioContext.createGain();
this._gainNode.connect(this._audioContext.destination);

this._analyser = this._audioContext.createAnalyser();
this._analyser.fftSize = 128;

if (!this._enableGaplessPlayback) {
const mediaElementSource: MediaElementAudioSourceNode = this._audioContext.createMediaElementSource(this._audio);
this._analyser.connect(this._audioContext.destination);
mediaElementSource!.connect(this._analyser);
}

try {
// This fails during unit tests because setSinkId() does not exist on HTMLAudioElement
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
// @ts-ignore
// eslint-disable-next-line @typescript-eslint/no-unsafe-call
this.audio.setSinkId('default');
this._audio.setSinkId('default');
} catch (e: unknown) {
// Suppress this error, but log it, in case it happens in production.
this.logger.error(e, 'Could not perform setSinkId()', 'AudioPlayer', 'constructor');
}

this.audio.defaultPlaybackRate = 1;
this.audio.playbackRate = 1;
this.audio.volume = 1;
this.audio.muted = false;
this._audio.defaultPlaybackRate = 1;
this._audio.playbackRate = 1;
this._audio.volume = 1;
this._audio.muted = false;

this.audio.onended = () => this.playbackFinished.next();
this._gainNode.gain.setValueAtTime(1, 0);

this._audio.onended = () => this.playbackFinished.next();
}

private playbackFinished: Subject<void> = new Subject();
public playbackFinished$: Observable<void> = this.playbackFinished.asObservable();

public get analyser(): AnalyserNode {
return this._analyser;
}

public get audio(): HTMLAudioElement {
return this._audio;
}

public get progressSeconds(): number {
if (isNaN(this.audio.currentTime)) {
return 0;
}
if (this._isPlayingOnWebAudio) {
return this._audioContext.currentTime - this._webAudioStartTime;
} else {
if (isNaN(this.audio.currentTime)) {
return 0;
}

return this.audio.currentTime;
return this.audio.currentTime;
}
}

public get totalSeconds(): number {
if (isNaN(this.audio.duration)) {
return 0;
if (this._isPlayingOnWebAudio) {
return this._buffer?.duration || 0;
} else {
if (isNaN(this.audio.duration)) {
return 0;
}

return this.audio.duration;
}
}

return this.audio.duration;
public get isPaused(): boolean {
return this._isPaused;
}

public play(audioFilePath: string): void {
const playableAudioFilePath: string = this.replaceUnplayableCharacters(audioFilePath);
this.audio.src = 'file:///' + playableAudioFilePath;
PromiseUtils.noAwait(this.audio.play());
this._isPlayingOnWebAudio = false;

if (audioFilePath !== this._nextAudioPath) {
const playableAudioFilePath: string = this.replaceUnplayableCharacters(audioFilePath);
this.audio.src = 'file:///' + playableAudioFilePath;
}

this.audio.play();

if (this._enableGaplessPlayback) {
const playableAudioFilePath: string = this.replaceUnplayableCharacters(audioFilePath);
this.loadAudioWithWebAudio(playableAudioFilePath);
}
}

public stop(): void {
this.audio.currentTime = 0;
this.audio.pause();
if (this._isPlayingOnWebAudio) {
if (this._sourceNode) {
this._sourceNode.onended = () => {};
this._sourceNode.stop();
this._sourceNode.disconnect();
}
} else {
this.audio.currentTime = 0;
this.audio.pause();
}
}

public pause(): void {
this.audio.pause();
if (this._isPlayingOnWebAudio) {
this._webAudioPausedAt = this._audioContext.currentTime - this._webAudioStartTime;

if (this._sourceNode) {
this._sourceNode.onended = () => {};
this._sourceNode.stop();
this._sourceNode.disconnect();
}
} else {
this.audio.pause();
this._pausedWhilePlayingOnHtml5Audio = true;
}
}

public resume(): void {
PromiseUtils.noAwait(this.audio.play());
this._pausedWhilePlayingOnHtml5Audio = false;

if (this._isPlayingOnWebAudio) {
this.playWebAudio(this._webAudioPausedAt);
} else {
PromiseUtils.noAwait(this.audio.play());
}
}

public setVolume(linearVolume: number): void {
// log(0) is undefined. So we provide a minimum of 0.01.
const logarithmicVolume: number = linearVolume > 0 ? this.mathExtensions.linearToLogarithmic(linearVolume, 0.01, 1) : 0;
this.audio.volume = logarithmicVolume;
this._gainNode.gain.setValueAtTime(logarithmicVolume, 0);
}

public mute(): void {
Expand All @@ -92,7 +174,11 @@ export class AudioPlayer implements AudioPlayerBase {
}

public skipToSeconds(seconds: number): void {
this.audio.currentTime = seconds;
if (this._isPlayingOnWebAudio) {
this.playWebAudio(seconds);
} else {
this.audio.currentTime = seconds;
}
}

private replaceUnplayableCharacters(audioFilePath: string): string {
Expand All @@ -101,4 +187,88 @@ export class AudioPlayer implements AudioPlayerBase {
playableAudioFilePath = StringUtils.replaceAll(playableAudioFilePath, '?', '%3F');
return playableAudioFilePath;
}

private async fetchAudioFile(url: string): Promise<Blob> {
const response = await fetch(url);
if (!response.ok) {
throw new Error(`Failed to fetch audio file: ${response.statusText}`);
}
return await response.blob(); // Convert the response to a Blob
}

private async loadAudioWithWebAudio(audioFilePath: string): Promise<void> {
this.fetchAudioFile(audioFilePath)
.then((blob) => {
const reader = new FileReader();
reader.readAsArrayBuffer(blob);
reader.onloadend = async () => {
const arrayBuffer = reader.result as ArrayBuffer;
this._buffer = await this._audioContext.decodeAudioData(arrayBuffer);
this.switchToWebAudio();
};
})
.catch((error) => console.error(error));
}

private playWebAudio(offset: number): void {
if (!this._buffer) {
return;
}

if (this._pausedWhilePlayingOnHtml5Audio) {
this._pausedWhilePlayingOnHtml5Audio = false;
return;
}

try {
// Make sure to stop any previous sourceNode if it's still playing
if (this._sourceNode) {
this._sourceNode.onended = () => {};

this._sourceNode.stop();
this._sourceNode.disconnect(); // Disconnect the previous node to avoid issues
}

// Create a new buffer source node
this._sourceNode = this._audioContext.createBufferSource();
this._sourceNode.buffer = this._buffer;

// Connect the source to the analyser
this._sourceNode.connect(this._analyser);

// Connect the source node to the gain node
this._sourceNode.connect(this._gainNode);

this._sourceNode.onended = () => {
this.playbackFinished.next();
};

this._isPlayingOnWebAudio = true;

// Store the current time when audio starts playing
this._webAudioStartTime = this._audioContext.currentTime - offset;

// Sync playback position with HTML5 Audio
this._sourceNode.start(0, offset);
} catch (error) {}
}

private switchToWebAudio(): void {
// Get the current position of HTML5 audio
const currentTime = this._audio.currentTime;

// Pause the HTML5 Audio
this._audio.pause();

this.playWebAudio(currentTime);
}

public preloadNextTrack(audioFilePath: string): void {
if (this._isPlayingOnWebAudio) {
this._nextAudioPath = audioFilePath;
const playableAudioFilePath: string = this.replaceUnplayableCharacters(audioFilePath);
this._audio.src = 'file:///' + playableAudioFilePath;
this._audio.load();
}
}
}
26 changes: 6 additions & 20 deletions src/app/services/playback/audio-visualizer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,7 @@ import { RgbColor } from '../../common/rgb-color';

@Injectable()
export class AudioVisualizer {
private readonly audioContext: AudioContext;
private readonly analyser: AnalyserNode;
private readonly dataArray: Uint8Array;
private dataArray: Uint8Array;
private canvas: HTMLCanvasElement;
private canvasContext: CanvasRenderingContext2D;
private isStopped: boolean;
Expand All @@ -18,32 +16,20 @@ export class AudioVisualizer {
private audioPlayer: AudioPlayerBase,
private appearanceService: AppearanceServiceBase,
private settings: SettingsBase,
) {
this.audioContext = new window.AudioContext();
this.analyser = this.audioContext.createAnalyser();
this.analyser.fftSize = 128;
this.dataArray = new Uint8Array(this.analyser.frequencyBinCount);
this.analyze();
}
) {}

public connectAudioElement(): void {
const source: MediaElementAudioSourceNode = this.getSourceForAudioContext(this.audioContext);
source.connect(this.analyser);
this.dataArray = new Uint8Array(this.audioPlayer.analyser.frequencyBinCount);
this.analyze();
}

public connectCanvas(canvas: HTMLCanvasElement): void {
this.analyser.disconnect();
this.canvas = canvas;
this.canvasContext = this.canvas.getContext('2d') as CanvasRenderingContext2D;
this.analyser.connect(this.audioContext.destination);
}

private getSourceForAudioContext(audioContext: AudioContext): MediaElementAudioSourceNode {
return audioContext.createMediaElementSource(this.audioPlayer.audio as HTMLMediaElement);
}

private shouldStopDelayed(): boolean {
return this.audioPlayer.audio.paused;
return this.audioPlayer.isPaused;
}

private shouldStopNow(): boolean {
Expand All @@ -53,7 +39,7 @@ export class AudioVisualizer {
private analyze(): void {
setTimeout(
() => {
this.analyser.getByteFrequencyData(this.dataArray);
this.audioPlayer.analyser.getByteFrequencyData(this.dataArray);
this.draw();

requestAnimationFrame(() => this.analyze());
Expand Down
Loading
Loading