Skip to content

Commit

Permalink
Merge pull request #45 from afkcodes/feature/v1.0.9
Browse files Browse the repository at this point in the history
Feature/v1.0.9
  • Loading branch information
afkcodes authored Jun 21, 2024
2 parents 5f5b4d4 + 4d9cb76 commit 144e610
Show file tree
Hide file tree
Showing 9 changed files with 148 additions and 32 deletions.
25 changes: 20 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,9 @@ For a comprehensive list of formats support visit [MDN audio codec guide](https:
- Casting support
- Dash media playback
- DRM
- ~~Equalizer~~ [] Done
- Updates to APIs for better DX
- React hooks to easily get started with React.
- Ads Support
- ~~Equalizer~~ [] Done
- ~~Updates to APIs for better DX~~ [] Done
- ~~Queue Support~~ [] Done.

### Installation
Expand Down Expand Up @@ -104,8 +103,8 @@ audio.play();
audio.pause();

// Get the Audio State
audio.subscribe("AUDIO_X_STATE", (data: AudioState) => {
console.log(data);
audio.subscribe("AUDIO_X_STATE", (audioState: AudioState) => {
console.log(audioState);
});

// Sample Audio State
Expand Down Expand Up @@ -270,6 +269,10 @@ audio.addMediaAndPlay(null, async (currentTrack: MediaTrack) => {
// To add a single track to queue
audio.addToQueue(track);
// To add a multiple tracks to queue
audio.addToQueue([track1, track2, track3]);
```
```JS
Expand All @@ -290,6 +293,18 @@ audio.playNext();
audio.playPrevious();
```
```JS
// To seek to a particular position
audio.seek(position); // position is basically time in seconds
```
```JS
// To seek by to a particular time range
audio.seekBy(time); // time range in seconds to seek
```
### Author
---
Expand Down
3 changes: 3 additions & 0 deletions dist/index.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3353,6 +3353,7 @@ declare class AudioX {
private eqStatus;
private isEqEnabled;
private eqInstance;
private showNotificationsActions;
constructor();
init(initProps: AudioInit): Promise<void>;
addMedia(mediaTrack: MediaTrack): Promise<void>;
Expand All @@ -3366,6 +3367,7 @@ declare class AudioX {
setPlaybackRate(playbackRate: PlaybackRate): void;
mute(): void;
seek(time: number): void;
seekBy(time: number): void;
destroy(): Promise<void>;
subscribe(eventName: string, callback: (data: any) => void, state?: any): () => void;
addEventListener(event: keyof HTMLMediaElementEventMap, callback: (data: any) => void): void;
Expand All @@ -3381,6 +3383,7 @@ declare class AudioX {
playNext(): void;
playPrevious(): void;
clearQueue(): void;
addToQueue(mediaTracks: MediaTrack | MediaTrack[]): void;
removeFromQueue(mediaTrack: MediaTrack): void;
getQueue(): MediaTrack[];
get id(): string | null;
Expand Down
4 changes: 2 additions & 2 deletions dist/index.js

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion dist/index.js.map

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "audio_x",
"version": "1.0.8",
"version": "1.0.9",
"description": "The audio player for the gen-x",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
Expand Down
24 changes: 24 additions & 0 deletions src/audio.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ class AudioX {
private eqStatus: EqualizerStatus = 'IDEAL';
private isEqEnabled: boolean = false;
private eqInstance: Equalizer;
private showNotificationsActions: boolean = false;

constructor() {
if (AudioX._instance) {
Expand Down Expand Up @@ -112,6 +113,7 @@ class AudioX {
}

if (showNotificationActions) {
this.showNotificationsActions = true;
attachMediaSessionHandlers();
}

Expand Down Expand Up @@ -211,6 +213,7 @@ class AudioX {
this._fetchFn = fetchFn;
await fetchFn(currentTrack as MediaTrack);
}

if (this._queue && isValidArray(this._queue)) {
this._currentQueueIndex = this._queue.findIndex(
(track) => track.id === currentTrack?.id
Expand Down Expand Up @@ -294,6 +297,13 @@ class AudioX {
}
}

seekBy(time: number) {
if (audioInstance && audioInstance.currentTime) {
const currentProgress = audioInstance.currentTime;
audioInstance.currentTime = currentProgress + time;
}
}

async destroy() {
if (audioInstance) {
await this.reset();
Expand Down Expand Up @@ -343,6 +353,10 @@ class AudioX {
break;
}
handleQueuePlayback();
// Attaching MediaSession Handler again as this will make sure the next and previous button show up in notification
if (this.showNotificationsActions) {
attachMediaSessionHandlers();
}
}

playNext() {
Expand Down Expand Up @@ -371,6 +385,16 @@ class AudioX {
}
}

addToQueue(mediaTracks: MediaTrack | MediaTrack[]) {
if (this._queue && isValidArray(this._queue)) {
if (Array.isArray(mediaTracks)) {
this._queue = [...this._queue, ...mediaTracks];
} else {
this._queue.push(mediaTracks);
}
}
}

removeFromQueue(mediaTrack: MediaTrack) {
if (this._queue && isValidArray(this._queue)) {
const queue = this._queue.filter(
Expand Down
79 changes: 62 additions & 17 deletions src/events/baseEvents.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { PLAYBACK_STATE } from 'constants/common';
import {
calculateActualPlayedLength,
getBufferedDuration,
getReadableErrorMessage
} from 'helpers/common';
import ChangeNotifier from 'helpers/notifier';
Expand All @@ -12,20 +13,25 @@ const notifier = ChangeNotifier;
const BASE_EVENT_CALLBACK_MAP: EventListenerCallbackMap = {
LOAD_START: (e, audioInstance: HTMLAudioElement) => {
console.log('STATUS', e.type);
const bufferedDuration = getBufferedDuration(audioInstance);

notifier.notify(
'AUDIO_STATE',
{
playbackState: PLAYBACK_STATE.BUFFERING,
duration: audioInstance?.duration,
error: { code: null, message: '', readable: '' }
error: { code: null, message: '', readable: '' },
bufferedDuration
},
`audiox_baseEvents_state_${e.type}`
);
},

DURATION_CHANGE: (e, audioInstance: HTMLAudioElement) => {
const audioState = notifier.getLatestState('AUDIO_X_STATE') as AudioState;
console.log('STATUS', e.type);
const audioState = notifier.getLatestState('AUDIO_X_STATE') as AudioState;
const bufferedDuration = getBufferedDuration(audioInstance);

notifier.notify(
'AUDIO_STATE',
{
Expand All @@ -34,63 +40,81 @@ const BASE_EVENT_CALLBACK_MAP: EventListenerCallbackMap = {
? PLAYBACK_STATE.PLAYING // fix for live streams where duration change is fired even when audio is playing
: PLAYBACK_STATE.DURATION_CHANGE,
duration: audioInstance?.duration,
error: { code: null, message: '', readable: '' }
error: { code: null, message: '', readable: '' },
bufferedDuration
},
`audiox_baseEvents_state_${e.type}`
);
},

LOADED_META_DATA: (e: Event, audioInstance: HTMLAudioElement) => {
console.log('STATUS', e.type);
const bufferedDuration = getBufferedDuration(audioInstance);

notifier.notify(
'AUDIO_STATE',
{
playbackState: PLAYBACK_STATE.BUFFERING,
duration: audioInstance?.duration,
error: { code: null, message: '', readable: '' }
error: { code: null, message: '', readable: '' },
bufferedDuration
},
`audiox_baseEvents_state_${e.type}`
);
},

LOADED_DATA: (e, audioInstance: HTMLAudioElement) => {
console.log('STATUS', e.type);
const bufferedDuration = getBufferedDuration(audioInstance);
notifier.notify(
'AUDIO_STATE',
{
playbackState: PLAYBACK_STATE.BUFFERING,
duration: audioInstance?.duration,
error: { code: null, message: '', readable: '' }
error: { code: null, message: '', readable: '' },
bufferedDuration
},
`audiox_baseEvents_state_${e.type}`
);
},

CAN_PLAY: (e: Event) => {
CAN_PLAY: (e: Event, audioInstance: HTMLAudioElement) => {
console.log('STATUS', e.type);
const audioState = notifier.getLatestState('AUDIO_X_STATE') as AudioState;
const bufferedDuration = getBufferedDuration(audioInstance);

notifier.notify(
'AUDIO_STATE',
{
playbackState: PLAYBACK_STATE.READY,
error: { code: null, message: '', readable: '' }
},
playbackState:
audioState.playbackState === 'paused'
? PLAYBACK_STATE.PAUSED
: PLAYBACK_STATE.READY,
error: { code: null, message: '', readable: '' },
bufferedDuration
} as AudioState,
`audiox_baseEvents_state_${e.type}`
);
},

CAN_PLAY_THROUGH: (e: Event) => {
const audioState = notifier.getLatestState('AUDIO_X_STATE') as AudioState;
CAN_PLAY_THROUGH: (e: Event, audioInstance: HTMLAudioElement) => {
console.log('STATUS', e.type);

const audioState = notifier.getLatestState('AUDIO_X_STATE') as AudioState;
const isPaused = audioInstance.paused;
const bufferedDuration = getBufferedDuration(audioInstance);

// below we check if the audio was already in paused state then we keep it as paused instead going to ready this make sure ready is fired only on the first load.
notifier.notify(
'AUDIO_STATE',
{
playbackState:
audioState.playbackState === 'playing'
? PLAYBACK_STATE.PLAYING // fix for live streams as canplaythrough event is can be behave weirdly as there is no known end to the media
: PLAYBACK_STATE.READY,
error: { code: null, message: '', readable: '' }
playbackState: isPaused
? PLAYBACK_STATE.PAUSED
: audioState.playbackState === 'playing'
? PLAYBACK_STATE.PLAYING // fix for live streams as canplaythrough event is can be behave weirdly as there is no known end to the media
: PLAYBACK_STATE.READY,
error: { code: null, message: '', readable: '' },
bufferedDuration
},
`audiox_baseEvents_state_${e.type}`
);
Expand Down Expand Up @@ -175,6 +199,7 @@ const BASE_EVENT_CALLBACK_MAP: EventListenerCallbackMap = {
TIME_UPDATE: (e: Event, audioInstance: HTMLAudioElement) => {
console.log('STATUS', e.type);
const audioState = notifier.getLatestState('AUDIO_X_STATE') as AudioState;
const bufferedDuration = getBufferedDuration(audioInstance);

notifier.notify(
'AUDIO_STATE',
Expand All @@ -183,7 +208,8 @@ const BASE_EVENT_CALLBACK_MAP: EventListenerCallbackMap = {
? audioState?.playbackState
: PLAYBACK_STATE.PLAYING,
progress: audioInstance?.currentTime,
error: { code: null, message: '', readable: '' }
error: { code: null, message: '', readable: '' },
bufferedDuration
},
`audiox_baseEvents_state_${e.type}`
);
Expand All @@ -205,6 +231,25 @@ const BASE_EVENT_CALLBACK_MAP: EventListenerCallbackMap = {
VOLUME_CHANGE: (e: Event) => {
console.log('STATUS', e.type);
notifier.notify('AUDIO_STATE', {}, `audiox_baseEvents_state`);
},

SEEKED: (e: any, audioInstance: HTMLAudioElement) => {
const audioState = notifier.getLatestState('AUDIO_X_STATE') as AudioState;
const bufferedDuration = getBufferedDuration(audioInstance);

notifier.notify(
'AUDIO_STATE',
{
playbackState:
audioState.playbackState === 'paused'
? 'paused'
: audioState.playbackState,
progress: audioInstance?.currentTime,
error: { code: null, message: '', readable: '' },
bufferedDuration
} as AudioState,
`audiox_baseEvents_state_${e.type}`
);
}
};

Expand Down
14 changes: 13 additions & 1 deletion src/helpers/common.ts
Original file line number Diff line number Diff line change
Expand Up @@ -120,11 +120,11 @@ const loadScript = (

const handleQueuePlayback = () => {
const audio = new AudioX();
const queue = audio.getQueue();
let hasEnded = false;

const audioStateListener = (state: AudioState) => {
if (state.playbackState === 'ended' && !hasEnded) {
const queue = audio.getQueue();
hasEnded = true;
if (queue && isValidArray(queue)) {
audio.playNext();
Expand All @@ -138,6 +138,17 @@ const handleQueuePlayback = () => {
ChangeNotifier.listen('AUDIO_STATE', audioStateListener);
};

const getBufferedDuration = (audioInstance: HTMLAudioElement) => {
const { buffered } = audioInstance;
let bufferedDuration = 0;

for (let i = 0; i < buffered.length; i++) {
bufferedDuration += buffered.end(i) - buffered.start(i);
}

return bufferedDuration;
};

const shuffle = <T>(array: T[]): T[] => {
const shuffledArray = [...array];

Expand All @@ -151,6 +162,7 @@ const shuffle = <T>(array: T[]): T[] => {
};

export {
getBufferedDuration,
getReadableErrorMessage,
handleQueuePlayback,
isValidArray,
Expand Down
Loading

0 comments on commit 144e610

Please sign in to comment.