2023-05-21 21:10:32 +00:00
|
|
|
export enum SubscribeEvents {
|
|
|
|
CurrentTimeTick,
|
|
|
|
FormattedDurationTick,
|
|
|
|
FormattedCurrentTimeTick,
|
|
|
|
}
|
2023-06-12 15:50:33 +00:00
|
|
|
class PubSub {
|
2023-05-21 21:10:32 +00:00
|
|
|
//el = event listener
|
2023-06-12 15:50:33 +00:00
|
|
|
el_current_time_tick: Array<(data: any) => void> = []
|
|
|
|
el_formatted_duration_tick: Array<(data: any) => void> = []
|
|
|
|
el_formatted_current_time_tick: Array<(data: any) => void> = []
|
2023-05-21 21:10:32 +00:00
|
|
|
|
2023-06-12 15:50:33 +00:00
|
|
|
subscribe(event_name: SubscribeEvents, func: (data: any) => void) {
|
2023-05-21 21:10:32 +00:00
|
|
|
switch (event_name) {
|
|
|
|
case SubscribeEvents.CurrentTimeTick: {
|
2023-06-12 15:50:33 +00:00
|
|
|
this.el_current_time_tick.push(func)
|
2023-05-21 21:10:32 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
case SubscribeEvents.FormattedDurationTick: {
|
2023-06-12 15:50:33 +00:00
|
|
|
this.el_formatted_duration_tick.push(func)
|
2023-05-21 21:10:32 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
case SubscribeEvents.FormattedCurrentTimeTick: {
|
2023-06-12 15:50:33 +00:00
|
|
|
this.el_formatted_current_time_tick.push(func)
|
2023-05-21 21:10:32 +00:00
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-06-12 15:50:33 +00:00
|
|
|
unsubscribe(event_name: SubscribeEvents, func: (data: any) => void) {
|
2023-05-21 21:10:32 +00:00
|
|
|
switch (event_name) {
|
|
|
|
case SubscribeEvents.CurrentTimeTick: {
|
2023-06-12 15:50:33 +00:00
|
|
|
if (this.el_current_time_tick.includes(func)) {
|
|
|
|
this.el_current_time_tick.splice(this.el_current_time_tick.indexOf(func), 1)
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
|
|
|
break
|
|
|
|
}
|
|
|
|
case SubscribeEvents.FormattedDurationTick: {
|
2023-06-12 15:50:33 +00:00
|
|
|
if (this.el_formatted_duration_tick.includes(func)) {
|
|
|
|
this.el_formatted_duration_tick.splice(this.el_formatted_duration_tick.indexOf(func), 1)
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
|
|
|
break
|
|
|
|
}
|
|
|
|
case SubscribeEvents.FormattedCurrentTimeTick: {
|
2023-06-12 15:50:33 +00:00
|
|
|
if (this.el_formatted_duration_tick.includes(func)) {
|
|
|
|
this.el_formatted_duration_tick.splice(this.el_formatted_duration_tick.indexOf(func), 1)
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-06-12 15:50:33 +00:00
|
|
|
emit(event_name: SubscribeEvents, data: any) {
|
2023-05-21 21:10:32 +00:00
|
|
|
switch (event_name) {
|
|
|
|
case SubscribeEvents.CurrentTimeTick: {
|
2023-06-12 15:50:33 +00:00
|
|
|
this.el_current_time_tick.forEach((func) => {
|
2023-05-21 21:10:32 +00:00
|
|
|
func(data)
|
|
|
|
})
|
|
|
|
break
|
|
|
|
}
|
|
|
|
case SubscribeEvents.FormattedDurationTick: {
|
2023-06-12 15:50:33 +00:00
|
|
|
this.el_formatted_duration_tick.forEach((func) => {
|
2023-05-21 21:10:32 +00:00
|
|
|
func(data)
|
|
|
|
})
|
|
|
|
break
|
|
|
|
}
|
|
|
|
case SubscribeEvents.FormattedCurrentTimeTick: {
|
2023-06-12 15:50:33 +00:00
|
|
|
this.el_formatted_current_time_tick.forEach((func) => {
|
2023-05-21 21:10:32 +00:00
|
|
|
func(data)
|
|
|
|
})
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* For old browsers */
|
|
|
|
declare global {
|
|
|
|
interface Window {
|
|
|
|
webkitAudioContext: typeof AudioContext
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2023-06-12 15:50:33 +00:00
|
|
|
export class MusicPlayer {
|
|
|
|
current_song_duration = 0
|
|
|
|
#volume_cache: number
|
|
|
|
is_playing = false
|
|
|
|
time = 0
|
|
|
|
#pub_sub = new PubSub
|
|
|
|
constructor(
|
2023-07-28 14:23:28 +00:00
|
|
|
public audio_context: AudioContext,
|
2023-06-12 15:50:33 +00:00
|
|
|
private audio_element: HTMLAudioElement,
|
|
|
|
public track: MediaElementAudioSourceNode,
|
2023-07-28 14:23:28 +00:00
|
|
|
public gain: GainNode,
|
2023-06-12 15:50:33 +00:00
|
|
|
public volume: number,
|
|
|
|
private current_song_path?: string) {
|
|
|
|
this.#volume_cache = volume
|
|
|
|
}
|
2023-05-21 21:10:32 +00:00
|
|
|
|
2023-06-12 15:50:33 +00:00
|
|
|
mute_toggle() {
|
|
|
|
if (this.gain.gain.value == 0) {
|
|
|
|
this.unmute()
|
2023-05-21 21:10:32 +00:00
|
|
|
} else {
|
2023-06-12 15:50:33 +00:00
|
|
|
this.mute()
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
|
|
|
}
|
2023-06-12 15:50:33 +00:00
|
|
|
mute() {
|
|
|
|
this.#volume_cache = this.gain.gain.value
|
2023-05-21 21:10:32 +00:00
|
|
|
/* Gentler mute, doesn't pop
|
|
|
|
gain.gain.linearRampToValueAtTime(
|
|
|
|
0,
|
|
|
|
audio_context.currentTime + 0.1
|
|
|
|
);*/
|
2023-06-12 15:50:33 +00:00
|
|
|
this.volume = this.gain.gain.value = 0
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
2023-06-12 15:50:33 +00:00
|
|
|
unmute() {
|
|
|
|
this.volume = this.gain.gain.value = this.#volume_cache
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
2023-06-12 15:50:33 +00:00
|
|
|
change_volume(volume_i: number) {
|
|
|
|
this.volume = this.gain.gain.value = volume_i
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Safer seek_async. Normal seek will try to start the player even if the track hasn't started yet, or was previously suspended/closed
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
try_seek_async(new_time: number) {
|
2023-05-21 21:10:32 +00:00
|
|
|
return new Promise((resolve, reject) => {
|
2023-06-12 15:50:33 +00:00
|
|
|
if (this.track.context.state == "closed" || this.track.context.state == "suspended") {
|
|
|
|
this.is_playing = false
|
2023-08-03 21:55:55 +00:00
|
|
|
reject(new Error("Can't seek - track not playing"))
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
2023-06-12 15:50:33 +00:00
|
|
|
this.audio_element.currentTime = new_time
|
2023-05-21 21:10:32 +00:00
|
|
|
resolve(null)
|
|
|
|
})
|
|
|
|
}
|
2023-08-03 21:55:55 +00:00
|
|
|
/**
|
|
|
|
* Unsafe, throws error if failed. Use try_seek_async or seek_async unless you don't care about the result.
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
seek(new_time: number) {
|
|
|
|
this.audio_element.currentTime = new_time
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Safer play_toggle_async. Normal play_toggle will try to start the player even if the track hasn't started yet, or was previously suspended/closed
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
try_play_toggle_async() {
|
2023-05-21 21:10:32 +00:00
|
|
|
return new Promise((resolve, reject) => {
|
2023-06-12 15:50:33 +00:00
|
|
|
if (this.audio_context.state === "suspended" || this.audio_context.state === "closed") {
|
2023-07-20 18:34:34 +00:00
|
|
|
this.audio_context.resume().then(undefined, (e) =>
|
2023-08-03 21:55:55 +00:00
|
|
|
reject(new Error("Context closed or suspended" + JSON.stringify(e))))
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
2023-06-12 15:50:33 +00:00
|
|
|
if (this.audio_element.paused) {
|
|
|
|
this.audio_element.play().then((s) => {
|
|
|
|
this.is_playing = true
|
2023-05-21 21:10:32 +00:00
|
|
|
resolve(s)
|
|
|
|
}, (r) => {
|
2023-06-12 15:50:33 +00:00
|
|
|
this.is_playing = false
|
2023-08-03 21:55:55 +00:00
|
|
|
reject(new Error("failed to play audio elements" + JSON.stringify(r)))
|
2023-05-21 21:10:32 +00:00
|
|
|
})
|
|
|
|
} else {
|
2023-06-12 15:50:33 +00:00
|
|
|
this.audio_element.pause()
|
|
|
|
this.is_playing = false
|
2023-05-21 21:10:32 +00:00
|
|
|
resolve(null)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Can try to play even if the audio context was suspended or closed. Best to use try_play_toggle_async()
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
play_toggle_async() {
|
2023-05-21 21:10:32 +00:00
|
|
|
return new Promise((resolve, reject) => {
|
2023-06-12 15:50:33 +00:00
|
|
|
if (this.audio_context.state === "suspended" || this.audio_context.state === "closed") {
|
|
|
|
this.audio_context.resume()
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
2023-06-12 15:50:33 +00:00
|
|
|
if (this.audio_element.paused) {
|
|
|
|
this.audio_element.play().then((s) => {
|
|
|
|
this.is_playing = true
|
2023-05-21 21:10:32 +00:00
|
|
|
resolve(s)
|
|
|
|
}, (r) => {
|
2023-06-12 15:50:33 +00:00
|
|
|
this.is_playing = false
|
2023-08-03 21:55:55 +00:00
|
|
|
reject(new Error(JSON.stringify(r)))
|
2023-05-21 21:10:32 +00:00
|
|
|
})
|
|
|
|
} else {
|
2023-06-12 15:50:33 +00:00
|
|
|
this.audio_element.pause()
|
|
|
|
this.is_playing = false
|
2023-05-21 21:10:32 +00:00
|
|
|
resolve(null)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Unsafe, throws error if failed. Use play_toggle_async or try_play_toggle_async unless you don't care about the result.
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
play_toggle() {
|
|
|
|
if (this.audio_element.paused) {
|
|
|
|
this.is_playing = true
|
|
|
|
this.audio_element.play().catch((r) => {
|
|
|
|
this.is_playing = false
|
2023-05-21 21:10:32 +00:00
|
|
|
throw r
|
|
|
|
})
|
|
|
|
} else {
|
2023-06-12 15:50:33 +00:00
|
|
|
this.is_playing = false
|
|
|
|
this.audio_element.pause()
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Safer play_async. Normal play will try to start the player even if the track hasn't started yet, or was previously suspended/closed
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
try_play_async() {
|
2023-05-21 21:10:32 +00:00
|
|
|
return new Promise((resolve, reject) => {
|
2023-08-03 21:55:55 +00:00
|
|
|
if (this.is_playing) resolve(Error("Already playing"))
|
2023-06-12 15:50:33 +00:00
|
|
|
if (this.audio_context.state === "suspended" || this.audio_context.state === "closed") {
|
2023-08-03 21:55:55 +00:00
|
|
|
this.audio_context.resume().then(() => {
|
|
|
|
this.audio_element.play().then((s) => {
|
|
|
|
this.is_playing = true
|
|
|
|
resolve(s)
|
|
|
|
}, (r) => {
|
|
|
|
this.is_playing = false
|
|
|
|
reject(new Error(JSON.stringify(r)))
|
|
|
|
})
|
|
|
|
}, (e) =>
|
|
|
|
reject(new Error("Context closed or suspended" + JSON.stringify(e))))
|
|
|
|
} else {
|
|
|
|
this.audio_element.play().then((s) => {
|
|
|
|
this.is_playing = true
|
|
|
|
resolve(s)
|
|
|
|
}, (r) => {
|
|
|
|
this.is_playing = false
|
|
|
|
reject(new Error(JSON.stringify(r)))
|
|
|
|
})
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Will try to play even if the audio context was suspended or closed. Best to use try_play_async()
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
play_async() {
|
2023-05-21 21:10:32 +00:00
|
|
|
return new Promise((resolve, reject) => {
|
2023-08-03 21:55:55 +00:00
|
|
|
if (this.is_playing) resolve(Error("Already playing"))
|
2023-06-12 15:50:33 +00:00
|
|
|
this.audio_element.play().then((s) => {
|
|
|
|
this.is_playing = true
|
2023-05-21 21:10:32 +00:00
|
|
|
resolve(s)
|
|
|
|
}, (r) => {
|
2023-06-12 15:50:33 +00:00
|
|
|
this.is_playing = false
|
2023-08-03 21:55:55 +00:00
|
|
|
reject(new Error(JSON.stringify(r)))
|
2023-05-21 21:10:32 +00:00
|
|
|
})
|
|
|
|
})
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Unsafe, throws error if failed. Use play_async or try_play_async unless you don't care about the result.
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
play() {
|
|
|
|
if (this.is_playing) return
|
|
|
|
this.audio_element.play().catch((r) => {
|
|
|
|
this.is_playing = false
|
2023-08-03 21:55:55 +00:00
|
|
|
throw new Error(r)
|
2023-05-21 21:10:32 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Safe technically. Even if audioContext is suspended or closed it will pretend that it paused.
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
pause() {
|
|
|
|
this.audio_element.pause()
|
|
|
|
this.is_playing = false
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Will only load metadata of the upcoming song. Need to call try_play_async() afterwards to start the playback
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
try_new_song_async(path: string) {
|
2023-05-21 21:10:32 +00:00
|
|
|
return new Promise((resolve, reject) => {
|
2023-06-12 15:50:33 +00:00
|
|
|
this.audio_element.src = this.current_song_path = path
|
2023-05-21 21:10:32 +00:00
|
|
|
//Found out today about this. Such a nice new way to mass remove event listeners!
|
|
|
|
const controller = new AbortController();
|
|
|
|
|
2023-07-31 17:42:21 +00:00
|
|
|
this.audio_element.addEventListener("canplay", function canplay_listener(s) {
|
2023-05-21 21:10:32 +00:00
|
|
|
controller.abort()
|
|
|
|
resolve(s)
|
|
|
|
}, { signal: controller.signal })
|
|
|
|
|
2023-06-12 15:50:33 +00:00
|
|
|
this.audio_element.addEventListener("error", function error_listener(e) {
|
2023-05-21 21:10:32 +00:00
|
|
|
controller.abort()
|
2023-08-03 21:55:55 +00:00
|
|
|
reject(new Error("Failed to load new song, error:" + JSON.stringify(e)))
|
2023-05-21 21:10:32 +00:00
|
|
|
}, { signal: controller.signal })
|
|
|
|
|
2023-06-12 15:50:33 +00:00
|
|
|
this.audio_element.addEventListener("stalled", function stalled_listener(e) {
|
2023-05-21 21:10:32 +00:00
|
|
|
controller.abort()
|
2023-08-03 21:55:55 +00:00
|
|
|
reject(new Error("Failed to load new song, stalled: " + JSON.stringify(e)))
|
2023-05-21 21:10:32 +00:00
|
|
|
}, { signal: controller.signal })
|
2023-06-12 15:50:33 +00:00
|
|
|
|
2023-07-28 14:23:28 +00:00
|
|
|
//once aborted, try to set current_song_duration
|
|
|
|
controller.signal.addEventListener("abort", () => {
|
|
|
|
this.current_song_duration = this.audio_element.duration
|
|
|
|
})
|
|
|
|
|
2023-06-12 15:50:33 +00:00
|
|
|
this.is_playing = false
|
2023-05-21 21:10:32 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Won't tell if you if the song actually got loaded or if it failed. For a safer version use try_new_song_async() unless you don't care about the result
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
new_song(path: string) {
|
|
|
|
this.audio_element.src = this.current_song_path = path
|
|
|
|
this.current_song_duration = this.audio_element.duration
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Will parse the duration of the song to make it easy to display in UI
|
|
|
|
* If somethings undefined it returns "0:00"
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
get_formatted_duration() {
|
|
|
|
const dur = this.audio_element.duration
|
|
|
|
this.current_song_duration = this.audio_element.duration
|
2023-05-21 21:10:32 +00:00
|
|
|
|
|
|
|
if (dur == 0 || !dur) return "0:00"
|
|
|
|
|
|
|
|
// ~ is Bitwise NOT, equivalent to Math.floor()
|
|
|
|
const hrs = ~~(dur / 3600);
|
|
|
|
const mins = ~~((dur % 3600) / 60);
|
|
|
|
const secs = ~~dur % 60;
|
|
|
|
|
|
|
|
let ret = ""
|
|
|
|
if (hrs > 0) {
|
|
|
|
ret += "" + hrs + ":" + (mins < 10 ? "0" : "");
|
|
|
|
}
|
|
|
|
|
|
|
|
ret += "" + mins + ":" + (secs < 10 ? "0" : "");
|
|
|
|
ret += "" + secs;
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Will parse the current time of the song to make it easy to display in UI
|
|
|
|
* If somethings undefined it returns "0:00"
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
get_formatted_current_time() {
|
|
|
|
const curr = this.audio_element.currentTime
|
2023-05-21 21:10:32 +00:00
|
|
|
|
|
|
|
if (curr == 0 || !curr) return "0:00"
|
|
|
|
// ~~ is Bitwise OR, equivalent to Math.floor()
|
|
|
|
const hrs = ~~(curr / 3600);
|
|
|
|
const mins = ~~((curr % 3600) / 60);
|
|
|
|
const secs = ~~curr % 60;
|
|
|
|
|
|
|
|
let ret = ""
|
|
|
|
if (hrs > 0) {
|
|
|
|
ret += "" + hrs + ":" + (mins < 10 ? "0" : "");
|
|
|
|
}
|
|
|
|
|
|
|
|
ret += "" + mins + ":" + (secs < 10 ? "0" : "");
|
|
|
|
ret += "" + secs;
|
|
|
|
return ret;
|
|
|
|
}
|
2023-06-12 15:50:33 +00:00
|
|
|
#emit_time() {
|
|
|
|
const request_id = requestAnimationFrame(this.#emit_time.bind(this))
|
|
|
|
if (this.audio_element.ended) this.is_playing = false
|
|
|
|
if (this.audio_element.paused) this.is_playing == false
|
|
|
|
// if use reactively changes volume directly
|
|
|
|
this.gain.gain.value = this.volume
|
|
|
|
|
|
|
|
this.time = this.audio_element.currentTime
|
|
|
|
if (this.#pub_sub.el_current_time_tick.length == 0) cancelAnimationFrame(request_id)
|
|
|
|
this.#pub_sub.emit(SubscribeEvents.CurrentTimeTick, this.time)
|
|
|
|
}
|
|
|
|
#emit_duration_fmt() {
|
|
|
|
const request_id = requestAnimationFrame(this.#emit_duration_fmt.bind(this))
|
|
|
|
const time = this.get_formatted_duration()
|
|
|
|
if (this.#pub_sub.el_formatted_duration_tick.length == 0) cancelAnimationFrame(request_id)
|
|
|
|
this.#pub_sub.emit(SubscribeEvents.FormattedDurationTick, time)
|
|
|
|
}
|
|
|
|
#emit_time_fmt() {
|
|
|
|
const request_id = requestAnimationFrame(this.#emit_time_fmt.bind(this))
|
|
|
|
const time = this.get_formatted_current_time()
|
|
|
|
if (this.#pub_sub.el_formatted_current_time_tick.length == 0) cancelAnimationFrame(request_id)
|
|
|
|
this.#pub_sub.emit(SubscribeEvents.FormattedCurrentTimeTick, time)
|
|
|
|
}
|
2023-05-21 21:10:32 +00:00
|
|
|
/**
|
|
|
|
* Will give current time every animation frame
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
on_time_tick(callback: (data: any) => void) {
|
|
|
|
this.#pub_sub.subscribe(SubscribeEvents.CurrentTimeTick, callback)
|
|
|
|
this.#emit_time()
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
2023-06-12 15:50:33 +00:00
|
|
|
|
2023-05-21 21:10:32 +00:00
|
|
|
/**
|
|
|
|
* Will give formatted current time via get_formatted_current_time() every animation frame
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
on_time_tick_formatted(callback: (data: any) => void) {
|
|
|
|
this.#pub_sub.subscribe(SubscribeEvents.FormattedCurrentTimeTick, callback)
|
|
|
|
this.#emit_time_fmt()
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Will give formatted duration time via get_formatted_duration() every animation frame
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
on_duration_formatted(callback: (data: any) => void) {
|
|
|
|
this.#pub_sub.subscribe(SubscribeEvents.FormattedDurationTick, callback)
|
|
|
|
this.#emit_duration_fmt()
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2023-06-12 15:50:33 +00:00
|
|
|
export class MusicPlayerBuilder {
|
|
|
|
#audio_context: AudioContext
|
|
|
|
#gain: GainNode
|
|
|
|
#track: MediaElementAudioSourceNode
|
|
|
|
#volume = 1
|
|
|
|
#prev_node: any;
|
|
|
|
#is_gain_connected = false
|
2023-05-21 21:10:32 +00:00
|
|
|
/**
|
2023-06-12 15:50:33 +00:00
|
|
|
* Creates a context and #gain( Gets connected at the end )
|
2023-05-21 21:10:32 +00:00
|
|
|
* will throw if audio_element is undefined (stupid vue setup amirite?)
|
|
|
|
* will throw if user has not interacted with the page yet (Can't initiate AudioContext)
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
constructor(private audio_element: HTMLAudioElement) {
|
2023-05-21 21:10:32 +00:00
|
|
|
if (audio_element === undefined) throw Error("audio_element was undefined")
|
|
|
|
// ↓ For old browsers
|
|
|
|
const AudioContext = window.AudioContext || window.webkitAudioContext;
|
2023-06-12 15:50:33 +00:00
|
|
|
this.#audio_context = new AudioContext()
|
|
|
|
this.#track = this.#audio_context.createMediaElementSource(audio_element)
|
|
|
|
this.#gain = this.#audio_context.createGain()
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
|
|
|
/**
|
|
|
|
* For external use, not kept inside player after connection.
|
|
|
|
* @returns {AnalyserNode}
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
add_analyser() {
|
|
|
|
const analyser = this.#audio_context.createAnalyser()
|
|
|
|
!this.#prev_node ? this.#track.connect(analyser) : this.#prev_node.connect(analyser)
|
|
|
|
this.#prev_node = analyser
|
2023-05-21 21:10:32 +00:00
|
|
|
return analyser
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* For external use, not kept inside player after connection.
|
|
|
|
* @returns {StereoPannerNode}
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
add_stereo_panner_node() {
|
|
|
|
const panner = this.#audio_context.createStereoPanner()
|
|
|
|
!this.#prev_node ? this.#track.connect(panner) : this.#prev_node.connect(panner)
|
|
|
|
this.#prev_node = panner
|
2023-05-21 21:10:32 +00:00
|
|
|
return panner
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* For external use, not kept inside player after connection.
|
|
|
|
* @returns {StereoPannerNode}
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
add_wave_shaper_node() {
|
|
|
|
const shaper = this.#audio_context.createWaveShaper()
|
|
|
|
!this.#prev_node ? this.#track.connect(shaper) : this.#prev_node.connect(shaper)
|
|
|
|
this.#prev_node = shaper
|
2023-05-21 21:10:32 +00:00
|
|
|
return shaper
|
|
|
|
}
|
|
|
|
/**
|
|
|
|
* For additional trickery, you can connect your own node.
|
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
connect_custom_node(node: AudioNode) {
|
|
|
|
!this.#prev_node ? this.#track.connect(node) : this.#prev_node.connect(node)
|
|
|
|
this.#prev_node = node
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
|
|
|
/**
|
2023-06-12 15:50:33 +00:00
|
|
|
* Only use if you need to connect the #gain before another node,
|
|
|
|
* eg. if you want the analyser nodes output to be affected by user #gain
|
2023-05-21 21:10:32 +00:00
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
connect_gain() {
|
|
|
|
!this.#prev_node ? this.#track.connect(this.#gain) : this.#prev_node.connect(this.#gain)
|
|
|
|
this.#prev_node = this.#gain
|
|
|
|
this.#is_gain_connected = true
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
|
|
|
/**
|
|
|
|
* Finishes the build
|
2023-06-12 15:50:33 +00:00
|
|
|
* @returns {Euterpe}
|
2023-05-21 21:10:32 +00:00
|
|
|
*/
|
2023-06-12 15:50:33 +00:00
|
|
|
build() {
|
|
|
|
if (!this.#is_gain_connected) {
|
|
|
|
!this.#prev_node ? this.#track.connect(this.#gain) : this.#prev_node.connect(this.#gain)
|
|
|
|
this.#prev_node = this.#gain
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
2023-06-12 15:50:33 +00:00
|
|
|
this.#prev_node.connect(this.#audio_context.destination)
|
|
|
|
this.audio_element.preload = "metadata"
|
|
|
|
return new MusicPlayer(this.#audio_context, this.audio_element, this.#track, this.#gain, this.#volume)
|
2023-05-21 21:10:32 +00:00
|
|
|
}
|
|
|
|
}
|