formatting, play_next when song ended

This commit is contained in:
Djkáťo 2024-02-12 16:36:02 +01:00
parent ed2d865f2d
commit cd97233bce
19 changed files with 375 additions and 930 deletions

View file

@ -8,5 +8,6 @@
"endOfLine": "lf", "endOfLine": "lf",
"bracketSpacing": true, "bracketSpacing": true,
"bracketSameLine": false, "bracketSameLine": false,
"arrowParens": "always" "arrowParens": "always",
"printWidth": 150
} }

View file

@ -1,11 +1,4 @@
import { import { Artist, Collection, DB, Ref, RefTo, Song } from "@euterpe.js/music-library"
Artist,
Collection,
DB,
Ref,
RefTo,
Song
} from "@euterpe.js/music-library"
export { DJSong, DJDB } export { DJSong, DJDB }
type ID = number type ID = number
@ -34,9 +27,7 @@ class DJSong extends Song {
try { try {
fetch(data.url).then((file) => { fetch(data.url).then((file) => {
file.arrayBuffer().then((buffer) => { file.arrayBuffer().then((buffer) => {
audio_context audio_context.decodeAudioData(buffer).then((audio_buffer) => {
.decodeAudioData(buffer)
.then((audio_buffer) => {
this.audio_buffer = audio_buffer this.audio_buffer = audio_buffer
}) })
}) })
@ -46,17 +37,13 @@ class DJSong extends Song {
} }
} }
public async analyze(url: URL, audio_context: AudioContext) { public async analyze(url: URL, audio_context: AudioContext) {
this.audio_buffer = await audio_context.decodeAudioData( this.audio_buffer = await audio_context.decodeAudioData(await (await fetch(url)).arrayBuffer())
await (await fetch(url)).arrayBuffer()
)
} }
} }
class DJDB extends DB { class DJDB extends DB {
dj_add(dj_songs: DJSong[]): void { dj_add(dj_songs: DJSong[]): void {
let inputs let inputs
typeof dj_songs[Symbol.iterator] == "function" typeof dj_songs[Symbol.iterator] == "function" ? (inputs = dj_songs) : (inputs = [dj_songs])
? (inputs = dj_songs)
: (inputs = [dj_songs])
for (const input of inputs) { for (const input of inputs) {
if (input instanceof DJSong) { if (input instanceof DJSong) {
const song = input as DJSong const song = input as DJSong
@ -65,16 +52,8 @@ class DJDB extends DB {
if (song.in_collection) { if (song.in_collection) {
const curr_col = song.in_collection.get(this) as Collection const curr_col = song.in_collection.get(this) as Collection
curr_col.songs.push(new Ref(RefTo.Songs, song.id)) curr_col.songs.push(new Ref(RefTo.Songs, song.id))
song.artists.forEach((artist) => song.artists.forEach((artist) => curr_col.artists.push(new Ref(RefTo.Artists, artist.get(this)!.id!)))
curr_col.artists.push( song.remix_artists.forEach((artist) => curr_col.artists.push(new Ref(RefTo.Artists, artist.get(this)!.id!)))
new Ref(RefTo.Artists, artist.get(this)!.id!)
)
)
song.remix_artists.forEach((artist) =>
curr_col.artists.push(
new Ref(RefTo.Artists, artist.get(this)!.id!)
)
)
} }
for (const artist_ref of song.artists) { for (const artist_ref of song.artists) {

View file

@ -11,22 +11,13 @@ class DJ {
/**in ms */ /**in ms */
beat_duration?: number beat_duration?: number
beat = { current: 0, max: 4, next_bar_in: 4 } beat = { current: 0, max: 4, next_bar_in: 4 }
on_beat?: (beat: { on_beat?: (beat: { current: number; max: number; next_bar_in: number }) => void
current: number constructor(public player: Euterpe | MusicPlayer, public master_bpm: number | 120) {
max: number
next_bar_in: number
}) => void
constructor(
public player: Euterpe | MusicPlayer,
public master_bpm: number | 120
) {
this.beat_duration = 60 / master_bpm this.beat_duration = 60 / master_bpm
this.#emit_beats() this.#emit_beats()
} }
#emit_beats() { #emit_beats() {
this.beat.current >= 4 this.beat.current >= 4 ? (this.beat.current++, this.beat.next_bar_in--) : ((this.beat.current = 0), (this.beat.next_bar_in = this.beat.max))
? (this.beat.current++, this.beat.next_bar_in--)
: ((this.beat.current = 0), (this.beat.next_bar_in = this.beat.max))
if (this.on_beat) this.on_beat(this.beat) if (this.on_beat) this.on_beat(this.beat)
//This makes it break if BPM >= 300!!!! //This makes it break if BPM >= 300!!!!
@ -92,17 +83,10 @@ class Track {
gain: GainNode gain: GainNode
audio_context: AudioContext | BaseAudioContext audio_context: AudioContext | BaseAudioContext
constructor( constructor(public player: MusicPlayer | Euterpe, public current_song?: Song, public should_loop?: boolean) {
public player: MusicPlayer | Euterpe,
public current_song?: Song,
public should_loop?: boolean
) {
this.audio_context = player.audio_context this.audio_context = player.audio_context
this.gain = this.audio_context.createGain() this.gain = this.audio_context.createGain()
if (current_song) if (current_song) this.change_song(current_song).catch((e) => console.error("error during track construction - " + e))
this.change_song(current_song).catch((e) =>
console.error("error during track construction - " + e)
)
} }
async #prepare() { async #prepare() {
@ -110,10 +94,7 @@ class Track {
if (!this.current_song) reject(new Error("No current song")) if (!this.current_song) reject(new Error("No current song"))
fetch(this.current_song!.url).then( fetch(this.current_song!.url).then(
async (file) => { async (file) => {
this.audio_buffer = this.audio_buffer = await this.audio_context.decodeAudioData(await file.arrayBuffer())
await this.audio_context.decodeAudioData(
await file.arrayBuffer()
)
resolve(this) resolve(this)
}, },
(reason) => reject(reason) (reason) => reject(reason)
@ -121,10 +102,7 @@ class Track {
}) })
} }
#connect() { #connect() {
if (!this.audio_buffer) if (!this.audio_buffer) throw new Error("Somehow buffer not in track even though it analyzed properly. Report this as a bug")
throw new Error(
"Somehow buffer not in track even though it analyzed properly. Report this as a bug"
)
this.buffer_source = this.audio_context.createBufferSource() this.buffer_source = this.audio_context.createBufferSource()
this.buffer_source.buffer = this.audio_buffer! this.buffer_source.buffer = this.audio_buffer!
this.buffer_source.connect(this.gain) this.buffer_source.connect(this.gain)
@ -149,11 +127,8 @@ class Track {
*/ */
async try_start(delay?: number) { async try_start(delay?: number) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
if (!this.buffer_source) if (!this.buffer_source) reject(new Error("No buffer source yet, set a song first"))
reject(new Error("No buffer source yet, set a song first")) this.buffer_source!.start(this.audio_context.currentTime + (delay || 0))
this.buffer_source!.start(
this.audio_context.currentTime + (delay || 0)
)
}) })
} }
} }

View file

@ -1,11 +1,4 @@
import { import { DB, Song, Artist, Ref, RefTo, Platforms } from "@euterpe.js/music-library"
DB,
Song,
Artist,
Ref,
RefTo,
Platforms
} from "@euterpe.js/music-library"
export const db = new DB() export const db = new DB()
db.add([ db.add([
@ -28,14 +21,7 @@ db.add([
}), }),
new Artist({ new Artist({
name: "IMANU", name: "IMANU",
links: [ links: [[Platforms.Spotify, new URL("https://open.spotify.com/artist/5Y7rFm0tiJTVDzGLMzz0W1?si=DRaZyugTTIqlBHDkMGKVqA&nd=1")]]
[
Platforms.Spotify,
new URL(
"https://open.spotify.com/artist/5Y7rFm0tiJTVDzGLMzz0W1?si=DRaZyugTTIqlBHDkMGKVqA&nd=1"
)
]
]
}), }),
new Artist({ new Artist({
name: "toe", name: "toe",
@ -45,33 +31,18 @@ db.add([
db.add([ db.add([
new Song({ new Song({
//Refrences are constructed as such. This allows to get to the artist from either collection or song //Refrences are constructed as such. This allows to get to the artist from either collection or song
artists: [ artists: [new Ref(RefTo.Artists, 2), new Ref(RefTo.Artists, 3), new Ref(RefTo.Artists, 4)],
new Ref(RefTo.Artists, 2),
new Ref(RefTo.Artists, 3),
new Ref(RefTo.Artists, 4)
],
duration: 252, duration: 252,
name: "Star", name: "Star",
remix_artists: [new Ref(RefTo.Artists, 5)], remix_artists: [new Ref(RefTo.Artists, 5)],
url: new URL( url: new URL("http://" + window.location.host + "/Machinedrum, Tanerelle & Mono Poly - Star (IMANU Remix) final.mp3")
"http://" +
window.location.host +
"/Machinedrum, Tanerelle & Mono Poly - Star (IMANU Remix) final.mp3"
)
}), }),
new Song({ new Song({
//If you don't like guessing the IDs, then this is also a way to do it //If you don't like guessing the IDs, then this is also a way to do it
artists: [ artists: [new Ref(RefTo.Artists, db.artists.find((a) => a.name == "Jamie xx")!.id!)],
new Ref(
RefTo.Artists,
db.artists.find((a) => a.name == "Jamie xx")!.id!
)
],
duration: 331, duration: 331,
name: "Sleep Sound", name: "Sleep Sound",
url: new URL( url: new URL("http://" + window.location.host + "/Jamie xx - Sleep Sound.mp3")
"http://" + window.location.host + "/Jamie xx - Sleep Sound.mp3"
)
}), }),
new Song({ new Song({
artists: [new Ref(RefTo.Artists, 1)], artists: [new Ref(RefTo.Artists, 1)],
@ -83,10 +54,6 @@ db.add([
artists: [new Ref(RefTo.Artists, 10)], artists: [new Ref(RefTo.Artists, 10)],
duration: 4 * 60 + 5, duration: 4 * 60 + 5,
name: "サニーボーイ・ラプソディ", name: "サニーボーイ・ラプソディ",
url: new URL( url: new URL("http://" + window.location.host + "/16.サニーボーイ・ラプソディ.ogg")
"http://" +
window.location.host +
"/16.サニーボーイ・ラプソディ.ogg"
)
}) })
]) ])

View file

@ -3,16 +3,12 @@ import { EuterpeBuilder } from "@euterpe.js/euterpe"
let is_seeking = false let is_seeking = false
// document.addEventListener("click", start, { once: true }) // document.addEventListener("click", start, { once: true })
const euterpe = new EuterpeBuilder( const euterpe = new EuterpeBuilder(document.querySelector("#audio")!, db).build()
document.querySelector("#audio")!,
db
).build()
add_library_to_dom() add_library_to_dom()
euterpe.try_preload_song(0).then( euterpe.try_preload_song(0).then(
() => { () => {
document.querySelector("#text-playing")!.innerHTML = document.querySelector("#text-playing")!.innerHTML = euterpe.format_current_song()
euterpe.format_current_song()
}, },
(e) => console.log(e + " Failed to preload") (e) => console.log(e + " Failed to preload")
) )
@ -46,8 +42,7 @@ euterpe.on_time_tick((time) => {
document.querySelector("#previous")?.addEventListener("click", () => { document.querySelector("#previous")?.addEventListener("click", () => {
euterpe.try_previous_song_looping().then( euterpe.try_previous_song_looping().then(
() => { () => {
document.querySelector("#text-playing")!.innerHTML = document.querySelector("#text-playing")!.innerHTML = euterpe.format_current_song()
euterpe.format_current_song()
}, },
(e) => alert(e + "Failed to change song") (e) => alert(e + "Failed to change song")
) )
@ -55,8 +50,7 @@ document.querySelector("#previous")?.addEventListener("click", () => {
document.querySelector("#next")?.addEventListener("click", () => { document.querySelector("#next")?.addEventListener("click", () => {
euterpe.try_next_song_looping().then( euterpe.try_next_song_looping().then(
() => { () => {
document.querySelector("#text-playing")!.innerHTML = document.querySelector("#text-playing")!.innerHTML = euterpe.format_current_song()
euterpe.format_current_song()
}, },
(e) => alert(e + "Failed to change song") (e) => alert(e + "Failed to change song")
) )
@ -78,9 +72,7 @@ document.querySelector("#toggle-mute")?.addEventListener("click", () => {
euterpe.mute_toggle() euterpe.mute_toggle()
}) })
document.querySelector("#toggle-play")?.addEventListener("click", () => { document.querySelector("#toggle-play")?.addEventListener("click", () => {
euterpe euterpe.try_play_toggle().catch((e) => alert("failed to toggle pause/play!" + e))
.try_play_toggle()
.catch((e) => alert("failed to toggle pause/play!" + e))
}) })
document.querySelector("#volume")?.addEventListener("input", (e) => { document.querySelector("#volume")?.addEventListener("input", (e) => {
euterpe.change_volume(e.target?.valueAsNumber) euterpe.change_volume(e.target?.valueAsNumber)
@ -119,9 +111,7 @@ function add_library_to_dom() {
function library_play(e: MouseEvent) { function library_play(e: MouseEvent) {
const b = e.currentTarget as HTMLButtonElement const b = e.currentTarget as HTMLButtonElement
euterpe.try_specific_song(parseInt(b.dataset["id"]!)).then( euterpe.try_specific_song(parseInt(b.dataset["id"]!)).then(
() => () => (document.querySelector("#text-playing")!.innerHTML = euterpe.format_current_song()),
(document.querySelector("#text-playing")!.innerHTML =
euterpe.format_current_song()),
(e) => alert(e) (e) => alert(e)
) )
} }

View file

@ -20,14 +20,15 @@ class Euterpe extends MusicPlayer {
current_song_path?: string, current_song_path?: string,
private options?: BuilderOptions private options?: BuilderOptions
) { ) {
super( super(audio_context, audio_element, track, gain, volume, current_song_path)
audio_context,
audio_element, audio_element.addEventListener("ended", () => {
track, audio_element.currentTime = 0
gain, audio_element.pause()
volume, try {
current_song_path this.try_next_song()
) } catch (e) { }
})
} }
/** /**
@ -57,13 +58,10 @@ class Euterpe extends MusicPlayer {
while (this.db.songs[--id_i].id! > this.current_song_id); while (this.db.songs[--id_i].id! > this.current_song_id);
const next_id = ++id_i const next_id = ++id_i
if (next_id == this.db.songs.length) if (next_id == this.db.songs.length) throw new Error("Won't go past the last song")
throw new Error("Won't go past the last song")
new_song = this.db.songs.find((song) => song.id == next_id)! new_song = this.db.songs.find((song) => song.id == next_id)!
} }
const url = this.options?.use_only_pathname_url const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
? new_song.url.pathname
: new_song.url.toString()
await this.try_new_song(url) await this.try_new_song(url)
await this.try_play() await this.try_play()
if (this.current_song) this.played_history.push(this.current_song) if (this.current_song) this.played_history.push(this.current_song)
@ -87,9 +85,7 @@ class Euterpe extends MusicPlayer {
if (next_id == this.db.songs.length) next_id = this.db.songs[0].id! if (next_id == this.db.songs.length) next_id = this.db.songs[0].id!
new_song = this.db.songs.find((song) => song.id == next_id)! new_song = this.db.songs.find((song) => song.id == next_id)!
} }
const url = this.options?.use_only_pathname_url const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
? new_song.url.pathname
: new_song.url.toString()
await this.try_new_song(url) await this.try_new_song(url)
await this.try_play() await this.try_play()
if (this.current_song) this.played_history.push(this.current_song) if (this.current_song) this.played_history.push(this.current_song)
@ -113,9 +109,7 @@ class Euterpe extends MusicPlayer {
if (next_id == this.db.songs.length) next_id = this.db.songs[0].id! if (next_id == this.db.songs.length) next_id = this.db.songs[0].id!
new_song = this.db.songs.find((song) => song.id == next_id)! new_song = this.db.songs.find((song) => song.id == next_id)!
} }
const url = this.options?.use_only_pathname_url const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
? new_song.url.pathname
: new_song.url.toString()
this.new_song(url) this.new_song(url)
this.play() this.play()
if (this.current_song) this.played_history.push(this.current_song) if (this.current_song) this.played_history.push(this.current_song)
@ -137,9 +131,7 @@ class Euterpe extends MusicPlayer {
const next_id = ++id_i const next_id = ++id_i
new_song = this.db.songs.find((song) => song.id == next_id)! new_song = this.db.songs.find((song) => song.id == next_id)!
} }
const url = this.options?.use_only_pathname_url const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
? new_song.url.pathname
: new_song.url.toString()
this.new_song(url) this.new_song(url)
this.play() this.play()
if (this.current_song) this.played_history.push(this.current_song) if (this.current_song) this.played_history.push(this.current_song)
@ -168,9 +160,7 @@ class Euterpe extends MusicPlayer {
specific_song(new_song_id: number) { specific_song(new_song_id: number) {
const new_song = this.db.songs.find((song) => song.id! == new_song_id) const new_song = this.db.songs.find((song) => song.id! == new_song_id)
if (!new_song) return if (!new_song) return
const url = this.options?.use_only_pathname_url const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
? new_song.url.pathname
: new_song.url.toString()
this.new_song(url) this.new_song(url)
this.play() this.play()
if (this.current_song) this.played_history.push(this.current_song) if (this.current_song) this.played_history.push(this.current_song)
@ -192,13 +182,10 @@ class Euterpe extends MusicPlayer {
while (this.db.songs[++id_i].id! < this.current_song_id); while (this.db.songs[++id_i].id! < this.current_song_id);
const next_id = --id_i const next_id = --id_i
if (next_id == this.db.songs.length) if (next_id == this.db.songs.length) throw new Error("Won't roll backwards to last song")
throw new Error("Won't roll backwards to last song")
new_song = this.db.songs.find((song) => song.id == next_id)! new_song = this.db.songs.find((song) => song.id == next_id)!
} }
const url = this.options?.use_only_pathname_url const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
? new_song.url.pathname
: new_song.url.toString()
await this.try_new_song(url) await this.try_new_song(url)
await this.try_play() await this.try_play()
//if (this.current_song) this.played_history.push(this.current_song) //if (this.current_song) this.played_history.push(this.current_song)
@ -219,13 +206,10 @@ class Euterpe extends MusicPlayer {
while (this.db.songs[++id_i].id! < this.current_song_id); while (this.db.songs[++id_i].id! < this.current_song_id);
let next_id = --id_i let next_id = --id_i
if (next_id == -1) if (next_id == -1) next_id = this.db.songs[this.db.songs.length - 1].id!
next_id = this.db.songs[this.db.songs.length - 1].id!
new_song = this.db.songs.find((song) => song.id == next_id)! new_song = this.db.songs.find((song) => song.id == next_id)!
} }
const url = this.options?.use_only_pathname_url const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
? new_song.url.pathname
: new_song.url.toString()
await this.try_new_song(url) await this.try_new_song(url)
await this.try_play() await this.try_play()
//if (this.current_song) this.played_history.push(this.current_song) //if (this.current_song) this.played_history.push(this.current_song)
@ -246,13 +230,10 @@ class Euterpe extends MusicPlayer {
while (this.db.songs[++id_i].id! < this.current_song_id); while (this.db.songs[++id_i].id! < this.current_song_id);
const next_id = -id_i const next_id = -id_i
if (next_id == this.db.songs.length) if (next_id == this.db.songs.length) throw new Error("Won't go past the last song")
throw new Error("Won't go past the last song")
new_song = this.db.songs.find((song) => song.id == next_id)! new_song = this.db.songs.find((song) => song.id == next_id)!
} }
const url = this.options?.use_only_pathname_url const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
? new_song.url.pathname
: new_song.url.toString()
this.new_song(url) this.new_song(url)
this.play() this.play()
//if (this.current_song) this.played_history.push(this.current_song) //if (this.current_song) this.played_history.push(this.current_song)
@ -273,13 +254,10 @@ class Euterpe extends MusicPlayer {
while (this.db.songs[++id_i].id! < this.current_song_id); while (this.db.songs[++id_i].id! < this.current_song_id);
let next_id = -id_i let next_id = -id_i
if (next_id == this.db.songs.length) if (next_id == this.db.songs.length) next_id = this.db.songs[this.db.songs.length].id!
next_id = this.db.songs[this.db.songs.length].id!
new_song = this.db.songs.find((song) => song.id == next_id)! new_song = this.db.songs.find((song) => song.id == next_id)!
} }
const url = this.options?.use_only_pathname_url const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
? new_song.url.pathname
: new_song.url.toString()
this.new_song(url) this.new_song(url)
this.play() this.play()
//if (this.current_song) this.played_history.push(this.current_song) //if (this.current_song) this.played_history.push(this.current_song)
@ -341,8 +319,7 @@ class Euterpe extends MusicPlayer {
try_queue_add(id: number) { try_queue_add(id: number) {
const curr_song = this.db.songs.find((song) => song.id == id) const curr_song = this.db.songs.find((song) => song.id == id)
if (!curr_song) throw new Error(`Song of id "${id}" doesn't exist`) if (!curr_song) throw new Error(`Song of id "${id}" doesn't exist`)
if (this.queue.find((song) => song.id == id)) if (this.queue.find((song) => song.id == id)) throw new Error(`Song of id "${id}" already queued`)
throw new Error(`Song of id "${id}" already queued`)
this.queue.push(curr_song) this.queue.push(curr_song)
} }
/** /**
@ -391,18 +368,12 @@ class EuterpeBuilder {
* will throw if audio_element is undefined (stupid vue setup amirite?) * will throw if audio_element is undefined (stupid vue setup amirite?)
* will throw if user has not interacted with the page yet (Can't initiate AudioContext) * will throw if user has not interacted with the page yet (Can't initiate AudioContext)
*/ */
constructor( constructor(private audio_element: HTMLAudioElement, private db: DB, private options?: BuilderOptions) {
private audio_element: HTMLAudioElement, if (audio_element === undefined) throw Error("audio_element was undefined")
private db: DB,
private options?: BuilderOptions
) {
if (audio_element === undefined)
throw Error("audio_element was undefined")
// ↓ For old browsers // ↓ For old browsers
const AudioContext = window.AudioContext || window.webkitAudioContext const AudioContext = window.AudioContext || window.webkitAudioContext
this.#audio_context = new AudioContext() this.#audio_context = new AudioContext()
this.#track = this.#track = this.#audio_context.createMediaElementSource(audio_element)
this.#audio_context.createMediaElementSource(audio_element)
this.#gain = this.#audio_context.createGain() this.#gain = this.#audio_context.createGain()
} }
/** /**
@ -411,9 +382,7 @@ class EuterpeBuilder {
*/ */
add_analyser() { add_analyser() {
const analyser = this.#audio_context.createAnalyser() const analyser = this.#audio_context.createAnalyser()
!this.#prev_node !this.#prev_node ? this.#track.connect(analyser) : this.#prev_node.connect(analyser)
? this.#track.connect(analyser)
: this.#prev_node.connect(analyser)
this.#prev_node = analyser this.#prev_node = analyser
return analyser return analyser
} }
@ -423,9 +392,7 @@ class EuterpeBuilder {
*/ */
add_stereo_panner_node() { add_stereo_panner_node() {
const panner = this.#audio_context.createStereoPanner() const panner = this.#audio_context.createStereoPanner()
!this.#prev_node !this.#prev_node ? this.#track.connect(panner) : this.#prev_node.connect(panner)
? this.#track.connect(panner)
: this.#prev_node.connect(panner)
this.#prev_node = panner this.#prev_node = panner
return panner return panner
} }
@ -435,9 +402,7 @@ class EuterpeBuilder {
*/ */
add_wave_shaper_node() { add_wave_shaper_node() {
const shaper = this.#audio_context.createWaveShaper() const shaper = this.#audio_context.createWaveShaper()
!this.#prev_node !this.#prev_node ? this.#track.connect(shaper) : this.#prev_node.connect(shaper)
? this.#track.connect(shaper)
: this.#prev_node.connect(shaper)
this.#prev_node = shaper this.#prev_node = shaper
return shaper return shaper
} }
@ -445,9 +410,7 @@ class EuterpeBuilder {
* For additional trickery, you can connect your own node. * For additional trickery, you can connect your own node.
*/ */
connect_custom_node(node: AudioNode) { connect_custom_node(node: AudioNode) {
!this.#prev_node !this.#prev_node ? this.#track.connect(node) : this.#prev_node.connect(node)
? this.#track.connect(node)
: this.#prev_node.connect(node)
this.#prev_node = node this.#prev_node = node
} }
/** /**
@ -455,9 +418,7 @@ class EuterpeBuilder {
* eg. if you want the analyser nodes output to be affected by user #gain * eg. if you want the analyser nodes output to be affected by user #gain
*/ */
connect_gain() { connect_gain() {
!this.#prev_node !this.#prev_node ? this.#track.connect(this.#gain) : this.#prev_node.connect(this.#gain)
? this.#track.connect(this.#gain)
: this.#prev_node.connect(this.#gain)
this.#prev_node = this.#gain this.#prev_node = this.#gain
this.#is_gain_connected = true this.#is_gain_connected = true
} }
@ -467,21 +428,10 @@ class EuterpeBuilder {
*/ */
build() { build() {
if (!this.#is_gain_connected) { if (!this.#is_gain_connected) {
!this.#prev_node !this.#prev_node ? this.#track.connect(this.#gain) : this.#prev_node.connect(this.#gain)
? this.#track.connect(this.#gain)
: this.#prev_node.connect(this.#gain)
this.#prev_node = this.#gain this.#prev_node = this.#gain
} }
this.#prev_node.connect(this.#audio_context.destination) this.#prev_node.connect(this.#audio_context.destination)
return new Euterpe( return new Euterpe(this.db, this.#audio_context, this.audio_element, this.#track, this.#gain, this.#volume, undefined, this.options)
this.db,
this.#audio_context,
this.audio_element,
this.#track,
this.#gain,
this.#volume,
undefined,
this.options
)
} }
} }

View file

@ -1,11 +1,4 @@
import { import { DB, Artist, Song, RefTo, Ref, Platforms } from "@euterpe.js/music-library"
DB,
Artist,
Song,
RefTo,
Ref,
Platforms
} from "@euterpe.js/music-library"
export const db = new DB() export const db = new DB()
db.add([ db.add([
@ -28,39 +21,21 @@ db.add([
}), }),
new Artist({ new Artist({
name: "IMANU", name: "IMANU",
links: [ links: [[Platforms.Spotify, new URL("https://open.spotify.com/artist/5Y7rFm0tiJTVDzGLMzz0W1?si=DRaZyugTTIqlBHDkMGKVqA&nd=1")]]
[
Platforms.Spotify,
new URL(
"https://open.spotify.com/artist/5Y7rFm0tiJTVDzGLMzz0W1?si=DRaZyugTTIqlBHDkMGKVqA&nd=1"
)
]
]
}) })
]) ])
db.add([ db.add([
new Song({ new Song({
//Refrences are constructed as such. This allows to get to the artist from either collection or song //Refrences are constructed as such. This allows to get to the artist from either collection or song
artists: [ artists: [new Ref(RefTo.Artists, 2), new Ref(RefTo.Artists, 3), new Ref(RefTo.Artists, 4)],
new Ref(RefTo.Artists, 2),
new Ref(RefTo.Artists, 3),
new Ref(RefTo.Artists, 4)
],
duration: 252, duration: 252,
name: "Star", name: "Star",
remix_artists: [new Ref(RefTo.Artists, 5)], remix_artists: [new Ref(RefTo.Artists, 5)],
url: new URL( url: new URL("http://127.0.0.1:4200/Machinedrum, Tanerelle & Mono Poly - Star (IMANU Remix) final.mp3")
"http://127.0.0.1:4200/Machinedrum, Tanerelle & Mono Poly - Star (IMANU Remix) final.mp3"
)
}), }),
new Song({ new Song({
//If you don't like guessing the IDs, then this is also a way to do it //If you don't like guessing the IDs, then this is also a way to do it
artists: [ artists: [new Ref(RefTo.Artists, db.artists.find((a) => a.name == "Jamie xx")!.id!)],
new Ref(
RefTo.Artists,
db.artists.find((a) => a.name == "Jamie xx")!.id!
)
],
duration: 331, duration: 331,
name: "Sleep Sound", name: "Sleep Sound",
url: new URL("http://127.0.0.1:4200/Jamie xx - Sleep Sound.mp3") url: new URL("http://127.0.0.1:4200/Jamie xx - Sleep Sound.mp3")

View file

@ -18,9 +18,7 @@ music_player.try_new_song_async(db.songs[curr_song_id].url.pathname).then(
document.querySelector("#previous")?.addEventListener("click", () => { document.querySelector("#previous")?.addEventListener("click", () => {
curr_song_id-- curr_song_id--
if (curr_song_id < 0) curr_song_id = 2 if (curr_song_id < 0) curr_song_id = 2
music_player music_player.try_new_song_async(db.songs[curr_song_id].url.pathname).then(
.try_new_song_async(db.songs[curr_song_id].url.pathname)
.then(
(s) => { (s) => {
change_current_song_text(db) change_current_song_text(db)
music_player.play_async().catch((err) => { music_player.play_async().catch((err) => {
@ -35,9 +33,7 @@ music_player.try_new_song_async(db.songs[curr_song_id].url.pathname).then(
document.querySelector("#next")?.addEventListener("click", () => { document.querySelector("#next")?.addEventListener("click", () => {
curr_song_id++ curr_song_id++
if (curr_song_id > 2) curr_song_id = 0 if (curr_song_id > 2) curr_song_id = 0
music_player music_player.try_new_song_async(db.songs[curr_song_id].url.pathname).then(
.try_new_song_async(db.songs[curr_song_id].url.pathname)
.then(
(s) => { (s) => {
change_current_song_text(db) change_current_song_text(db)
music_player.play_async().catch((err) => { music_player.play_async().catch((err) => {
@ -67,14 +63,10 @@ music_player.try_new_song_async(db.songs[curr_song_id].url.pathname).then(
document.querySelector("#unmute")?.addEventListener("click", () => { document.querySelector("#unmute")?.addEventListener("click", () => {
music_player.unmute() music_player.unmute()
}) })
document document.querySelector("#toggle-mute")?.addEventListener("click", () => {
.querySelector("#toggle-mute")
?.addEventListener("click", () => {
music_player.mute_toggle() music_player.mute_toggle()
}) })
document document.querySelector("#toggle-play")?.addEventListener("click", () => {
.querySelector("#toggle-play")
?.addEventListener("click", () => {
music_player.play_toggle_async().then( music_player.play_toggle_async().then(
(s) => console.log("toggled play/pause"), (s) => console.log("toggled play/pause"),
(e) => alert("failed to toggle pause/play!" + e) (e) => alert("failed to toggle pause/play!" + e)
@ -100,8 +92,7 @@ music_player.try_new_song_async(db.songs[curr_song_id].url.pathname).then(
// Subscriptions to AudioContext changes, eg. time.. // Subscriptions to AudioContext changes, eg. time..
music_player.subscribe_to_formatted_duration_time((time) => { music_player.subscribe_to_formatted_duration_time((time) => {
document.querySelector("#duration").innerHTML = time document.querySelector("#duration").innerHTML = time
document.querySelector("#seek").max = document.querySelector("#seek").max = "" + music_player.get_current_duration()
"" + music_player.get_current_duration()
}) })
music_player.subscribe_to_formatted_current_time_tick((time) => { music_player.subscribe_to_formatted_current_time_tick((time) => {
document.querySelector("#current").innerHTML = time document.querySelector("#current").innerHTML = time
@ -133,9 +124,7 @@ function change_current_song_text(db: DB) {
const curr_artist = artist.get(db) as Artist const curr_artist = artist.get(db) as Artist
if (curr_artist.links && curr_artist.links.length > 0) { if (curr_artist.links && curr_artist.links.length > 0) {
//returns "found a link! Spotify" //returns "found a link! Spotify"
console.log( console.log("found a link! " + Platforms[curr_artist.links[0][0]])
"found a link! " + Platforms[curr_artist.links[0][0]]
)
const url = curr_artist.links[0][1] const url = curr_artist.links[0][1]
final_text += `<a href=${url}>${curr_artist.name}</a>, ` final_text += `<a href=${url}>${curr_artist.name}</a>, `

View file

@ -1,14 +1,4 @@
export { export { RefTo, Ref, Song, Collection, DB, Artist, Platforms, CollectionType, from_json }
RefTo,
Ref,
Song,
Collection,
DB,
Artist,
Platforms,
CollectionType,
from_json
}
type ID = number type ID = number
enum RefTo { enum RefTo {
Artists, Artists,
@ -179,9 +169,7 @@ class DB {
add(artist: Artist[]): void add(artist: Artist[]): void
add(collection: Collection[]): void add(collection: Collection[]): void
add(mix: (Song | Artist | Collection)[]): void add(mix: (Song | Artist | Collection)[]): void
add( add(stuff: Artist[] | Collection[] | Song[] | (Song | Artist | Collection)[]) {
stuff: Artist[] | Collection[] | Song[] | (Song | Artist | Collection)[]
) {
/** All of this adds refrences to the other side of whatever is being added. /** All of this adds refrences to the other side of whatever is being added.
* eg. adding song with refrence to artist, adds refrence of song to artist * eg. adding song with refrence to artist, adds refrence of song to artist
* and adds incremental ids * and adds incremental ids
@ -217,9 +205,7 @@ class DB {
} }
for (const artist_ref of col.artists) { for (const artist_ref of col.artists) {
const curr_artist = artist_ref.get(this) as Artist const curr_artist = artist_ref.get(this) as Artist
curr_artist.collections.push( curr_artist.collections.push(new Ref(RefTo.Collections, col.id))
new Ref(RefTo.Collections, col.id)
)
} }
this.collections.push(col) this.collections.push(col)
} else if (input instanceof Song) { } else if (input instanceof Song) {
@ -229,16 +215,8 @@ class DB {
if (song.in_collection) { if (song.in_collection) {
const curr_col = song.in_collection.get(this) as Collection const curr_col = song.in_collection.get(this) as Collection
curr_col.songs.push(new Ref(RefTo.Songs, song.id)) curr_col.songs.push(new Ref(RefTo.Songs, song.id))
song.artists.forEach((artist) => song.artists.forEach((artist) => curr_col.artists.push(new Ref(RefTo.Artists, artist.get(this)!.id!)))
curr_col.artists.push( song.remix_artists.forEach((artist) => curr_col.artists.push(new Ref(RefTo.Artists, artist.get(this)!.id!)))
new Ref(RefTo.Artists, artist.get(this)!.id!)
)
)
song.remix_artists.forEach((artist) =>
curr_col.artists.push(
new Ref(RefTo.Artists, artist.get(this)!.id!)
)
)
} }
for (const artist_ref of song.artists) { for (const artist_ref of song.artists) {
@ -258,20 +236,12 @@ class DB {
this.artists.sort((a, b) => a.id! - b.id!) this.artists.sort((a, b) => a.id! - b.id!)
} }
} }
function from_json(db_stringified: { function from_json(db_stringified: { artists?: any; songs?: any; collections?: any }): DB {
artists?: any
songs?: any
collections?: any
}): DB {
const db = new DB() const db = new DB()
if (db_stringified.artists) { if (db_stringified.artists) {
for (const artist of db_stringified.artists) { for (const artist of db_stringified.artists) {
if (artist.songs) if (artist.songs) artist.songs = artist.songs.map((e: any) => ref_from_json(e))
artist.songs = artist.songs.map((e: any) => ref_from_json(e)) if (artist.collections) artist.collections = artist.collections.map((e: any) => ref_from_json(e))
if (artist.collections)
artist.collections = artist.collections.map((e: any) =>
ref_from_json(e)
)
if (artist.links) if (artist.links)
artist.links = artist.links.map((e: any) => { artist.links = artist.links.map((e: any) => {
try { try {
@ -280,8 +250,7 @@ function from_json(db_stringified: {
console.log(e) console.log(e)
} }
}) })
if (artist.publish_date) if (artist.publish_date) artist.publish_date = new Date(JSON.parse(artist.publish_date))
artist.publish_date = new Date(JSON.parse(artist.publish_date))
if (artist.id) artist.id = artist.id as ID if (artist.id) artist.id = artist.id as ID
try { try {
if (artist.pfp) artist.pfp = new URL(artist.pfp) if (artist.pfp) artist.pfp = new URL(artist.pfp)
@ -298,22 +267,16 @@ function from_json(db_stringified: {
} catch (e) { } catch (e) {
console.error("failed to parse song.url" + e) console.error("failed to parse song.url" + e)
} }
if (song.artists) if (song.artists) song.artists = song.artists.map((e: any) => ref_from_json(e))
song.artists = song.artists.map((e: any) => ref_from_json(e)) if (song.remix_artists) song.remix_artists = song.remix_artists.map((e: any) => ref_from_json(e))
if (song.remix_artists) if (song.in_collection) song.in_collection = ref_from_json(song.in_collection)
song.remix_artists = song.remix_artists.map((e: any) =>
ref_from_json(e)
)
if (song.in_collection)
song.in_collection = ref_from_json(song.in_collection)
try { try {
if (song.cover) song.cover = new URL(song.cover) if (song.cover) song.cover = new URL(song.cover)
} catch (e) { } catch (e) {
console.error(e), console.error("failed to parse artist URL") console.error(e), console.error("failed to parse artist URL")
} }
try { try {
if (song.publish_date) if (song.publish_date) song.publish_date = new Date(JSON.parse(song.publish_date))
song.publish_date = new Date(JSON.parse(song.publish_date))
} catch (e) { } catch (e) {
console.error(e), console.error("Failed to song cover url") console.error(e), console.error("Failed to song cover url")
} }
@ -323,32 +286,18 @@ function from_json(db_stringified: {
} }
if (db_stringified.collections) { if (db_stringified.collections) {
for (const collection of db_stringified.collections) { for (const collection of db_stringified.collections) {
if (collection.artists) if (collection.artists) collection.artists = collection.artists.map((e: any) => ref_from_json(e))
collection.artists = collection.artists.map((e: any) => if (collection.songs) collection.songs = collection.songs.map((e: any) => ref_from_json(e))
ref_from_json(e) if (collection.type) collection.type = collection.type.map((e: any) => e as CollectionType)
)
if (collection.songs)
collection.songs = collection.songs.map((e: any) =>
ref_from_json(e)
)
if (collection.type)
collection.type = collection.type.map(
(e: any) => e as CollectionType
)
try { try {
if (collection.publish_date) if (collection.publish_date) collection.publish_date = new Date(JSON.parse(collection.publish_date))
collection.publish_date = new Date(
JSON.parse(collection.publish_date)
)
} catch (e) { } catch (e) {
console.error(e), console.error("Failed to parse date") console.error(e), console.error("Failed to parse date")
} }
try { try {
if (collection.cover) if (collection.cover) collection.cover = new URL(collection.cover)
collection.cover = new URL(collection.cover)
} catch (e) { } catch (e) {
console.error(e), console.error(e), console.error("failed to parse collection cover url")
console.error("failed to parse collection cover url")
} }
if (collection.id) collection.id = collection.id as ID if (collection.id) collection.id = collection.id as ID
db.collections.push(collection) db.collections.push(collection)

View file

@ -4,15 +4,7 @@ const music_player_builder = new MusicPlayerBuilder(audio_el)
const music_player = music_player_builder.build() const music_player = music_player_builder.build()
music_player.change_volume(1) music_player.change_volume(1)
music_player music_player.try_new_song(encodeURI("http://" + window.location.host + "/nuphory - NVISION (EXTENDED MIX).ogg")).then(
.try_new_song(
encodeURI(
"http://" +
window.location.host +
"/nuphory - NVISION (EXTENDED MIX).ogg"
)
)
.then(
() => { () => {
let is_seeking = false let is_seeking = false
document.querySelector("#play")?.addEventListener("click", () => { document.querySelector("#play")?.addEventListener("click", () => {
@ -33,32 +25,22 @@ music_player
document.querySelector("#unmute")?.addEventListener("click", () => { document.querySelector("#unmute")?.addEventListener("click", () => {
music_player.unmute() music_player.unmute()
}) })
document document.querySelector("#toggle-mute")?.addEventListener("click", () => {
.querySelector("#toggle-mute")
?.addEventListener("click", () => {
music_player.mute_toggle() music_player.mute_toggle()
}) })
document document.querySelector("#toggle-play")?.addEventListener("click", () => {
.querySelector("#toggle-play")
?.addEventListener("click", () => {
music_player.try_play_toggle().then( music_player.try_play_toggle().then(
(s) => console.log("toggled play/pause"), (s) => console.log("toggled play/pause"),
(e) => alert("failed to toggle pause/play!" + e) (e) => alert("failed to toggle pause/play!" + e)
) )
}) })
document document.querySelector("#volume")?.addEventListener("input", (e) => {
.querySelector("#volume")
?.addEventListener("input", (e) => {
music_player.change_volume(e.target?.valueAsNumber) music_player.change_volume(e.target?.valueAsNumber)
}) })
document document.querySelector("#seek")?.addEventListener("mousedown", (e) => {
.querySelector("#seek")
?.addEventListener("mousedown", (e) => {
is_seeking = true is_seeking = true
}) })
document document.querySelector("#seek")?.addEventListener("mouseup", (e) => {
.querySelector("#seek")
?.addEventListener("mouseup", (e) => {
try { try {
music_player.try_seek(e.target?.valueAsNumber) music_player.try_seek(e.target?.valueAsNumber)
console.log("seeked to " + e.target?.valueAsNumber) console.log("seeked to " + e.target?.valueAsNumber)
@ -70,8 +52,7 @@ music_player
// Subscriptions to AudioContext changes, eg. time.. // Subscriptions to AudioContext changes, eg. time..
music_player.on_duration_formatted((time) => { music_player.on_duration_formatted((time) => {
document.querySelector("#duration")!.innerHTML = time document.querySelector("#duration")!.innerHTML = time
document.querySelector("#seek")!.max = document.querySelector("#seek")!.max = "" + music_player.current_song_duration
"" + music_player.current_song_duration
}) })
music_player.on_time_tick_formatted((time) => { music_player.on_time_tick_formatted((time) => {
document.querySelector("#current")!.innerHTML = time document.querySelector("#current")!.innerHTML = time

View file

@ -29,28 +29,19 @@ class PubSub {
switch (event_name) { switch (event_name) {
case SubscribeEvents.CurrentTimeTick: { case SubscribeEvents.CurrentTimeTick: {
if (this.el_current_time_tick.includes(func)) { if (this.el_current_time_tick.includes(func)) {
this.el_current_time_tick.splice( this.el_current_time_tick.splice(this.el_current_time_tick.indexOf(func), 1)
this.el_current_time_tick.indexOf(func),
1
)
} }
break break
} }
case SubscribeEvents.FormattedDurationTick: { case SubscribeEvents.FormattedDurationTick: {
if (this.el_formatted_duration_tick.includes(func)) { if (this.el_formatted_duration_tick.includes(func)) {
this.el_formatted_duration_tick.splice( this.el_formatted_duration_tick.splice(this.el_formatted_duration_tick.indexOf(func), 1)
this.el_formatted_duration_tick.indexOf(func),
1
)
} }
break break
} }
case SubscribeEvents.FormattedCurrentTimeTick: { case SubscribeEvents.FormattedCurrentTimeTick: {
if (this.el_formatted_duration_tick.includes(func)) { if (this.el_formatted_duration_tick.includes(func)) {
this.el_formatted_duration_tick.splice( this.el_formatted_duration_tick.splice(this.el_formatted_duration_tick.indexOf(func), 1)
this.el_formatted_duration_tick.indexOf(func),
1
)
} }
break break
} }
@ -128,18 +119,14 @@ export class MusicPlayer {
} }
/** /**
* Safer seek_async. Normal seek will try to start the player even if the track hasn't started yet, or was previously suspended/closed. * Safer seek_async. Normal seek will try to start the player even if the track hasn't started yet, or was previously suspended/closed.
* Will also resume playback if player is paused (by finishing the song etc) * will not resume playback
* @throws if "Can't seek - Audiocontext is not running" * @throws if "Can't seek - Audiocontext is not running"
*/ */
async try_seek(new_time: number) { async try_seek(new_time: number) {
if (this.audio_context.state !== "running") { if (this.audio_context.state !== "running") {
this.is_playing = false this.is_playing = false
throw new Error( throw new Error("Can't seek - audioContext not running, audio_context.state : " + this.audio_context.state)
"Can't seek - audioContext not running, audio_context.state : " +
this.audio_context.state
)
} }
if (this.audio_element.paused) await this.try_play()
this.audio_element.currentTime = new_time this.audio_element.currentTime = new_time
} }
@ -235,10 +222,7 @@ export class MusicPlayer {
try { try {
await this.audio_context.resume() await this.audio_context.resume()
} catch (e) { } catch (e) {
console.log( console.log("loading new song - couldn't resume context before hand", e)
"loading new song - couldn't resume context before hand",
e
)
} }
} }
return new Promise<void>((resolve, reject) => { return new Promise<void>((resolve, reject) => {
@ -273,8 +257,7 @@ export class MusicPlayer {
//once aborted, try to set current_song_duration //once aborted, try to set current_song_duration
controller.signal.addEventListener("abort", (r) => { controller.signal.addEventListener("abort", (r) => {
this.current_song_duration = this.audio_element.duration this.current_song_duration = this.audio_element.duration
if (typeof controller.signal.reason == "string") if (typeof controller.signal.reason == "string") reject(new Error(controller.signal.reason))
reject(new Error(controller.signal.reason))
resolve() resolve()
}) })
this.is_playing = false this.is_playing = false
@ -341,24 +324,19 @@ export class MusicPlayer {
this.gain.gain.value = this.volume this.gain.gain.value = this.volume
this.time = this.audio_element.currentTime this.time = this.audio_element.currentTime
if (this.#pub_sub.el_current_time_tick.length == 0) if (this.#pub_sub.el_current_time_tick.length == 0) cancelAnimationFrame(request_id)
cancelAnimationFrame(request_id)
this.#pub_sub.emit(SubscribeEvents.CurrentTimeTick, this.time) this.#pub_sub.emit(SubscribeEvents.CurrentTimeTick, this.time)
} }
#emit_duration_fmt() { #emit_duration_fmt() {
const request_id = requestAnimationFrame( const request_id = requestAnimationFrame(this.#emit_duration_fmt.bind(this))
this.#emit_duration_fmt.bind(this)
)
const time = this.get_formatted_duration() const time = this.get_formatted_duration()
if (this.#pub_sub.el_formatted_duration_tick.length == 0) if (this.#pub_sub.el_formatted_duration_tick.length == 0) cancelAnimationFrame(request_id)
cancelAnimationFrame(request_id)
this.#pub_sub.emit(SubscribeEvents.FormattedDurationTick, time) this.#pub_sub.emit(SubscribeEvents.FormattedDurationTick, time)
} }
#emit_time_fmt() { #emit_time_fmt() {
const request_id = requestAnimationFrame(this.#emit_time_fmt.bind(this)) const request_id = requestAnimationFrame(this.#emit_time_fmt.bind(this))
const time = this.get_formatted_current_time() const time = this.get_formatted_current_time()
if (this.#pub_sub.el_formatted_current_time_tick.length == 0) if (this.#pub_sub.el_formatted_current_time_tick.length == 0) cancelAnimationFrame(request_id)
cancelAnimationFrame(request_id)
this.#pub_sub.emit(SubscribeEvents.FormattedCurrentTimeTick, time) this.#pub_sub.emit(SubscribeEvents.FormattedCurrentTimeTick, time)
} }
/** /**
@ -373,10 +351,7 @@ export class MusicPlayer {
* Will give formatted current time via get_formatted_current_time() every animation frame * Will give formatted current time via get_formatted_current_time() every animation frame
*/ */
on_time_tick_formatted(callback: (data: any) => void) { on_time_tick_formatted(callback: (data: any) => void) {
this.#pub_sub.subscribe( this.#pub_sub.subscribe(SubscribeEvents.FormattedCurrentTimeTick, callback)
SubscribeEvents.FormattedCurrentTimeTick,
callback
)
this.#emit_time_fmt() this.#emit_time_fmt()
} }
/** /**
@ -401,13 +376,11 @@ export class MusicPlayerBuilder {
* will throw if user has not interacted with the page yet (Can't initiate AudioContext) * will throw if user has not interacted with the page yet (Can't initiate AudioContext)
*/ */
constructor(private audio_element: HTMLAudioElement) { constructor(private audio_element: HTMLAudioElement) {
if (audio_element === undefined) if (audio_element === undefined) throw Error("audio_element was undefined")
throw Error("audio_element was undefined")
// ↓ For old browsers // ↓ For old browsers
const AudioContext = window.AudioContext || window.webkitAudioContext const AudioContext = window.AudioContext || window.webkitAudioContext
this.#audio_context = new AudioContext() this.#audio_context = new AudioContext()
this.#track = this.#track = this.#audio_context.createMediaElementSource(audio_element)
this.#audio_context.createMediaElementSource(audio_element)
this.#gain = this.#audio_context.createGain() this.#gain = this.#audio_context.createGain()
} }
/** /**
@ -416,9 +389,7 @@ export class MusicPlayerBuilder {
*/ */
add_analyser() { add_analyser() {
const analyser = this.#audio_context.createAnalyser() const analyser = this.#audio_context.createAnalyser()
!this.#prev_node !this.#prev_node ? this.#track.connect(analyser) : this.#prev_node.connect(analyser)
? this.#track.connect(analyser)
: this.#prev_node.connect(analyser)
this.#prev_node = analyser this.#prev_node = analyser
return analyser return analyser
} }
@ -428,9 +399,7 @@ export class MusicPlayerBuilder {
*/ */
add_stereo_panner_node() { add_stereo_panner_node() {
const panner = this.#audio_context.createStereoPanner() const panner = this.#audio_context.createStereoPanner()
!this.#prev_node !this.#prev_node ? this.#track.connect(panner) : this.#prev_node.connect(panner)
? this.#track.connect(panner)
: this.#prev_node.connect(panner)
this.#prev_node = panner this.#prev_node = panner
return panner return panner
} }
@ -440,9 +409,7 @@ export class MusicPlayerBuilder {
*/ */
add_wave_shaper_node() { add_wave_shaper_node() {
const shaper = this.#audio_context.createWaveShaper() const shaper = this.#audio_context.createWaveShaper()
!this.#prev_node !this.#prev_node ? this.#track.connect(shaper) : this.#prev_node.connect(shaper)
? this.#track.connect(shaper)
: this.#prev_node.connect(shaper)
this.#prev_node = shaper this.#prev_node = shaper
return shaper return shaper
} }
@ -450,9 +417,7 @@ export class MusicPlayerBuilder {
* For additional trickery, you can connect your own node. * For additional trickery, you can connect your own node.
*/ */
connect_custom_node(node: AudioNode) { connect_custom_node(node: AudioNode) {
!this.#prev_node !this.#prev_node ? this.#track.connect(node) : this.#prev_node.connect(node)
? this.#track.connect(node)
: this.#prev_node.connect(node)
this.#prev_node = node this.#prev_node = node
} }
/** /**
@ -460,9 +425,7 @@ export class MusicPlayerBuilder {
* eg. if you want the analyser nodes output to be affected by user #gain * eg. if you want the analyser nodes output to be affected by user #gain
*/ */
connect_gain() { connect_gain() {
!this.#prev_node !this.#prev_node ? this.#track.connect(this.#gain) : this.#prev_node.connect(this.#gain)
? this.#track.connect(this.#gain)
: this.#prev_node.connect(this.#gain)
this.#prev_node = this.#gain this.#prev_node = this.#gain
this.#is_gain_connected = true this.#is_gain_connected = true
} }
@ -472,18 +435,10 @@ export class MusicPlayerBuilder {
*/ */
build() { build() {
if (!this.#is_gain_connected) { if (!this.#is_gain_connected) {
!this.#prev_node !this.#prev_node ? this.#track.connect(this.#gain) : this.#prev_node.connect(this.#gain)
? this.#track.connect(this.#gain)
: this.#prev_node.connect(this.#gain)
this.#prev_node = this.#gain this.#prev_node = this.#gain
} }
this.#prev_node.connect(this.#audio_context.destination) this.#prev_node.connect(this.#audio_context.destination)
return new MusicPlayer( return new MusicPlayer(this.#audio_context, this.audio_element, this.#track, this.#gain, this.#volume)
this.#audio_context,
this.audio_element,
this.#track,
this.#gain,
this.#volume
)
} }
} }

View file

@ -1,15 +1,6 @@
import filehound from "filehound" import filehound from "filehound"
import fs from "fs" import fs from "fs"
const songs = filehound const songs = filehound.create().path("../public/samples").ext(["ogg"]).findSync()
.create() fs.writeFile("songs_list.ts", `export const songs = ` + JSON.stringify(songs), "utf8", () => {
.path("../public/samples")
.ext(["ogg"])
.findSync()
fs.writeFile(
"songs_list.ts",
`export const songs = ` + JSON.stringify(songs),
"utf8",
() => {
1 + 1 1 + 1
} })
)

View file

@ -13,10 +13,7 @@ export function generate_db() {
for (let i = 0; i < songs.length; i++) { for (let i = 0; i < songs.length; i++) {
const song = songs[i] const song = songs[i]
const last_i = song.lastIndexOf(path_char) const last_i = song.lastIndexOf(path_char)
const collection_name = song.slice( const collection_name = song.slice(song.slice(0, last_i).lastIndexOf(path_char) + 1, last_i)
song.slice(0, last_i).lastIndexOf(path_char) + 1,
last_i
)
/* /*
const foreforelast_i = song.slice(0, forelast_i - 1) const foreforelast_i = song.slice(0, forelast_i - 1)
const foreforeforelast_i = song.slice(0, foreforelast_i - 1).lastIndexOf("\\") const foreforeforelast_i = song.slice(0, foreforelast_i - 1).lastIndexOf("\\")
@ -44,15 +41,11 @@ export function generate_db() {
const last_i = song.song.lastIndexOf(path_char) const last_i = song.song.lastIndexOf(path_char)
const name = song.song.slice(last_i + 1) const name = song.song.slice(last_i + 1)
const song_url = song.song.slice( const song_url = song.song.slice(song.song.indexOf(`public${path_char}`) + 7)
song.song.indexOf(`public${path_char}`) + 7
)
const db_song = new Song({ const db_song = new Song({
name: name.slice(0, name.lastIndexOf(".")), name: name.slice(0, name.lastIndexOf(".")),
artists: [], artists: [],
url: new URL( url: new URL(`${window.location.href}${song_url}`.replaceAll("\\", "/")),
`${window.location.href}${song_url}`.replaceAll("\\", "/")
),
duration: 0, duration: 0,
remix_artists: [], remix_artists: [],
in_collection: new Ref(RefTo.Collections, song.collection_id) in_collection: new Ref(RefTo.Collections, song.collection_id)

View file

@ -1,12 +1,6 @@
import { DB, from_json } from "@euterpe.js/music-library" import { DB, from_json } from "@euterpe.js/music-library"
import { generate_db } from "./generate_db" import { generate_db } from "./generate_db"
import { import { AudioVisualBuilder, SmoothingAlgorythm, ShapeType, WaveformOrientation, WaveformShape } from "@euterpe.js/visualizer"
AudioVisualBuilder,
SmoothingAlgorythm,
ShapeType,
WaveformOrientation,
WaveformShape
} from "@euterpe.js/visualizer"
let result: AnalyzeReturn | undefined let result: AnalyzeReturn | undefined
@ -23,10 +17,7 @@ audioContextAnalyser.smoothingTimeConstant = 0
const analyserBufferLength = audioContextAnalyser.frequencyBinCount const analyserBufferLength = audioContextAnalyser.frequencyBinCount
const FFTDataArray = new Float32Array(analyserBufferLength) const FFTDataArray = new Float32Array(analyserBufferLength)
//Connect all audio Nodes //Connect all audio Nodes
track track.connect(audioContextAnalyser).connect(gain).connect(audioContext.destination)
.connect(audioContextAnalyser)
.connect(gain)
.connect(audioContext.destination)
document.getElementById("analyze")!.addEventListener("click", async (ev) => { document.getElementById("analyze")!.addEventListener("click", async (ev) => {
audioContext.resume() audioContext.resume()
@ -50,11 +41,7 @@ document.getElementById("upload")!.addEventListener("change", (ev) => {
for (const song of new_db.songs) { for (const song of new_db.songs) {
if (song.fft_data) { if (song.fft_data) {
for (let i = 0; i < song.fft_data.length; i++) { for (let i = 0; i < song.fft_data.length; i++) {
if ( if (song.fft_data[i] === null || song.fft_data[i] === undefined) song.fft_data[i] = -Infinity
song.fft_data[i] === null ||
song.fft_data[i] === undefined
)
song.fft_data[i] = -Infinity
} }
} }
} }
@ -68,26 +55,18 @@ async function svg() {
return return
} }
console.log("Creating svgs...") console.log("Creating svgs...")
const canvas_wrapper = document.querySelector( const canvas_wrapper = document.querySelector(".canvas-wrapper") as HTMLElement
".canvas-wrapper"
) as HTMLElement
const waveform_canvas = document const waveform_canvas = document.querySelector("#waveform-canvas")?.cloneNode() as SVGSVGElement
.querySelector("#waveform-canvas")
?.cloneNode() as SVGSVGElement
canvas_wrapper.childNodes.forEach((c) => c.remove()) canvas_wrapper.childNodes.forEach((c) => c.remove())
canvas_wrapper.appendChild(waveform_canvas) canvas_wrapper.appendChild(waveform_canvas)
for (const song of result.db.songs) { for (const song of result.db.songs) {
console.log("creating waveform for -> " + song.name) console.log("creating waveform for -> " + song.name)
const curr_waveform_canvas = const curr_waveform_canvas = waveform_canvas.cloneNode() as SVGSVGElement
waveform_canvas.cloneNode() as SVGSVGElement
waveform_canvas.parentElement?.append(curr_waveform_canvas) waveform_canvas.parentElement?.append(curr_waveform_canvas)
const waveform_visual_builder = new AudioVisualBuilder( const waveform_visual_builder = new AudioVisualBuilder(result.analyzer_node, curr_waveform_canvas)
result.analyzer_node,
curr_waveform_canvas
)
.set_fft_data_tresholds({ .set_fft_data_tresholds({
point_count_i: 100, point_count_i: 100,
fft_multiplier_i: 0.9, fft_multiplier_i: 0.9,
@ -95,15 +74,11 @@ async function svg() {
}) })
.set_fft_time_smoothing(0.8) .set_fft_time_smoothing(0.8)
.set_smoothing_algorythm(SmoothingAlgorythm.CatmullRom) .set_smoothing_algorythm(SmoothingAlgorythm.CatmullRom)
const waveform_visual = waveform_visual_builder.build( const waveform_visual = waveform_visual_builder.build(ShapeType.Waveform, true, {
ShapeType.Waveform,
true,
{
fft_data: new Float32Array(new Float64Array(song.fft_data!)), fft_data: new Float32Array(new Float64Array(song.fft_data!)),
orientation: WaveformOrientation.Horizontal, orientation: WaveformOrientation.Horizontal,
shape_type: WaveformShape.LineLike shape_type: WaveformShape.LineLike
} })
)
waveform_visual.draw_once() waveform_visual.draw_once()
// await new Promise<void>((done) => setTimeout(() => done(), 500)) // await new Promise<void>((done) => setTimeout(() => done(), 500))
// @ts-ignore // @ts-ignore
@ -125,21 +100,13 @@ async function analyze(): Promise<AnalyzeReturn> {
console.log(db) console.log(db)
for (const song of db.songs) { for (const song of db.songs) {
// const song = db.songs[db.songs.length - 1] // const song = db.songs[db.songs.length - 1]
console.log( console.log(`Analyzing ${song.name}, ${db.songs.indexOf(song) + 1}/${db.songs.length}`)
`Analyzing ${song.name}, ${db.songs.indexOf(song) + 1}/${
db.songs.length
}`
)
//if not loaded yet keep trying //if not loaded yet keep trying
audioEl.src = song.url.href audioEl.src = song.url.href
await awaitLoad(audioEl) await awaitLoad(audioEl)
song.duration = audioEl.duration song.duration = audioEl.duration
let currentFFTData = [] let currentFFTData = []
for ( for (let curSecond = 0; curSecond < song.duration; curSecond += song.duration / samplingRate) {
let curSecond = 0;
curSecond < song.duration;
curSecond += song.duration / samplingRate
) {
console.log("working...") console.log("working...")
audioEl.currentTime = curSecond audioEl.currentTime = curSecond
await audioEl.play() await audioEl.play()
@ -149,9 +116,7 @@ async function analyze(): Promise<AnalyzeReturn> {
FFTDataArray.forEach((element) => { FFTDataArray.forEach((element) => {
volume += element volume += element
}) })
currentFFTData.push( currentFFTData.push(Math.round((volume / FFTDataArray.length) * 100) / 100)
Math.round((volume / FFTDataArray.length) * 100) / 100
)
} }
song.fft_data = currentFFTData song.fft_data = currentFFTData
console.log(song.fft_data) console.log(song.fft_data)

View file

@ -39,8 +39,7 @@ function generate_new_sounds_ogg(file, currentExtention) {
//Adds 25ms of delay to all samples //Adds 25ms of delay to all samples
command += `-af 'adelay=25:all=true' ` command += `-af 'adelay=25:all=true' `
//So the demo is HQ //So the demo is HQ
if (file.includes("demo")) if (file.includes("demo")) command += `-c:a libopus -b:a 256k '${file}.ogg'"`
command += `-c:a libopus -b:a 256k '${file}.ogg'"`
else command += `-c:a libopus -b:a 96k '${file}.ogg'"` else command += `-c:a libopus -b:a 96k '${file}.ogg'"`
exec(command) exec(command)
console.log(command) console.log(command)
@ -59,11 +58,7 @@ function generate_new_sounds_mp3(file, currentExtention) {
exec(command) exec(command)
// console.log(command) // console.log(command)
} }
function generate_new_video_sizes_mp4( function generate_new_video_sizes_mp4(file, currentExtention, width_resolutions) {
file,
currentExtention,
width_resolutions
) {
const path = file.substring(0, file.lastIndexOf("\\")) const path = file.substring(0, file.lastIndexOf("\\"))
file = file.substring(file.lastIndexOf("\\") + 1) file = file.substring(file.lastIndexOf("\\") + 1)
@ -86,11 +81,7 @@ function generate_new_video_sizes_mp4(
} }
}) })
} }
function generate_new_video_sizes_webm( function generate_new_video_sizes_webm(file, currentExtention, width_resolutions) {
file,
currentExtention,
width_resolutions
) {
const path = file.substring(0, file.lastIndexOf("\\")) const path = file.substring(0, file.lastIndexOf("\\"))
file = file.substring(file.lastIndexOf("\\") + 1) file = file.substring(file.lastIndexOf("\\") + 1)
@ -129,21 +120,12 @@ for (let i = 0; i < dirs.length; i++) {
current_folder_files = current_folder_files.slice(1) current_folder_files = current_folder_files.slice(1)
} }
for (let current_media of current_folder_files) { for (let current_media of current_folder_files) {
current_media = [ current_media = [current_media.substring(0, current_media.lastIndexOf(".")), current_media.substring(current_media.lastIndexOf(".") + 1)]
current_media.substring(0, current_media.lastIndexOf(".")),
current_media.substring(current_media.lastIndexOf(".") + 1)
]
if (current_media[1] == "wav") { if (current_media[1] == "wav") {
console.log(`${current_media[0]}.${current_media[1]}\n`) console.log(`${current_media[0]}.${current_media[1]}\n`)
generate_new_sounds_ogg( generate_new_sounds_ogg(`${current_media[0]}`, `${current_media[1]}`)
`${current_media[0]}`, generate_new_sounds_mp3(`${current_media[0]}`, `${current_media[1]}`)
`${current_media[1]}`
)
generate_new_sounds_mp3(
`${current_media[0]}`,
`${current_media[1]}`
)
} }
/* /*

View file

@ -1,12 +1,6 @@
import { MusicPlayerBuilder } from "@euterpe.js/player" import { MusicPlayerBuilder } from "@euterpe.js/player"
import { fft_data } from "./waveform_data" import { fft_data } from "./waveform_data"
import { import { AudioVisualBuilder, SmoothingAlgorythm, ShapeType, WaveformOrientation, WaveformShape } from "@euterpe.js/visualizer"
AudioVisualBuilder,
SmoothingAlgorythm,
ShapeType,
WaveformOrientation,
WaveformShape
} from "@euterpe.js/visualizer"
const audio_el = document.querySelector("#audio") as HTMLAudioElement const audio_el = document.querySelector("#audio") as HTMLAudioElement
const music_player_builder = new MusicPlayerBuilder(audio_el) const music_player_builder = new MusicPlayerBuilder(audio_el)
const trapnation_analyser_node = music_player_builder.add_analyser() const trapnation_analyser_node = music_player_builder.add_analyser()
@ -14,19 +8,14 @@ const bar_analyser_node = music_player_builder.add_analyser()
const music_player = music_player_builder.build() const music_player = music_player_builder.build()
music_player.change_volume(0.5) music_player.change_volume(0.5)
const waveform_canvas = document.querySelector( const waveform_canvas = document.querySelector("#waveform-canvas") as SVGSVGElement
"#waveform-canvas"
) as SVGSVGElement
const seek_element = document.querySelector("#seek") as HTMLInputElement const seek_element = document.querySelector("#seek") as HTMLInputElement
const duration_element = document.querySelector("#duration") as HTMLElement const duration_element = document.querySelector("#duration") as HTMLElement
const current_time_element = document.querySelector("#current") as HTMLElement const current_time_element = document.querySelector("#current") as HTMLElement
/** /**
* Create the Audio Visualizer * Create the Audio Visualizer
*/ */
const trapnation_visual_builder = new AudioVisualBuilder( const trapnation_visual_builder = new AudioVisualBuilder(trapnation_analyser_node, document.querySelector("#trapnation-canvas") as SVGSVGElement)
trapnation_analyser_node,
document.querySelector("#trapnation-canvas") as SVGSVGElement
)
//Because the to_fft_range is so low, it needs more FFT data. //Because the to_fft_range is so low, it needs more FFT data.
.set_fft_size(8192) .set_fft_size(8192)
//Tells the Visualiser how to parse data which mutates our initial shape //Tells the Visualiser how to parse data which mutates our initial shape
@ -39,15 +28,9 @@ const trapnation_visual_builder = new AudioVisualBuilder(
.set_fft_time_smoothing(0.6) .set_fft_time_smoothing(0.6)
//If not using typescript enums, CatmullRom = number 2 //If not using typescript enums, CatmullRom = number 2
.set_smoothing_algorythm(SmoothingAlgorythm.CatmullRom) .set_smoothing_algorythm(SmoothingAlgorythm.CatmullRom)
const trapnation_visual = trapnation_visual_builder.build( const trapnation_visual = trapnation_visual_builder.build(ShapeType.Circle, false)
ShapeType.Circle,
false
)
const bar_visual_builder = new AudioVisualBuilder( const bar_visual_builder = new AudioVisualBuilder(bar_analyser_node, document.querySelector("#bar-canvas") as SVGSVGElement)
bar_analyser_node,
document.querySelector("#bar-canvas") as SVGSVGElement
)
.set_fft_data_tresholds({ .set_fft_data_tresholds({
point_count_i: 50, point_count_i: 50,
fft_multiplier_i: 2, fft_multiplier_i: 2,
@ -57,10 +40,7 @@ const bar_visual_builder = new AudioVisualBuilder(
.set_smoothing_algorythm(SmoothingAlgorythm.BezierPerpendicular) .set_smoothing_algorythm(SmoothingAlgorythm.BezierPerpendicular)
const bar_visual = bar_visual_builder.build(ShapeType.Line, false) const bar_visual = bar_visual_builder.build(ShapeType.Line, false)
const waveform_visual_builder = new AudioVisualBuilder( const waveform_visual_builder = new AudioVisualBuilder(bar_analyser_node, waveform_canvas)
bar_analyser_node,
waveform_canvas
)
.set_fft_data_tresholds({ .set_fft_data_tresholds({
point_count_i: 100, point_count_i: 100,
fft_multiplier_i: 1, fft_multiplier_i: 1,
@ -68,15 +48,11 @@ const waveform_visual_builder = new AudioVisualBuilder(
}) })
.set_fft_time_smoothing(0.8) .set_fft_time_smoothing(0.8)
.set_smoothing_algorythm(SmoothingAlgorythm.CatmullRom) .set_smoothing_algorythm(SmoothingAlgorythm.CatmullRom)
const waveform_visual = waveform_visual_builder.build( const waveform_visual = waveform_visual_builder.build(ShapeType.Waveform, true, {
ShapeType.Waveform,
true,
{
fft_data: new Float32Array(fft_data.fft_data), fft_data: new Float32Array(fft_data.fft_data),
orientation: WaveformOrientation.Horizontal, orientation: WaveformOrientation.Horizontal,
shape_type: WaveformShape.LineLike shape_type: WaveformShape.LineLike
} })
)
trapnation_visual.draw() trapnation_visual.draw()
bar_visual.draw() bar_visual.draw()
@ -128,11 +104,7 @@ function convert_range(value: number, r1: number[], r2: number[]) {
waveform_canvas.addEventListener("mousemove", (e) => { waveform_canvas.addEventListener("mousemove", (e) => {
const rect = e.target.getBoundingClientRect() const rect = e.target.getBoundingClientRect()
const x = e.clientX - rect.left const x = e.clientX - rect.left
const resX = convert_range( const resX = convert_range(x, [0, rect.width], [0, waveform_canvas.viewBox.baseVal.width + 40])
x,
[0, rect.width],
[0, waveform_canvas.viewBox.baseVal.width + 40]
)
const polygon = `polygon(0 0, ${resX}px 0, ${resX}px 100%, 0 100%)` const polygon = `polygon(0 0, ${resX}px 0, ${resX}px 100%, 0 100%)`
document.documentElement.style.setProperty("--clip-seek-path", polygon) document.documentElement.style.setProperty("--clip-seek-path", polygon)
}) })
@ -143,11 +115,7 @@ waveform_canvas.addEventListener("mouseleave", (e) => {
/* /*
* The player part * The player part
*/ */
music_player music_player.try_new_song_async(encodeURI("http://localhost:4200/nuphory - NVISION (EXTENDED MIX).ogg")).then(
.try_new_song_async(
encodeURI("http://localhost:4200/nuphory - NVISION (EXTENDED MIX).ogg")
)
.then(
() => { () => {
let is_seeking = false let is_seeking = false
document.querySelector("#play")?.addEventListener("click", () => { document.querySelector("#play")?.addEventListener("click", () => {
@ -167,32 +135,22 @@ music_player
document.querySelector("#unmute")?.addEventListener("click", () => { document.querySelector("#unmute")?.addEventListener("click", () => {
music_player.unmute() music_player.unmute()
}) })
document document.querySelector("#toggle-mute")?.addEventListener("click", () => {
.querySelector("#toggle-mute")
?.addEventListener("click", () => {
music_player.mute_toggle() music_player.mute_toggle()
}) })
document document.querySelector("#toggle-play")?.addEventListener("click", () => {
.querySelector("#toggle-play")
?.addEventListener("click", () => {
music_player.play_toggle_async().then( music_player.play_toggle_async().then(
(s) => console.log("toggled play/pause"), (s) => console.log("toggled play/pause"),
(e) => alert("failed to toggle pause/play!" + e) (e) => alert("failed to toggle pause/play!" + e)
) )
}) })
document document.querySelector("#volume")?.addEventListener("input", (e) => {
.querySelector("#volume")
?.addEventListener("input", (e) => {
music_player.change_volume(e.target?.valueAsNumber) music_player.change_volume(e.target?.valueAsNumber)
}) })
document document.querySelector("#seek")?.addEventListener("mousedown", (e) => {
.querySelector("#seek")
?.addEventListener("mousedown", (e) => {
is_seeking = true is_seeking = true
}) })
document document.querySelector("#seek")?.addEventListener("mouseup", (e) => {
.querySelector("#seek")
?.addEventListener("mouseup", (e) => {
music_player.try_seek_async(e.target?.valueAsNumber).then( music_player.try_seek_async(e.target?.valueAsNumber).then(
() => { () => {
console.log("seeked to " + e.target?.valueAsNumber) console.log("seeked to " + e.target?.valueAsNumber)
@ -214,14 +172,9 @@ music_player
music_player.on_time_tick((time) => { music_player.on_time_tick((time) => {
if (is_seeking) return if (is_seeking) return
seek_element.value = "" + time seek_element.value = "" + time
const x = `${ const x = `${(time / music_player.current_song_duration) * 100}%`
(time / music_player.current_song_duration) * 100
}%`
const polygon = `polygon(0 0, ${x} 0, ${x} 100%, 0 100%)` const polygon = `polygon(0 0, ${x} 0, ${x} 100%, 0 100%)`
document.documentElement.style.setProperty( document.documentElement.style.setProperty("--clip-time-path", polygon)
"--clip-time-path",
polygon
)
}) })
}, },
(e) => console.log(e) (e) => console.log(e)

View file

@ -1,17 +1,10 @@
export const fft_data = { export const fft_data = {
fft_data: [ fft_data: [
-106.24, -99.73, -100.98, -101.34, -107.01, -92.38, -84.85, -90.28, -106.24, -99.73, -100.98, -101.34, -107.01, -92.38, -84.85, -90.28, -93.68, -95.02, -97.16, -96.32, -99.23, -103.13, -85.57, -98.17, -103.27,
-93.68, -95.02, -97.16, -96.32, -99.23, -103.13, -85.57, -98.17, -107.5, -83.62, -95.23, -97.12, -94.78, -95.93, -101.42, -97.83, -102.42, -111.74, -101.38, -106.8, -111.05, -88.04, -90.88, -97.67, -96.31,
-103.27, -107.5, -83.62, -95.23, -97.12, -94.78, -95.93, -101.42, -96.69, -102.15, -102.03, -100.51, -107.14, -101.48, -101.6, -106.62, -73.94, -79.53, -92.74, -96.08, -96.26, -100.35, -99.13, -102.03,
-97.83, -102.42, -111.74, -101.38, -106.8, -111.05, -88.04, -90.88, -107.4, -93.57, -102.31, -102.3, -109.04, -81.85, -92.79, -100.06, -95.79, -96.49, -99.89, -100.27, -102.69, -107.35, -103.94, -104.64,
-97.67, -96.31, -96.69, -102.15, -102.03, -100.51, -107.14, -101.48, -104.3, -78.82, -84.2, -95.29, -92.57, -93.47, -98.08, -98.9, -101.56, -109.38, -102.01, -102.51, -104.83, -72.18, -76.52, -91.69, -99.97,
-101.6, -106.62, -73.94, -79.53, -92.74, -96.08, -96.26, -100.35, -96.63, -98.61, -76.97, -90.41, -100.38, -106.77, -102.83, -104.46, -108.59, -80.97, -88.05, -100.77, -79.64, -72.3, -87.96, -92.89, -93.03
-99.13, -102.03, -107.4, -93.57, -102.31, -102.3, -109.04, -81.85,
-92.79, -100.06, -95.79, -96.49, -99.89, -100.27, -102.69, -107.35,
-103.94, -104.64, -104.3, -78.82, -84.2, -95.29, -92.57, -93.47, -98.08,
-98.9, -101.56, -109.38, -102.01, -102.51, -104.83, -72.18, -76.52,
-91.69, -99.97, -96.63, -98.61, -76.97, -90.41, -100.38, -106.77,
-102.83, -104.46, -108.59, -80.97, -88.05, -100.77, -79.64, -72.3,
-87.96, -92.89, -93.03
] ]
} }

View file

@ -88,16 +88,9 @@ export class AudioVisual {
} else { } else {
this.#analyzer_node.getFloatFrequencyData(this.#fft_data) this.#analyzer_node.getFloatFrequencyData(this.#fft_data)
} }
const from = Math.round( const from = Math.round((this.#point_count / 100) * this.#from_fft_range)
(this.#point_count / 100) * this.#from_fft_range const to = Math.round(this.#buffer_length - (this.#buffer_length / 100) * this.#to_fft_range)
) const squeeze_factor = Math.round((this.#buffer_length - to) / this.#point_count)
const to = Math.round(
this.#buffer_length -
(this.#buffer_length / 100) * this.#to_fft_range
)
const squeeze_factor = Math.round(
(this.#buffer_length - to) / this.#point_count
)
const return_array = new Array(this.#point_count) const return_array = new Array(this.#point_count)
for (let i = 0; i < this.#point_count + 1; i++) { for (let i = 0; i < this.#point_count + 1; i++) {
@ -117,27 +110,16 @@ export class AudioVisual {
case ShapeType.Circle: { case ShapeType.Circle: {
const pointDistance = 7 const pointDistance = 7
for (let curPoint = 0; curPoint < arr.length; curPoint++) { for (let curPoint = 0; curPoint < arr.length; curPoint++) {
const [dx, dy] = this.#normalise_perpendicular_anchors( const [dx, dy] = this.#normalise_perpendicular_anchors(arr[curPoint].x, arr[curPoint].y)
arr[curPoint].x,
arr[curPoint].y
)
const perpendicular = [-dy, dx] const perpendicular = [-dy, dx]
anchors.push({ anchors.push({
leftAnchor: { leftAnchor: {
x: x: arr[curPoint].x + pointDistance * perpendicular[0],
arr[curPoint].x + y: arr[curPoint].y + pointDistance * perpendicular[1]
pointDistance * perpendicular[0],
y:
arr[curPoint].y +
pointDistance * perpendicular[1]
}, },
rightAnchor: { rightAnchor: {
x: x: arr[curPoint].x - pointDistance * perpendicular[0],
arr[curPoint].x - y: arr[curPoint].y - pointDistance * perpendicular[1]
pointDistance * perpendicular[0],
y:
arr[curPoint].y -
pointDistance * perpendicular[1]
} }
}) })
} }
@ -194,16 +176,7 @@ export class AudioVisual {
const cp2x = x2 - ((x3 - x1) / 6) * k const cp2x = x2 - ((x3 - x1) / 6) * k
const cp2y = y2 - ((y3 - y1) / 6) * k const cp2y = y2 - ((y3 - y1) / 6) * k
path += path += "C" + [cp1x.toFixed(2), cp1y.toFixed(2), cp2x.toFixed(2), cp2y.toFixed(2), x2.toFixed(2), y2.toFixed(2)]
"C" +
[
cp1x.toFixed(2),
cp1y.toFixed(2),
cp2x.toFixed(2),
cp2y.toFixed(2),
x2.toFixed(2),
y2.toFixed(2)
]
} }
return path return path
} }
@ -217,16 +190,10 @@ export class AudioVisual {
case ShapeType.Line: { case ShapeType.Line: {
for (let i = 0; i < frequency_data.length - 1; i++) { for (let i = 0; i < frequency_data.length - 1; i++) {
const mutator = isFinite(frequency_data[i]) const mutator = isFinite(frequency_data[i])
? this.#convert_range( ? this.#convert_range(frequency_data[i] * this.#fft_multiplier + this.#fft_offset, in_range, out_range)
frequency_data[i] * this.#fft_multiplier +
this.#fft_offset,
in_range,
out_range
)
: -1 * this.#canvas_height : -1 * this.#canvas_height
mutated_points.push({ mutated_points.push({
x: this.#shape.points[i] x: this.#shape.points[i].x /** ((Math.max(FFTDataArray[i] + 100)) * 4)*/,
.x /** ((Math.max(FFTDataArray[i] + 100)) * 4)*/,
y: this.#shape.points[i].y - mutator y: this.#shape.points[i].y - mutator
}) })
} }
@ -234,30 +201,13 @@ export class AudioVisual {
} }
case ShapeType.Circle: { case ShapeType.Circle: {
for (let i = 0; i < frequency_data.length - 1; i++) { for (let i = 0; i < frequency_data.length - 1; i++) {
const new_i = const new_i = i > (frequency_data.length - 1) / 2 ? frequency_data.length - 1 - i : i
i > (frequency_data.length - 1) / 2
? frequency_data.length - 1 - i
: i
mutated_points.push({ mutated_points.push({
x: x:
this.#shape.points[i].x * this.#shape.points[i].x * Math.max((frequency_data[new_i] * this.#fft_multiplier + this.#fft_offset) / 50, 1) +
Math.max(
(frequency_data[new_i] *
this.#fft_multiplier +
this.#fft_offset) /
50,
1
) +
this.#canvas_width / 2, this.#canvas_width / 2,
y: y:
this.#shape.points[i].y * this.#shape.points[i].y * Math.max((frequency_data[new_i] * this.#fft_multiplier + this.#fft_offset) / 50, 1) +
Math.max(
(frequency_data[new_i] *
this.#fft_multiplier +
this.#fft_offset) /
50,
1
) +
this.#canvas_height / 2 this.#canvas_height / 2
}) })
/* TODO: IMPLEMENT SCALING TO BEAT /* TODO: IMPLEMENT SCALING TO BEAT
@ -268,23 +218,12 @@ export class AudioVisual {
break break
} }
case ShapeType.Waveform: { case ShapeType.Waveform: {
if ( if (this.#shape.waveform_options!.shape_type == WaveformShape.LineLike) {
this.#shape.waveform_options!.shape_type ==
WaveformShape.LineLike
) {
if (this.#shape.symmetry) { if (this.#shape.symmetry) {
for (let i = 0; i < this.#shape.points.length; i += 2) { for (let i = 0; i < this.#shape.points.length; i += 2) {
let mutator = this.#convert_range( let mutator = this.#convert_range(frequency_data[i / 2] * this.#fft_multiplier + this.#fft_offset, in_range, out_range)
frequency_data[i / 2] * this.#fft_multiplier +
this.#fft_offset,
in_range,
out_range
)
if (mutator <= 0) mutator = 2 if (mutator <= 0) mutator = 2
if ( if (this.#shape.waveform_options!.orientation == WaveformOrientation.Horizontal) {
this.#shape.waveform_options!.orientation ==
WaveformOrientation.Horizontal
) {
mutated_points.push({ mutated_points.push({
x: this.#shape.points[i].x, x: this.#shape.points[i].x,
y: this.#shape.points[i].y - mutator y: this.#shape.points[i].y - mutator
@ -306,16 +245,8 @@ export class AudioVisual {
} }
} else { } else {
for (let i = 0; i < frequency_data.length - 1; i++) { for (let i = 0; i < frequency_data.length - 1; i++) {
const mutator = this.#convert_range( const mutator = this.#convert_range(frequency_data[i] * this.#fft_multiplier + this.#fft_offset, in_range, out_range)
frequency_data[i] * this.#fft_multiplier + if (this.#shape.waveform_options!.orientation == WaveformOrientation.Horizontal) {
this.#fft_offset,
in_range,
out_range
)
if (
this.#shape.waveform_options!.orientation ==
WaveformOrientation.Horizontal
) {
mutated_points.push({ mutated_points.push({
x: this.#shape.points[i].x, x: this.#shape.points[i].x,
y: this.#shape.points[i].y - mutator y: this.#shape.points[i].y - mutator
@ -363,36 +294,26 @@ export class AudioVisual {
switch (this.#shape.shape_type) { switch (this.#shape.shape_type) {
case ShapeType.Line: { case ShapeType.Line: {
for (let i = 0; i < arr.length; i++) { for (let i = 0; i < arr.length; i++) {
path += `L ${arr[i].x.toFixed(2)},${arr[ path += `L ${arr[i].x.toFixed(2)},${arr[i].y.toFixed(2)} `
i
].y.toFixed(2)} `
} }
if (this.#shape.shape_type == ShapeType.Line) { if (this.#shape.shape_type == ShapeType.Line) {
path += `L ${this.#canvas_width} ${ path += `L ${this.#canvas_width} ${this.#canvas_height} `
this.#canvas_height
} `
//path += `L ${canvas_width} ${canvas_height} ` //path += `L ${canvas_width} ${canvas_height} `
} }
break break
} }
case ShapeType.Circle: { case ShapeType.Circle: {
for (let i = 0; i < arr.length; i++) { for (let i = 0; i < arr.length; i++) {
path += `L ${arr[i].x.toFixed(2)},${arr[ path += `L ${arr[i].x.toFixed(2)},${arr[i].y.toFixed(2)} `
i
].y.toFixed(2)} `
} }
break break
} }
case ShapeType.Waveform: { case ShapeType.Waveform: {
for (let i = 0; i < arr.length; i += 2) { for (let i = 0; i < arr.length; i += 2) {
path += `L ${arr[i].x.toFixed(2)},${arr[ path += `L ${arr[i].x.toFixed(2)},${arr[i].y.toFixed(2)} `
i
].y.toFixed(2)} `
} }
for (let i = arr.length - 1; i >= 0; i -= 2) { for (let i = arr.length - 1; i >= 0; i -= 2) {
path += `L ${arr[i].x.toFixed(2)},${arr[ path += `L ${arr[i].x.toFixed(2)},${arr[i].y.toFixed(2)} `
i
].y.toFixed(2)} `
} }
} }
} }
@ -404,15 +325,9 @@ export class AudioVisual {
const anchors = this.#create_perpendicular_anchors(arr) const anchors = this.#create_perpendicular_anchors(arr)
for (let i = 1; i < arr.length; i++) { for (let i = 1; i < arr.length; i++) {
path += `C ${anchors[i - 1].rightAnchor.x.toFixed( path += `C ${anchors[i - 1].rightAnchor.x.toFixed(2)} ${anchors[i - 1].rightAnchor.y.toFixed(2)} ${anchors[
2
)} ${anchors[i - 1].rightAnchor.y.toFixed(2)} ${anchors[
i i
].leftAnchor.x.toFixed(2)} ${anchors[ ].leftAnchor.x.toFixed(2)} ${anchors[i].leftAnchor.y.toFixed(2)} ${arr[i].x.toFixed(2)} ${arr[i].y.toFixed(2)} `
i
].leftAnchor.y.toFixed(2)} ${arr[i].x.toFixed(2)} ${arr[
i
].y.toFixed(2)} `
} }
if (this.#shape.shape_type == ShapeType.Line) { if (this.#shape.shape_type == ShapeType.Line) {
//path += `L ${this.canvasWidth} ${this.canvasHeight / 2} ` //path += `L ${this.canvasWidth} ${this.canvasHeight / 2} `
@ -432,10 +347,7 @@ export class AudioVisual {
break break
} }
case SmoothingAlgorythm.CatmullRom: { case SmoothingAlgorythm.CatmullRom: {
if ( if (this.#shape.shape_type == ShapeType.Waveform && this.#shape.symmetry == true) {
this.#shape.shape_type == ShapeType.Waveform &&
this.#shape.symmetry == true
) {
//adding points so both halfs ends and start at the same center point //adding points so both halfs ends and start at the same center point
console.log(arr) console.log(arr)
const first_half = [{ x: 0, y: this.#canvas_height / 2 }] const first_half = [{ x: 0, y: this.#canvas_height / 2 }]
@ -500,10 +412,7 @@ export class AudioVisualBuilder {
#from_fft_range #from_fft_range
#to_fft_range #to_fft_range
#point_count: number #point_count: number
constructor( constructor(analyzer_node: AnalyserNode, svg_injecting_element: SVGSVGElement) {
analyzer_node: AnalyserNode,
svg_injecting_element: SVGSVGElement
) {
this.#analyzer_node = analyzer_node this.#analyzer_node = analyzer_node
this.#svg_injecting_element = svg_injecting_element this.#svg_injecting_element = svg_injecting_element
this.#canvas_width = svg_injecting_element.viewBox.baseVal.width this.#canvas_width = svg_injecting_element.viewBox.baseVal.width
@ -515,10 +424,7 @@ export class AudioVisualBuilder {
this.#fft_offset = 150 this.#fft_offset = 150
this.#from_fft_range = 0 this.#from_fft_range = 0
this.#to_fft_range = 100 this.#to_fft_range = 100
this.#point_count = Math.round( this.#point_count = Math.round((this.#buffer_length / 100) * (this.#from_fft_range - this.#to_fft_range))
(this.#buffer_length / 100) *
(this.#from_fft_range - this.#to_fft_range)
)
} }
/** /**
* The smoothingTimeConstant property of the AnalyserNode interface is a double value representing the averaging constant with the last analysis frame. It's basically an average between the current buffer and the last buffer the AnalyserNode processed, and results in a much smoother set of value changes over time. * The smoothingTimeConstant property of the AnalyserNode interface is a double value representing the averaging constant with the last analysis frame. It's basically an average between the current buffer and the last buffer the AnalyserNode processed, and results in a much smoother set of value changes over time.
@ -537,8 +443,7 @@ export class AudioVisualBuilder {
* @returns this * @returns this
*/ */
set_fft_size(fft_size: number) { set_fft_size(fft_size: number) {
if (!(this.#fft_size && !(this.#fft_size & (this.#fft_size - 1)))) if (!(this.#fft_size && !(this.#fft_size & (this.#fft_size - 1)))) throw Error("fft_size not power of two")
throw Error("fft_size not power of two")
this.#analyzer_node.fftSize = this.#fft_size = fft_size this.#analyzer_node.fftSize = this.#fft_size = fft_size
this.#buffer_length = this.#analyzer_node.frequencyBinCount this.#buffer_length = this.#analyzer_node.frequencyBinCount
return this return this
@ -571,9 +476,7 @@ export class AudioVisualBuilder {
set_fft_data_tresholds({ set_fft_data_tresholds({
from_fft_range_i = 0, from_fft_range_i = 0,
to_fft_range_i = 100, to_fft_range_i = 100,
point_count_i = Math.round( point_count_i = Math.round((this.#buffer_length / 100) * (from_fft_range_i - to_fft_range_i)),
(this.#buffer_length / 100) * (from_fft_range_i - to_fft_range_i)
),
fft_multiplier_i = 2, fft_multiplier_i = 2,
fft_offset_i = -50 fft_offset_i = -50
}) { }) {
@ -614,11 +517,7 @@ export class AudioVisualBuilder {
* @param shape_type Circle = 0; Line = 1; * @param shape_type Circle = 0; Line = 1;
* @returns `new AudioVisual` * @returns `new AudioVisual`
*/ */
build( build(shape_type: ShapeType, symmetry: boolean, waveform_options?: WaveformOptions) {
shape_type: ShapeType,
symmetry: boolean,
waveform_options?: WaveformOptions
) {
const shape = this.#create_shape(shape_type, symmetry, waveform_options) const shape = this.#create_shape(shape_type, symmetry, waveform_options)
return new AudioVisual( return new AudioVisual(
this.#analyzer_node, this.#analyzer_node,
@ -632,11 +531,7 @@ export class AudioVisualBuilder {
this.#point_count this.#point_count
) )
} }
#create_shape( #create_shape(shape_type: ShapeType, symmetry: boolean, waveform_options?: WaveformOptions): Shape {
shape_type: ShapeType,
symmetry: boolean,
waveform_options?: WaveformOptions
): Shape {
const point_amount = this.#get_cured_frequency_data().length const point_amount = this.#get_cured_frequency_data().length
let new_shape: Shape let new_shape: Shape
switch (shape_type) { switch (shape_type) {
@ -657,20 +552,11 @@ export class AudioVisualBuilder {
} }
case ShapeType.Circle: { case ShapeType.Circle: {
const points = [] const points = []
const radius = const radius = this.#canvas_height > this.#canvas_width ? this.#canvas_height / 5 : this.#canvas_width / 5
this.#canvas_height > this.#canvas_width
? this.#canvas_height / 5
: this.#canvas_width / 5
for (let i = 0; i < point_amount; i++) { for (let i = 0; i < point_amount; i++) {
points.push({ points.push({
x: x: Math.cos(((2 * Math.PI) / point_amount) * i - Math.PI / 2) * radius,
Math.cos( y: Math.sin(((2 * Math.PI) / point_amount) * i - Math.PI / 2) * radius
((2 * Math.PI) / point_amount) * i - Math.PI / 2
) * radius,
y:
Math.sin(
((2 * Math.PI) / point_amount) * i - Math.PI / 2
) * radius
}) })
} }
@ -683,16 +569,10 @@ export class AudioVisualBuilder {
} }
case ShapeType.Waveform: { case ShapeType.Waveform: {
if (waveform_options === undefined) { if (waveform_options === undefined) {
console.error( console.error("Waveform options undefined at shapetype.waveform, please define!")
"Waveform options undefined at shapetype.waveform, please define!" throw Error("Waveform options undefined at shapetype.waveform, please define!")
)
throw Error(
"Waveform options undefined at shapetype.waveform, please define!"
)
} }
const fft_length = this.#get_cured_frequency_data( const fft_length = this.#get_cured_frequency_data(waveform_options.fft_data).length
waveform_options.fft_data
).length
const points = [] const points = []
for (let i = 0; i < fft_length; i++) { for (let i = 0; i < fft_length; i++) {
let x, y let x, y
@ -702,16 +582,10 @@ export class AudioVisualBuilder {
} else { } else {
throw Error("WaveformShape.Striped not implemented yet") throw Error("WaveformShape.Striped not implemented yet")
} }
waveform_options.orientation == waveform_options.orientation == WaveformOrientation.Horizontal ? points.push({ x: x, y: y }) : points.push({ x: y, y: x })
WaveformOrientation.Horizontal
? points.push({ x: x, y: y })
: points.push({ x: y, y: x })
//Douple the points needed for symmetry //Douple the points needed for symmetry
if (symmetry) { if (symmetry) {
waveform_options.orientation == waveform_options.orientation == WaveformOrientation.Horizontal ? points.push({ x: x, y: y }) : points.push({ x: y, y: x })
WaveformOrientation.Horizontal
? points.push({ x: x, y: y })
: points.push({ x: y, y: x })
} }
} }
new_shape = { new_shape = {
@ -731,16 +605,9 @@ export class AudioVisualBuilder {
fft_data = new Float32Array(this.#buffer_length) fft_data = new Float32Array(this.#buffer_length)
this.#analyzer_node.getFloatFrequencyData(fft_data) this.#analyzer_node.getFloatFrequencyData(fft_data)
} }
const from = Math.round( const from = Math.round((this.#point_count / 100) * this.#from_fft_range)
(this.#point_count / 100) * this.#from_fft_range const to = Math.round(this.#buffer_length - (this.#buffer_length / 100) * this.#to_fft_range)
) const squeezeFactor = Math.round((this.#buffer_length - to) / this.#point_count)
const to = Math.round(
this.#buffer_length -
(this.#buffer_length / 100) * this.#to_fft_range
)
const squeezeFactor = Math.round(
(this.#buffer_length - to) / this.#point_count
)
const return_array = new Array(this.#point_count) const return_array = new Array(this.#point_count)
for (let i = 0; i < this.#point_count; i++) { for (let i = 0; i < this.#point_count; i++) {

View file

@ -35,16 +35,10 @@ invariant(
const graph = readCachedProjectGraph() const graph = readCachedProjectGraph()
const project = graph.nodes[name] const project = graph.nodes[name]
invariant( invariant(project, `Could not find project "${name}" in the workspace. Is the project.json configured correctly?`)
project,
`Could not find project "${name}" in the workspace. Is the project.json configured correctly?`
)
const outputPath = project.data?.targets?.build?.options?.outputPath const outputPath = project.data?.targets?.build?.options?.outputPath
invariant( invariant(outputPath, `Could not find "build.options.outputPath" of project "${name}". Is project.json configured correctly?`)
outputPath,
`Could not find "build.options.outputPath" of project "${name}". Is project.json configured correctly?`
)
process.chdir(outputPath) process.chdir(outputPath)
@ -54,11 +48,7 @@ try {
json.version = version json.version = version
writeFileSync(`package.json`, JSON.stringify(json, null, 2)) writeFileSync(`package.json`, JSON.stringify(json, null, 2))
} catch (e) { } catch (e) {
console.error( console.error(chalk.bold.red(`Error reading package.json file from library build output.`))
chalk.bold.red(
`Error reading package.json file from library build output.`
)
)
} }
// Execute "npm publish" to publish // Execute "npm publish" to publish