fix formatting

This commit is contained in:
Djkáťo 2024-02-12 16:28:54 +01:00
parent b57321350d
commit ed2d865f2d
24 changed files with 3217 additions and 2495 deletions

View file

@ -3,6 +3,7 @@
"version": "0.0.0",
"license": "MIT",
"scripts": {
"format": "prettier --write '**/*.{js,ts,css,html,json,mjs}'",
"publish-player": "nx build player && cd dist/packages/player && npm publish --access=public",
"publish-visualizer": "nx build visualizer && cd dist/packages/visualizer && npm publish --access=public",
"publish-library": "nx build music-library && cd dist/packages/music-library && npm publish --access=public",

View file

@ -1,4 +1,11 @@
import { Artist, Collection, DB, Ref, RefTo, Song } from "@euterpe.js/music-library";
import {
Artist,
Collection,
DB,
Ref,
RefTo,
Song
} from "@euterpe.js/music-library"
export { DJSong, DJDB }
type ID = number
@ -27,23 +34,29 @@ class DJSong extends Song {
try {
fetch(data.url).then((file) => {
file.arrayBuffer().then((buffer) => {
audio_context.decodeAudioData(buffer).then((audio_buffer) => {
audio_context
.decodeAudioData(buffer)
.then((audio_buffer) => {
this.audio_buffer = audio_buffer
})
})
});
})
} catch (e) {
console.error(new Error("Failed to preprocess DJSong. " + e))
}
}
public async analyze(url: URL, audio_context: AudioContext) {
this.audio_buffer = await audio_context.decodeAudioData(await (await fetch(url)).arrayBuffer())
this.audio_buffer = await audio_context.decodeAudioData(
await (await fetch(url)).arrayBuffer()
)
}
}
class DJDB extends DB {
dj_add(dj_songs: DJSong[]): void {
let inputs
typeof dj_songs[Symbol.iterator] == "function" ? inputs = dj_songs : inputs = [dj_songs]
typeof dj_songs[Symbol.iterator] == "function"
? (inputs = dj_songs)
: (inputs = [dj_songs])
for (const input of inputs) {
if (input instanceof DJSong) {
const song = input as DJSong
@ -52,8 +65,16 @@ class DJDB extends DB {
if (song.in_collection) {
const curr_col = song.in_collection.get(this) as Collection
curr_col.songs.push(new Ref(RefTo.Songs, song.id))
song.artists.forEach((artist) => curr_col.artists.push(new Ref(RefTo.Artists, artist.get(this)!.id!)))
song.remix_artists.forEach((artist) => curr_col.artists.push(new Ref(RefTo.Artists, artist.get(this)!.id!)))
song.artists.forEach((artist) =>
curr_col.artists.push(
new Ref(RefTo.Artists, artist.get(this)!.id!)
)
)
song.remix_artists.forEach((artist) =>
curr_col.artists.push(
new Ref(RefTo.Artists, artist.get(this)!.id!)
)
)
}
for (const artist_ref of song.artists) {
@ -70,5 +91,4 @@ class DJDB extends DB {
}
}
}
}

View file

@ -1,6 +1,6 @@
import { Euterpe } from "@euterpe.js/euterpe";
import { Song } from "@euterpe.js/music-library";
import { MusicPlayer } from "@euterpe.js/player";
import { Euterpe } from "@euterpe.js/euterpe"
import { Song } from "@euterpe.js/music-library"
import { MusicPlayer } from "@euterpe.js/player"
export { DJ }
/**
* To change volume of a track, use track[i].gain.gain, to change master volume, use euterpe/music players volume.
@ -11,15 +11,22 @@ class DJ {
/**in ms */
beat_duration?: number
beat = { current: 0, max: 4, next_bar_in: 4 }
on_beat?: (beat: { current: number, max: number, next_bar_in: number }) => void
constructor(public player: Euterpe | MusicPlayer, public master_bpm: number | 120) {
on_beat?: (beat: {
current: number
max: number
next_bar_in: number
}) => void
constructor(
public player: Euterpe | MusicPlayer,
public master_bpm: number | 120
) {
this.beat_duration = 60 / master_bpm
this.#emit_beats()
}
#emit_beats() {
this.beat.current >= 4 ?
(this.beat.current++, this.beat.next_bar_in--) :
(this.beat.current = 0, this.beat.next_bar_in = this.beat.max)
this.beat.current >= 4
? (this.beat.current++, this.beat.next_bar_in--)
: ((this.beat.current = 0), (this.beat.next_bar_in = this.beat.max))
if (this.on_beat) this.on_beat(this.beat)
//This makes it break if BPM >= 300!!!!
@ -38,7 +45,10 @@ class DJ {
*/
async try_queue_track(track_i: number, delay: number) {
return new Promise((resolve, reject) => {
this.tracks[track_i].try_start(delay).then(() => resolve(this), (e) => reject(e))
this.tracks[track_i].try_start(delay).then(
() => resolve(this),
(e) => reject(e)
)
})
}
/**
@ -47,7 +57,10 @@ class DJ {
*/
async try_load_song_into_track(track_i: number, song: Song) {
return new Promise((resolve, reject) => {
this.tracks[track_i].change_song(song).then(() => resolve(this), (e) => reject(e))
this.tracks[track_i].change_song(song).then(
() => resolve(this),
(e) => reject(e)
)
})
}
/**
@ -57,7 +70,10 @@ class DJ {
*/
async try_start_track(track_i: number) {
return new Promise((resolve, reject) => {
this.tracks[track_i].try_start().then(() => resolve(this), (e) => reject(e))
this.tracks[track_i].try_start().then(
() => resolve(this),
(e) => reject(e)
)
})
}
/**
@ -68,7 +84,6 @@ class DJ {
this.master_bpm = new_master_bpm
this.beat_duration = 60 / this.master_bpm
}
}
class Track {
@ -77,26 +92,39 @@ class Track {
gain: GainNode
audio_context: AudioContext | BaseAudioContext
constructor(public player: MusicPlayer | Euterpe, public current_song?: Song, public should_loop?: boolean) {
constructor(
public player: MusicPlayer | Euterpe,
public current_song?: Song,
public should_loop?: boolean
) {
this.audio_context = player.audio_context
this.gain = this.audio_context.createGain()
if (current_song) this.change_song(current_song).catch((e) => console.error("error during track construction - " + e))
if (current_song)
this.change_song(current_song).catch((e) =>
console.error("error during track construction - " + e)
)
}
async #prepare() {
return new Promise((resolve, reject) => {
if (!this.current_song) reject(new Error("No current song"))
fetch(this.current_song!.url)
.then(
fetch(this.current_song!.url).then(
async (file) => {
this.audio_buffer = await this.audio_context.decodeAudioData(await file.arrayBuffer())
this.audio_buffer =
await this.audio_context.decodeAudioData(
await file.arrayBuffer()
)
resolve(this)
},
(reason) => reject(reason))
(reason) => reject(reason)
)
})
}
#connect() {
if (!this.audio_buffer) throw new Error("Somehow buffer not in track even though it analyzed properly. Report this as a bug")
if (!this.audio_buffer)
throw new Error(
"Somehow buffer not in track even though it analyzed properly. Report this as a bug"
)
this.buffer_source = this.audio_context.createBufferSource()
this.buffer_source.buffer = this.audio_buffer!
this.buffer_source.connect(this.gain)
@ -106,10 +134,13 @@ class Track {
async change_song(new_song: Song) {
return new Promise((resolve, reject) => {
this.current_song = new_song
this.#prepare().then(() => {
this.#prepare().then(
() => {
this.#connect()
resolve(this)
}, (reason) => reject(reason))
},
(reason) => reject(reason)
)
})
}
/**
@ -118,8 +149,11 @@ class Track {
*/
async try_start(delay?: number) {
return new Promise((resolve, reject) => {
if (!this.buffer_source) reject(new Error("No buffer source yet, set a song first"))
this.buffer_source!.start(this.audio_context.currentTime + (delay || 0))
if (!this.buffer_source)
reject(new Error("No buffer source yet, set a song first"))
this.buffer_source!.start(
this.audio_context.currentTime + (delay || 0)
)
})
}
}

View file

@ -1 +1 @@
import { MusicPlayerBuilder } from "@euterpe.js/player";
import { MusicPlayerBuilder } from "@euterpe.js/player"

View file

@ -1,50 +1,77 @@
import { DB, Song, Artist, Ref, RefTo, Platforms } from "@euterpe.js/music-library"
export const db = new DB
import {
DB,
Song,
Artist,
Ref,
RefTo,
Platforms
} from "@euterpe.js/music-library"
export const db = new DB()
db.add([
//The IDs are added incrementally & are 0 based., so first artists ID added is 0, next 1 etc...
//You can specify the ID manually if you want
new Artist({
name: "Jamie xx",
name: "Jamie xx"
}),
new Artist({
name: "janz",
name: "janz"
}),
new Artist({
name: "Machinedrum",
name: "Machinedrum"
}),
new Artist({
name: "Tanerélle",
name: "Tanerélle"
}),
new Artist({
name: "Mono/Poly",
name: "Mono/Poly"
}),
new Artist({
name: "IMANU",
links: [
[Platforms.Spotify, new URL("https://open.spotify.com/artist/5Y7rFm0tiJTVDzGLMzz0W1?si=DRaZyugTTIqlBHDkMGKVqA&nd=1")]
[
Platforms.Spotify,
new URL(
"https://open.spotify.com/artist/5Y7rFm0tiJTVDzGLMzz0W1?si=DRaZyugTTIqlBHDkMGKVqA&nd=1"
)
]
]
}),
new Artist({
name: "toe",
id: 10
}),
})
])
db.add([
new Song({
//Refrences are constructed as such. This allows to get to the artist from either collection or song
artists: [new Ref(RefTo.Artists, 2), new Ref(RefTo.Artists, 3), new Ref(RefTo.Artists, 4)],
artists: [
new Ref(RefTo.Artists, 2),
new Ref(RefTo.Artists, 3),
new Ref(RefTo.Artists, 4)
],
duration: 252,
name: "Star",
remix_artists: [new Ref(RefTo.Artists, 5)],
url: new URL("http://" + window.location.host + "/Machinedrum, Tanerelle & Mono Poly - Star (IMANU Remix) final.mp3")
url: new URL(
"http://" +
window.location.host +
"/Machinedrum, Tanerelle & Mono Poly - Star (IMANU Remix) final.mp3"
)
}),
new Song({
//If you don't like guessing the IDs, then this is also a way to do it
artists: [new Ref(RefTo.Artists, db.artists.find((a) => a.name == "Jamie xx")!.id!)],
artists: [
new Ref(
RefTo.Artists,
db.artists.find((a) => a.name == "Jamie xx")!.id!
)
],
duration: 331,
name: "Sleep Sound",
url: new URL("http://" + window.location.host + "/Jamie xx - Sleep Sound.mp3")
url: new URL(
"http://" + window.location.host + "/Jamie xx - Sleep Sound.mp3"
)
}),
new Song({
artists: [new Ref(RefTo.Artists, 1)],
@ -56,7 +83,10 @@ db.add([
artists: [new Ref(RefTo.Artists, 10)],
duration: 4 * 60 + 5,
name: "サニーボーイ・ラプソディ",
url: new URL("http://" + window.location.host + "/16.サニーボーイ・ラプソディ.ogg")
url: new URL(
"http://" +
window.location.host +
"/16.サニーボーイ・ラプソディ.ogg"
)
})
])

View file

@ -1,15 +1,21 @@
import { db } from "./db";
import { EuterpeBuilder } from "@euterpe.js/euterpe";
import { db } from "./db"
import { EuterpeBuilder } from "@euterpe.js/euterpe"
let is_seeking = false
// document.addEventListener("click", start, { once: true })
const euterpe = new EuterpeBuilder(document.querySelector("#audio")!, db)
.build()
const euterpe = new EuterpeBuilder(
document.querySelector("#audio")!,
db
).build()
add_library_to_dom()
euterpe.try_preload_song(0).then(() => {
document.querySelector("#text-playing")!.innerHTML = euterpe.format_current_song()
}, (e) => console.log(e + " Failed to preload"))
euterpe.try_preload_song(0).then(
() => {
document.querySelector("#text-playing")!.innerHTML =
euterpe.format_current_song()
},
(e) => console.log(e + " Failed to preload")
)
document.querySelector("#seek")?.addEventListener("mouseup", (e) => {
try {
@ -38,14 +44,22 @@ euterpe.on_time_tick((time) => {
})
document.querySelector("#previous")?.addEventListener("click", () => {
euterpe.try_previous_song_looping().then(() => {
document.querySelector("#text-playing")!.innerHTML = euterpe.format_current_song()
}, (e) => alert(e + "Failed to change song"))
euterpe.try_previous_song_looping().then(
() => {
document.querySelector("#text-playing")!.innerHTML =
euterpe.format_current_song()
},
(e) => alert(e + "Failed to change song")
)
})
document.querySelector("#next")?.addEventListener("click", () => {
euterpe.try_next_song_looping().then(() => {
document.querySelector("#text-playing")!.innerHTML = euterpe.format_current_song()
}, (e) => alert(e + "Failed to change song"))
euterpe.try_next_song_looping().then(
() => {
document.querySelector("#text-playing")!.innerHTML =
euterpe.format_current_song()
},
(e) => alert(e + "Failed to change song")
)
})
document.querySelector("#play")?.addEventListener("click", () => {
@ -64,14 +78,16 @@ document.querySelector("#toggle-mute")?.addEventListener("click", () => {
euterpe.mute_toggle()
})
document.querySelector("#toggle-play")?.addEventListener("click", () => {
euterpe.try_play_toggle().catch((e) => alert("failed to toggle pause/play!" + e))
euterpe
.try_play_toggle()
.catch((e) => alert("failed to toggle pause/play!" + e))
})
document.querySelector("#volume")?.addEventListener("input", (e) => {
euterpe.change_volume(e.target?.valueAsNumber)
})
//disables time updates so the time slider doesn't slip away from user
document.querySelector("#seek")?.addEventListener("mousedown", () => {
is_seeking = true;
is_seeking = true
})
function add_library_to_dom() {
@ -103,7 +119,9 @@ function add_library_to_dom() {
function library_play(e: MouseEvent) {
const b = e.currentTarget as HTMLButtonElement
euterpe.try_specific_song(parseInt(b.dataset["id"]!)).then(
() => document.querySelector("#text-playing")!.innerHTML = euterpe.format_current_song(),
() =>
(document.querySelector("#text-playing")!.innerHTML =
euterpe.format_current_song()),
(e) => alert(e)
)
}

View file

@ -20,8 +20,14 @@ class Euterpe extends MusicPlayer {
current_song_path?: string,
private options?: BuilderOptions
) {
super(audio_context, audio_element, track, gain, volume, current_song_path)
super(
audio_context,
audio_element,
track,
gain,
volume,
current_song_path
)
}
/**
@ -47,14 +53,17 @@ class Euterpe extends MusicPlayer {
if (this.queue.length > 0) {
new_song = this.queue.shift()!
} else {
let id_i = this.db.songs.length;
let id_i = this.db.songs.length
while (this.db.songs[--id_i].id! > this.current_song_id);
const next_id = ++id_i;
const next_id = ++id_i
if (next_id == this.db.songs.length) throw new Error("Won't go past the last song")
if (next_id == this.db.songs.length)
throw new Error("Won't go past the last song")
new_song = this.db.songs.find((song) => song.id == next_id)!
}
const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
const url = this.options?.use_only_pathname_url
? new_song.url.pathname
: new_song.url.toString()
await this.try_new_song(url)
await this.try_play()
if (this.current_song) this.played_history.push(this.current_song)
@ -71,14 +80,16 @@ class Euterpe extends MusicPlayer {
if (this.queue.length > 0) {
new_song = this.queue.shift()!
} else {
let id_i = this.db.songs.length;
let id_i = this.db.songs.length
while (this.db.songs[--id_i].id! > this.current_song_id);
let next_id = ++id_i
if (next_id == this.db.songs.length) next_id = this.db.songs[0].id!
new_song = this.db.songs.find((song) => song.id == next_id)!
}
const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
const url = this.options?.use_only_pathname_url
? new_song.url.pathname
: new_song.url.toString()
await this.try_new_song(url)
await this.try_play()
if (this.current_song) this.played_history.push(this.current_song)
@ -95,14 +106,16 @@ class Euterpe extends MusicPlayer {
if (this.queue.length > 0) {
new_song = this.queue.shift()!
} else {
let id_i = this.db.songs.length;
let id_i = this.db.songs.length
while (this.db.songs[--id_i].id! > this.current_song_id);
let next_id = ++id_i;
let next_id = ++id_i
if (next_id == this.db.songs.length) next_id = this.db.songs[0].id!
new_song = this.db.songs.find((song) => song.id == next_id)!
}
const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
const url = this.options?.use_only_pathname_url
? new_song.url.pathname
: new_song.url.toString()
this.new_song(url)
this.play()
if (this.current_song) this.played_history.push(this.current_song)
@ -119,12 +132,14 @@ class Euterpe extends MusicPlayer {
if (this.queue.length > 0) {
new_song = this.queue.shift()!
} else {
let id_i = this.db.songs.length;
let id_i = this.db.songs.length
while (this.db.songs[--id_i].id! > this.current_song_id);
const next_id = ++id_i;
const next_id = ++id_i
new_song = this.db.songs.find((song) => song.id == next_id)!
}
const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
const url = this.options?.use_only_pathname_url
? new_song.url.pathname
: new_song.url.toString()
this.new_song(url)
this.play()
if (this.current_song) this.played_history.push(this.current_song)
@ -153,7 +168,9 @@ class Euterpe extends MusicPlayer {
specific_song(new_song_id: number) {
const new_song = this.db.songs.find((song) => song.id! == new_song_id)
if (!new_song) return
const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
const url = this.options?.use_only_pathname_url
? new_song.url.pathname
: new_song.url.toString()
this.new_song(url)
this.play()
if (this.current_song) this.played_history.push(this.current_song)
@ -171,14 +188,17 @@ class Euterpe extends MusicPlayer {
if (this.played_history.length > 0) {
new_song = this.played_history.pop()!
} else {
let id_i = 0;
let id_i = 0
while (this.db.songs[++id_i].id! < this.current_song_id);
const next_id = --id_i;
const next_id = --id_i
if (next_id == this.db.songs.length) throw new Error("Won't roll backwards to last song")
if (next_id == this.db.songs.length)
throw new Error("Won't roll backwards to last song")
new_song = this.db.songs.find((song) => song.id == next_id)!
}
const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
const url = this.options?.use_only_pathname_url
? new_song.url.pathname
: new_song.url.toString()
await this.try_new_song(url)
await this.try_play()
//if (this.current_song) this.played_history.push(this.current_song)
@ -195,14 +215,17 @@ class Euterpe extends MusicPlayer {
if (this.played_history.length > 0) {
new_song = this.played_history.pop()!
} else {
let id_i = -1;
let id_i = -1
while (this.db.songs[++id_i].id! < this.current_song_id);
let next_id = --id_i;
let next_id = --id_i
if (next_id == -1) next_id = this.db.songs[this.db.songs.length - 1].id!
if (next_id == -1)
next_id = this.db.songs[this.db.songs.length - 1].id!
new_song = this.db.songs.find((song) => song.id == next_id)!
}
const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
const url = this.options?.use_only_pathname_url
? new_song.url.pathname
: new_song.url.toString()
await this.try_new_song(url)
await this.try_play()
//if (this.current_song) this.played_history.push(this.current_song)
@ -219,14 +242,17 @@ class Euterpe extends MusicPlayer {
if (this.played_history.length > 0) {
new_song = this.played_history.pop()!
} else {
let id_i = 0;
let id_i = 0
while (this.db.songs[++id_i].id! < this.current_song_id);
const next_id = -id_i;
const next_id = -id_i
if (next_id == this.db.songs.length) throw new Error("Won't go past the last song")
if (next_id == this.db.songs.length)
throw new Error("Won't go past the last song")
new_song = this.db.songs.find((song) => song.id == next_id)!
}
const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
const url = this.options?.use_only_pathname_url
? new_song.url.pathname
: new_song.url.toString()
this.new_song(url)
this.play()
//if (this.current_song) this.played_history.push(this.current_song)
@ -243,14 +269,17 @@ class Euterpe extends MusicPlayer {
if (this.played_history.length > 0) {
new_song = this.played_history.pop()!
} else {
let id_i = 0;
let id_i = 0
while (this.db.songs[++id_i].id! < this.current_song_id);
let next_id = -id_i;
let next_id = -id_i
if (next_id == this.db.songs.length) next_id = this.db.songs[this.db.songs.length].id!
if (next_id == this.db.songs.length)
next_id = this.db.songs[this.db.songs.length].id!
new_song = this.db.songs.find((song) => song.id == next_id)!
}
const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
const url = this.options?.use_only_pathname_url
? new_song.url.pathname
: new_song.url.toString()
this.new_song(url)
this.play()
//if (this.current_song) this.played_history.push(this.current_song)
@ -263,7 +292,6 @@ class Euterpe extends MusicPlayer {
* @returns {ARTIST}, {ARTIST2}... - {SONG NAME} ({REMIX ARTIST}, {REMIX ARTIST2}... remix)
*/
format_current_song(id = this.current_song?.id) {
const curr_song = this.db.songs.find((song) => song.id == id)
if (!curr_song) {
return "ID - ID"
@ -313,7 +341,8 @@ class Euterpe extends MusicPlayer {
try_queue_add(id: number) {
const curr_song = this.db.songs.find((song) => song.id == id)
if (!curr_song) throw new Error(`Song of id "${id}" doesn't exist`)
if (this.queue.find((song) => song.id == id)) throw new Error(`Song of id "${id}" already queued`)
if (this.queue.find((song) => song.id == id))
throw new Error(`Song of id "${id}" already queued`)
this.queue.push(curr_song)
}
/**
@ -344,7 +373,6 @@ class Euterpe extends MusicPlayer {
if (i == -1) return
return this.queue.splice(i, 1)
}
}
type BuilderOptions = {
use_only_pathname_url?: boolean
@ -355,7 +383,7 @@ class EuterpeBuilder {
#gain: GainNode
#track: MediaElementAudioSourceNode
#volume = 1
#prev_node: any;
#prev_node: any
#is_gain_connected = false
/**
@ -363,12 +391,18 @@ class EuterpeBuilder {
* will throw if audio_element is undefined (stupid vue setup amirite?)
* will throw if user has not interacted with the page yet (Can't initiate AudioContext)
*/
constructor(private audio_element: HTMLAudioElement, private db: DB, private options?: BuilderOptions) {
if (audio_element === undefined) throw Error("audio_element was undefined")
constructor(
private audio_element: HTMLAudioElement,
private db: DB,
private options?: BuilderOptions
) {
if (audio_element === undefined)
throw Error("audio_element was undefined")
// ↓ For old browsers
const AudioContext = window.AudioContext || window.webkitAudioContext;
const AudioContext = window.AudioContext || window.webkitAudioContext
this.#audio_context = new AudioContext()
this.#track = this.#audio_context.createMediaElementSource(audio_element)
this.#track =
this.#audio_context.createMediaElementSource(audio_element)
this.#gain = this.#audio_context.createGain()
}
/**
@ -377,7 +411,9 @@ class EuterpeBuilder {
*/
add_analyser() {
const analyser = this.#audio_context.createAnalyser()
!this.#prev_node ? this.#track.connect(analyser) : this.#prev_node.connect(analyser)
!this.#prev_node
? this.#track.connect(analyser)
: this.#prev_node.connect(analyser)
this.#prev_node = analyser
return analyser
}
@ -387,7 +423,9 @@ class EuterpeBuilder {
*/
add_stereo_panner_node() {
const panner = this.#audio_context.createStereoPanner()
!this.#prev_node ? this.#track.connect(panner) : this.#prev_node.connect(panner)
!this.#prev_node
? this.#track.connect(panner)
: this.#prev_node.connect(panner)
this.#prev_node = panner
return panner
}
@ -397,7 +435,9 @@ class EuterpeBuilder {
*/
add_wave_shaper_node() {
const shaper = this.#audio_context.createWaveShaper()
!this.#prev_node ? this.#track.connect(shaper) : this.#prev_node.connect(shaper)
!this.#prev_node
? this.#track.connect(shaper)
: this.#prev_node.connect(shaper)
this.#prev_node = shaper
return shaper
}
@ -405,7 +445,9 @@ class EuterpeBuilder {
* For additional trickery, you can connect your own node.
*/
connect_custom_node(node: AudioNode) {
!this.#prev_node ? this.#track.connect(node) : this.#prev_node.connect(node)
!this.#prev_node
? this.#track.connect(node)
: this.#prev_node.connect(node)
this.#prev_node = node
}
/**
@ -413,7 +455,9 @@ class EuterpeBuilder {
* eg. if you want the analyser nodes output to be affected by user #gain
*/
connect_gain() {
!this.#prev_node ? this.#track.connect(this.#gain) : this.#prev_node.connect(this.#gain)
!this.#prev_node
? this.#track.connect(this.#gain)
: this.#prev_node.connect(this.#gain)
this.#prev_node = this.#gain
this.#is_gain_connected = true
}
@ -423,10 +467,21 @@ class EuterpeBuilder {
*/
build() {
if (!this.#is_gain_connected) {
!this.#prev_node ? this.#track.connect(this.#gain) : this.#prev_node.connect(this.#gain)
!this.#prev_node
? this.#track.connect(this.#gain)
: this.#prev_node.connect(this.#gain)
this.#prev_node = this.#gain
}
this.#prev_node.connect(this.#audio_context.destination)
return new Euterpe(this.db, this.#audio_context, this.audio_element, this.#track, this.#gain, this.#volume, undefined, this.options)
return new Euterpe(
this.db,
this.#audio_context,
this.audio_element,
this.#track,
this.#gain,
this.#volume,
undefined,
this.options
)
}
}

View file

@ -1,42 +1,66 @@
import { DB, Artist, Song, RefTo, Ref, Platforms } from "@euterpe.js/music-library";
export const db = new DB
import {
DB,
Artist,
Song,
RefTo,
Ref,
Platforms
} from "@euterpe.js/music-library"
export const db = new DB()
db.add([
//The IDs are added incrementally & are 0 based., so first artists ID added is 0, next 1 etc...
//You can specify the ID manually if you want
new Artist({
name: "Jamie xx",
name: "Jamie xx"
}),
new Artist({
name: "janz",
name: "janz"
}),
new Artist({
name: "Machinedrum",
name: "Machinedrum"
}),
new Artist({
name: "Tanerélle",
name: "Tanerélle"
}),
new Artist({
name: "Mono/Poly",
name: "Mono/Poly"
}),
new Artist({
name: "IMANU",
links: [
[Platforms.Spotify, new URL("https://open.spotify.com/artist/5Y7rFm0tiJTVDzGLMzz0W1?si=DRaZyugTTIqlBHDkMGKVqA&nd=1")]
[
Platforms.Spotify,
new URL(
"https://open.spotify.com/artist/5Y7rFm0tiJTVDzGLMzz0W1?si=DRaZyugTTIqlBHDkMGKVqA&nd=1"
)
]
})])
]
})
])
db.add([
new Song({
//Refrences are constructed as such. This allows to get to the artist from either collection or song
artists: [new Ref(RefTo.Artists, 2), new Ref(RefTo.Artists, 3), new Ref(RefTo.Artists, 4)],
artists: [
new Ref(RefTo.Artists, 2),
new Ref(RefTo.Artists, 3),
new Ref(RefTo.Artists, 4)
],
duration: 252,
name: "Star",
remix_artists: [new Ref(RefTo.Artists, 5)],
url: new URL("http://127.0.0.1:4200/Machinedrum, Tanerelle & Mono Poly - Star (IMANU Remix) final.mp3")
url: new URL(
"http://127.0.0.1:4200/Machinedrum, Tanerelle & Mono Poly - Star (IMANU Remix) final.mp3"
)
}),
new Song({
//If you don't like guessing the IDs, then this is also a way to do it
artists: [new Ref(RefTo.Artists, db.artists.find((a) => a.name == "Jamie xx")!.id!)],
artists: [
new Ref(
RefTo.Artists,
db.artists.find((a) => a.name == "Jamie xx")!.id!
)
],
duration: 331,
name: "Sleep Sound",
url: new URL("http://127.0.0.1:4200/Jamie xx - Sleep Sound.mp3")

View file

@ -1,40 +1,62 @@
import { MusicPlayerBuilder } from "@euterpe.js/player";
import { db } from "./db";
import { Artist } from "@euterpe.js/music-library";
import { DB, Platforms } from "@euterpe.js/music-library";
import { MusicPlayerBuilder } from "@euterpe.js/player"
import { db } from "./db"
import { Artist } from "@euterpe.js/music-library"
import { DB, Platforms } from "@euterpe.js/music-library"
const audio_el = document.querySelector("#audio") as HTMLAudioElement
const music_player_builder = new MusicPlayerBuilder(audio_el)
const music_player = music_player_builder.build()
music_player.change_volume(1)
let curr_song_id = 1;
let curr_song_id = 1
const elem_curr_song = document.querySelector("#text-playing")
music_player.try_new_song_async(db.songs[curr_song_id].url.pathname)
.then(() => {
music_player.try_new_song_async(db.songs[curr_song_id].url.pathname).then(
() => {
let is_seeking = false
change_current_song_text(db)
document.querySelector("#previous")?.addEventListener("click", () => {
curr_song_id--
if (curr_song_id < 0) curr_song_id = 2
music_player.try_new_song_async(db.songs[curr_song_id].url.pathname).then((s) => {
music_player
.try_new_song_async(db.songs[curr_song_id].url.pathname)
.then(
(s) => {
change_current_song_text(db)
music_player.play_async().catch((err) => { console.log(err) })
}, (e) => { console.log(e) })
music_player.play_async().catch((err) => {
console.log(err)
})
},
(e) => {
console.log(e)
}
)
})
document.querySelector("#next")?.addEventListener("click", () => {
curr_song_id++
if (curr_song_id > 2) curr_song_id = 0
music_player.try_new_song_async(db.songs[curr_song_id].url.pathname).then((s) => {
music_player
.try_new_song_async(db.songs[curr_song_id].url.pathname)
.then(
(s) => {
change_current_song_text(db)
music_player.play_async().catch((err) => { console.log(err) })
}, (e) => { console.log(e) })
music_player.play_async().catch((err) => {
console.log(err)
})
},
(e) => {
console.log(e)
}
)
})
document.querySelector("#play")?.addEventListener("click", () => {
music_player.play_async()
.then(() => { console.log("Playing!") }, (e) => alert("Failed to play, " + e))
music_player.play_async().then(
() => {
console.log("Playing!")
},
(e) => alert("Failed to play, " + e)
)
})
document.querySelector("#pause")?.addEventListener("click", () => {
music_player.pause()
@ -45,28 +67,41 @@ music_player.try_new_song_async(db.songs[curr_song_id].url.pathname)
document.querySelector("#unmute")?.addEventListener("click", () => {
music_player.unmute()
})
document.querySelector("#toggle-mute")?.addEventListener("click", () => {
document
.querySelector("#toggle-mute")
?.addEventListener("click", () => {
music_player.mute_toggle()
})
document.querySelector("#toggle-play")?.addEventListener("click", () => {
music_player.play_toggle_async().then((s) => console.log("toggled play/pause"), (e) => alert("failed to toggle pause/play!" + e))
document
.querySelector("#toggle-play")
?.addEventListener("click", () => {
music_player.play_toggle_async().then(
(s) => console.log("toggled play/pause"),
(e) => alert("failed to toggle pause/play!" + e)
)
})
document.querySelector("#volume")?.addEventListener("input", (e) => {
music_player.change_volume(e.target?.valueAsNumber)
})
document.querySelector("#seek")?.addEventListener("mousedown", (e) => {
is_seeking = true;
is_seeking = true
})
document.querySelector("#seek")?.addEventListener("mouseup", (e) => {
music_player.try_seek_async(e.target?.valueAsNumber).then(() => { console.log("seeked to " + e.target?.valueAsNumber) }, () => {
music_player.try_seek_async(e.target?.valueAsNumber).then(
() => {
console.log("seeked to " + e.target?.valueAsNumber)
},
() => {
alert("Failed seeking! " + e)
})
}
)
is_seeking = false
})
// Subscriptions to AudioContext changes, eg. time..
music_player.subscribe_to_formatted_duration_time((time) => {
document.querySelector("#duration").innerHTML = time
document.querySelector("#seek").max = "" + music_player.get_current_duration()
document.querySelector("#seek").max =
"" + music_player.get_current_duration()
})
music_player.subscribe_to_formatted_current_time_tick((time) => {
document.querySelector("#current").innerHTML = time
@ -75,9 +110,9 @@ music_player.try_new_song_async(db.songs[curr_song_id].url.pathname)
if (is_seeking) return
document.querySelector("#seek").value = "" + time
})
}, (e) => console.log(e))
},
(e) => console.log(e)
)
function change_current_song_text(db: DB) {
const curr_song = db.songs[curr_song_id]
@ -98,7 +133,9 @@ function change_current_song_text(db: DB) {
const curr_artist = artist.get(db) as Artist
if (curr_artist.links && curr_artist.links.length > 0) {
//returns "found a link! Spotify"
console.log("found a link! " + Platforms[curr_artist.links[0][0]])
console.log(
"found a link! " + Platforms[curr_artist.links[0][0]]
)
const url = curr_artist.links[0][1]
final_text += `<a href=${url}>${curr_artist.name}</a>, `

View file

@ -20,7 +20,7 @@ enum CollectionType {
EP = "EP",
Single = "Single",
Playlist = "Playlist",
Release = "Release",
Release = "Release"
}
enum Platforms {
Youtube = "Youtube",
@ -33,7 +33,7 @@ enum Platforms {
Instagram = "Instagram",
Patreon = "Patreon",
Twitter = "Twitter",
Facebook = "Facebook",
Facebook = "Facebook"
}
class Ref {
@ -68,7 +68,7 @@ interface SongConstructor {
bpm?: number
key?: string
fft_data?: number[]
id?: ID,
id?: ID
metadata?: any[]
}
class Song {
@ -106,7 +106,7 @@ class Song {
}
interface ArtistConstructor {
name: string,
name: string
pfp?: URL
songs?: Ref[]
collections?: Ref[]
@ -145,7 +145,6 @@ interface CollectionConstructor {
metadata?: any[]
name?: string
type?: CollectionType
}
class Collection {
name?: string
@ -180,7 +179,9 @@ class DB {
add(artist: Artist[]): void
add(collection: Collection[]): void
add(mix: (Song | Artist | Collection)[]): void
add(stuff: Artist[] | Collection[] | Song[] | (Song | Artist | Collection)[]) {
add(
stuff: Artist[] | Collection[] | Song[] | (Song | Artist | Collection)[]
) {
/** All of this adds refrences to the other side of whatever is being added.
* eg. adding song with refrence to artist, adds refrence of song to artist
* and adds incremental ids
@ -206,9 +207,7 @@ class DB {
curr_col?.artists.push(new Ref(RefTo.Artists, artist.id))
}
this.artists.push(artist)
}
else if (input instanceof Collection) {
} else if (input instanceof Collection) {
const col = input as Collection
if (!col.id) col.id = this.collections.length
@ -218,20 +217,28 @@ class DB {
}
for (const artist_ref of col.artists) {
const curr_artist = artist_ref.get(this) as Artist
curr_artist.collections.push(new Ref(RefTo.Collections, col.id))
curr_artist.collections.push(
new Ref(RefTo.Collections, col.id)
)
}
this.collections.push(col)
}
else if (input instanceof Song) {
} else if (input instanceof Song) {
const song = input as Song
if (!song.id) song.id = this.songs.length
if (song.in_collection) {
const curr_col = song.in_collection.get(this) as Collection
curr_col.songs.push(new Ref(RefTo.Songs, song.id))
song.artists.forEach((artist) => curr_col.artists.push(new Ref(RefTo.Artists, artist.get(this)!.id!)))
song.remix_artists.forEach((artist) => curr_col.artists.push(new Ref(RefTo.Artists, artist.get(this)!.id!)))
song.artists.forEach((artist) =>
curr_col.artists.push(
new Ref(RefTo.Artists, artist.get(this)!.id!)
)
)
song.remix_artists.forEach((artist) =>
curr_col.artists.push(
new Ref(RefTo.Artists, artist.get(this)!.id!)
)
)
}
for (const artist_ref of song.artists) {
@ -250,45 +257,99 @@ class DB {
this.collections.sort((a, b) => a.id! - b.id!)
this.artists.sort((a, b) => a.id! - b.id!)
}
}
function from_json(db_stringified: { artists?: any, songs?: any, collections?: any }): DB {
const db = new DB
function from_json(db_stringified: {
artists?: any
songs?: any
collections?: any
}): DB {
const db = new DB()
if (db_stringified.artists) {
for (const artist of db_stringified.artists) {
if (artist.songs) artist.songs = artist.songs.map((e: any) => ref_from_json(e))
if (artist.collections) artist.collections = artist.collections.map((e: any) => ref_from_json(e))
if (artist.links) artist.links = artist.links.map((e: any) => { try { [e[0] as Platforms, new URL(e[1])] } catch (e) { console.log(e) } })
if (artist.publish_date) artist.publish_date = new Date(JSON.parse(artist.publish_date))
if (artist.songs)
artist.songs = artist.songs.map((e: any) => ref_from_json(e))
if (artist.collections)
artist.collections = artist.collections.map((e: any) =>
ref_from_json(e)
)
if (artist.links)
artist.links = artist.links.map((e: any) => {
try {
;[e[0] as Platforms, new URL(e[1])]
} catch (e) {
console.log(e)
}
})
if (artist.publish_date)
artist.publish_date = new Date(JSON.parse(artist.publish_date))
if (artist.id) artist.id = artist.id as ID
try { if (artist.pfp) artist.pfp = new URL(artist.pfp) }
catch (e) { console.error(e), console.error("failed to parse artist URL") }
try {
if (artist.pfp) artist.pfp = new URL(artist.pfp)
} catch (e) {
console.error(e), console.error("failed to parse artist URL")
}
db.artists.push(artist)
}
}
if (db_stringified.songs) {
for (const song of db_stringified.songs) {
try { if (song.url) song.url = new URL(song.url) } catch (e) { console.error("failed to parse song.url" + e) }
if (song.artists) song.artists = song.artists.map((e: any) => ref_from_json(e))
if (song.remix_artists) song.remix_artists = song.remix_artists.map((e: any) => ref_from_json(e))
if (song.in_collection) song.in_collection = ref_from_json(song.in_collection)
try { if (song.cover) song.cover = new URL(song.cover) }
catch (e) { console.error(e), console.error("failed to parse artist URL") }
try { if (song.publish_date) song.publish_date = new Date(JSON.parse(song.publish_date)) }
catch (e) { console.error(e), console.error("Failed to song cover url") }
try {
if (song.url) song.url = new URL(song.url)
} catch (e) {
console.error("failed to parse song.url" + e)
}
if (song.artists)
song.artists = song.artists.map((e: any) => ref_from_json(e))
if (song.remix_artists)
song.remix_artists = song.remix_artists.map((e: any) =>
ref_from_json(e)
)
if (song.in_collection)
song.in_collection = ref_from_json(song.in_collection)
try {
if (song.cover) song.cover = new URL(song.cover)
} catch (e) {
console.error(e), console.error("failed to parse artist URL")
}
try {
if (song.publish_date)
song.publish_date = new Date(JSON.parse(song.publish_date))
} catch (e) {
console.error(e), console.error("Failed to song cover url")
}
if (song.id) song.id = song.id as ID
db.songs.push(song)
}
}
if (db_stringified.collections) {
for (const collection of db_stringified.collections) {
if (collection.artists) collection.artists = collection.artists.map((e: any) => ref_from_json(e))
if (collection.songs) collection.songs = collection.songs.map((e: any) => ref_from_json(e))
if (collection.type) collection.type = collection.type.map((e: any) => e as CollectionType)
try { if (collection.publish_date) collection.publish_date = new Date(JSON.parse(collection.publish_date)) }
catch (e) { console.error(e), console.error("Failed to parse date") }
try { if (collection.cover) collection.cover = new URL(collection.cover) }
catch (e) { console.error(e), console.error("failed to parse collection cover url") }
if (collection.artists)
collection.artists = collection.artists.map((e: any) =>
ref_from_json(e)
)
if (collection.songs)
collection.songs = collection.songs.map((e: any) =>
ref_from_json(e)
)
if (collection.type)
collection.type = collection.type.map(
(e: any) => e as CollectionType
)
try {
if (collection.publish_date)
collection.publish_date = new Date(
JSON.parse(collection.publish_date)
)
} catch (e) {
console.error(e), console.error("Failed to parse date")
}
try {
if (collection.cover)
collection.cover = new URL(collection.cover)
} catch (e) {
console.error(e),
console.error("failed to parse collection cover url")
}
if (collection.id) collection.id = collection.id as ID
db.collections.push(collection)
}

View file

@ -1,16 +1,28 @@
import { MusicPlayerBuilder } from "@euterpe.js/player";
import { MusicPlayerBuilder } from "@euterpe.js/player"
const audio_el = document.querySelector("#audio") as HTMLAudioElement
const music_player_builder = new MusicPlayerBuilder(audio_el)
const music_player = music_player_builder.build()
music_player.change_volume(1)
music_player.try_new_song(encodeURI("http://" + window.location.host + "/nuphory - NVISION (EXTENDED MIX).ogg"))
.then(() => {
music_player
.try_new_song(
encodeURI(
"http://" +
window.location.host +
"/nuphory - NVISION (EXTENDED MIX).ogg"
)
)
.then(
() => {
let is_seeking = false
document.querySelector("#play")?.addEventListener("click", () => {
//const analyser_node = music_player_builder.add_analyser()
music_player.try_play()
.then(() => { console.log("Playing!") }, (e) => alert("Failed to play, " + e))
music_player.try_play().then(
() => {
console.log("Playing!")
},
(e) => alert("Failed to play, " + e)
)
})
document.querySelector("#pause")?.addEventListener("click", () => {
music_player.pause()
@ -21,19 +33,32 @@ music_player.try_new_song(encodeURI("http://" + window.location.host + "/nuphory
document.querySelector("#unmute")?.addEventListener("click", () => {
music_player.unmute()
})
document.querySelector("#toggle-mute")?.addEventListener("click", () => {
document
.querySelector("#toggle-mute")
?.addEventListener("click", () => {
music_player.mute_toggle()
})
document.querySelector("#toggle-play")?.addEventListener("click", () => {
music_player.try_play_toggle().then((s) => console.log("toggled play/pause"), (e) => alert("failed to toggle pause/play!" + e))
document
.querySelector("#toggle-play")
?.addEventListener("click", () => {
music_player.try_play_toggle().then(
(s) => console.log("toggled play/pause"),
(e) => alert("failed to toggle pause/play!" + e)
)
})
document.querySelector("#volume")?.addEventListener("input", (e) => {
document
.querySelector("#volume")
?.addEventListener("input", (e) => {
music_player.change_volume(e.target?.valueAsNumber)
})
document.querySelector("#seek")?.addEventListener("mousedown", (e) => {
is_seeking = true;
document
.querySelector("#seek")
?.addEventListener("mousedown", (e) => {
is_seeking = true
})
document.querySelector("#seek")?.addEventListener("mouseup", (e) => {
document
.querySelector("#seek")
?.addEventListener("mouseup", (e) => {
try {
music_player.try_seek(e.target?.valueAsNumber)
console.log("seeked to " + e.target?.valueAsNumber)
@ -45,7 +70,8 @@ music_player.try_new_song(encodeURI("http://" + window.location.host + "/nuphory
// Subscriptions to AudioContext changes, eg. time..
music_player.on_duration_formatted((time) => {
document.querySelector("#duration")!.innerHTML = time
document.querySelector("#seek")!.max = "" + music_player.current_song_duration
document.querySelector("#seek")!.max =
"" + music_player.current_song_duration
})
music_player.on_time_tick_formatted((time) => {
document.querySelector("#current")!.innerHTML = time
@ -54,5 +80,6 @@ music_player.try_new_song(encodeURI("http://" + window.location.host + "/nuphory
if (is_seeking) return
document.querySelector("#seek")!.value = "" + time
})
}, (e) => console.log(e))
},
(e) => console.log(e)
)

View file

@ -1,7 +1,7 @@
export enum SubscribeEvents {
CurrentTimeTick,
FormattedDurationTick,
FormattedCurrentTimeTick,
FormattedCurrentTimeTick
}
class PubSub {
//el = event listener
@ -29,19 +29,28 @@ class PubSub {
switch (event_name) {
case SubscribeEvents.CurrentTimeTick: {
if (this.el_current_time_tick.includes(func)) {
this.el_current_time_tick.splice(this.el_current_time_tick.indexOf(func), 1)
this.el_current_time_tick.splice(
this.el_current_time_tick.indexOf(func),
1
)
}
break
}
case SubscribeEvents.FormattedDurationTick: {
if (this.el_formatted_duration_tick.includes(func)) {
this.el_formatted_duration_tick.splice(this.el_formatted_duration_tick.indexOf(func), 1)
this.el_formatted_duration_tick.splice(
this.el_formatted_duration_tick.indexOf(func),
1
)
}
break
}
case SubscribeEvents.FormattedCurrentTimeTick: {
if (this.el_formatted_duration_tick.includes(func)) {
this.el_formatted_duration_tick.splice(this.el_formatted_duration_tick.indexOf(func), 1)
this.el_formatted_duration_tick.splice(
this.el_formatted_duration_tick.indexOf(func),
1
)
}
break
}
@ -78,20 +87,20 @@ declare global {
}
}
export class MusicPlayer {
current_song_duration = 0
#volume_cache: number
is_playing = false
time = 0
#pub_sub = new PubSub
#pub_sub = new PubSub()
constructor(
public audio_context: AudioContext,
private audio_element: HTMLAudioElement,
public track: MediaElementAudioSourceNode,
public gain: GainNode,
public volume: number,
private current_song_path?: string) {
private current_song_path?: string
) {
this.#volume_cache = volume
}
@ -125,7 +134,10 @@ export class MusicPlayer {
async try_seek(new_time: number) {
if (this.audio_context.state !== "running") {
this.is_playing = false
throw new Error("Can't seek - audioContext not running, audio_context.state : " + this.audio_context.state)
throw new Error(
"Can't seek - audioContext not running, audio_context.state : " +
this.audio_context.state
)
}
if (this.audio_element.paused) await this.try_play()
this.audio_element.currentTime = new_time
@ -223,30 +235,46 @@ export class MusicPlayer {
try {
await this.audio_context.resume()
} catch (e) {
console.log("loading new song - couldn't resume context before hand", e)
console.log(
"loading new song - couldn't resume context before hand",
e
)
}
}
return new Promise<void>((resolve, reject) => {
this.audio_element.src = this.current_song_path = path
//Found out today about this. Such a nice new way to mass remove event listeners!
const controller = new AbortController();
const controller = new AbortController()
this.audio_element.addEventListener("canplaythrough", function canplay_listener() {
this.audio_element.addEventListener(
"canplaythrough",
function canplay_listener() {
controller.abort()
}, { signal: controller.signal })
},
{ signal: controller.signal }
)
this.audio_element.addEventListener("error", function error_listener() {
this.audio_element.addEventListener(
"error",
function error_listener() {
controller.abort("new src error")
}, { signal: controller.signal })
},
{ signal: controller.signal }
)
this.audio_element.addEventListener("stalled", function stalled_listener() {
this.audio_element.addEventListener(
"stalled",
function stalled_listener() {
controller.abort("new src stalled")
}, { signal: controller.signal })
},
{ signal: controller.signal }
)
//once aborted, try to set current_song_duration
controller.signal.addEventListener("abort", (r) => {
this.current_song_duration = this.audio_element.duration
if (typeof controller.signal.reason == "string") reject(new Error(controller.signal.reason))
if (typeof controller.signal.reason == "string")
reject(new Error(controller.signal.reason))
resolve()
})
this.is_playing = false
@ -270,18 +298,18 @@ export class MusicPlayer {
if (dur == 0 || !dur) return "0:00"
// ~ is Bitwise NOT, equivalent to Math.floor()
const hrs = ~~(dur / 3600);
const mins = ~~((dur % 3600) / 60);
const secs = ~~dur % 60;
const hrs = ~~(dur / 3600)
const mins = ~~((dur % 3600) / 60)
const secs = ~~dur % 60
let ret = ""
if (hrs > 0) {
ret += "" + hrs + ":" + (mins < 10 ? "0" : "");
ret += "" + hrs + ":" + (mins < 10 ? "0" : "")
}
ret += "" + mins + ":" + (secs < 10 ? "0" : "");
ret += "" + secs;
return ret;
ret += "" + mins + ":" + (secs < 10 ? "0" : "")
ret += "" + secs
return ret
}
/**
* Will parse the current time of the song to make it easy to display in UI
@ -292,18 +320,18 @@ export class MusicPlayer {
if (curr == 0 || !curr) return "0:00"
// ~~ is Bitwise OR, equivalent to Math.floor()
const hrs = ~~(curr / 3600);
const mins = ~~((curr % 3600) / 60);
const secs = ~~curr % 60;
const hrs = ~~(curr / 3600)
const mins = ~~((curr % 3600) / 60)
const secs = ~~curr % 60
let ret = ""
if (hrs > 0) {
ret += "" + hrs + ":" + (mins < 10 ? "0" : "");
ret += "" + hrs + ":" + (mins < 10 ? "0" : "")
}
ret += "" + mins + ":" + (secs < 10 ? "0" : "");
ret += "" + secs;
return ret;
ret += "" + mins + ":" + (secs < 10 ? "0" : "")
ret += "" + secs
return ret
}
#emit_time() {
const request_id = requestAnimationFrame(this.#emit_time.bind(this))
@ -313,19 +341,24 @@ export class MusicPlayer {
this.gain.gain.value = this.volume
this.time = this.audio_element.currentTime
if (this.#pub_sub.el_current_time_tick.length == 0) cancelAnimationFrame(request_id)
if (this.#pub_sub.el_current_time_tick.length == 0)
cancelAnimationFrame(request_id)
this.#pub_sub.emit(SubscribeEvents.CurrentTimeTick, this.time)
}
#emit_duration_fmt() {
const request_id = requestAnimationFrame(this.#emit_duration_fmt.bind(this))
const request_id = requestAnimationFrame(
this.#emit_duration_fmt.bind(this)
)
const time = this.get_formatted_duration()
if (this.#pub_sub.el_formatted_duration_tick.length == 0) cancelAnimationFrame(request_id)
if (this.#pub_sub.el_formatted_duration_tick.length == 0)
cancelAnimationFrame(request_id)
this.#pub_sub.emit(SubscribeEvents.FormattedDurationTick, time)
}
#emit_time_fmt() {
const request_id = requestAnimationFrame(this.#emit_time_fmt.bind(this))
const time = this.get_formatted_current_time()
if (this.#pub_sub.el_formatted_current_time_tick.length == 0) cancelAnimationFrame(request_id)
if (this.#pub_sub.el_formatted_current_time_tick.length == 0)
cancelAnimationFrame(request_id)
this.#pub_sub.emit(SubscribeEvents.FormattedCurrentTimeTick, time)
}
/**
@ -340,7 +373,10 @@ export class MusicPlayer {
* Will give formatted current time via get_formatted_current_time() every animation frame
*/
on_time_tick_formatted(callback: (data: any) => void) {
this.#pub_sub.subscribe(SubscribeEvents.FormattedCurrentTimeTick, callback)
this.#pub_sub.subscribe(
SubscribeEvents.FormattedCurrentTimeTick,
callback
)
this.#emit_time_fmt()
}
/**
@ -352,15 +388,12 @@ export class MusicPlayer {
}
}
export class MusicPlayerBuilder {
#audio_context: AudioContext
#gain: GainNode
#track: MediaElementAudioSourceNode
#volume = 1
#prev_node: any;
#prev_node: any
#is_gain_connected = false
/**
* Creates a context and #gain( Gets connected at the end )
@ -368,11 +401,13 @@ export class MusicPlayerBuilder {
* will throw if user has not interacted with the page yet (Can't initiate AudioContext)
*/
constructor(private audio_element: HTMLAudioElement) {
if (audio_element === undefined) throw Error("audio_element was undefined")
if (audio_element === undefined)
throw Error("audio_element was undefined")
// ↓ For old browsers
const AudioContext = window.AudioContext || window.webkitAudioContext;
const AudioContext = window.AudioContext || window.webkitAudioContext
this.#audio_context = new AudioContext()
this.#track = this.#audio_context.createMediaElementSource(audio_element)
this.#track =
this.#audio_context.createMediaElementSource(audio_element)
this.#gain = this.#audio_context.createGain()
}
/**
@ -381,7 +416,9 @@ export class MusicPlayerBuilder {
*/
add_analyser() {
const analyser = this.#audio_context.createAnalyser()
!this.#prev_node ? this.#track.connect(analyser) : this.#prev_node.connect(analyser)
!this.#prev_node
? this.#track.connect(analyser)
: this.#prev_node.connect(analyser)
this.#prev_node = analyser
return analyser
}
@ -391,7 +428,9 @@ export class MusicPlayerBuilder {
*/
add_stereo_panner_node() {
const panner = this.#audio_context.createStereoPanner()
!this.#prev_node ? this.#track.connect(panner) : this.#prev_node.connect(panner)
!this.#prev_node
? this.#track.connect(panner)
: this.#prev_node.connect(panner)
this.#prev_node = panner
return panner
}
@ -401,7 +440,9 @@ export class MusicPlayerBuilder {
*/
add_wave_shaper_node() {
const shaper = this.#audio_context.createWaveShaper()
!this.#prev_node ? this.#track.connect(shaper) : this.#prev_node.connect(shaper)
!this.#prev_node
? this.#track.connect(shaper)
: this.#prev_node.connect(shaper)
this.#prev_node = shaper
return shaper
}
@ -409,7 +450,9 @@ export class MusicPlayerBuilder {
* For additional trickery, you can connect your own node.
*/
connect_custom_node(node: AudioNode) {
!this.#prev_node ? this.#track.connect(node) : this.#prev_node.connect(node)
!this.#prev_node
? this.#track.connect(node)
: this.#prev_node.connect(node)
this.#prev_node = node
}
/**
@ -417,7 +460,9 @@ export class MusicPlayerBuilder {
* eg. if you want the analyser nodes output to be affected by user #gain
*/
connect_gain() {
!this.#prev_node ? this.#track.connect(this.#gain) : this.#prev_node.connect(this.#gain)
!this.#prev_node
? this.#track.connect(this.#gain)
: this.#prev_node.connect(this.#gain)
this.#prev_node = this.#gain
this.#is_gain_connected = true
}
@ -427,10 +472,18 @@ export class MusicPlayerBuilder {
*/
build() {
if (!this.#is_gain_connected) {
!this.#prev_node ? this.#track.connect(this.#gain) : this.#prev_node.connect(this.#gain)
!this.#prev_node
? this.#track.connect(this.#gain)
: this.#prev_node.connect(this.#gain)
this.#prev_node = this.#gain
}
this.#prev_node.connect(this.#audio_context.destination)
return new MusicPlayer(this.#audio_context, this.audio_element, this.#track, this.#gain, this.#volume)
return new MusicPlayer(
this.#audio_context,
this.audio_element,
this.#track,
this.#gain,
this.#volume
)
}
}

View file

@ -1,4 +1,15 @@
import filehound from "filehound"
import fs from "fs"
const songs = filehound.create().path("../public/samples").ext(["ogg"]).findSync()
fs.writeFile('songs_list.ts', `export const songs = ` + JSON.stringify(songs), 'utf8', () => { 1 + 1 })
const songs = filehound
.create()
.path("../public/samples")
.ext(["ogg"])
.findSync()
fs.writeFile(
"songs_list.ts",
`export const songs = ` + JSON.stringify(songs),
"utf8",
() => {
1 + 1
}
)

View file

@ -1,10 +1,10 @@
import { Collection, Ref, RefTo, Song, DB, } from "@euterpe.js/music-library"
import { Collection, Ref, RefTo, Song, DB } from "@euterpe.js/music-library"
import { songs } from "./songs_list"
export function generate_db() {
console.log(songs)
// construct db
let db = new DB
let db = new DB()
let collections: string[] = new Array()
let new_songs = []
const path_char = songs[0].includes("\\") ? "\\" : "/"
@ -13,22 +13,27 @@ export function generate_db() {
for (let i = 0; i < songs.length; i++) {
const song = songs[i]
const last_i = song.lastIndexOf(path_char)
const collection_name = song.slice(song.slice(0, last_i).lastIndexOf(path_char) + 1, last_i)
const collection_name = song.slice(
song.slice(0, last_i).lastIndexOf(path_char) + 1,
last_i
)
/*
const foreforelast_i = song.slice(0, forelast_i - 1)
const foreforeforelast_i = song.slice(0, foreforelast_i - 1).lastIndexOf("\\")
*/
if (!collections.includes(collection_name)) {
console.log(`creating collection ${collection_name}`)
db.add([new Collection({
db.add([
new Collection({
name: collection_name,
songs: [],
artists: [],
})])
artists: []
})
])
collections.push(collection_name)
}
let col = db.collections.find(col => col.name == collection_name)!
let col = db.collections.find((col) => col.name == collection_name)!
let col_id = col.id
new_songs.push({ song: song, collection_id: col_id! })
}
@ -39,11 +44,15 @@ export function generate_db() {
const last_i = song.song.lastIndexOf(path_char)
const name = song.song.slice(last_i + 1)
const song_url = song.song.slice(song.song.indexOf(`public${path_char}`) + 7)
const song_url = song.song.slice(
song.song.indexOf(`public${path_char}`) + 7
)
const db_song = new Song({
name: name.slice(0, name.lastIndexOf(".")),
artists: [],
url: new URL(`${window.location.href}${song_url}`.replaceAll("\\", "/")),
url: new URL(
`${window.location.href}${song_url}`.replaceAll("\\", "/")
),
duration: 0,
remix_artists: [],
in_collection: new Ref(RefTo.Collections, song.collection_id)

View file

@ -1,8 +1,14 @@
import { DB, from_json } from "@euterpe.js/music-library"
import { generate_db } from "./generate_db"
import { AudioVisualBuilder, SmoothingAlgorythm, ShapeType, WaveformOrientation, WaveformShape } from "@euterpe.js/visualizer"
import {
AudioVisualBuilder,
SmoothingAlgorythm,
ShapeType,
WaveformOrientation,
WaveformShape
} from "@euterpe.js/visualizer"
let result: AnalyzeReturn | undefined;
let result: AnalyzeReturn | undefined
let db = generate_db()
//Create all audio nodes
@ -17,8 +23,10 @@ audioContextAnalyser.smoothingTimeConstant = 0
const analyserBufferLength = audioContextAnalyser.frequencyBinCount
const FFTDataArray = new Float32Array(analyserBufferLength)
//Connect all audio Nodes
track.connect(audioContextAnalyser).connect(gain).connect(audioContext.destination)
track
.connect(audioContextAnalyser)
.connect(gain)
.connect(audioContext.destination)
document.getElementById("analyze")!.addEventListener("click", async (ev) => {
audioContext.resume()
@ -35,20 +43,23 @@ document.getElementById("upload")!.addEventListener("change", (ev) => {
audioContext.resume()
const fileReader = new FileReader()
fileReader.readAsText(ev.target.files[0])
fileReader.onload = event => {
fileReader.onload = (event) => {
let str = JSON.parse(event.target.result)
let new_db = from_json(str)
//-infinity get stringified to null, undo that
for (const song of new_db.songs) {
if (song.fft_data) {
for (let i = 0; i < song.fft_data.length; i++) {
if (song.fft_data[i] === null || song.fft_data[i] === undefined) song.fft_data[i] = -Infinity
if (
song.fft_data[i] === null ||
song.fft_data[i] === undefined
)
song.fft_data[i] = -Infinity
}
}
}
result = { db: new_db, analyzer_node: audioContextAnalyser }
}
})
async function svg() {
@ -57,22 +68,42 @@ async function svg() {
return
}
console.log("Creating svgs...")
const canvas_wrapper = document.querySelector(".canvas-wrapper") as HTMLElement
const canvas_wrapper = document.querySelector(
".canvas-wrapper"
) as HTMLElement
const waveform_canvas = document.querySelector("#waveform-canvas")?.cloneNode() as SVGSVGElement
const waveform_canvas = document
.querySelector("#waveform-canvas")
?.cloneNode() as SVGSVGElement
canvas_wrapper.childNodes.forEach((c) => c.remove())
canvas_wrapper.appendChild(waveform_canvas)
for (const song of result.db.songs) {
console.log("creating waveform for -> " + song.name)
const curr_waveform_canvas = waveform_canvas.cloneNode() as SVGSVGElement
const curr_waveform_canvas =
waveform_canvas.cloneNode() as SVGSVGElement
waveform_canvas.parentElement?.append(curr_waveform_canvas)
const waveform_visual_builder = new AudioVisualBuilder(result.analyzer_node, curr_waveform_canvas)
.set_fft_data_tresholds({ point_count_i: 100, fft_multiplier_i: .9, fft_offset_i: -65 })
const waveform_visual_builder = new AudioVisualBuilder(
result.analyzer_node,
curr_waveform_canvas
)
.set_fft_data_tresholds({
point_count_i: 100,
fft_multiplier_i: 0.9,
fft_offset_i: -65
})
.set_fft_time_smoothing(0.8)
.set_smoothing_algorythm(SmoothingAlgorythm.CatmullRom)
const waveform_visual = waveform_visual_builder.build(ShapeType.Waveform, true, { fft_data: new Float32Array(new Float64Array(song.fft_data!)), orientation: WaveformOrientation.Horizontal, shape_type: WaveformShape.LineLike })
const waveform_visual = waveform_visual_builder.build(
ShapeType.Waveform,
true,
{
fft_data: new Float32Array(new Float64Array(song.fft_data!)),
orientation: WaveformOrientation.Horizontal,
shape_type: WaveformShape.LineLike
}
)
waveform_visual.draw_once()
// await new Promise<void>((done) => setTimeout(() => done(), 500))
// @ts-ignore
@ -82,7 +113,6 @@ async function svg() {
waveform_canvas.remove()
console.dir(result.db, { depth: null })
download(JSON.stringify(result.db), "db.json", "text/plain")
}
async function analyze(): Promise<AnalyzeReturn> {
console.clear()
@ -95,13 +125,21 @@ async function analyze(): Promise<AnalyzeReturn> {
console.log(db)
for (const song of db.songs) {
// const song = db.songs[db.songs.length - 1]
console.log(`Analyzing ${song.name}, ${db.songs.indexOf(song) + 1}/${db.songs.length}`)
console.log(
`Analyzing ${song.name}, ${db.songs.indexOf(song) + 1}/${
db.songs.length
}`
)
//if not loaded yet keep trying
audioEl.src = song.url.href
await awaitLoad(audioEl)
song.duration = audioEl.duration
let currentFFTData = []
for (let curSecond = 0; curSecond < song.duration; curSecond += song.duration / samplingRate) {
for (
let curSecond = 0;
curSecond < song.duration;
curSecond += song.duration / samplingRate
) {
console.log("working...")
audioEl.currentTime = curSecond
await audioEl.play()
@ -111,24 +149,29 @@ async function analyze(): Promise<AnalyzeReturn> {
FFTDataArray.forEach((element) => {
volume += element
})
currentFFTData.push(Math.round((volume / FFTDataArray.length) * 100) / 100)
currentFFTData.push(
Math.round((volume / FFTDataArray.length) * 100) / 100
)
}
song.fft_data = currentFFTData
console.log(song.fft_data)
}
console.log("Analyzation finished!")
const result: AnalyzeReturn = { analyzer_node: audioContextAnalyser, db: db }
const result: AnalyzeReturn = {
analyzer_node: audioContextAnalyser,
db: db
}
return result
}
function download(content: BlobPart, fileName: string, contentType: string) {
var a = document.querySelector("#download") as HTMLAnchorElement;
var file = new Blob([content], { type: contentType });
a.href = URL.createObjectURL(file);
a.download = fileName;
var a = document.querySelector("#download") as HTMLAnchorElement
var file = new Blob([content], { type: contentType })
a.href = URL.createObjectURL(file)
a.download = fileName
// a.click();
}
type AnalyzeReturn = {
analyzer_node: AnalyserNode,
analyzer_node: AnalyserNode
db: DB
}
function awaitLoad(audioEl: HTMLAudioElement) {

View file

@ -5,7 +5,7 @@
*/
import filehound from "filehound"
import { execSync, exec } from 'child_process'
import { execSync, exec } from "child_process"
import { fstat, unlinkSync } from "fs"
function generate_new_photo_sizes(file, currentExtention) {
@ -22,8 +22,12 @@ function generate_new_photo_sizes(file, currentExtention) {
exec(command)
}
function generate_new_anim_photo_sizes(file, currentExtention) {
exec(`start ffmpeg -y -i "${file}.${currentExtention}" -lossless 0 -frames:v 1 -r 1 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_ogw_static.webp" -vf scale=1000:-1 -lossless 0 -frames:v 1 -r 1 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_1000w_static.webp" -vf scale=800:-1 -lossless 0 -frames:v 1 -r 1 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_800w_static.webp" -vf scale=500:-1 -lossless 0 -frames:v 1 -r 1 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_500w_static.webp" -vf scale=320:-1 -lossless 0 -frames:v 1 -r 1 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_320w_static.webp" -vf scale=-1:64,gblur=sigma=10:steps=2 -lossless 0 -frames:v 1 -r 1 -compression_level 6 -quality 85 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_placeholder_static.webp"`)
exec(`start ffmpeg -y -i "${file}.${currentExtention}" -lossless 0 -quality 85 -loop 0 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_ogw.webp" -vf scale=1000:-1 -lossless 0 -quality 85 -loop 0 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_1000w.webp" -vf scale=800:-1 -lossless 0 -quality 85 -loop 0 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_800w.webp" -vf scale=500:-1 -lossless 0 -quality 85 -loop 0 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_500w.webp" -vf scale=320:-1 -lossless 0 -quality 85 -loop 0 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_320w.webp" -vf scale=-1:64,gblur=sigma=10:steps=2 -frames:v 1 -lossless 0 -c:v libwebp -compression_level 6 -quality 85 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_placeholder.webp"`)
exec(
`start ffmpeg -y -i "${file}.${currentExtention}" -lossless 0 -frames:v 1 -r 1 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_ogw_static.webp" -vf scale=1000:-1 -lossless 0 -frames:v 1 -r 1 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_1000w_static.webp" -vf scale=800:-1 -lossless 0 -frames:v 1 -r 1 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_800w_static.webp" -vf scale=500:-1 -lossless 0 -frames:v 1 -r 1 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_500w_static.webp" -vf scale=320:-1 -lossless 0 -frames:v 1 -r 1 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_320w_static.webp" -vf scale=-1:64,gblur=sigma=10:steps=2 -lossless 0 -frames:v 1 -r 1 -compression_level 6 -quality 85 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_placeholder_static.webp"`
)
exec(
`start ffmpeg -y -i "${file}.${currentExtention}" -lossless 0 -quality 85 -loop 0 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_ogw.webp" -vf scale=1000:-1 -lossless 0 -quality 85 -loop 0 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_1000w.webp" -vf scale=800:-1 -lossless 0 -quality 85 -loop 0 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_800w.webp" -vf scale=500:-1 -lossless 0 -quality 85 -loop 0 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_500w.webp" -vf scale=320:-1 -lossless 0 -quality 85 -loop 0 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_320w.webp" -vf scale=-1:64,gblur=sigma=10:steps=2 -frames:v 1 -lossless 0 -c:v libwebp -compression_level 6 -quality 85 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_placeholder.webp"`
)
}
function generate_new_sounds_ogg(file, currentExtention) {
const path = file.substring(0, file.lastIndexOf("\\"))
@ -35,7 +39,8 @@ function generate_new_sounds_ogg(file, currentExtention) {
//Adds 25ms of delay to all samples
command += `-af 'adelay=25:all=true' `
//So the demo is HQ
if (file.includes("demo")) command += `-c:a libopus -b:a 256k '${file}.ogg'"`
if (file.includes("demo"))
command += `-c:a libopus -b:a 256k '${file}.ogg'"`
else command += `-c:a libopus -b:a 96k '${file}.ogg'"`
exec(command)
console.log(command)
@ -54,7 +59,11 @@ function generate_new_sounds_mp3(file, currentExtention) {
exec(command)
// console.log(command)
}
function generate_new_video_sizes_mp4(file, currentExtention, width_resolutions) {
function generate_new_video_sizes_mp4(
file,
currentExtention,
width_resolutions
) {
const path = file.substring(0, file.lastIndexOf("\\"))
file = file.substring(file.lastIndexOf("\\") + 1)
@ -72,12 +81,16 @@ function generate_new_video_sizes_mp4(file, currentExtention, width_resolutions)
res_command += `cd "${path}" && `
res_command += `ffmpeg -y -i "${file}.${currentExtention}" `
res_command += `-vcodec libx264 -g 240 -b:v 3M -vf scale=${resolution}:-2 -pass 2 "${file}_${resolution}p.mp4"`
res_command += "&& exit\""
res_command += '&& exit"'
exec(res_command)
}
})
}
function generate_new_video_sizes_webm(file, currentExtention, width_resolutions) {
function generate_new_video_sizes_webm(
file,
currentExtention,
width_resolutions
) {
const path = file.substring(0, file.lastIndexOf("\\"))
file = file.substring(file.lastIndexOf("\\") + 1)
@ -95,36 +108,42 @@ function generate_new_video_sizes_webm(file, currentExtention, width_resolutions
res_command += `cd "${path}" && `
res_command += `ffmpeg -y -i "${file}.${currentExtention}" `
res_command += `-vcodec libvpx-vp9 -cpu-used 0 -deadline good -quality good -g 240 -vf scale=${resolution}:-1 -crf 42 -b:v 0 -c:a libopus -row-mt 1 -tile-rows 2 -tile-columns 4 -threads 16 -auto-alt-ref 6 -pass 2 "${file}_${resolution}p.webm"`
res_command += "&& exit\""
res_command += '&& exit"'
exec(res_command)
}
})
}
let dirs = filehound.create()
.path("../public")
.directory()
.findSync()
let dirs = filehound.create().path("../public").directory().findSync()
console.log(dirs)
for (let i = 0; i < dirs.length; i++) {
//gets current name file+ext
let current_folder_files = filehound.create()
.path(`${dirs[i]}`)
.findSync()
let current_folder_files = filehound.create().path(`${dirs[i]}`).findSync()
if (current_folder_files[0] != undefined) {
//if previous encode was cancelled and 2pass log not removed, remove it :)
if (current_folder_files[0].includes("ffmpeg2pass-0.log")) {
try { unlinkSync(`${dirs[i]}/ffmpeg2pass-0.log`) } catch (err) { }
try {
unlinkSync(`${dirs[i]}/ffmpeg2pass-0.log`)
} catch (err) {}
current_folder_files = current_folder_files.slice(1)
}
for (let current_media of current_folder_files) {
current_media = [current_media.substring(0, current_media.lastIndexOf(".")), current_media.substring(current_media.lastIndexOf(".") + 1)]
current_media = [
current_media.substring(0, current_media.lastIndexOf(".")),
current_media.substring(current_media.lastIndexOf(".") + 1)
]
if (current_media[1] == "wav") {
console.log(`${current_media[0]}.${current_media[1]}\n`)
generate_new_sounds_ogg(`${current_media[0]}`, `${current_media[1]}`)
generate_new_sounds_mp3(`${current_media[0]}`, `${current_media[1]}`)
generate_new_sounds_ogg(
`${current_media[0]}`,
`${current_media[1]}`
)
generate_new_sounds_mp3(
`${current_media[0]}`,
`${current_media[1]}`
)
}
/*
@ -147,6 +166,3 @@ for (let i = 0; i < dirs.length; i++) {
}
}
}

View file

@ -1 +1,33 @@
export const songs = ["../public/samples/FX/01 HTS Boom Kicks.ogg","../public/samples/FX/02 HTS Verbclaps.ogg","../public/samples/FX/03 HTS Noisesweep.ogg","../public/samples/FX/04 HTS Combined FX.ogg","../public/samples/bass/01 HTS Arp Bass.ogg","../public/samples/bass/02 HTS Hard Bass.ogg","../public/samples/bass/03 HTS Break Bass.ogg","../public/samples/bass/04 HTS Sub Bass.ogg","../public/samples/demos/01 demo 1 - Violet Delta & Eyhz.ogg","../public/samples/demos/02 demo 2 - Walras.ogg","../public/samples/demos/03 demo 3 - G4TE-16.ogg","../public/samples/demos/04 demo 4 - shadeux.ogg","../public/samples/demos/05 demo 5 - crowit.ogg","../public/samples/demos/06 demo 6 - sh0wtime.ogg","../public/samples/demos/07 demo 7 - nuphory & Luna Lenta.ogg","../public/samples/drums/01 HTS Rides and Hats.ogg","../public/samples/drums/02 HTS Claps and Hats.ogg","../public/samples/drums/03 HTS Club Snares.ogg","../public/samples/drums/04 HTS Buildup Snares.ogg","../public/samples/kicks/01 HTS Trancekick.ogg","../public/samples/kicks/02 HTS Sizzle Layer.ogg","../public/samples/kicks/03 HTS Transients.ogg","../public/samples/kicks/04 HTS Kick Combined.ogg","../public/samples/loops/01 HTS Loop CH.ogg","../public/samples/loops/02 HTS Loop OH.ogg","../public/samples/loops/03 HTS Perc Loop.ogg","../public/samples/loops/04 HTS Full Loop.ogg","../public/samples/synths/01 HTS Leads and Bass 01.ogg","../public/samples/synths/02 HTS Leads and Pads 01.ogg","../public/samples/synths/03 HTS Leads and Bass 02.ogg","../public/samples/synths/04 HTS Leads and Pads 02.ogg"]
export const songs = [
"../public/samples/FX/01 HTS Boom Kicks.ogg",
"../public/samples/FX/02 HTS Verbclaps.ogg",
"../public/samples/FX/03 HTS Noisesweep.ogg",
"../public/samples/FX/04 HTS Combined FX.ogg",
"../public/samples/bass/01 HTS Arp Bass.ogg",
"../public/samples/bass/02 HTS Hard Bass.ogg",
"../public/samples/bass/03 HTS Break Bass.ogg",
"../public/samples/bass/04 HTS Sub Bass.ogg",
"../public/samples/demos/01 demo 1 - Violet Delta & Eyhz.ogg",
"../public/samples/demos/02 demo 2 - Walras.ogg",
"../public/samples/demos/03 demo 3 - G4TE-16.ogg",
"../public/samples/demos/04 demo 4 - shadeux.ogg",
"../public/samples/demos/05 demo 5 - crowit.ogg",
"../public/samples/demos/06 demo 6 - sh0wtime.ogg",
"../public/samples/demos/07 demo 7 - nuphory & Luna Lenta.ogg",
"../public/samples/drums/01 HTS Rides and Hats.ogg",
"../public/samples/drums/02 HTS Claps and Hats.ogg",
"../public/samples/drums/03 HTS Club Snares.ogg",
"../public/samples/drums/04 HTS Buildup Snares.ogg",
"../public/samples/kicks/01 HTS Trancekick.ogg",
"../public/samples/kicks/02 HTS Sizzle Layer.ogg",
"../public/samples/kicks/03 HTS Transients.ogg",
"../public/samples/kicks/04 HTS Kick Combined.ogg",
"../public/samples/loops/01 HTS Loop CH.ogg",
"../public/samples/loops/02 HTS Loop OH.ogg",
"../public/samples/loops/03 HTS Perc Loop.ogg",
"../public/samples/loops/04 HTS Full Loop.ogg",
"../public/samples/synths/01 HTS Leads and Bass 01.ogg",
"../public/samples/synths/02 HTS Leads and Pads 01.ogg",
"../public/samples/synths/03 HTS Leads and Bass 02.ogg",
"../public/samples/synths/04 HTS Leads and Pads 02.ogg"
]

View file

@ -1,43 +1,82 @@
import { MusicPlayerBuilder } from "@euterpe.js/player";
import { fft_data } from "./waveform_data";
import { AudioVisualBuilder, SmoothingAlgorythm, ShapeType, WaveformOrientation, WaveformShape } from "@euterpe.js/visualizer"
import { MusicPlayerBuilder } from "@euterpe.js/player"
import { fft_data } from "./waveform_data"
import {
AudioVisualBuilder,
SmoothingAlgorythm,
ShapeType,
WaveformOrientation,
WaveformShape
} from "@euterpe.js/visualizer"
const audio_el = document.querySelector("#audio") as HTMLAudioElement
const music_player_builder = new MusicPlayerBuilder(audio_el)
const trapnation_analyser_node = music_player_builder.add_analyser()
const bar_analyser_node = music_player_builder.add_analyser()
const music_player = music_player_builder.build()
music_player.change_volume(.5)
music_player.change_volume(0.5)
const waveform_canvas = document.querySelector("#waveform-canvas") as SVGSVGElement
const waveform_canvas = document.querySelector(
"#waveform-canvas"
) as SVGSVGElement
const seek_element = document.querySelector("#seek") as HTMLInputElement
const duration_element = document.querySelector("#duration") as HTMLElement
const current_time_element = document.querySelector("#current") as HTMLElement
/**
* Create the Audio Visualizer
*/
const trapnation_visual_builder = new AudioVisualBuilder(trapnation_analyser_node, document.querySelector("#trapnation-canvas") as SVGSVGElement)
const trapnation_visual_builder = new AudioVisualBuilder(
trapnation_analyser_node,
document.querySelector("#trapnation-canvas") as SVGSVGElement
)
//Because the to_fft_range is so low, it needs more FFT data.
.set_fft_size(8192)
//Tells the Visualiser how to parse data which mutates our initial shape
.set_fft_data_tresholds({ to_fft_range_i: 3, point_count_i: 40, fft_multiplier_i: 1.5, fft_offset_i: 150 })
.set_fft_data_tresholds({
to_fft_range_i: 3,
point_count_i: 40,
fft_multiplier_i: 1.5,
fft_offset_i: 150
})
.set_fft_time_smoothing(0.6)
//If not using typescript enums, CatmullRom = number 2
.set_smoothing_algorythm(SmoothingAlgorythm.CatmullRom)
const trapnation_visual = trapnation_visual_builder.build(ShapeType.Circle, false)
const trapnation_visual = trapnation_visual_builder.build(
ShapeType.Circle,
false
)
const bar_visual_builder = new AudioVisualBuilder(bar_analyser_node, document.querySelector("#bar-canvas") as SVGSVGElement)
.set_fft_data_tresholds({ point_count_i: 50, fft_multiplier_i: 2, fft_offset_i: -100 })
const bar_visual_builder = new AudioVisualBuilder(
bar_analyser_node,
document.querySelector("#bar-canvas") as SVGSVGElement
)
.set_fft_data_tresholds({
point_count_i: 50,
fft_multiplier_i: 2,
fft_offset_i: -100
})
.set_fft_time_smoothing(0.8)
.set_smoothing_algorythm(SmoothingAlgorythm.BezierPerpendicular)
const bar_visual = bar_visual_builder.build(ShapeType.Line, false)
const waveform_visual_builder = new AudioVisualBuilder(bar_analyser_node, waveform_canvas)
.set_fft_data_tresholds({ point_count_i: 100, fft_multiplier_i: 1, fft_offset_i: -80 })
const waveform_visual_builder = new AudioVisualBuilder(
bar_analyser_node,
waveform_canvas
)
.set_fft_data_tresholds({
point_count_i: 100,
fft_multiplier_i: 1,
fft_offset_i: -80
})
.set_fft_time_smoothing(0.8)
.set_smoothing_algorythm(SmoothingAlgorythm.CatmullRom)
const waveform_visual = waveform_visual_builder.build(ShapeType.Waveform, true, { fft_data: new Float32Array(fft_data.fft_data), orientation: WaveformOrientation.Horizontal, shape_type: WaveformShape.LineLike })
const waveform_visual = waveform_visual_builder.build(
ShapeType.Waveform,
true,
{
fft_data: new Float32Array(fft_data.fft_data),
orientation: WaveformOrientation.Horizontal,
shape_type: WaveformShape.LineLike
}
)
trapnation_visual.draw()
bar_visual.draw()
@ -89,7 +128,11 @@ function convert_range(value: number, r1: number[], r2: number[]) {
waveform_canvas.addEventListener("mousemove", (e) => {
const rect = e.target.getBoundingClientRect()
const x = e.clientX - rect.left
const resX = convert_range(x, [0, rect.width], [0, waveform_canvas.viewBox.baseVal.width + 40])
const resX = convert_range(
x,
[0, rect.width],
[0, waveform_canvas.viewBox.baseVal.width + 40]
)
const polygon = `polygon(0 0, ${resX}px 0, ${resX}px 100%, 0 100%)`
document.documentElement.style.setProperty("--clip-seek-path", polygon)
})
@ -100,12 +143,20 @@ waveform_canvas.addEventListener("mouseleave", (e) => {
/*
* The player part
*/
music_player.try_new_song_async(encodeURI("http://localhost:4200/nuphory - NVISION (EXTENDED MIX).ogg"))
.then(() => {
music_player
.try_new_song_async(
encodeURI("http://localhost:4200/nuphory - NVISION (EXTENDED MIX).ogg")
)
.then(
() => {
let is_seeking = false
document.querySelector("#play")?.addEventListener("click", () => {
music_player.play_async()
.then(() => { console.log("Playing!") }, (e) => alert("Failed to play, " + e))
music_player.play_async().then(
() => {
console.log("Playing!")
},
(e) => alert("Failed to play, " + e)
)
})
document.querySelector("#pause")?.addEventListener("click", () => {
music_player.pause()
@ -116,22 +167,40 @@ music_player.try_new_song_async(encodeURI("http://localhost:4200/nuphory - NVISI
document.querySelector("#unmute")?.addEventListener("click", () => {
music_player.unmute()
})
document.querySelector("#toggle-mute")?.addEventListener("click", () => {
document
.querySelector("#toggle-mute")
?.addEventListener("click", () => {
music_player.mute_toggle()
})
document.querySelector("#toggle-play")?.addEventListener("click", () => {
music_player.play_toggle_async().then((s) => console.log("toggled play/pause"), (e) => alert("failed to toggle pause/play!" + e))
document
.querySelector("#toggle-play")
?.addEventListener("click", () => {
music_player.play_toggle_async().then(
(s) => console.log("toggled play/pause"),
(e) => alert("failed to toggle pause/play!" + e)
)
})
document.querySelector("#volume")?.addEventListener("input", (e) => {
document
.querySelector("#volume")
?.addEventListener("input", (e) => {
music_player.change_volume(e.target?.valueAsNumber)
})
document.querySelector("#seek")?.addEventListener("mousedown", (e) => {
is_seeking = true;
document
.querySelector("#seek")
?.addEventListener("mousedown", (e) => {
is_seeking = true
})
document.querySelector("#seek")?.addEventListener("mouseup", (e) => {
music_player.try_seek_async(e.target?.valueAsNumber).then(() => { console.log("seeked to " + e.target?.valueAsNumber) }, () => {
document
.querySelector("#seek")
?.addEventListener("mouseup", (e) => {
music_player.try_seek_async(e.target?.valueAsNumber).then(
() => {
console.log("seeked to " + e.target?.valueAsNumber)
},
() => {
alert("Failed seeking! " + e)
})
}
)
is_seeking = false
})
// Subscriptions to AudioContext changes, eg. time..
@ -145,9 +214,15 @@ music_player.try_new_song_async(encodeURI("http://localhost:4200/nuphory - NVISI
music_player.on_time_tick((time) => {
if (is_seeking) return
seek_element.value = "" + time
const x = `${time / music_player.current_song_duration * 100}%`
const x = `${
(time / music_player.current_song_duration) * 100
}%`
const polygon = `polygon(0 0, ${x} 0, ${x} 100%, 0 100%)`
document.documentElement.style.setProperty("--clip-time-path", polygon)
document.documentElement.style.setProperty(
"--clip-time-path",
polygon
)
})
}, (e) => console.log(e))
},
(e) => console.log(e)
)

View file

@ -1,14 +1,17 @@
export const fft_data = {
"fft_data": [
-106.24, -99.73, -100.98, -101.34, -107.01, -92.38, -84.85, -90.28, -93.68, -95.02, -97.16,
-96.32, -99.23, -103.13, -85.57, -98.17, -103.27, -107.5, -83.62, -95.23, -97.12, -94.78,
-95.93, -101.42, -97.83, -102.42, -111.74, -101.38, -106.8, -111.05, -88.04, -90.88, -97.67,
-96.31, -96.69, -102.15, -102.03, -100.51, -107.14, -101.48, -101.6, -106.62, -73.94,
-79.53, -92.74, -96.08, -96.26, -100.35, -99.13, -102.03, -107.4, -93.57, -102.31, -102.3,
-109.04, -81.85, -92.79, -100.06, -95.79, -96.49, -99.89, -100.27, -102.69, -107.35,
-103.94, -104.64, -104.3, -78.82, -84.2, -95.29, -92.57, -93.47, -98.08, -98.9, -101.56,
-109.38, -102.01, -102.51, -104.83, -72.18, -76.52, -91.69, -99.97, -96.63, -98.61, -76.97,
-90.41, -100.38, -106.77, -102.83, -104.46, -108.59, -80.97, -88.05, -100.77, -79.64, -72.3,
fft_data: [
-106.24, -99.73, -100.98, -101.34, -107.01, -92.38, -84.85, -90.28,
-93.68, -95.02, -97.16, -96.32, -99.23, -103.13, -85.57, -98.17,
-103.27, -107.5, -83.62, -95.23, -97.12, -94.78, -95.93, -101.42,
-97.83, -102.42, -111.74, -101.38, -106.8, -111.05, -88.04, -90.88,
-97.67, -96.31, -96.69, -102.15, -102.03, -100.51, -107.14, -101.48,
-101.6, -106.62, -73.94, -79.53, -92.74, -96.08, -96.26, -100.35,
-99.13, -102.03, -107.4, -93.57, -102.31, -102.3, -109.04, -81.85,
-92.79, -100.06, -95.79, -96.49, -99.89, -100.27, -102.69, -107.35,
-103.94, -104.64, -104.3, -78.82, -84.2, -95.29, -92.57, -93.47, -98.08,
-98.9, -101.56, -109.38, -102.01, -102.51, -104.83, -72.18, -76.52,
-91.69, -99.97, -96.63, -98.61, -76.97, -90.41, -100.38, -106.77,
-102.83, -104.46, -108.59, -80.97, -88.05, -100.77, -79.64, -72.3,
-87.96, -92.89, -93.03
],
]
}

View file

@ -2,12 +2,12 @@ export enum SmoothingAlgorythm {
Linear,
BezierPerpendicular,
CatmullRom,
BezierWeighted,
BezierWeighted
}
export enum ShapeType {
Circle,
Line,
Waveform,
Waveform
/*To be Implmeneted
Custom,
FullSongWaveForm
@ -15,23 +15,23 @@ export enum ShapeType {
}
export enum WaveformOrientation {
Vertical,
Horizontal,
Horizontal
}
export enum WaveformShape {
LineLike,
Striped,
Striped
}
export type WaveformOptions = {
fft_data: Float32Array,
shape_type: WaveformShape,
fft_data: Float32Array
shape_type: WaveformShape
orientation: WaveformOrientation
}
type Point = {
x: number,
y: number,
x: number
y: number
}
type Shape = {
shape_type: ShapeType,
shape_type: ShapeType
//Algo-rythm, because this is about music. Get it? xd
smoothing_algorythm: SmoothingAlgorythm
points: Point[]
@ -62,7 +62,7 @@ export class AudioVisual {
fft_offset: number,
from_fft_range: number,
to_fft_range: number,
point_count: number,
point_count: number
) {
this.#analyzer_node = analyzer_node
this.#svg_injecting_element = svg_injecting_element
@ -88,9 +88,16 @@ export class AudioVisual {
} else {
this.#analyzer_node.getFloatFrequencyData(this.#fft_data)
}
const from = Math.round((this.#point_count / 100) * this.#from_fft_range)
const to = Math.round(this.#buffer_length - (this.#buffer_length / 100) * this.#to_fft_range)
const squeeze_factor = Math.round((this.#buffer_length - to) / this.#point_count)
const from = Math.round(
(this.#point_count / 100) * this.#from_fft_range
)
const to = Math.round(
this.#buffer_length -
(this.#buffer_length / 100) * this.#to_fft_range
)
const squeeze_factor = Math.round(
(this.#buffer_length - to) / this.#point_count
)
const return_array = new Array(this.#point_count)
for (let i = 0; i < this.#point_count + 1; i++) {
@ -104,23 +111,34 @@ export class AudioVisual {
return [x / magnitude, y / magnitude]
}
#create_perpendicular_anchors(arr: { x: number, y: number }[]) {
#create_perpendicular_anchors(arr: { x: number; y: number }[]) {
const anchors = []
switch (this.#shape.shape_type) {
case ShapeType.Circle: {
const pointDistance = 7
for (let curPoint = 0; curPoint < arr.length; curPoint++) {
const [dx, dy] = this.#normalise_perpendicular_anchors(arr[curPoint].x, arr[curPoint].y)
const [dx, dy] = this.#normalise_perpendicular_anchors(
arr[curPoint].x,
arr[curPoint].y
)
const perpendicular = [-dy, dx]
anchors.push({
leftAnchor: {
x: arr[curPoint].x + pointDistance * perpendicular[0],
y: arr[curPoint].y + pointDistance * perpendicular[1],
x:
arr[curPoint].x +
pointDistance * perpendicular[0],
y:
arr[curPoint].y +
pointDistance * perpendicular[1]
},
rightAnchor: {
x: arr[curPoint].x - pointDistance * perpendicular[0],
y: arr[curPoint].y - pointDistance * perpendicular[1],
},
x:
arr[curPoint].x -
pointDistance * perpendicular[0],
y:
arr[curPoint].y -
pointDistance * perpendicular[1]
}
})
}
break
@ -131,12 +149,12 @@ export class AudioVisual {
anchors.push({
leftAnchor: {
x: pointDistance * curPoint - pointDistance / 3,
y: arr[curPoint].y,
y: arr[curPoint].y
},
rightAnchor: {
x: pointDistance * curPoint + pointDistance / 3,
y: arr[curPoint].y,
},
y: arr[curPoint].y
}
})
}
}
@ -145,13 +163,12 @@ export class AudioVisual {
return anchors
}
#catmull_rom_smooth(arr: { x: number, y: number }[], k: number) {
#catmull_rom_smooth(arr: { x: number; y: number }[], k: number) {
if (k == null) k = 1
const last = arr.length - 2
let path = ""
for (let i = 0; i < arr.length - 1; i++) {
const x0 = i ? arr[i - 1].x : arr[0].x
const y0 = i ? arr[i - 1].y : arr[0].y
@ -171,13 +188,22 @@ export class AudioVisual {
const x3 = i !== last ? arr[i + 2].x : subx
const y3 = i !== last ? arr[i + 2].y : suby
const cp1x = x1 + (x2 - x0) / 6 * k
const cp1y = y1 + (y2 - y0) / 6 * k
const cp1x = x1 + ((x2 - x0) / 6) * k
const cp1y = y1 + ((y2 - y0) / 6) * k
const cp2x = x2 - (x3 - x1) / 6 * k
const cp2y = y2 - (y3 - y1) / 6 * k
const cp2x = x2 - ((x3 - x1) / 6) * k
const cp2y = y2 - ((y3 - y1) / 6) * k
path += "C" + [cp1x.toFixed(2), cp1y.toFixed(2), cp2x.toFixed(2), cp2y.toFixed(2), x2.toFixed(2), y2.toFixed(2)]
path +=
"C" +
[
cp1x.toFixed(2),
cp1y.toFixed(2),
cp2x.toFixed(2),
cp2y.toFixed(2),
x2.toFixed(2),
y2.toFixed(2)
]
}
return path
}
@ -190,20 +216,49 @@ export class AudioVisual {
switch (this.#shape.shape_type) {
case ShapeType.Line: {
for (let i = 0; i < frequency_data.length - 1; i++) {
const mutator = isFinite(frequency_data[i]) ? this.#convert_range(frequency_data[i] * this.#fft_multiplier + this.#fft_offset, in_range, out_range) : -1 * this.#canvas_height
const mutator = isFinite(frequency_data[i])
? this.#convert_range(
frequency_data[i] * this.#fft_multiplier +
this.#fft_offset,
in_range,
out_range
)
: -1 * this.#canvas_height
mutated_points.push({
x: this.#shape.points[i].x /** ((Math.max(FFTDataArray[i] + 100)) * 4)*/,
y: this.#shape.points[i].y - mutator,
x: this.#shape.points[i]
.x /** ((Math.max(FFTDataArray[i] + 100)) * 4)*/,
y: this.#shape.points[i].y - mutator
})
}
break
}
case ShapeType.Circle: {
for (let i = 0; i < frequency_data.length - 1; i++) {
const new_i = i > (frequency_data.length - 1) / 2 ? frequency_data.length - 1 - i : i
const new_i =
i > (frequency_data.length - 1) / 2
? frequency_data.length - 1 - i
: i
mutated_points.push({
x: this.#shape.points[i].x * Math.max((frequency_data[new_i] * this.#fft_multiplier + this.#fft_offset) / 50, 1) + this.#canvas_width / 2,
y: this.#shape.points[i].y * Math.max((frequency_data[new_i] * this.#fft_multiplier + this.#fft_offset) / 50, 1) + this.#canvas_height / 2,
x:
this.#shape.points[i].x *
Math.max(
(frequency_data[new_i] *
this.#fft_multiplier +
this.#fft_offset) /
50,
1
) +
this.#canvas_width / 2,
y:
this.#shape.points[i].y *
Math.max(
(frequency_data[new_i] *
this.#fft_multiplier +
this.#fft_offset) /
50,
1
) +
this.#canvas_height / 2
})
/* TODO: IMPLEMENT SCALING TO BEAT
this.injectingHTMLElement.parentElement.style.transform = `scale(${(100 + Math.max((frequencyData[2] * 2 + 130) / 5, 1)) / 100})`
@ -213,12 +268,23 @@ export class AudioVisual {
break
}
case ShapeType.Waveform: {
if (this.#shape.waveform_options!.shape_type == WaveformShape.LineLike) {
if (
this.#shape.waveform_options!.shape_type ==
WaveformShape.LineLike
) {
if (this.#shape.symmetry) {
for (let i = 0; i < this.#shape.points.length; i += 2) {
let mutator = this.#convert_range(frequency_data[i / 2] * this.#fft_multiplier + this.#fft_offset, in_range, out_range)
let mutator = this.#convert_range(
frequency_data[i / 2] * this.#fft_multiplier +
this.#fft_offset,
in_range,
out_range
)
if (mutator <= 0) mutator = 2
if (this.#shape.waveform_options!.orientation == WaveformOrientation.Horizontal) {
if (
this.#shape.waveform_options!.orientation ==
WaveformOrientation.Horizontal
) {
mutated_points.push({
x: this.#shape.points[i].x,
y: this.#shape.points[i].y - mutator
@ -240,8 +306,16 @@ export class AudioVisual {
}
} else {
for (let i = 0; i < frequency_data.length - 1; i++) {
const mutator = this.#convert_range(frequency_data[i] * this.#fft_multiplier + this.#fft_offset, in_range, out_range)
if (this.#shape.waveform_options!.orientation == WaveformOrientation.Horizontal) {
const mutator = this.#convert_range(
frequency_data[i] * this.#fft_multiplier +
this.#fft_offset,
in_range,
out_range
)
if (
this.#shape.waveform_options!.orientation ==
WaveformOrientation.Horizontal
) {
mutated_points.push({
x: this.#shape.points[i].x,
y: this.#shape.points[i].y - mutator
@ -289,26 +363,36 @@ export class AudioVisual {
switch (this.#shape.shape_type) {
case ShapeType.Line: {
for (let i = 0; i < arr.length; i++) {
path += `L ${arr[i].x.toFixed(2)},${arr[i].y.toFixed(2)} `
path += `L ${arr[i].x.toFixed(2)},${arr[
i
].y.toFixed(2)} `
}
if (this.#shape.shape_type == ShapeType.Line) {
path += `L ${this.#canvas_width} ${this.#canvas_height} `
path += `L ${this.#canvas_width} ${
this.#canvas_height
} `
//path += `L ${canvas_width} ${canvas_height} `
}
break
}
case ShapeType.Circle: {
for (let i = 0; i < arr.length; i++) {
path += `L ${arr[i].x.toFixed(2)},${arr[i].y.toFixed(2)} `
path += `L ${arr[i].x.toFixed(2)},${arr[
i
].y.toFixed(2)} `
}
break
}
case ShapeType.Waveform: {
for (let i = 0; i < arr.length; i += 2) {
path += `L ${arr[i].x.toFixed(2)},${arr[i].y.toFixed(2)} `
path += `L ${arr[i].x.toFixed(2)},${arr[
i
].y.toFixed(2)} `
}
for (let i = arr.length - 1; i >= 0; i -= 2) {
path += `L ${arr[i].x.toFixed(2)},${arr[i].y.toFixed(2)} `
path += `L ${arr[i].x.toFixed(2)},${arr[
i
].y.toFixed(2)} `
}
}
}
@ -320,7 +404,15 @@ export class AudioVisual {
const anchors = this.#create_perpendicular_anchors(arr)
for (let i = 1; i < arr.length; i++) {
path += `C ${anchors[i - 1].rightAnchor.x.toFixed(2)} ${anchors[i - 1].rightAnchor.y.toFixed(2)} ${anchors[i].leftAnchor.x.toFixed(2)} ${anchors[i].leftAnchor.y.toFixed(2)} ${arr[i].x.toFixed(2)} ${arr[i].y.toFixed(2)} `
path += `C ${anchors[i - 1].rightAnchor.x.toFixed(
2
)} ${anchors[i - 1].rightAnchor.y.toFixed(2)} ${anchors[
i
].leftAnchor.x.toFixed(2)} ${anchors[
i
].leftAnchor.y.toFixed(2)} ${arr[i].x.toFixed(2)} ${arr[
i
].y.toFixed(2)} `
}
if (this.#shape.shape_type == ShapeType.Line) {
//path += `L ${this.canvasWidth} ${this.canvasHeight / 2} `
@ -340,7 +432,10 @@ export class AudioVisual {
break
}
case SmoothingAlgorythm.CatmullRom: {
if (this.#shape.shape_type == ShapeType.Waveform && this.#shape.symmetry == true) {
if (
this.#shape.shape_type == ShapeType.Waveform &&
this.#shape.symmetry == true
) {
//adding points so both halfs ends and start at the same center point
console.log(arr)
const first_half = [{ x: 0, y: this.#canvas_height / 2 }]
@ -349,8 +444,14 @@ export class AudioVisual {
first_half.push(arr[i])
second_half.push(arr[i + 1])
}
first_half.push({ x: this.#canvas_width, y: this.#canvas_height / 2 })
second_half.push({ x: this.#canvas_width, y: this.#canvas_height / 2 })
first_half.push({
x: this.#canvas_width,
y: this.#canvas_height / 2
})
second_half.push({
x: this.#canvas_width,
y: this.#canvas_height / 2
})
// path += `M ${this.#canvas_width},${this.#canvas_height / 2}`
second_half.reverse()
//path += ` L 0 ${this.#canvas_height / 2}`
@ -358,8 +459,7 @@ export class AudioVisual {
//path += ` L ${this.#canvas_width} ${this.#canvas_height / 2}`
path += this.#catmull_rom_smooth(second_half, 1)
//path += `L 0 ${this.#canvas_height / 2}`
}
else {
} else {
path += this.#catmull_rom_smooth(arr, 1)
}
path += `Z`
@ -369,7 +469,7 @@ export class AudioVisual {
return `<path width="100%" height="100%" d="${path}"/>`
}
on_data(fn: ((data: Float32Array) => void)) {
on_data(fn: (data: Float32Array) => void) {
this.#subscriber_fns.push(fn)
}
/**
@ -400,7 +500,10 @@ export class AudioVisualBuilder {
#from_fft_range
#to_fft_range
#point_count: number
constructor(analyzer_node: AnalyserNode, svg_injecting_element: SVGSVGElement) {
constructor(
analyzer_node: AnalyserNode,
svg_injecting_element: SVGSVGElement
) {
this.#analyzer_node = analyzer_node
this.#svg_injecting_element = svg_injecting_element
this.#canvas_width = svg_injecting_element.viewBox.baseVal.width
@ -412,7 +515,10 @@ export class AudioVisualBuilder {
this.#fft_offset = 150
this.#from_fft_range = 0
this.#to_fft_range = 100
this.#point_count = Math.round((this.#buffer_length / 100) * (this.#from_fft_range - this.#to_fft_range))
this.#point_count = Math.round(
(this.#buffer_length / 100) *
(this.#from_fft_range - this.#to_fft_range)
)
}
/**
* The smoothingTimeConstant property of the AnalyserNode interface is a double value representing the averaging constant with the last analysis frame. It's basically an average between the current buffer and the last buffer the AnalyserNode processed, and results in a much smoother set of value changes over time.
@ -431,7 +537,8 @@ export class AudioVisualBuilder {
* @returns this
*/
set_fft_size(fft_size: number) {
if (!(this.#fft_size && !(this.#fft_size & (this.#fft_size - 1)))) throw Error("fft_size not power of two")
if (!(this.#fft_size && !(this.#fft_size & (this.#fft_size - 1))))
throw Error("fft_size not power of two")
this.#analyzer_node.fftSize = this.#fft_size = fft_size
this.#buffer_length = this.#analyzer_node.frequencyBinCount
return this
@ -461,7 +568,15 @@ export class AudioVisualBuilder {
* @param param0
* @returns
*/
set_fft_data_tresholds({ from_fft_range_i = 0, to_fft_range_i = 100, point_count_i = Math.round((this.#buffer_length / 100) * (from_fft_range_i - to_fft_range_i)), fft_multiplier_i = 2, fft_offset_i = -50 }) {
set_fft_data_tresholds({
from_fft_range_i = 0,
to_fft_range_i = 100,
point_count_i = Math.round(
(this.#buffer_length / 100) * (from_fft_range_i - to_fft_range_i)
),
fft_multiplier_i = 2,
fft_offset_i = -50
}) {
this.#from_fft_range = from_fft_range_i
this.#to_fft_range = to_fft_range_i
this.#point_count = point_count_i
@ -499,11 +614,29 @@ export class AudioVisualBuilder {
* @param shape_type Circle = 0; Line = 1;
* @returns `new AudioVisual`
*/
build(shape_type: ShapeType, symmetry: boolean, waveform_options?: WaveformOptions) {
build(
shape_type: ShapeType,
symmetry: boolean,
waveform_options?: WaveformOptions
) {
const shape = this.#create_shape(shape_type, symmetry, waveform_options)
return new AudioVisual(this.#analyzer_node, this.#svg_injecting_element, shape, this.#buffer_length, this.#fft_multipier, this.#fft_offset, this.#from_fft_range, this.#to_fft_range, this.#point_count)
return new AudioVisual(
this.#analyzer_node,
this.#svg_injecting_element,
shape,
this.#buffer_length,
this.#fft_multipier,
this.#fft_offset,
this.#from_fft_range,
this.#to_fft_range,
this.#point_count
)
}
#create_shape(shape_type: ShapeType, symmetry: boolean, waveform_options?: WaveformOptions): Shape {
#create_shape(
shape_type: ShapeType,
symmetry: boolean,
waveform_options?: WaveformOptions
): Shape {
const point_amount = this.#get_cured_frequency_data().length
let new_shape: Shape
switch (shape_type) {
@ -512,31 +645,54 @@ export class AudioVisualBuilder {
for (let i = 0; i < point_amount; i++) {
points.push({
x: (this.#canvas_width / point_amount) * i,
y: 0,
y: 0
})
}
new_shape = { shape_type, points, smoothing_algorythm: this.#smoothing_algorythm }
new_shape = {
shape_type,
points,
smoothing_algorythm: this.#smoothing_algorythm
}
break
}
case ShapeType.Circle: {
const points = []
const radius = this.#canvas_height > this.#canvas_width ? this.#canvas_height / 5 : this.#canvas_width / 5
const radius =
this.#canvas_height > this.#canvas_width
? this.#canvas_height / 5
: this.#canvas_width / 5
for (let i = 0; i < point_amount; i++) {
points.push({
x: Math.cos(((2 * Math.PI) / point_amount) * i - Math.PI / 2) * radius,
y: Math.sin(((2 * Math.PI) / point_amount) * i - Math.PI / 2) * radius,
x:
Math.cos(
((2 * Math.PI) / point_amount) * i - Math.PI / 2
) * radius,
y:
Math.sin(
((2 * Math.PI) / point_amount) * i - Math.PI / 2
) * radius
})
}
new_shape = { shape_type, points, smoothing_algorythm: this.#smoothing_algorythm }
new_shape = {
shape_type,
points,
smoothing_algorythm: this.#smoothing_algorythm
}
break
}
case ShapeType.Waveform: {
if (waveform_options === undefined) {
console.error("Waveform options undefined at shapetype.waveform, please define!")
throw Error("Waveform options undefined at shapetype.waveform, please define!")
console.error(
"Waveform options undefined at shapetype.waveform, please define!"
)
throw Error(
"Waveform options undefined at shapetype.waveform, please define!"
)
}
const fft_length = this.#get_cured_frequency_data(waveform_options.fft_data).length
const fft_length = this.#get_cured_frequency_data(
waveform_options.fft_data
).length
const points = []
for (let i = 0; i < fft_length; i++) {
let x, y
@ -546,17 +702,25 @@ export class AudioVisualBuilder {
} else {
throw Error("WaveformShape.Striped not implemented yet")
}
waveform_options.orientation == WaveformOrientation.Horizontal ?
points.push({ x: x, y: y }) :
points.push({ x: y, y: x })
waveform_options.orientation ==
WaveformOrientation.Horizontal
? points.push({ x: x, y: y })
: points.push({ x: y, y: x })
//Douple the points needed for symmetry
if (symmetry) {
waveform_options.orientation == WaveformOrientation.Horizontal ?
points.push({ x: x, y: y }) :
points.push({ x: y, y: x })
waveform_options.orientation ==
WaveformOrientation.Horizontal
? points.push({ x: x, y: y })
: points.push({ x: y, y: x })
}
}
new_shape = { shape_type, points, smoothing_algorythm: this.#smoothing_algorythm, symmetry: symmetry, waveform_options: waveform_options }
new_shape = {
shape_type,
points,
smoothing_algorythm: this.#smoothing_algorythm,
symmetry: symmetry,
waveform_options: waveform_options
}
}
}
return new_shape
@ -567,9 +731,16 @@ export class AudioVisualBuilder {
fft_data = new Float32Array(this.#buffer_length)
this.#analyzer_node.getFloatFrequencyData(fft_data)
}
const from = Math.round((this.#point_count / 100) * this.#from_fft_range)
const to = Math.round(this.#buffer_length - (this.#buffer_length / 100) * this.#to_fft_range)
const squeezeFactor = Math.round((this.#buffer_length - to) / this.#point_count)
const from = Math.round(
(this.#point_count / 100) * this.#from_fft_range
)
const to = Math.round(
this.#buffer_length -
(this.#buffer_length / 100) * this.#to_fft_range
)
const squeezeFactor = Math.round(
(this.#buffer_length - to) / this.#point_count
)
const return_array = new Array(this.#point_count)
for (let i = 0; i < this.#point_count; i++) {
@ -577,5 +748,4 @@ export class AudioVisualBuilder {
}
return return_array
}
}

13
test.js
View file

@ -20,8 +20,8 @@ class AudioElementHehe {
console.log("Pausing!")
}
}
const audio_context = new AudioContexthehe
const audio_element = new AudioElementHehe
const audio_context = new AudioContexthehe()
const audio_element = new AudioElementHehe()
let is_playing = false
try_play_toggle_async()
@ -31,13 +31,16 @@ function try_play_toggle_async() {
audio_context.resume().catch((e) => reject(e))
}
if (audio_element.paused) {
audio_element.play().then((s) => {
audio_element.play().then(
(s) => {
is_playing = true
resolve(s)
}, (r) => {
},
(r) => {
is_playing = false
reject(r)
})
}
)
} else {
audio_element.pause()
is_playing = false