diff --git a/.prettierrc b/.prettierrc
index 841d5a2..5ee78f4 100644
--- a/.prettierrc
+++ b/.prettierrc
@@ -1,12 +1,13 @@
{
- "parser": "typescript",
- "trailingComma": "none",
- "useTabs": true,
- "tabWidth": 4,
- "semi": false,
- "singleQuote": false,
- "endOfLine": "lf",
- "bracketSpacing": true,
- "bracketSameLine": false,
- "arrowParens": "always"
-}
\ No newline at end of file
+ "parser": "typescript",
+ "trailingComma": "none",
+ "useTabs": true,
+ "tabWidth": 4,
+ "semi": false,
+ "singleQuote": false,
+ "endOfLine": "lf",
+ "bracketSpacing": true,
+ "bracketSameLine": false,
+ "arrowParens": "always",
+ "printWidth": 150
+}
diff --git a/packages/dj/src/db_extend.ts b/packages/dj/src/db_extend.ts
index 3525403..77cb57e 100644
--- a/packages/dj/src/db_extend.ts
+++ b/packages/dj/src/db_extend.ts
@@ -1,11 +1,4 @@
-import {
- Artist,
- Collection,
- DB,
- Ref,
- RefTo,
- Song
-} from "@euterpe.js/music-library"
+import { Artist, Collection, DB, Ref, RefTo, Song } from "@euterpe.js/music-library"
export { DJSong, DJDB }
type ID = number
@@ -34,11 +27,9 @@ class DJSong extends Song {
try {
fetch(data.url).then((file) => {
file.arrayBuffer().then((buffer) => {
- audio_context
- .decodeAudioData(buffer)
- .then((audio_buffer) => {
- this.audio_buffer = audio_buffer
- })
+ audio_context.decodeAudioData(buffer).then((audio_buffer) => {
+ this.audio_buffer = audio_buffer
+ })
})
})
} catch (e) {
@@ -46,17 +37,13 @@ class DJSong extends Song {
}
}
public async analyze(url: URL, audio_context: AudioContext) {
- this.audio_buffer = await audio_context.decodeAudioData(
- await (await fetch(url)).arrayBuffer()
- )
+ this.audio_buffer = await audio_context.decodeAudioData(await (await fetch(url)).arrayBuffer())
}
}
class DJDB extends DB {
dj_add(dj_songs: DJSong[]): void {
let inputs
- typeof dj_songs[Symbol.iterator] == "function"
- ? (inputs = dj_songs)
- : (inputs = [dj_songs])
+ typeof dj_songs[Symbol.iterator] == "function" ? (inputs = dj_songs) : (inputs = [dj_songs])
for (const input of inputs) {
if (input instanceof DJSong) {
const song = input as DJSong
@@ -65,16 +52,8 @@ class DJDB extends DB {
if (song.in_collection) {
const curr_col = song.in_collection.get(this) as Collection
curr_col.songs.push(new Ref(RefTo.Songs, song.id))
- song.artists.forEach((artist) =>
- curr_col.artists.push(
- new Ref(RefTo.Artists, artist.get(this)!.id!)
- )
- )
- song.remix_artists.forEach((artist) =>
- curr_col.artists.push(
- new Ref(RefTo.Artists, artist.get(this)!.id!)
- )
- )
+ song.artists.forEach((artist) => curr_col.artists.push(new Ref(RefTo.Artists, artist.get(this)!.id!)))
+ song.remix_artists.forEach((artist) => curr_col.artists.push(new Ref(RefTo.Artists, artist.get(this)!.id!)))
}
for (const artist_ref of song.artists) {
diff --git a/packages/dj/src/euterpe_extend.ts b/packages/dj/src/euterpe_extend.ts
index 16f868b..461602f 100644
--- a/packages/dj/src/euterpe_extend.ts
+++ b/packages/dj/src/euterpe_extend.ts
@@ -11,22 +11,13 @@ class DJ {
/**in ms */
beat_duration?: number
beat = { current: 0, max: 4, next_bar_in: 4 }
- on_beat?: (beat: {
- current: number
- max: number
- next_bar_in: number
- }) => void
- constructor(
- public player: Euterpe | MusicPlayer,
- public master_bpm: number | 120
- ) {
+ on_beat?: (beat: { current: number; max: number; next_bar_in: number }) => void
+ constructor(public player: Euterpe | MusicPlayer, public master_bpm: number | 120) {
this.beat_duration = 60 / master_bpm
this.#emit_beats()
}
#emit_beats() {
- this.beat.current >= 4
- ? (this.beat.current++, this.beat.next_bar_in--)
- : ((this.beat.current = 0), (this.beat.next_bar_in = this.beat.max))
+ this.beat.current >= 4 ? (this.beat.current++, this.beat.next_bar_in--) : ((this.beat.current = 0), (this.beat.next_bar_in = this.beat.max))
if (this.on_beat) this.on_beat(this.beat)
//This makes it break if BPM >= 300!!!!
@@ -92,17 +83,10 @@ class Track {
gain: GainNode
audio_context: AudioContext | BaseAudioContext
- constructor(
- public player: MusicPlayer | Euterpe,
- public current_song?: Song,
- public should_loop?: boolean
- ) {
+ constructor(public player: MusicPlayer | Euterpe, public current_song?: Song, public should_loop?: boolean) {
this.audio_context = player.audio_context
this.gain = this.audio_context.createGain()
- if (current_song)
- this.change_song(current_song).catch((e) =>
- console.error("error during track construction - " + e)
- )
+ if (current_song) this.change_song(current_song).catch((e) => console.error("error during track construction - " + e))
}
async #prepare() {
@@ -110,10 +94,7 @@ class Track {
if (!this.current_song) reject(new Error("No current song"))
fetch(this.current_song!.url).then(
async (file) => {
- this.audio_buffer =
- await this.audio_context.decodeAudioData(
- await file.arrayBuffer()
- )
+ this.audio_buffer = await this.audio_context.decodeAudioData(await file.arrayBuffer())
resolve(this)
},
(reason) => reject(reason)
@@ -121,10 +102,7 @@ class Track {
})
}
#connect() {
- if (!this.audio_buffer)
- throw new Error(
- "Somehow buffer not in track even though it analyzed properly. Report this as a bug"
- )
+ if (!this.audio_buffer) throw new Error("Somehow buffer not in track even though it analyzed properly. Report this as a bug")
this.buffer_source = this.audio_context.createBufferSource()
this.buffer_source.buffer = this.audio_buffer!
this.buffer_source.connect(this.gain)
@@ -149,11 +127,8 @@ class Track {
*/
async try_start(delay?: number) {
return new Promise((resolve, reject) => {
- if (!this.buffer_source)
- reject(new Error("No buffer source yet, set a song first"))
- this.buffer_source!.start(
- this.audio_context.currentTime + (delay || 0)
- )
+ if (!this.buffer_source) reject(new Error("No buffer source yet, set a song first"))
+ this.buffer_source!.start(this.audio_context.currentTime + (delay || 0))
})
}
}
diff --git a/packages/euterpe-web-test/src/db.ts b/packages/euterpe-web-test/src/db.ts
index 9eca910..049c436 100644
--- a/packages/euterpe-web-test/src/db.ts
+++ b/packages/euterpe-web-test/src/db.ts
@@ -1,11 +1,4 @@
-import {
- DB,
- Song,
- Artist,
- Ref,
- RefTo,
- Platforms
-} from "@euterpe.js/music-library"
+import { DB, Song, Artist, Ref, RefTo, Platforms } from "@euterpe.js/music-library"
export const db = new DB()
db.add([
@@ -28,14 +21,7 @@ db.add([
}),
new Artist({
name: "IMANU",
- links: [
- [
- Platforms.Spotify,
- new URL(
- "https://open.spotify.com/artist/5Y7rFm0tiJTVDzGLMzz0W1?si=DRaZyugTTIqlBHDkMGKVqA&nd=1"
- )
- ]
- ]
+ links: [[Platforms.Spotify, new URL("https://open.spotify.com/artist/5Y7rFm0tiJTVDzGLMzz0W1?si=DRaZyugTTIqlBHDkMGKVqA&nd=1")]]
}),
new Artist({
name: "toe",
@@ -45,33 +31,18 @@ db.add([
db.add([
new Song({
//Refrences are constructed as such. This allows to get to the artist from either collection or song
- artists: [
- new Ref(RefTo.Artists, 2),
- new Ref(RefTo.Artists, 3),
- new Ref(RefTo.Artists, 4)
- ],
+ artists: [new Ref(RefTo.Artists, 2), new Ref(RefTo.Artists, 3), new Ref(RefTo.Artists, 4)],
duration: 252,
name: "Star",
remix_artists: [new Ref(RefTo.Artists, 5)],
- url: new URL(
- "http://" +
- window.location.host +
- "/Machinedrum, Tanerelle & Mono Poly - Star (IMANU Remix) final.mp3"
- )
+ url: new URL("http://" + window.location.host + "/Machinedrum, Tanerelle & Mono Poly - Star (IMANU Remix) final.mp3")
}),
new Song({
//If you don't like guessing the IDs, then this is also a way to do it
- artists: [
- new Ref(
- RefTo.Artists,
- db.artists.find((a) => a.name == "Jamie xx")!.id!
- )
- ],
+ artists: [new Ref(RefTo.Artists, db.artists.find((a) => a.name == "Jamie xx")!.id!)],
duration: 331,
name: "Sleep Sound",
- url: new URL(
- "http://" + window.location.host + "/Jamie xx - Sleep Sound.mp3"
- )
+ url: new URL("http://" + window.location.host + "/Jamie xx - Sleep Sound.mp3")
}),
new Song({
artists: [new Ref(RefTo.Artists, 1)],
@@ -83,10 +54,6 @@ db.add([
artists: [new Ref(RefTo.Artists, 10)],
duration: 4 * 60 + 5,
name: "サニーボーイ・ラプソディ",
- url: new URL(
- "http://" +
- window.location.host +
- "/16.サニーボーイ・ラプソディ.ogg"
- )
+ url: new URL("http://" + window.location.host + "/16.サニーボーイ・ラプソディ.ogg")
})
])
diff --git a/packages/euterpe-web-test/src/main.ts b/packages/euterpe-web-test/src/main.ts
index cecfec4..ef4fffe 100644
--- a/packages/euterpe-web-test/src/main.ts
+++ b/packages/euterpe-web-test/src/main.ts
@@ -3,16 +3,12 @@ import { EuterpeBuilder } from "@euterpe.js/euterpe"
let is_seeking = false
// document.addEventListener("click", start, { once: true })
-const euterpe = new EuterpeBuilder(
- document.querySelector("#audio")!,
- db
-).build()
+const euterpe = new EuterpeBuilder(document.querySelector("#audio")!, db).build()
add_library_to_dom()
euterpe.try_preload_song(0).then(
() => {
- document.querySelector("#text-playing")!.innerHTML =
- euterpe.format_current_song()
+ document.querySelector("#text-playing")!.innerHTML = euterpe.format_current_song()
},
(e) => console.log(e + " Failed to preload")
)
@@ -46,8 +42,7 @@ euterpe.on_time_tick((time) => {
document.querySelector("#previous")?.addEventListener("click", () => {
euterpe.try_previous_song_looping().then(
() => {
- document.querySelector("#text-playing")!.innerHTML =
- euterpe.format_current_song()
+ document.querySelector("#text-playing")!.innerHTML = euterpe.format_current_song()
},
(e) => alert(e + "Failed to change song")
)
@@ -55,8 +50,7 @@ document.querySelector("#previous")?.addEventListener("click", () => {
document.querySelector("#next")?.addEventListener("click", () => {
euterpe.try_next_song_looping().then(
() => {
- document.querySelector("#text-playing")!.innerHTML =
- euterpe.format_current_song()
+ document.querySelector("#text-playing")!.innerHTML = euterpe.format_current_song()
},
(e) => alert(e + "Failed to change song")
)
@@ -78,9 +72,7 @@ document.querySelector("#toggle-mute")?.addEventListener("click", () => {
euterpe.mute_toggle()
})
document.querySelector("#toggle-play")?.addEventListener("click", () => {
- euterpe
- .try_play_toggle()
- .catch((e) => alert("failed to toggle pause/play!" + e))
+ euterpe.try_play_toggle().catch((e) => alert("failed to toggle pause/play!" + e))
})
document.querySelector("#volume")?.addEventListener("input", (e) => {
euterpe.change_volume(e.target?.valueAsNumber)
@@ -119,9 +111,7 @@ function add_library_to_dom() {
function library_play(e: MouseEvent) {
const b = e.currentTarget as HTMLButtonElement
euterpe.try_specific_song(parseInt(b.dataset["id"]!)).then(
- () =>
- (document.querySelector("#text-playing")!.innerHTML =
- euterpe.format_current_song()),
+ () => (document.querySelector("#text-playing")!.innerHTML = euterpe.format_current_song()),
(e) => alert(e)
)
}
diff --git a/packages/euterpe/src/index.ts b/packages/euterpe/src/index.ts
index ddab7e6..4b6fb36 100644
--- a/packages/euterpe/src/index.ts
+++ b/packages/euterpe/src/index.ts
@@ -20,14 +20,15 @@ class Euterpe extends MusicPlayer {
current_song_path?: string,
private options?: BuilderOptions
) {
- super(
- audio_context,
- audio_element,
- track,
- gain,
- volume,
- current_song_path
- )
+ super(audio_context, audio_element, track, gain, volume, current_song_path)
+
+ audio_element.addEventListener("ended", () => {
+ audio_element.currentTime = 0
+ audio_element.pause()
+ try {
+ this.try_next_song()
+ } catch (e) { }
+ })
}
/**
@@ -57,13 +58,10 @@ class Euterpe extends MusicPlayer {
while (this.db.songs[--id_i].id! > this.current_song_id);
const next_id = ++id_i
- if (next_id == this.db.songs.length)
- throw new Error("Won't go past the last song")
+ if (next_id == this.db.songs.length) throw new Error("Won't go past the last song")
new_song = this.db.songs.find((song) => song.id == next_id)!
}
- const url = this.options?.use_only_pathname_url
- ? new_song.url.pathname
- : new_song.url.toString()
+ const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
await this.try_new_song(url)
await this.try_play()
if (this.current_song) this.played_history.push(this.current_song)
@@ -87,9 +85,7 @@ class Euterpe extends MusicPlayer {
if (next_id == this.db.songs.length) next_id = this.db.songs[0].id!
new_song = this.db.songs.find((song) => song.id == next_id)!
}
- const url = this.options?.use_only_pathname_url
- ? new_song.url.pathname
- : new_song.url.toString()
+ const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
await this.try_new_song(url)
await this.try_play()
if (this.current_song) this.played_history.push(this.current_song)
@@ -113,9 +109,7 @@ class Euterpe extends MusicPlayer {
if (next_id == this.db.songs.length) next_id = this.db.songs[0].id!
new_song = this.db.songs.find((song) => song.id == next_id)!
}
- const url = this.options?.use_only_pathname_url
- ? new_song.url.pathname
- : new_song.url.toString()
+ const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
this.new_song(url)
this.play()
if (this.current_song) this.played_history.push(this.current_song)
@@ -137,9 +131,7 @@ class Euterpe extends MusicPlayer {
const next_id = ++id_i
new_song = this.db.songs.find((song) => song.id == next_id)!
}
- const url = this.options?.use_only_pathname_url
- ? new_song.url.pathname
- : new_song.url.toString()
+ const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
this.new_song(url)
this.play()
if (this.current_song) this.played_history.push(this.current_song)
@@ -168,9 +160,7 @@ class Euterpe extends MusicPlayer {
specific_song(new_song_id: number) {
const new_song = this.db.songs.find((song) => song.id! == new_song_id)
if (!new_song) return
- const url = this.options?.use_only_pathname_url
- ? new_song.url.pathname
- : new_song.url.toString()
+ const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
this.new_song(url)
this.play()
if (this.current_song) this.played_history.push(this.current_song)
@@ -192,13 +182,10 @@ class Euterpe extends MusicPlayer {
while (this.db.songs[++id_i].id! < this.current_song_id);
const next_id = --id_i
- if (next_id == this.db.songs.length)
- throw new Error("Won't roll backwards to last song")
+ if (next_id == this.db.songs.length) throw new Error("Won't roll backwards to last song")
new_song = this.db.songs.find((song) => song.id == next_id)!
}
- const url = this.options?.use_only_pathname_url
- ? new_song.url.pathname
- : new_song.url.toString()
+ const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
await this.try_new_song(url)
await this.try_play()
//if (this.current_song) this.played_history.push(this.current_song)
@@ -219,13 +206,10 @@ class Euterpe extends MusicPlayer {
while (this.db.songs[++id_i].id! < this.current_song_id);
let next_id = --id_i
- if (next_id == -1)
- next_id = this.db.songs[this.db.songs.length - 1].id!
+ if (next_id == -1) next_id = this.db.songs[this.db.songs.length - 1].id!
new_song = this.db.songs.find((song) => song.id == next_id)!
}
- const url = this.options?.use_only_pathname_url
- ? new_song.url.pathname
- : new_song.url.toString()
+ const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
await this.try_new_song(url)
await this.try_play()
//if (this.current_song) this.played_history.push(this.current_song)
@@ -246,13 +230,10 @@ class Euterpe extends MusicPlayer {
while (this.db.songs[++id_i].id! < this.current_song_id);
const next_id = -id_i
- if (next_id == this.db.songs.length)
- throw new Error("Won't go past the last song")
+ if (next_id == this.db.songs.length) throw new Error("Won't go past the last song")
new_song = this.db.songs.find((song) => song.id == next_id)!
}
- const url = this.options?.use_only_pathname_url
- ? new_song.url.pathname
- : new_song.url.toString()
+ const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
this.new_song(url)
this.play()
//if (this.current_song) this.played_history.push(this.current_song)
@@ -273,13 +254,10 @@ class Euterpe extends MusicPlayer {
while (this.db.songs[++id_i].id! < this.current_song_id);
let next_id = -id_i
- if (next_id == this.db.songs.length)
- next_id = this.db.songs[this.db.songs.length].id!
+ if (next_id == this.db.songs.length) next_id = this.db.songs[this.db.songs.length].id!
new_song = this.db.songs.find((song) => song.id == next_id)!
}
- const url = this.options?.use_only_pathname_url
- ? new_song.url.pathname
- : new_song.url.toString()
+ const url = this.options?.use_only_pathname_url ? new_song.url.pathname : new_song.url.toString()
this.new_song(url)
this.play()
//if (this.current_song) this.played_history.push(this.current_song)
@@ -341,8 +319,7 @@ class Euterpe extends MusicPlayer {
try_queue_add(id: number) {
const curr_song = this.db.songs.find((song) => song.id == id)
if (!curr_song) throw new Error(`Song of id "${id}" doesn't exist`)
- if (this.queue.find((song) => song.id == id))
- throw new Error(`Song of id "${id}" already queued`)
+ if (this.queue.find((song) => song.id == id)) throw new Error(`Song of id "${id}" already queued`)
this.queue.push(curr_song)
}
/**
@@ -391,18 +368,12 @@ class EuterpeBuilder {
* will throw if audio_element is undefined (stupid vue setup amirite?)
* will throw if user has not interacted with the page yet (Can't initiate AudioContext)
*/
- constructor(
- private audio_element: HTMLAudioElement,
- private db: DB,
- private options?: BuilderOptions
- ) {
- if (audio_element === undefined)
- throw Error("audio_element was undefined")
+ constructor(private audio_element: HTMLAudioElement, private db: DB, private options?: BuilderOptions) {
+ if (audio_element === undefined) throw Error("audio_element was undefined")
// ↓ For old browsers
const AudioContext = window.AudioContext || window.webkitAudioContext
this.#audio_context = new AudioContext()
- this.#track =
- this.#audio_context.createMediaElementSource(audio_element)
+ this.#track = this.#audio_context.createMediaElementSource(audio_element)
this.#gain = this.#audio_context.createGain()
}
/**
@@ -411,9 +382,7 @@ class EuterpeBuilder {
*/
add_analyser() {
const analyser = this.#audio_context.createAnalyser()
- !this.#prev_node
- ? this.#track.connect(analyser)
- : this.#prev_node.connect(analyser)
+ !this.#prev_node ? this.#track.connect(analyser) : this.#prev_node.connect(analyser)
this.#prev_node = analyser
return analyser
}
@@ -423,9 +392,7 @@ class EuterpeBuilder {
*/
add_stereo_panner_node() {
const panner = this.#audio_context.createStereoPanner()
- !this.#prev_node
- ? this.#track.connect(panner)
- : this.#prev_node.connect(panner)
+ !this.#prev_node ? this.#track.connect(panner) : this.#prev_node.connect(panner)
this.#prev_node = panner
return panner
}
@@ -435,9 +402,7 @@ class EuterpeBuilder {
*/
add_wave_shaper_node() {
const shaper = this.#audio_context.createWaveShaper()
- !this.#prev_node
- ? this.#track.connect(shaper)
- : this.#prev_node.connect(shaper)
+ !this.#prev_node ? this.#track.connect(shaper) : this.#prev_node.connect(shaper)
this.#prev_node = shaper
return shaper
}
@@ -445,9 +410,7 @@ class EuterpeBuilder {
* For additional trickery, you can connect your own node.
*/
connect_custom_node(node: AudioNode) {
- !this.#prev_node
- ? this.#track.connect(node)
- : this.#prev_node.connect(node)
+ !this.#prev_node ? this.#track.connect(node) : this.#prev_node.connect(node)
this.#prev_node = node
}
/**
@@ -455,9 +418,7 @@ class EuterpeBuilder {
* eg. if you want the analyser nodes output to be affected by user #gain
*/
connect_gain() {
- !this.#prev_node
- ? this.#track.connect(this.#gain)
- : this.#prev_node.connect(this.#gain)
+ !this.#prev_node ? this.#track.connect(this.#gain) : this.#prev_node.connect(this.#gain)
this.#prev_node = this.#gain
this.#is_gain_connected = true
}
@@ -467,21 +428,10 @@ class EuterpeBuilder {
*/
build() {
if (!this.#is_gain_connected) {
- !this.#prev_node
- ? this.#track.connect(this.#gain)
- : this.#prev_node.connect(this.#gain)
+ !this.#prev_node ? this.#track.connect(this.#gain) : this.#prev_node.connect(this.#gain)
this.#prev_node = this.#gain
}
this.#prev_node.connect(this.#audio_context.destination)
- return new Euterpe(
- this.db,
- this.#audio_context,
- this.audio_element,
- this.#track,
- this.#gain,
- this.#volume,
- undefined,
- this.options
- )
+ return new Euterpe(this.db, this.#audio_context, this.audio_element, this.#track, this.#gain, this.#volume, undefined, this.options)
}
}
diff --git a/packages/music-library-web-test/src/db.ts b/packages/music-library-web-test/src/db.ts
index 9dcf1b7..b864e79 100644
--- a/packages/music-library-web-test/src/db.ts
+++ b/packages/music-library-web-test/src/db.ts
@@ -1,11 +1,4 @@
-import {
- DB,
- Artist,
- Song,
- RefTo,
- Ref,
- Platforms
-} from "@euterpe.js/music-library"
+import { DB, Artist, Song, RefTo, Ref, Platforms } from "@euterpe.js/music-library"
export const db = new DB()
db.add([
@@ -28,39 +21,21 @@ db.add([
}),
new Artist({
name: "IMANU",
- links: [
- [
- Platforms.Spotify,
- new URL(
- "https://open.spotify.com/artist/5Y7rFm0tiJTVDzGLMzz0W1?si=DRaZyugTTIqlBHDkMGKVqA&nd=1"
- )
- ]
- ]
+ links: [[Platforms.Spotify, new URL("https://open.spotify.com/artist/5Y7rFm0tiJTVDzGLMzz0W1?si=DRaZyugTTIqlBHDkMGKVqA&nd=1")]]
})
])
db.add([
new Song({
//Refrences are constructed as such. This allows to get to the artist from either collection or song
- artists: [
- new Ref(RefTo.Artists, 2),
- new Ref(RefTo.Artists, 3),
- new Ref(RefTo.Artists, 4)
- ],
+ artists: [new Ref(RefTo.Artists, 2), new Ref(RefTo.Artists, 3), new Ref(RefTo.Artists, 4)],
duration: 252,
name: "Star",
remix_artists: [new Ref(RefTo.Artists, 5)],
- url: new URL(
- "http://127.0.0.1:4200/Machinedrum, Tanerelle & Mono Poly - Star (IMANU Remix) final.mp3"
- )
+ url: new URL("http://127.0.0.1:4200/Machinedrum, Tanerelle & Mono Poly - Star (IMANU Remix) final.mp3")
}),
new Song({
//If you don't like guessing the IDs, then this is also a way to do it
- artists: [
- new Ref(
- RefTo.Artists,
- db.artists.find((a) => a.name == "Jamie xx")!.id!
- )
- ],
+ artists: [new Ref(RefTo.Artists, db.artists.find((a) => a.name == "Jamie xx")!.id!)],
duration: 331,
name: "Sleep Sound",
url: new URL("http://127.0.0.1:4200/Jamie xx - Sleep Sound.mp3")
diff --git a/packages/music-library-web-test/src/main.ts b/packages/music-library-web-test/src/main.ts
index 576e493..9ae4bc3 100644
--- a/packages/music-library-web-test/src/main.ts
+++ b/packages/music-library-web-test/src/main.ts
@@ -18,36 +18,32 @@ music_player.try_new_song_async(db.songs[curr_song_id].url.pathname).then(
document.querySelector("#previous")?.addEventListener("click", () => {
curr_song_id--
if (curr_song_id < 0) curr_song_id = 2
- music_player
- .try_new_song_async(db.songs[curr_song_id].url.pathname)
- .then(
- (s) => {
- change_current_song_text(db)
- music_player.play_async().catch((err) => {
- console.log(err)
- })
- },
- (e) => {
- console.log(e)
- }
- )
+ music_player.try_new_song_async(db.songs[curr_song_id].url.pathname).then(
+ (s) => {
+ change_current_song_text(db)
+ music_player.play_async().catch((err) => {
+ console.log(err)
+ })
+ },
+ (e) => {
+ console.log(e)
+ }
+ )
})
document.querySelector("#next")?.addEventListener("click", () => {
curr_song_id++
if (curr_song_id > 2) curr_song_id = 0
- music_player
- .try_new_song_async(db.songs[curr_song_id].url.pathname)
- .then(
- (s) => {
- change_current_song_text(db)
- music_player.play_async().catch((err) => {
- console.log(err)
- })
- },
- (e) => {
- console.log(e)
- }
- )
+ music_player.try_new_song_async(db.songs[curr_song_id].url.pathname).then(
+ (s) => {
+ change_current_song_text(db)
+ music_player.play_async().catch((err) => {
+ console.log(err)
+ })
+ },
+ (e) => {
+ console.log(e)
+ }
+ )
})
document.querySelector("#play")?.addEventListener("click", () => {
@@ -67,19 +63,15 @@ music_player.try_new_song_async(db.songs[curr_song_id].url.pathname).then(
document.querySelector("#unmute")?.addEventListener("click", () => {
music_player.unmute()
})
- document
- .querySelector("#toggle-mute")
- ?.addEventListener("click", () => {
- music_player.mute_toggle()
- })
- document
- .querySelector("#toggle-play")
- ?.addEventListener("click", () => {
- music_player.play_toggle_async().then(
- (s) => console.log("toggled play/pause"),
- (e) => alert("failed to toggle pause/play!" + e)
- )
- })
+ document.querySelector("#toggle-mute")?.addEventListener("click", () => {
+ music_player.mute_toggle()
+ })
+ document.querySelector("#toggle-play")?.addEventListener("click", () => {
+ music_player.play_toggle_async().then(
+ (s) => console.log("toggled play/pause"),
+ (e) => alert("failed to toggle pause/play!" + e)
+ )
+ })
document.querySelector("#volume")?.addEventListener("input", (e) => {
music_player.change_volume(e.target?.valueAsNumber)
})
@@ -100,8 +92,7 @@ music_player.try_new_song_async(db.songs[curr_song_id].url.pathname).then(
// Subscriptions to AudioContext changes, eg. time..
music_player.subscribe_to_formatted_duration_time((time) => {
document.querySelector("#duration").innerHTML = time
- document.querySelector("#seek").max =
- "" + music_player.get_current_duration()
+ document.querySelector("#seek").max = "" + music_player.get_current_duration()
})
music_player.subscribe_to_formatted_current_time_tick((time) => {
document.querySelector("#current").innerHTML = time
@@ -133,9 +124,7 @@ function change_current_song_text(db: DB) {
const curr_artist = artist.get(db) as Artist
if (curr_artist.links && curr_artist.links.length > 0) {
//returns "found a link! Spotify"
- console.log(
- "found a link! " + Platforms[curr_artist.links[0][0]]
- )
+ console.log("found a link! " + Platforms[curr_artist.links[0][0]])
const url = curr_artist.links[0][1]
final_text += `${curr_artist.name}, `
diff --git a/packages/music-library/src/index.ts b/packages/music-library/src/index.ts
index c0441bf..83dd037 100644
--- a/packages/music-library/src/index.ts
+++ b/packages/music-library/src/index.ts
@@ -1,14 +1,4 @@
-export {
- RefTo,
- Ref,
- Song,
- Collection,
- DB,
- Artist,
- Platforms,
- CollectionType,
- from_json
-}
+export { RefTo, Ref, Song, Collection, DB, Artist, Platforms, CollectionType, from_json }
type ID = number
enum RefTo {
Artists,
@@ -179,9 +169,7 @@ class DB {
add(artist: Artist[]): void
add(collection: Collection[]): void
add(mix: (Song | Artist | Collection)[]): void
- add(
- stuff: Artist[] | Collection[] | Song[] | (Song | Artist | Collection)[]
- ) {
+ add(stuff: Artist[] | Collection[] | Song[] | (Song | Artist | Collection)[]) {
/** All of this adds refrences to the other side of whatever is being added.
* eg. adding song with refrence to artist, adds refrence of song to artist
* and adds incremental ids
@@ -217,9 +205,7 @@ class DB {
}
for (const artist_ref of col.artists) {
const curr_artist = artist_ref.get(this) as Artist
- curr_artist.collections.push(
- new Ref(RefTo.Collections, col.id)
- )
+ curr_artist.collections.push(new Ref(RefTo.Collections, col.id))
}
this.collections.push(col)
} else if (input instanceof Song) {
@@ -229,16 +215,8 @@ class DB {
if (song.in_collection) {
const curr_col = song.in_collection.get(this) as Collection
curr_col.songs.push(new Ref(RefTo.Songs, song.id))
- song.artists.forEach((artist) =>
- curr_col.artists.push(
- new Ref(RefTo.Artists, artist.get(this)!.id!)
- )
- )
- song.remix_artists.forEach((artist) =>
- curr_col.artists.push(
- new Ref(RefTo.Artists, artist.get(this)!.id!)
- )
- )
+ song.artists.forEach((artist) => curr_col.artists.push(new Ref(RefTo.Artists, artist.get(this)!.id!)))
+ song.remix_artists.forEach((artist) => curr_col.artists.push(new Ref(RefTo.Artists, artist.get(this)!.id!)))
}
for (const artist_ref of song.artists) {
@@ -258,20 +236,12 @@ class DB {
this.artists.sort((a, b) => a.id! - b.id!)
}
}
-function from_json(db_stringified: {
- artists?: any
- songs?: any
- collections?: any
-}): DB {
+function from_json(db_stringified: { artists?: any; songs?: any; collections?: any }): DB {
const db = new DB()
if (db_stringified.artists) {
for (const artist of db_stringified.artists) {
- if (artist.songs)
- artist.songs = artist.songs.map((e: any) => ref_from_json(e))
- if (artist.collections)
- artist.collections = artist.collections.map((e: any) =>
- ref_from_json(e)
- )
+ if (artist.songs) artist.songs = artist.songs.map((e: any) => ref_from_json(e))
+ if (artist.collections) artist.collections = artist.collections.map((e: any) => ref_from_json(e))
if (artist.links)
artist.links = artist.links.map((e: any) => {
try {
@@ -280,8 +250,7 @@ function from_json(db_stringified: {
console.log(e)
}
})
- if (artist.publish_date)
- artist.publish_date = new Date(JSON.parse(artist.publish_date))
+ if (artist.publish_date) artist.publish_date = new Date(JSON.parse(artist.publish_date))
if (artist.id) artist.id = artist.id as ID
try {
if (artist.pfp) artist.pfp = new URL(artist.pfp)
@@ -298,22 +267,16 @@ function from_json(db_stringified: {
} catch (e) {
console.error("failed to parse song.url" + e)
}
- if (song.artists)
- song.artists = song.artists.map((e: any) => ref_from_json(e))
- if (song.remix_artists)
- song.remix_artists = song.remix_artists.map((e: any) =>
- ref_from_json(e)
- )
- if (song.in_collection)
- song.in_collection = ref_from_json(song.in_collection)
+ if (song.artists) song.artists = song.artists.map((e: any) => ref_from_json(e))
+ if (song.remix_artists) song.remix_artists = song.remix_artists.map((e: any) => ref_from_json(e))
+ if (song.in_collection) song.in_collection = ref_from_json(song.in_collection)
try {
if (song.cover) song.cover = new URL(song.cover)
} catch (e) {
console.error(e), console.error("failed to parse artist URL")
}
try {
- if (song.publish_date)
- song.publish_date = new Date(JSON.parse(song.publish_date))
+ if (song.publish_date) song.publish_date = new Date(JSON.parse(song.publish_date))
} catch (e) {
console.error(e), console.error("Failed to song cover url")
}
@@ -323,32 +286,18 @@ function from_json(db_stringified: {
}
if (db_stringified.collections) {
for (const collection of db_stringified.collections) {
- if (collection.artists)
- collection.artists = collection.artists.map((e: any) =>
- ref_from_json(e)
- )
- if (collection.songs)
- collection.songs = collection.songs.map((e: any) =>
- ref_from_json(e)
- )
- if (collection.type)
- collection.type = collection.type.map(
- (e: any) => e as CollectionType
- )
+ if (collection.artists) collection.artists = collection.artists.map((e: any) => ref_from_json(e))
+ if (collection.songs) collection.songs = collection.songs.map((e: any) => ref_from_json(e))
+ if (collection.type) collection.type = collection.type.map((e: any) => e as CollectionType)
try {
- if (collection.publish_date)
- collection.publish_date = new Date(
- JSON.parse(collection.publish_date)
- )
+ if (collection.publish_date) collection.publish_date = new Date(JSON.parse(collection.publish_date))
} catch (e) {
console.error(e), console.error("Failed to parse date")
}
try {
- if (collection.cover)
- collection.cover = new URL(collection.cover)
+ if (collection.cover) collection.cover = new URL(collection.cover)
} catch (e) {
- console.error(e),
- console.error("failed to parse collection cover url")
+ console.error(e), console.error("failed to parse collection cover url")
}
if (collection.id) collection.id = collection.id as ID
db.collections.push(collection)
diff --git a/packages/player-web-test/src/main.ts b/packages/player-web-test/src/main.ts
index 5a6ad56..5f705b7 100644
--- a/packages/player-web-test/src/main.ts
+++ b/packages/player-web-test/src/main.ts
@@ -4,82 +4,63 @@ const music_player_builder = new MusicPlayerBuilder(audio_el)
const music_player = music_player_builder.build()
music_player.change_volume(1)
-music_player
- .try_new_song(
- encodeURI(
- "http://" +
- window.location.host +
- "/nuphory - NVISION (EXTENDED MIX).ogg"
- )
- )
- .then(
- () => {
- let is_seeking = false
- document.querySelector("#play")?.addEventListener("click", () => {
- //const analyser_node = music_player_builder.add_analyser()
- music_player.try_play().then(
- () => {
- console.log("Playing!")
- },
- (e) => alert("Failed to play, " + e)
- )
- })
- document.querySelector("#pause")?.addEventListener("click", () => {
- music_player.pause()
- })
- document.querySelector("#mute")?.addEventListener("click", () => {
- music_player.mute()
- })
- document.querySelector("#unmute")?.addEventListener("click", () => {
- music_player.unmute()
- })
- document
- .querySelector("#toggle-mute")
- ?.addEventListener("click", () => {
- music_player.mute_toggle()
- })
- document
- .querySelector("#toggle-play")
- ?.addEventListener("click", () => {
- music_player.try_play_toggle().then(
- (s) => console.log("toggled play/pause"),
- (e) => alert("failed to toggle pause/play!" + e)
- )
- })
- document
- .querySelector("#volume")
- ?.addEventListener("input", (e) => {
- music_player.change_volume(e.target?.valueAsNumber)
- })
- document
- .querySelector("#seek")
- ?.addEventListener("mousedown", (e) => {
- is_seeking = true
- })
- document
- .querySelector("#seek")
- ?.addEventListener("mouseup", (e) => {
- try {
- music_player.try_seek(e.target?.valueAsNumber)
- console.log("seeked to " + e.target?.valueAsNumber)
- } catch (e) {
- alert("Failed seeking! " + e)
- }
- is_seeking = false
- })
- // Subscriptions to AudioContext changes, eg. time..
- music_player.on_duration_formatted((time) => {
- document.querySelector("#duration")!.innerHTML = time
- document.querySelector("#seek")!.max =
- "" + music_player.current_song_duration
- })
- music_player.on_time_tick_formatted((time) => {
- document.querySelector("#current")!.innerHTML = time
- })
- music_player.on_time_tick((time) => {
- if (is_seeking) return
- document.querySelector("#seek")!.value = "" + time
- })
- },
- (e) => console.log(e)
- )
+music_player.try_new_song(encodeURI("http://" + window.location.host + "/nuphory - NVISION (EXTENDED MIX).ogg")).then(
+ () => {
+ let is_seeking = false
+ document.querySelector("#play")?.addEventListener("click", () => {
+ //const analyser_node = music_player_builder.add_analyser()
+ music_player.try_play().then(
+ () => {
+ console.log("Playing!")
+ },
+ (e) => alert("Failed to play, " + e)
+ )
+ })
+ document.querySelector("#pause")?.addEventListener("click", () => {
+ music_player.pause()
+ })
+ document.querySelector("#mute")?.addEventListener("click", () => {
+ music_player.mute()
+ })
+ document.querySelector("#unmute")?.addEventListener("click", () => {
+ music_player.unmute()
+ })
+ document.querySelector("#toggle-mute")?.addEventListener("click", () => {
+ music_player.mute_toggle()
+ })
+ document.querySelector("#toggle-play")?.addEventListener("click", () => {
+ music_player.try_play_toggle().then(
+ (s) => console.log("toggled play/pause"),
+ (e) => alert("failed to toggle pause/play!" + e)
+ )
+ })
+ document.querySelector("#volume")?.addEventListener("input", (e) => {
+ music_player.change_volume(e.target?.valueAsNumber)
+ })
+ document.querySelector("#seek")?.addEventListener("mousedown", (e) => {
+ is_seeking = true
+ })
+ document.querySelector("#seek")?.addEventListener("mouseup", (e) => {
+ try {
+ music_player.try_seek(e.target?.valueAsNumber)
+ console.log("seeked to " + e.target?.valueAsNumber)
+ } catch (e) {
+ alert("Failed seeking! " + e)
+ }
+ is_seeking = false
+ })
+ // Subscriptions to AudioContext changes, eg. time..
+ music_player.on_duration_formatted((time) => {
+ document.querySelector("#duration")!.innerHTML = time
+ document.querySelector("#seek")!.max = "" + music_player.current_song_duration
+ })
+ music_player.on_time_tick_formatted((time) => {
+ document.querySelector("#current")!.innerHTML = time
+ })
+ music_player.on_time_tick((time) => {
+ if (is_seeking) return
+ document.querySelector("#seek")!.value = "" + time
+ })
+ },
+ (e) => console.log(e)
+)
diff --git a/packages/player/src/index.ts b/packages/player/src/index.ts
index cba671f..c19ff8c 100644
--- a/packages/player/src/index.ts
+++ b/packages/player/src/index.ts
@@ -29,28 +29,19 @@ class PubSub {
switch (event_name) {
case SubscribeEvents.CurrentTimeTick: {
if (this.el_current_time_tick.includes(func)) {
- this.el_current_time_tick.splice(
- this.el_current_time_tick.indexOf(func),
- 1
- )
+ this.el_current_time_tick.splice(this.el_current_time_tick.indexOf(func), 1)
}
break
}
case SubscribeEvents.FormattedDurationTick: {
if (this.el_formatted_duration_tick.includes(func)) {
- this.el_formatted_duration_tick.splice(
- this.el_formatted_duration_tick.indexOf(func),
- 1
- )
+ this.el_formatted_duration_tick.splice(this.el_formatted_duration_tick.indexOf(func), 1)
}
break
}
case SubscribeEvents.FormattedCurrentTimeTick: {
if (this.el_formatted_duration_tick.includes(func)) {
- this.el_formatted_duration_tick.splice(
- this.el_formatted_duration_tick.indexOf(func),
- 1
- )
+ this.el_formatted_duration_tick.splice(this.el_formatted_duration_tick.indexOf(func), 1)
}
break
}
@@ -114,10 +105,10 @@ export class MusicPlayer {
mute() {
this.#volume_cache = this.gain.gain.value
/* Gentler mute, doesn't pop
- gain.gain.linearRampToValueAtTime(
- 0,
- audio_context.currentTime + 0.1
- );*/
+ gain.gain.linearRampToValueAtTime(
+ 0,
+ audio_context.currentTime + 0.1
+ );*/
this.volume = this.gain.gain.value = 0
}
unmute() {
@@ -128,18 +119,14 @@ export class MusicPlayer {
}
/**
* Safer seek_async. Normal seek will try to start the player even if the track hasn't started yet, or was previously suspended/closed.
- * Will also resume playback if player is paused (by finishing the song etc)
+ * will not resume playback
* @throws if "Can't seek - Audiocontext is not running"
*/
async try_seek(new_time: number) {
if (this.audio_context.state !== "running") {
this.is_playing = false
- throw new Error(
- "Can't seek - audioContext not running, audio_context.state : " +
- this.audio_context.state
- )
+ throw new Error("Can't seek - audioContext not running, audio_context.state : " + this.audio_context.state)
}
- if (this.audio_element.paused) await this.try_play()
this.audio_element.currentTime = new_time
}
@@ -235,10 +222,7 @@ export class MusicPlayer {
try {
await this.audio_context.resume()
} catch (e) {
- console.log(
- "loading new song - couldn't resume context before hand",
- e
- )
+ console.log("loading new song - couldn't resume context before hand", e)
}
}
return new Promise((resolve, reject) => {
@@ -273,8 +257,7 @@ export class MusicPlayer {
//once aborted, try to set current_song_duration
controller.signal.addEventListener("abort", (r) => {
this.current_song_duration = this.audio_element.duration
- if (typeof controller.signal.reason == "string")
- reject(new Error(controller.signal.reason))
+ if (typeof controller.signal.reason == "string") reject(new Error(controller.signal.reason))
resolve()
})
this.is_playing = false
@@ -341,24 +324,19 @@ export class MusicPlayer {
this.gain.gain.value = this.volume
this.time = this.audio_element.currentTime
- if (this.#pub_sub.el_current_time_tick.length == 0)
- cancelAnimationFrame(request_id)
+ if (this.#pub_sub.el_current_time_tick.length == 0) cancelAnimationFrame(request_id)
this.#pub_sub.emit(SubscribeEvents.CurrentTimeTick, this.time)
}
#emit_duration_fmt() {
- const request_id = requestAnimationFrame(
- this.#emit_duration_fmt.bind(this)
- )
+ const request_id = requestAnimationFrame(this.#emit_duration_fmt.bind(this))
const time = this.get_formatted_duration()
- if (this.#pub_sub.el_formatted_duration_tick.length == 0)
- cancelAnimationFrame(request_id)
+ if (this.#pub_sub.el_formatted_duration_tick.length == 0) cancelAnimationFrame(request_id)
this.#pub_sub.emit(SubscribeEvents.FormattedDurationTick, time)
}
#emit_time_fmt() {
const request_id = requestAnimationFrame(this.#emit_time_fmt.bind(this))
const time = this.get_formatted_current_time()
- if (this.#pub_sub.el_formatted_current_time_tick.length == 0)
- cancelAnimationFrame(request_id)
+ if (this.#pub_sub.el_formatted_current_time_tick.length == 0) cancelAnimationFrame(request_id)
this.#pub_sub.emit(SubscribeEvents.FormattedCurrentTimeTick, time)
}
/**
@@ -373,10 +351,7 @@ export class MusicPlayer {
* Will give formatted current time via get_formatted_current_time() every animation frame
*/
on_time_tick_formatted(callback: (data: any) => void) {
- this.#pub_sub.subscribe(
- SubscribeEvents.FormattedCurrentTimeTick,
- callback
- )
+ this.#pub_sub.subscribe(SubscribeEvents.FormattedCurrentTimeTick, callback)
this.#emit_time_fmt()
}
/**
@@ -401,13 +376,11 @@ export class MusicPlayerBuilder {
* will throw if user has not interacted with the page yet (Can't initiate AudioContext)
*/
constructor(private audio_element: HTMLAudioElement) {
- if (audio_element === undefined)
- throw Error("audio_element was undefined")
+ if (audio_element === undefined) throw Error("audio_element was undefined")
// ↓ For old browsers
const AudioContext = window.AudioContext || window.webkitAudioContext
this.#audio_context = new AudioContext()
- this.#track =
- this.#audio_context.createMediaElementSource(audio_element)
+ this.#track = this.#audio_context.createMediaElementSource(audio_element)
this.#gain = this.#audio_context.createGain()
}
/**
@@ -416,9 +389,7 @@ export class MusicPlayerBuilder {
*/
add_analyser() {
const analyser = this.#audio_context.createAnalyser()
- !this.#prev_node
- ? this.#track.connect(analyser)
- : this.#prev_node.connect(analyser)
+ !this.#prev_node ? this.#track.connect(analyser) : this.#prev_node.connect(analyser)
this.#prev_node = analyser
return analyser
}
@@ -428,9 +399,7 @@ export class MusicPlayerBuilder {
*/
add_stereo_panner_node() {
const panner = this.#audio_context.createStereoPanner()
- !this.#prev_node
- ? this.#track.connect(panner)
- : this.#prev_node.connect(panner)
+ !this.#prev_node ? this.#track.connect(panner) : this.#prev_node.connect(panner)
this.#prev_node = panner
return panner
}
@@ -440,9 +409,7 @@ export class MusicPlayerBuilder {
*/
add_wave_shaper_node() {
const shaper = this.#audio_context.createWaveShaper()
- !this.#prev_node
- ? this.#track.connect(shaper)
- : this.#prev_node.connect(shaper)
+ !this.#prev_node ? this.#track.connect(shaper) : this.#prev_node.connect(shaper)
this.#prev_node = shaper
return shaper
}
@@ -450,9 +417,7 @@ export class MusicPlayerBuilder {
* For additional trickery, you can connect your own node.
*/
connect_custom_node(node: AudioNode) {
- !this.#prev_node
- ? this.#track.connect(node)
- : this.#prev_node.connect(node)
+ !this.#prev_node ? this.#track.connect(node) : this.#prev_node.connect(node)
this.#prev_node = node
}
/**
@@ -460,9 +425,7 @@ export class MusicPlayerBuilder {
* eg. if you want the analyser nodes output to be affected by user #gain
*/
connect_gain() {
- !this.#prev_node
- ? this.#track.connect(this.#gain)
- : this.#prev_node.connect(this.#gain)
+ !this.#prev_node ? this.#track.connect(this.#gain) : this.#prev_node.connect(this.#gain)
this.#prev_node = this.#gain
this.#is_gain_connected = true
}
@@ -472,18 +435,10 @@ export class MusicPlayerBuilder {
*/
build() {
if (!this.#is_gain_connected) {
- !this.#prev_node
- ? this.#track.connect(this.#gain)
- : this.#prev_node.connect(this.#gain)
+ !this.#prev_node ? this.#track.connect(this.#gain) : this.#prev_node.connect(this.#gain)
this.#prev_node = this.#gain
}
this.#prev_node.connect(this.#audio_context.destination)
- return new MusicPlayer(
- this.#audio_context,
- this.audio_element,
- this.#track,
- this.#gain,
- this.#volume
- )
+ return new MusicPlayer(this.#audio_context, this.audio_element, this.#track, this.#gain, this.#volume)
}
}
diff --git a/packages/preprocessor/src/crawler.mjs b/packages/preprocessor/src/crawler.mjs
index 6fc09f6..c6f3cd9 100644
--- a/packages/preprocessor/src/crawler.mjs
+++ b/packages/preprocessor/src/crawler.mjs
@@ -1,15 +1,6 @@
import filehound from "filehound"
import fs from "fs"
-const songs = filehound
- .create()
- .path("../public/samples")
- .ext(["ogg"])
- .findSync()
-fs.writeFile(
- "songs_list.ts",
- `export const songs = ` + JSON.stringify(songs),
- "utf8",
- () => {
- 1 + 1
- }
-)
+const songs = filehound.create().path("../public/samples").ext(["ogg"]).findSync()
+fs.writeFile("songs_list.ts", `export const songs = ` + JSON.stringify(songs), "utf8", () => {
+ 1 + 1
+})
diff --git a/packages/preprocessor/src/generate_db.ts b/packages/preprocessor/src/generate_db.ts
index 6552e7a..8e6fda5 100644
--- a/packages/preprocessor/src/generate_db.ts
+++ b/packages/preprocessor/src/generate_db.ts
@@ -13,10 +13,7 @@ export function generate_db() {
for (let i = 0; i < songs.length; i++) {
const song = songs[i]
const last_i = song.lastIndexOf(path_char)
- const collection_name = song.slice(
- song.slice(0, last_i).lastIndexOf(path_char) + 1,
- last_i
- )
+ const collection_name = song.slice(song.slice(0, last_i).lastIndexOf(path_char) + 1, last_i)
/*
const foreforelast_i = song.slice(0, forelast_i - 1)
const foreforeforelast_i = song.slice(0, foreforelast_i - 1).lastIndexOf("\\")
@@ -44,15 +41,11 @@ export function generate_db() {
const last_i = song.song.lastIndexOf(path_char)
const name = song.song.slice(last_i + 1)
- const song_url = song.song.slice(
- song.song.indexOf(`public${path_char}`) + 7
- )
+ const song_url = song.song.slice(song.song.indexOf(`public${path_char}`) + 7)
const db_song = new Song({
name: name.slice(0, name.lastIndexOf(".")),
artists: [],
- url: new URL(
- `${window.location.href}${song_url}`.replaceAll("\\", "/")
- ),
+ url: new URL(`${window.location.href}${song_url}`.replaceAll("\\", "/")),
duration: 0,
remix_artists: [],
in_collection: new Ref(RefTo.Collections, song.collection_id)
diff --git a/packages/preprocessor/src/main.ts b/packages/preprocessor/src/main.ts
index d26ecfe..6fd22fd 100644
--- a/packages/preprocessor/src/main.ts
+++ b/packages/preprocessor/src/main.ts
@@ -1,12 +1,6 @@
import { DB, from_json } from "@euterpe.js/music-library"
import { generate_db } from "./generate_db"
-import {
- AudioVisualBuilder,
- SmoothingAlgorythm,
- ShapeType,
- WaveformOrientation,
- WaveformShape
-} from "@euterpe.js/visualizer"
+import { AudioVisualBuilder, SmoothingAlgorythm, ShapeType, WaveformOrientation, WaveformShape } from "@euterpe.js/visualizer"
let result: AnalyzeReturn | undefined
@@ -23,10 +17,7 @@ audioContextAnalyser.smoothingTimeConstant = 0
const analyserBufferLength = audioContextAnalyser.frequencyBinCount
const FFTDataArray = new Float32Array(analyserBufferLength)
//Connect all audio Nodes
-track
- .connect(audioContextAnalyser)
- .connect(gain)
- .connect(audioContext.destination)
+track.connect(audioContextAnalyser).connect(gain).connect(audioContext.destination)
document.getElementById("analyze")!.addEventListener("click", async (ev) => {
audioContext.resume()
@@ -50,11 +41,7 @@ document.getElementById("upload")!.addEventListener("change", (ev) => {
for (const song of new_db.songs) {
if (song.fft_data) {
for (let i = 0; i < song.fft_data.length; i++) {
- if (
- song.fft_data[i] === null ||
- song.fft_data[i] === undefined
- )
- song.fft_data[i] = -Infinity
+ if (song.fft_data[i] === null || song.fft_data[i] === undefined) song.fft_data[i] = -Infinity
}
}
}
@@ -68,26 +55,18 @@ async function svg() {
return
}
console.log("Creating svgs...")
- const canvas_wrapper = document.querySelector(
- ".canvas-wrapper"
- ) as HTMLElement
+ const canvas_wrapper = document.querySelector(".canvas-wrapper") as HTMLElement
- const waveform_canvas = document
- .querySelector("#waveform-canvas")
- ?.cloneNode() as SVGSVGElement
+ const waveform_canvas = document.querySelector("#waveform-canvas")?.cloneNode() as SVGSVGElement
canvas_wrapper.childNodes.forEach((c) => c.remove())
canvas_wrapper.appendChild(waveform_canvas)
for (const song of result.db.songs) {
console.log("creating waveform for -> " + song.name)
- const curr_waveform_canvas =
- waveform_canvas.cloneNode() as SVGSVGElement
+ const curr_waveform_canvas = waveform_canvas.cloneNode() as SVGSVGElement
waveform_canvas.parentElement?.append(curr_waveform_canvas)
- const waveform_visual_builder = new AudioVisualBuilder(
- result.analyzer_node,
- curr_waveform_canvas
- )
+ const waveform_visual_builder = new AudioVisualBuilder(result.analyzer_node, curr_waveform_canvas)
.set_fft_data_tresholds({
point_count_i: 100,
fft_multiplier_i: 0.9,
@@ -95,15 +74,11 @@ async function svg() {
})
.set_fft_time_smoothing(0.8)
.set_smoothing_algorythm(SmoothingAlgorythm.CatmullRom)
- const waveform_visual = waveform_visual_builder.build(
- ShapeType.Waveform,
- true,
- {
- fft_data: new Float32Array(new Float64Array(song.fft_data!)),
- orientation: WaveformOrientation.Horizontal,
- shape_type: WaveformShape.LineLike
- }
- )
+ const waveform_visual = waveform_visual_builder.build(ShapeType.Waveform, true, {
+ fft_data: new Float32Array(new Float64Array(song.fft_data!)),
+ orientation: WaveformOrientation.Horizontal,
+ shape_type: WaveformShape.LineLike
+ })
waveform_visual.draw_once()
// await new Promise((done) => setTimeout(() => done(), 500))
// @ts-ignore
@@ -125,21 +100,13 @@ async function analyze(): Promise {
console.log(db)
for (const song of db.songs) {
// const song = db.songs[db.songs.length - 1]
- console.log(
- `Analyzing ${song.name}, ${db.songs.indexOf(song) + 1}/${
- db.songs.length
- }`
- )
+ console.log(`Analyzing ${song.name}, ${db.songs.indexOf(song) + 1}/${db.songs.length}`)
//if not loaded yet keep trying
audioEl.src = song.url.href
await awaitLoad(audioEl)
song.duration = audioEl.duration
let currentFFTData = []
- for (
- let curSecond = 0;
- curSecond < song.duration;
- curSecond += song.duration / samplingRate
- ) {
+ for (let curSecond = 0; curSecond < song.duration; curSecond += song.duration / samplingRate) {
console.log("working...")
audioEl.currentTime = curSecond
await audioEl.play()
@@ -149,9 +116,7 @@ async function analyze(): Promise {
FFTDataArray.forEach((element) => {
volume += element
})
- currentFFTData.push(
- Math.round((volume / FFTDataArray.length) * 100) / 100
- )
+ currentFFTData.push(Math.round((volume / FFTDataArray.length) * 100) / 100)
}
song.fft_data = currentFFTData
console.log(song.fft_data)
diff --git a/packages/preprocessor/src/media_process.js b/packages/preprocessor/src/media_process.js
index b890b54..1af51c9 100644
--- a/packages/preprocessor/src/media_process.js
+++ b/packages/preprocessor/src/media_process.js
@@ -39,8 +39,7 @@ function generate_new_sounds_ogg(file, currentExtention) {
//Adds 25ms of delay to all samples
command += `-af 'adelay=25:all=true' `
//So the demo is HQ
- if (file.includes("demo"))
- command += `-c:a libopus -b:a 256k '${file}.ogg'"`
+ if (file.includes("demo")) command += `-c:a libopus -b:a 256k '${file}.ogg'"`
else command += `-c:a libopus -b:a 96k '${file}.ogg'"`
exec(command)
console.log(command)
@@ -59,11 +58,7 @@ function generate_new_sounds_mp3(file, currentExtention) {
exec(command)
// console.log(command)
}
-function generate_new_video_sizes_mp4(
- file,
- currentExtention,
- width_resolutions
-) {
+function generate_new_video_sizes_mp4(file, currentExtention, width_resolutions) {
const path = file.substring(0, file.lastIndexOf("\\"))
file = file.substring(file.lastIndexOf("\\") + 1)
@@ -86,11 +81,7 @@ function generate_new_video_sizes_mp4(
}
})
}
-function generate_new_video_sizes_webm(
- file,
- currentExtention,
- width_resolutions
-) {
+function generate_new_video_sizes_webm(file, currentExtention, width_resolutions) {
const path = file.substring(0, file.lastIndexOf("\\"))
file = file.substring(file.lastIndexOf("\\") + 1)
@@ -129,21 +120,12 @@ for (let i = 0; i < dirs.length; i++) {
current_folder_files = current_folder_files.slice(1)
}
for (let current_media of current_folder_files) {
- current_media = [
- current_media.substring(0, current_media.lastIndexOf(".")),
- current_media.substring(current_media.lastIndexOf(".") + 1)
- ]
+ current_media = [current_media.substring(0, current_media.lastIndexOf(".")), current_media.substring(current_media.lastIndexOf(".") + 1)]
if (current_media[1] == "wav") {
console.log(`${current_media[0]}.${current_media[1]}\n`)
- generate_new_sounds_ogg(
- `${current_media[0]}`,
- `${current_media[1]}`
- )
- generate_new_sounds_mp3(
- `${current_media[0]}`,
- `${current_media[1]}`
- )
+ generate_new_sounds_ogg(`${current_media[0]}`, `${current_media[1]}`)
+ generate_new_sounds_mp3(`${current_media[0]}`, `${current_media[1]}`)
}
/*
diff --git a/packages/visualizer-web-test/src/main.ts b/packages/visualizer-web-test/src/main.ts
index 0368e21..b119c99 100644
--- a/packages/visualizer-web-test/src/main.ts
+++ b/packages/visualizer-web-test/src/main.ts
@@ -1,12 +1,6 @@
import { MusicPlayerBuilder } from "@euterpe.js/player"
import { fft_data } from "./waveform_data"
-import {
- AudioVisualBuilder,
- SmoothingAlgorythm,
- ShapeType,
- WaveformOrientation,
- WaveformShape
-} from "@euterpe.js/visualizer"
+import { AudioVisualBuilder, SmoothingAlgorythm, ShapeType, WaveformOrientation, WaveformShape } from "@euterpe.js/visualizer"
const audio_el = document.querySelector("#audio") as HTMLAudioElement
const music_player_builder = new MusicPlayerBuilder(audio_el)
const trapnation_analyser_node = music_player_builder.add_analyser()
@@ -14,19 +8,14 @@ const bar_analyser_node = music_player_builder.add_analyser()
const music_player = music_player_builder.build()
music_player.change_volume(0.5)
-const waveform_canvas = document.querySelector(
- "#waveform-canvas"
-) as SVGSVGElement
+const waveform_canvas = document.querySelector("#waveform-canvas") as SVGSVGElement
const seek_element = document.querySelector("#seek") as HTMLInputElement
const duration_element = document.querySelector("#duration") as HTMLElement
const current_time_element = document.querySelector("#current") as HTMLElement
/**
* Create the Audio Visualizer
*/
-const trapnation_visual_builder = new AudioVisualBuilder(
- trapnation_analyser_node,
- document.querySelector("#trapnation-canvas") as SVGSVGElement
-)
+const trapnation_visual_builder = new AudioVisualBuilder(trapnation_analyser_node, document.querySelector("#trapnation-canvas") as SVGSVGElement)
//Because the to_fft_range is so low, it needs more FFT data.
.set_fft_size(8192)
//Tells the Visualiser how to parse data which mutates our initial shape
@@ -39,15 +28,9 @@ const trapnation_visual_builder = new AudioVisualBuilder(
.set_fft_time_smoothing(0.6)
//If not using typescript enums, CatmullRom = number 2
.set_smoothing_algorythm(SmoothingAlgorythm.CatmullRom)
-const trapnation_visual = trapnation_visual_builder.build(
- ShapeType.Circle,
- false
-)
+const trapnation_visual = trapnation_visual_builder.build(ShapeType.Circle, false)
-const bar_visual_builder = new AudioVisualBuilder(
- bar_analyser_node,
- document.querySelector("#bar-canvas") as SVGSVGElement
-)
+const bar_visual_builder = new AudioVisualBuilder(bar_analyser_node, document.querySelector("#bar-canvas") as SVGSVGElement)
.set_fft_data_tresholds({
point_count_i: 50,
fft_multiplier_i: 2,
@@ -57,10 +40,7 @@ const bar_visual_builder = new AudioVisualBuilder(
.set_smoothing_algorythm(SmoothingAlgorythm.BezierPerpendicular)
const bar_visual = bar_visual_builder.build(ShapeType.Line, false)
-const waveform_visual_builder = new AudioVisualBuilder(
- bar_analyser_node,
- waveform_canvas
-)
+const waveform_visual_builder = new AudioVisualBuilder(bar_analyser_node, waveform_canvas)
.set_fft_data_tresholds({
point_count_i: 100,
fft_multiplier_i: 1,
@@ -68,15 +48,11 @@ const waveform_visual_builder = new AudioVisualBuilder(
})
.set_fft_time_smoothing(0.8)
.set_smoothing_algorythm(SmoothingAlgorythm.CatmullRom)
-const waveform_visual = waveform_visual_builder.build(
- ShapeType.Waveform,
- true,
- {
- fft_data: new Float32Array(fft_data.fft_data),
- orientation: WaveformOrientation.Horizontal,
- shape_type: WaveformShape.LineLike
- }
-)
+const waveform_visual = waveform_visual_builder.build(ShapeType.Waveform, true, {
+ fft_data: new Float32Array(fft_data.fft_data),
+ orientation: WaveformOrientation.Horizontal,
+ shape_type: WaveformShape.LineLike
+})
trapnation_visual.draw()
bar_visual.draw()
@@ -128,11 +104,7 @@ function convert_range(value: number, r1: number[], r2: number[]) {
waveform_canvas.addEventListener("mousemove", (e) => {
const rect = e.target.getBoundingClientRect()
const x = e.clientX - rect.left
- const resX = convert_range(
- x,
- [0, rect.width],
- [0, waveform_canvas.viewBox.baseVal.width + 40]
- )
+ const resX = convert_range(x, [0, rect.width], [0, waveform_canvas.viewBox.baseVal.width + 40])
const polygon = `polygon(0 0, ${resX}px 0, ${resX}px 100%, 0 100%)`
document.documentElement.style.setProperty("--clip-seek-path", polygon)
})
@@ -143,86 +115,67 @@ waveform_canvas.addEventListener("mouseleave", (e) => {
/*
* The player part
*/
-music_player
- .try_new_song_async(
- encodeURI("http://localhost:4200/nuphory - NVISION (EXTENDED MIX).ogg")
- )
- .then(
- () => {
- let is_seeking = false
- document.querySelector("#play")?.addEventListener("click", () => {
- music_player.play_async().then(
- () => {
- console.log("Playing!")
- },
- (e) => alert("Failed to play, " + e)
- )
- })
- document.querySelector("#pause")?.addEventListener("click", () => {
- music_player.pause()
- })
- document.querySelector("#mute")?.addEventListener("click", () => {
- music_player.mute()
- })
- document.querySelector("#unmute")?.addEventListener("click", () => {
- music_player.unmute()
- })
- document
- .querySelector("#toggle-mute")
- ?.addEventListener("click", () => {
- music_player.mute_toggle()
- })
- document
- .querySelector("#toggle-play")
- ?.addEventListener("click", () => {
- music_player.play_toggle_async().then(
- (s) => console.log("toggled play/pause"),
- (e) => alert("failed to toggle pause/play!" + e)
- )
- })
- document
- .querySelector("#volume")
- ?.addEventListener("input", (e) => {
- music_player.change_volume(e.target?.valueAsNumber)
- })
- document
- .querySelector("#seek")
- ?.addEventListener("mousedown", (e) => {
- is_seeking = true
- })
- document
- .querySelector("#seek")
- ?.addEventListener("mouseup", (e) => {
- music_player.try_seek_async(e.target?.valueAsNumber).then(
- () => {
- console.log("seeked to " + e.target?.valueAsNumber)
- },
- () => {
- alert("Failed seeking! " + e)
- }
- )
- is_seeking = false
- })
- // Subscriptions to AudioContext changes, eg. time..
- music_player.on_duration_formatted((time) => {
- duration_element.innerHTML = time
- seek_element.max = "" + music_player.current_song_duration
- })
- music_player.on_time_tick_formatted((time) => {
- current_time_element.innerHTML = time
- })
- music_player.on_time_tick((time) => {
- if (is_seeking) return
- seek_element.value = "" + time
- const x = `${
- (time / music_player.current_song_duration) * 100
- }%`
- const polygon = `polygon(0 0, ${x} 0, ${x} 100%, 0 100%)`
- document.documentElement.style.setProperty(
- "--clip-time-path",
- polygon
- )
- })
- },
- (e) => console.log(e)
- )
+music_player.try_new_song_async(encodeURI("http://localhost:4200/nuphory - NVISION (EXTENDED MIX).ogg")).then(
+ () => {
+ let is_seeking = false
+ document.querySelector("#play")?.addEventListener("click", () => {
+ music_player.play_async().then(
+ () => {
+ console.log("Playing!")
+ },
+ (e) => alert("Failed to play, " + e)
+ )
+ })
+ document.querySelector("#pause")?.addEventListener("click", () => {
+ music_player.pause()
+ })
+ document.querySelector("#mute")?.addEventListener("click", () => {
+ music_player.mute()
+ })
+ document.querySelector("#unmute")?.addEventListener("click", () => {
+ music_player.unmute()
+ })
+ document.querySelector("#toggle-mute")?.addEventListener("click", () => {
+ music_player.mute_toggle()
+ })
+ document.querySelector("#toggle-play")?.addEventListener("click", () => {
+ music_player.play_toggle_async().then(
+ (s) => console.log("toggled play/pause"),
+ (e) => alert("failed to toggle pause/play!" + e)
+ )
+ })
+ document.querySelector("#volume")?.addEventListener("input", (e) => {
+ music_player.change_volume(e.target?.valueAsNumber)
+ })
+ document.querySelector("#seek")?.addEventListener("mousedown", (e) => {
+ is_seeking = true
+ })
+ document.querySelector("#seek")?.addEventListener("mouseup", (e) => {
+ music_player.try_seek_async(e.target?.valueAsNumber).then(
+ () => {
+ console.log("seeked to " + e.target?.valueAsNumber)
+ },
+ () => {
+ alert("Failed seeking! " + e)
+ }
+ )
+ is_seeking = false
+ })
+ // Subscriptions to AudioContext changes, eg. time..
+ music_player.on_duration_formatted((time) => {
+ duration_element.innerHTML = time
+ seek_element.max = "" + music_player.current_song_duration
+ })
+ music_player.on_time_tick_formatted((time) => {
+ current_time_element.innerHTML = time
+ })
+ music_player.on_time_tick((time) => {
+ if (is_seeking) return
+ seek_element.value = "" + time
+ const x = `${(time / music_player.current_song_duration) * 100}%`
+ const polygon = `polygon(0 0, ${x} 0, ${x} 100%, 0 100%)`
+ document.documentElement.style.setProperty("--clip-time-path", polygon)
+ })
+ },
+ (e) => console.log(e)
+)
diff --git a/packages/visualizer-web-test/src/waveform_data.ts b/packages/visualizer-web-test/src/waveform_data.ts
index 6c62797..6c8938c 100644
--- a/packages/visualizer-web-test/src/waveform_data.ts
+++ b/packages/visualizer-web-test/src/waveform_data.ts
@@ -1,17 +1,10 @@
export const fft_data = {
fft_data: [
- -106.24, -99.73, -100.98, -101.34, -107.01, -92.38, -84.85, -90.28,
- -93.68, -95.02, -97.16, -96.32, -99.23, -103.13, -85.57, -98.17,
- -103.27, -107.5, -83.62, -95.23, -97.12, -94.78, -95.93, -101.42,
- -97.83, -102.42, -111.74, -101.38, -106.8, -111.05, -88.04, -90.88,
- -97.67, -96.31, -96.69, -102.15, -102.03, -100.51, -107.14, -101.48,
- -101.6, -106.62, -73.94, -79.53, -92.74, -96.08, -96.26, -100.35,
- -99.13, -102.03, -107.4, -93.57, -102.31, -102.3, -109.04, -81.85,
- -92.79, -100.06, -95.79, -96.49, -99.89, -100.27, -102.69, -107.35,
- -103.94, -104.64, -104.3, -78.82, -84.2, -95.29, -92.57, -93.47, -98.08,
- -98.9, -101.56, -109.38, -102.01, -102.51, -104.83, -72.18, -76.52,
- -91.69, -99.97, -96.63, -98.61, -76.97, -90.41, -100.38, -106.77,
- -102.83, -104.46, -108.59, -80.97, -88.05, -100.77, -79.64, -72.3,
- -87.96, -92.89, -93.03
+ -106.24, -99.73, -100.98, -101.34, -107.01, -92.38, -84.85, -90.28, -93.68, -95.02, -97.16, -96.32, -99.23, -103.13, -85.57, -98.17, -103.27,
+ -107.5, -83.62, -95.23, -97.12, -94.78, -95.93, -101.42, -97.83, -102.42, -111.74, -101.38, -106.8, -111.05, -88.04, -90.88, -97.67, -96.31,
+ -96.69, -102.15, -102.03, -100.51, -107.14, -101.48, -101.6, -106.62, -73.94, -79.53, -92.74, -96.08, -96.26, -100.35, -99.13, -102.03,
+ -107.4, -93.57, -102.31, -102.3, -109.04, -81.85, -92.79, -100.06, -95.79, -96.49, -99.89, -100.27, -102.69, -107.35, -103.94, -104.64,
+ -104.3, -78.82, -84.2, -95.29, -92.57, -93.47, -98.08, -98.9, -101.56, -109.38, -102.01, -102.51, -104.83, -72.18, -76.52, -91.69, -99.97,
+ -96.63, -98.61, -76.97, -90.41, -100.38, -106.77, -102.83, -104.46, -108.59, -80.97, -88.05, -100.77, -79.64, -72.3, -87.96, -92.89, -93.03
]
}
diff --git a/packages/visualizer/src/index.ts b/packages/visualizer/src/index.ts
index 6e16ab3..0230e59 100644
--- a/packages/visualizer/src/index.ts
+++ b/packages/visualizer/src/index.ts
@@ -88,16 +88,9 @@ export class AudioVisual {
} else {
this.#analyzer_node.getFloatFrequencyData(this.#fft_data)
}
- const from = Math.round(
- (this.#point_count / 100) * this.#from_fft_range
- )
- const to = Math.round(
- this.#buffer_length -
- (this.#buffer_length / 100) * this.#to_fft_range
- )
- const squeeze_factor = Math.round(
- (this.#buffer_length - to) / this.#point_count
- )
+ const from = Math.round((this.#point_count / 100) * this.#from_fft_range)
+ const to = Math.round(this.#buffer_length - (this.#buffer_length / 100) * this.#to_fft_range)
+ const squeeze_factor = Math.round((this.#buffer_length - to) / this.#point_count)
const return_array = new Array(this.#point_count)
for (let i = 0; i < this.#point_count + 1; i++) {
@@ -117,27 +110,16 @@ export class AudioVisual {
case ShapeType.Circle: {
const pointDistance = 7
for (let curPoint = 0; curPoint < arr.length; curPoint++) {
- const [dx, dy] = this.#normalise_perpendicular_anchors(
- arr[curPoint].x,
- arr[curPoint].y
- )
+ const [dx, dy] = this.#normalise_perpendicular_anchors(arr[curPoint].x, arr[curPoint].y)
const perpendicular = [-dy, dx]
anchors.push({
leftAnchor: {
- x:
- arr[curPoint].x +
- pointDistance * perpendicular[0],
- y:
- arr[curPoint].y +
- pointDistance * perpendicular[1]
+ x: arr[curPoint].x + pointDistance * perpendicular[0],
+ y: arr[curPoint].y + pointDistance * perpendicular[1]
},
rightAnchor: {
- x:
- arr[curPoint].x -
- pointDistance * perpendicular[0],
- y:
- arr[curPoint].y -
- pointDistance * perpendicular[1]
+ x: arr[curPoint].x - pointDistance * perpendicular[0],
+ y: arr[curPoint].y - pointDistance * perpendicular[1]
}
})
}
@@ -194,16 +176,7 @@ export class AudioVisual {
const cp2x = x2 - ((x3 - x1) / 6) * k
const cp2y = y2 - ((y3 - y1) / 6) * k
- path +=
- "C" +
- [
- cp1x.toFixed(2),
- cp1y.toFixed(2),
- cp2x.toFixed(2),
- cp2y.toFixed(2),
- x2.toFixed(2),
- y2.toFixed(2)
- ]
+ path += "C" + [cp1x.toFixed(2), cp1y.toFixed(2), cp2x.toFixed(2), cp2y.toFixed(2), x2.toFixed(2), y2.toFixed(2)]
}
return path
}
@@ -217,16 +190,10 @@ export class AudioVisual {
case ShapeType.Line: {
for (let i = 0; i < frequency_data.length - 1; i++) {
const mutator = isFinite(frequency_data[i])
- ? this.#convert_range(
- frequency_data[i] * this.#fft_multiplier +
- this.#fft_offset,
- in_range,
- out_range
- )
+ ? this.#convert_range(frequency_data[i] * this.#fft_multiplier + this.#fft_offset, in_range, out_range)
: -1 * this.#canvas_height
mutated_points.push({
- x: this.#shape.points[i]
- .x /** ((Math.max(FFTDataArray[i] + 100)) * 4)*/,
+ x: this.#shape.points[i].x /** ((Math.max(FFTDataArray[i] + 100)) * 4)*/,
y: this.#shape.points[i].y - mutator
})
}
@@ -234,30 +201,13 @@ export class AudioVisual {
}
case ShapeType.Circle: {
for (let i = 0; i < frequency_data.length - 1; i++) {
- const new_i =
- i > (frequency_data.length - 1) / 2
- ? frequency_data.length - 1 - i
- : i
+ const new_i = i > (frequency_data.length - 1) / 2 ? frequency_data.length - 1 - i : i
mutated_points.push({
x:
- this.#shape.points[i].x *
- Math.max(
- (frequency_data[new_i] *
- this.#fft_multiplier +
- this.#fft_offset) /
- 50,
- 1
- ) +
+ this.#shape.points[i].x * Math.max((frequency_data[new_i] * this.#fft_multiplier + this.#fft_offset) / 50, 1) +
this.#canvas_width / 2,
y:
- this.#shape.points[i].y *
- Math.max(
- (frequency_data[new_i] *
- this.#fft_multiplier +
- this.#fft_offset) /
- 50,
- 1
- ) +
+ this.#shape.points[i].y * Math.max((frequency_data[new_i] * this.#fft_multiplier + this.#fft_offset) / 50, 1) +
this.#canvas_height / 2
})
/* TODO: IMPLEMENT SCALING TO BEAT
@@ -268,23 +218,12 @@ export class AudioVisual {
break
}
case ShapeType.Waveform: {
- if (
- this.#shape.waveform_options!.shape_type ==
- WaveformShape.LineLike
- ) {
+ if (this.#shape.waveform_options!.shape_type == WaveformShape.LineLike) {
if (this.#shape.symmetry) {
for (let i = 0; i < this.#shape.points.length; i += 2) {
- let mutator = this.#convert_range(
- frequency_data[i / 2] * this.#fft_multiplier +
- this.#fft_offset,
- in_range,
- out_range
- )
+ let mutator = this.#convert_range(frequency_data[i / 2] * this.#fft_multiplier + this.#fft_offset, in_range, out_range)
if (mutator <= 0) mutator = 2
- if (
- this.#shape.waveform_options!.orientation ==
- WaveformOrientation.Horizontal
- ) {
+ if (this.#shape.waveform_options!.orientation == WaveformOrientation.Horizontal) {
mutated_points.push({
x: this.#shape.points[i].x,
y: this.#shape.points[i].y - mutator
@@ -306,16 +245,8 @@ export class AudioVisual {
}
} else {
for (let i = 0; i < frequency_data.length - 1; i++) {
- const mutator = this.#convert_range(
- frequency_data[i] * this.#fft_multiplier +
- this.#fft_offset,
- in_range,
- out_range
- )
- if (
- this.#shape.waveform_options!.orientation ==
- WaveformOrientation.Horizontal
- ) {
+ const mutator = this.#convert_range(frequency_data[i] * this.#fft_multiplier + this.#fft_offset, in_range, out_range)
+ if (this.#shape.waveform_options!.orientation == WaveformOrientation.Horizontal) {
mutated_points.push({
x: this.#shape.points[i].x,
y: this.#shape.points[i].y - mutator
@@ -363,36 +294,26 @@ export class AudioVisual {
switch (this.#shape.shape_type) {
case ShapeType.Line: {
for (let i = 0; i < arr.length; i++) {
- path += `L ${arr[i].x.toFixed(2)},${arr[
- i
- ].y.toFixed(2)} `
+ path += `L ${arr[i].x.toFixed(2)},${arr[i].y.toFixed(2)} `
}
if (this.#shape.shape_type == ShapeType.Line) {
- path += `L ${this.#canvas_width} ${
- this.#canvas_height
- } `
+ path += `L ${this.#canvas_width} ${this.#canvas_height} `
//path += `L ${canvas_width} ${canvas_height} `
}
break
}
case ShapeType.Circle: {
for (let i = 0; i < arr.length; i++) {
- path += `L ${arr[i].x.toFixed(2)},${arr[
- i
- ].y.toFixed(2)} `
+ path += `L ${arr[i].x.toFixed(2)},${arr[i].y.toFixed(2)} `
}
break
}
case ShapeType.Waveform: {
for (let i = 0; i < arr.length; i += 2) {
- path += `L ${arr[i].x.toFixed(2)},${arr[
- i
- ].y.toFixed(2)} `
+ path += `L ${arr[i].x.toFixed(2)},${arr[i].y.toFixed(2)} `
}
for (let i = arr.length - 1; i >= 0; i -= 2) {
- path += `L ${arr[i].x.toFixed(2)},${arr[
- i
- ].y.toFixed(2)} `
+ path += `L ${arr[i].x.toFixed(2)},${arr[i].y.toFixed(2)} `
}
}
}
@@ -404,15 +325,9 @@ export class AudioVisual {
const anchors = this.#create_perpendicular_anchors(arr)
for (let i = 1; i < arr.length; i++) {
- path += `C ${anchors[i - 1].rightAnchor.x.toFixed(
- 2
- )} ${anchors[i - 1].rightAnchor.y.toFixed(2)} ${anchors[
+ path += `C ${anchors[i - 1].rightAnchor.x.toFixed(2)} ${anchors[i - 1].rightAnchor.y.toFixed(2)} ${anchors[
i
- ].leftAnchor.x.toFixed(2)} ${anchors[
- i
- ].leftAnchor.y.toFixed(2)} ${arr[i].x.toFixed(2)} ${arr[
- i
- ].y.toFixed(2)} `
+ ].leftAnchor.x.toFixed(2)} ${anchors[i].leftAnchor.y.toFixed(2)} ${arr[i].x.toFixed(2)} ${arr[i].y.toFixed(2)} `
}
if (this.#shape.shape_type == ShapeType.Line) {
//path += `L ${this.canvasWidth} ${this.canvasHeight / 2} `
@@ -432,10 +347,7 @@ export class AudioVisual {
break
}
case SmoothingAlgorythm.CatmullRom: {
- if (
- this.#shape.shape_type == ShapeType.Waveform &&
- this.#shape.symmetry == true
- ) {
+ if (this.#shape.shape_type == ShapeType.Waveform && this.#shape.symmetry == true) {
//adding points so both halfs ends and start at the same center point
console.log(arr)
const first_half = [{ x: 0, y: this.#canvas_height / 2 }]
@@ -500,10 +412,7 @@ export class AudioVisualBuilder {
#from_fft_range
#to_fft_range
#point_count: number
- constructor(
- analyzer_node: AnalyserNode,
- svg_injecting_element: SVGSVGElement
- ) {
+ constructor(analyzer_node: AnalyserNode, svg_injecting_element: SVGSVGElement) {
this.#analyzer_node = analyzer_node
this.#svg_injecting_element = svg_injecting_element
this.#canvas_width = svg_injecting_element.viewBox.baseVal.width
@@ -515,10 +424,7 @@ export class AudioVisualBuilder {
this.#fft_offset = 150
this.#from_fft_range = 0
this.#to_fft_range = 100
- this.#point_count = Math.round(
- (this.#buffer_length / 100) *
- (this.#from_fft_range - this.#to_fft_range)
- )
+ this.#point_count = Math.round((this.#buffer_length / 100) * (this.#from_fft_range - this.#to_fft_range))
}
/**
* The smoothingTimeConstant property of the AnalyserNode interface is a double value representing the averaging constant with the last analysis frame. It's basically an average between the current buffer and the last buffer the AnalyserNode processed, and results in a much smoother set of value changes over time.
@@ -537,8 +443,7 @@ export class AudioVisualBuilder {
* @returns this
*/
set_fft_size(fft_size: number) {
- if (!(this.#fft_size && !(this.#fft_size & (this.#fft_size - 1))))
- throw Error("fft_size not power of two")
+ if (!(this.#fft_size && !(this.#fft_size & (this.#fft_size - 1)))) throw Error("fft_size not power of two")
this.#analyzer_node.fftSize = this.#fft_size = fft_size
this.#buffer_length = this.#analyzer_node.frequencyBinCount
return this
@@ -571,9 +476,7 @@ export class AudioVisualBuilder {
set_fft_data_tresholds({
from_fft_range_i = 0,
to_fft_range_i = 100,
- point_count_i = Math.round(
- (this.#buffer_length / 100) * (from_fft_range_i - to_fft_range_i)
- ),
+ point_count_i = Math.round((this.#buffer_length / 100) * (from_fft_range_i - to_fft_range_i)),
fft_multiplier_i = 2,
fft_offset_i = -50
}) {
@@ -614,11 +517,7 @@ export class AudioVisualBuilder {
* @param shape_type Circle = 0; Line = 1;
* @returns `new AudioVisual`
*/
- build(
- shape_type: ShapeType,
- symmetry: boolean,
- waveform_options?: WaveformOptions
- ) {
+ build(shape_type: ShapeType, symmetry: boolean, waveform_options?: WaveformOptions) {
const shape = this.#create_shape(shape_type, symmetry, waveform_options)
return new AudioVisual(
this.#analyzer_node,
@@ -632,11 +531,7 @@ export class AudioVisualBuilder {
this.#point_count
)
}
- #create_shape(
- shape_type: ShapeType,
- symmetry: boolean,
- waveform_options?: WaveformOptions
- ): Shape {
+ #create_shape(shape_type: ShapeType, symmetry: boolean, waveform_options?: WaveformOptions): Shape {
const point_amount = this.#get_cured_frequency_data().length
let new_shape: Shape
switch (shape_type) {
@@ -657,20 +552,11 @@ export class AudioVisualBuilder {
}
case ShapeType.Circle: {
const points = []
- const radius =
- this.#canvas_height > this.#canvas_width
- ? this.#canvas_height / 5
- : this.#canvas_width / 5
+ const radius = this.#canvas_height > this.#canvas_width ? this.#canvas_height / 5 : this.#canvas_width / 5
for (let i = 0; i < point_amount; i++) {
points.push({
- x:
- Math.cos(
- ((2 * Math.PI) / point_amount) * i - Math.PI / 2
- ) * radius,
- y:
- Math.sin(
- ((2 * Math.PI) / point_amount) * i - Math.PI / 2
- ) * radius
+ x: Math.cos(((2 * Math.PI) / point_amount) * i - Math.PI / 2) * radius,
+ y: Math.sin(((2 * Math.PI) / point_amount) * i - Math.PI / 2) * radius
})
}
@@ -683,16 +569,10 @@ export class AudioVisualBuilder {
}
case ShapeType.Waveform: {
if (waveform_options === undefined) {
- console.error(
- "Waveform options undefined at shapetype.waveform, please define!"
- )
- throw Error(
- "Waveform options undefined at shapetype.waveform, please define!"
- )
+ console.error("Waveform options undefined at shapetype.waveform, please define!")
+ throw Error("Waveform options undefined at shapetype.waveform, please define!")
}
- const fft_length = this.#get_cured_frequency_data(
- waveform_options.fft_data
- ).length
+ const fft_length = this.#get_cured_frequency_data(waveform_options.fft_data).length
const points = []
for (let i = 0; i < fft_length; i++) {
let x, y
@@ -702,16 +582,10 @@ export class AudioVisualBuilder {
} else {
throw Error("WaveformShape.Striped not implemented yet")
}
- waveform_options.orientation ==
- WaveformOrientation.Horizontal
- ? points.push({ x: x, y: y })
- : points.push({ x: y, y: x })
+ waveform_options.orientation == WaveformOrientation.Horizontal ? points.push({ x: x, y: y }) : points.push({ x: y, y: x })
//Douple the points needed for symmetry
if (symmetry) {
- waveform_options.orientation ==
- WaveformOrientation.Horizontal
- ? points.push({ x: x, y: y })
- : points.push({ x: y, y: x })
+ waveform_options.orientation == WaveformOrientation.Horizontal ? points.push({ x: x, y: y }) : points.push({ x: y, y: x })
}
}
new_shape = {
@@ -731,16 +605,9 @@ export class AudioVisualBuilder {
fft_data = new Float32Array(this.#buffer_length)
this.#analyzer_node.getFloatFrequencyData(fft_data)
}
- const from = Math.round(
- (this.#point_count / 100) * this.#from_fft_range
- )
- const to = Math.round(
- this.#buffer_length -
- (this.#buffer_length / 100) * this.#to_fft_range
- )
- const squeezeFactor = Math.round(
- (this.#buffer_length - to) / this.#point_count
- )
+ const from = Math.round((this.#point_count / 100) * this.#from_fft_range)
+ const to = Math.round(this.#buffer_length - (this.#buffer_length / 100) * this.#to_fft_range)
+ const squeezeFactor = Math.round((this.#buffer_length - to) / this.#point_count)
const return_array = new Array(this.#point_count)
for (let i = 0; i < this.#point_count; i++) {
diff --git a/tools/scripts/publish.mjs b/tools/scripts/publish.mjs
index 352ddb1..95ae7ae 100644
--- a/tools/scripts/publish.mjs
+++ b/tools/scripts/publish.mjs
@@ -35,16 +35,10 @@ invariant(
const graph = readCachedProjectGraph()
const project = graph.nodes[name]
-invariant(
- project,
- `Could not find project "${name}" in the workspace. Is the project.json configured correctly?`
-)
+invariant(project, `Could not find project "${name}" in the workspace. Is the project.json configured correctly?`)
const outputPath = project.data?.targets?.build?.options?.outputPath
-invariant(
- outputPath,
- `Could not find "build.options.outputPath" of project "${name}". Is project.json configured correctly?`
-)
+invariant(outputPath, `Could not find "build.options.outputPath" of project "${name}". Is project.json configured correctly?`)
process.chdir(outputPath)
@@ -54,11 +48,7 @@ try {
json.version = version
writeFileSync(`package.json`, JSON.stringify(json, null, 2))
} catch (e) {
- console.error(
- chalk.bold.red(
- `Error reading package.json file from library build output.`
- )
- )
+ console.error(chalk.bold.red(`Error reading package.json file from library build output.`))
}
// Execute "npm publish" to publish