fix underflowing symmetry lines, better errors
This commit is contained in:
parent
4f48ef3e22
commit
b9ea1928b9
11 changed files with 200 additions and 65 deletions
14
.gitignore
vendored
14
.gitignore
vendored
|
@ -37,3 +37,17 @@ testem.log
|
||||||
# System Files
|
# System Files
|
||||||
.DS_Store
|
.DS_Store
|
||||||
Thumbs.db
|
Thumbs.db
|
||||||
|
|
||||||
|
# video and audio files
|
||||||
|
*.mp3
|
||||||
|
*.mp4
|
||||||
|
*.mov
|
||||||
|
*.webm
|
||||||
|
*.webp
|
||||||
|
*.ogg
|
||||||
|
*.wav
|
||||||
|
*.avi
|
||||||
|
*.png
|
||||||
|
*.jpeg
|
||||||
|
*.jpg
|
||||||
|
*.exr
|
|
@ -124,37 +124,17 @@ export class MusicPlayer {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
if (this.track.context.state == "closed" || this.track.context.state == "suspended") {
|
if (this.track.context.state == "closed" || this.track.context.state == "suspended") {
|
||||||
this.is_playing = false
|
this.is_playing = false
|
||||||
reject("Can't seek - track not playing")
|
reject(new Error("Can't seek - track not playing"))
|
||||||
}
|
}
|
||||||
this.audio_element.currentTime = new_time
|
this.audio_element.currentTime = new_time
|
||||||
resolve(null)
|
resolve(null)
|
||||||
/*audio_element.play().then((s) => resolve(s), (r) => {
|
|
||||||
is_playing = false
|
|
||||||
reject(r)
|
|
||||||
})*/
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
// THIS MIGHT BE UNNECESSARY? CUZ SEEKING DOESN'T REQUIRE PLAY
|
/**
|
||||||
// /**
|
* Unsafe, throws error if failed. Use try_seek_async or seek_async unless you don't care about the result.
|
||||||
// * Can try to seek even if the audio context was suspended or closed. Best to use try_seek_async()
|
*/
|
||||||
// */
|
|
||||||
// seek_async(new_time: number) {
|
|
||||||
// return new Promise((resolve, reject) => {
|
|
||||||
// this.audio_element.currentTime = new_time
|
|
||||||
// resolve(null)
|
|
||||||
// /* audio_element.play().then((s) => resolve(s), (r) => {
|
|
||||||
// is_playing = false
|
|
||||||
// reject(r)
|
|
||||||
// })*/
|
|
||||||
// })
|
|
||||||
// // }
|
|
||||||
// /**
|
|
||||||
// * Unsafe, throws error if failed. Use try_seek_async or seek_async unless you don't care about the result.
|
|
||||||
// */
|
|
||||||
|
|
||||||
seek(new_time: number) {
|
seek(new_time: number) {
|
||||||
this.audio_element.currentTime = new_time
|
this.audio_element.currentTime = new_time
|
||||||
//this.audio_element.play().catch((e) => { throw e })
|
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* Safer play_toggle_async. Normal play_toggle will try to start the player even if the track hasn't started yet, or was previously suspended/closed
|
* Safer play_toggle_async. Normal play_toggle will try to start the player even if the track hasn't started yet, or was previously suspended/closed
|
||||||
|
@ -163,7 +143,7 @@ export class MusicPlayer {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
if (this.audio_context.state === "suspended" || this.audio_context.state === "closed") {
|
if (this.audio_context.state === "suspended" || this.audio_context.state === "closed") {
|
||||||
this.audio_context.resume().then(undefined, (e) =>
|
this.audio_context.resume().then(undefined, (e) =>
|
||||||
reject("Context closed or suspended" + e))
|
reject(new Error("Context closed or suspended" + JSON.stringify(e))))
|
||||||
}
|
}
|
||||||
if (this.audio_element.paused) {
|
if (this.audio_element.paused) {
|
||||||
this.audio_element.play().then((s) => {
|
this.audio_element.play().then((s) => {
|
||||||
|
@ -171,7 +151,7 @@ export class MusicPlayer {
|
||||||
resolve(s)
|
resolve(s)
|
||||||
}, (r) => {
|
}, (r) => {
|
||||||
this.is_playing = false
|
this.is_playing = false
|
||||||
reject(r)
|
reject(new Error("failed to play audio elements" + JSON.stringify(r)))
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
this.audio_element.pause()
|
this.audio_element.pause()
|
||||||
|
@ -194,7 +174,7 @@ export class MusicPlayer {
|
||||||
resolve(s)
|
resolve(s)
|
||||||
}, (r) => {
|
}, (r) => {
|
||||||
this.is_playing = false
|
this.is_playing = false
|
||||||
reject(r)
|
reject(new Error(JSON.stringify(r)))
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
this.audio_element.pause()
|
this.audio_element.pause()
|
||||||
|
@ -223,18 +203,27 @@ export class MusicPlayer {
|
||||||
*/
|
*/
|
||||||
try_play_async() {
|
try_play_async() {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
if (this.is_playing) reject(Error("Already playing"))
|
if (this.is_playing) resolve(Error("Already playing"))
|
||||||
if (this.audio_context.state === "suspended" || this.audio_context.state === "closed") {
|
if (this.audio_context.state === "suspended" || this.audio_context.state === "closed") {
|
||||||
this.audio_context.resume().then(undefined, (e) =>
|
this.audio_context.resume().then(() => {
|
||||||
reject("Context closed or suspended" + e))
|
this.audio_element.play().then((s) => {
|
||||||
|
this.is_playing = true
|
||||||
|
resolve(s)
|
||||||
|
}, (r) => {
|
||||||
|
this.is_playing = false
|
||||||
|
reject(new Error(JSON.stringify(r)))
|
||||||
|
})
|
||||||
|
}, (e) =>
|
||||||
|
reject(new Error("Context closed or suspended" + JSON.stringify(e))))
|
||||||
|
} else {
|
||||||
|
this.audio_element.play().then((s) => {
|
||||||
|
this.is_playing = true
|
||||||
|
resolve(s)
|
||||||
|
}, (r) => {
|
||||||
|
this.is_playing = false
|
||||||
|
reject(new Error(JSON.stringify(r)))
|
||||||
|
})
|
||||||
}
|
}
|
||||||
this.audio_element.play().then((s) => {
|
|
||||||
this.is_playing = true
|
|
||||||
resolve(s)
|
|
||||||
}, (r) => {
|
|
||||||
this.is_playing = false
|
|
||||||
reject(r)
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
|
@ -242,16 +231,13 @@ export class MusicPlayer {
|
||||||
*/
|
*/
|
||||||
play_async() {
|
play_async() {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
if (this.is_playing) resolve(null)
|
if (this.is_playing) resolve(Error("Already playing"))
|
||||||
if (this.audio_context.state === "suspended" || this.audio_context.state === "closed") {
|
|
||||||
this.audio_context.resume()
|
|
||||||
}
|
|
||||||
this.audio_element.play().then((s) => {
|
this.audio_element.play().then((s) => {
|
||||||
this.is_playing = true
|
this.is_playing = true
|
||||||
resolve(s)
|
resolve(s)
|
||||||
}, (r) => {
|
}, (r) => {
|
||||||
this.is_playing = false
|
this.is_playing = false
|
||||||
reject(r)
|
reject(new Error(JSON.stringify(r)))
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -262,7 +248,7 @@ export class MusicPlayer {
|
||||||
if (this.is_playing) return
|
if (this.is_playing) return
|
||||||
this.audio_element.play().catch((r) => {
|
this.audio_element.play().catch((r) => {
|
||||||
this.is_playing = false
|
this.is_playing = false
|
||||||
throw r
|
throw new Error(r)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
|
@ -288,12 +274,12 @@ export class MusicPlayer {
|
||||||
|
|
||||||
this.audio_element.addEventListener("error", function error_listener(e) {
|
this.audio_element.addEventListener("error", function error_listener(e) {
|
||||||
controller.abort()
|
controller.abort()
|
||||||
reject(e)
|
reject(new Error("Failed to load new song, error:" + JSON.stringify(e)))
|
||||||
}, { signal: controller.signal })
|
}, { signal: controller.signal })
|
||||||
|
|
||||||
this.audio_element.addEventListener("stalled", function stalled_listener(e) {
|
this.audio_element.addEventListener("stalled", function stalled_listener(e) {
|
||||||
controller.abort()
|
controller.abort()
|
||||||
reject(e)
|
reject(new Error("Failed to load new song, stalled: " + JSON.stringify(e)))
|
||||||
}, { signal: controller.signal })
|
}, { signal: controller.signal })
|
||||||
|
|
||||||
//once aborted, try to set current_song_duration
|
//once aborted, try to set current_song_duration
|
||||||
|
|
1
packages/preprocessor/.gitignore
vendored
1
packages/preprocessor/.gitignore
vendored
|
@ -25,5 +25,6 @@ dist-ssr
|
||||||
|
|
||||||
# public samples
|
# public samples
|
||||||
public/samples/*
|
public/samples/*
|
||||||
|
public/media/*
|
||||||
src/song_list.ts
|
src/song_list.ts
|
||||||
src/db.js
|
src/db.js
|
|
@ -12,29 +12,55 @@
|
||||||
<div id="app"></div>
|
<div id="app"></div>
|
||||||
<script type="module" src="/src/main.ts">
|
<script type="module" src="/src/main.ts">
|
||||||
</script>
|
</script>
|
||||||
<button id="button">Analyze!</button>
|
<div class="button-wrapper">
|
||||||
<svg id="waveform-canvas" viewBox="0 0 500 500" preserveAspectRatio="none"></svg>
|
<button id="button">Analyze!</button>
|
||||||
|
<a id="download" href="">Download DB</a>
|
||||||
|
</div>
|
||||||
|
<div class="canvas-wrapper">
|
||||||
|
<svg id="waveform-canvas" viewBox="0 0 500 500" preserveAspectRatio="none"></svg>
|
||||||
|
</div>
|
||||||
<style>
|
<style>
|
||||||
body {
|
body {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
height: 100vh;
|
min-height: 100vh;
|
||||||
padding: 0;
|
padding: 0;
|
||||||
margin: 0;
|
margin: 0;
|
||||||
|
/*
|
||||||
display: flex;
|
display: flex;
|
||||||
justify-content: center;
|
justify-content: center;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
|
*/
|
||||||
}
|
}
|
||||||
|
|
||||||
button {
|
.button-wrapper {
|
||||||
|
display: flex;
|
||||||
|
width: 100%;
|
||||||
|
height: 4rem;
|
||||||
|
justify-content: center;
|
||||||
|
align-items: center;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
.button-wrapper>* {
|
||||||
padding: 1.2rem 2rem;
|
padding: 1.2rem 2rem;
|
||||||
|
width: 100%;
|
||||||
|
margin: 0 auto;
|
||||||
}
|
}
|
||||||
|
|
||||||
svg {
|
svg {
|
||||||
margin-top: 5rem;
|
width: 100%;
|
||||||
width: 500px;
|
height: 200px;
|
||||||
height: 500px;
|
|
||||||
border: 1px solid black;
|
border: 1px solid black;
|
||||||
|
grid-column-start: content;
|
||||||
|
}
|
||||||
|
|
||||||
|
.canvas-wrapper {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: 0.5fr [content] 1fr 0.5fr;
|
||||||
|
column-gap: 20px;
|
||||||
|
justify-items: stretch;
|
||||||
|
align-items: stretch;
|
||||||
}
|
}
|
||||||
</style>
|
</style>
|
||||||
</body>
|
</body>
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"crawl": "node crawler.mjs",
|
"crawl": "node crawler.mjs",
|
||||||
"process": "node crawler.mjs && vite",
|
"process": "node crawler.mjs && vite",
|
||||||
|
"media-process": "cd src && node media_process.js",
|
||||||
"serve": "vite",
|
"serve": "vite",
|
||||||
"build": "tsc && vite build",
|
"build": "tsc && vite build",
|
||||||
"preview": "vite preview"
|
"preview": "vite preview"
|
||||||
|
@ -15,7 +16,7 @@
|
||||||
"vite": "^4.4.0"
|
"vite": "^4.4.0"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"filehound": "^1.17.6",
|
"@euterpe.js/music-library": "*",
|
||||||
"@euterpe.js/music-library": "*"
|
"filehound": "^1.17.6"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -42,7 +42,7 @@ export function generate_db() {
|
||||||
const db_song = new Song({
|
const db_song = new Song({
|
||||||
name: name.slice(0, name.lastIndexOf(".")),
|
name: name.slice(0, name.lastIndexOf(".")),
|
||||||
artists: [],
|
artists: [],
|
||||||
url: new URL("http://localhost:4200/" + song_url),
|
url: new URL("http://localhost:4201/" + song_url),
|
||||||
duration: 0,
|
duration: 0,
|
||||||
remix_artists: [],
|
remix_artists: [],
|
||||||
in_collection: new Ref(RefTo.Collections, song.collection_id)
|
in_collection: new Ref(RefTo.Collections, song.collection_id)
|
||||||
|
|
|
@ -11,17 +11,20 @@ export async function start() {
|
||||||
const waveform_canvas = document.querySelector("#waveform-canvas") as SVGSVGElement
|
const waveform_canvas = document.querySelector("#waveform-canvas") as SVGSVGElement
|
||||||
for (const song of result.db.songs) {
|
for (const song of result.db.songs) {
|
||||||
console.log("creating waveform for -> " + song.name)
|
console.log("creating waveform for -> " + song.name)
|
||||||
const waveform_visual_builder = new AudioVisualBuilder(result.analyzer_node, waveform_canvas)
|
const curr_waveform_canvas = waveform_canvas.cloneNode() as SVGSVGElement
|
||||||
.set_fft_data_tresholds({ point_count_i: 100, fft_multiplier_i: 1, fft_offset_i: -80 })
|
waveform_canvas.parentElement?.append(curr_waveform_canvas)
|
||||||
|
const waveform_visual_builder = new AudioVisualBuilder(result.analyzer_node, curr_waveform_canvas)
|
||||||
|
.set_fft_data_tresholds({ point_count_i: 100, fft_multiplier_i: .6, fft_offset_i: -75 })
|
||||||
.set_fft_time_smoothing(0.8)
|
.set_fft_time_smoothing(0.8)
|
||||||
.set_smoothing_algorythm(SmoothingAlgorythm.CatmullRom)
|
.set_smoothing_algorythm(SmoothingAlgorythm.CatmullRom)
|
||||||
const waveform_visual = waveform_visual_builder.build(ShapeType.Waveform, true, { fft_data: new Float32Array(new Float64Array(song.fft_data!)), orientation: WaveformOrientation.Horizontal, shape_type: WaveformShape.LineLike })
|
const waveform_visual = waveform_visual_builder.build(ShapeType.Waveform, true, { fft_data: new Float32Array(new Float64Array(song.fft_data!)), orientation: WaveformOrientation.Horizontal, shape_type: WaveformShape.LineLike })
|
||||||
waveform_visual.draw_once()
|
waveform_visual.draw_once()
|
||||||
await new Promise<void>((done) => setTimeout(() => done(), 300))
|
await new Promise<void>((done) => setTimeout(() => done(), 300))
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
song.metadata[0] = waveform_canvas.innerHTML
|
song.metadata[0] = curr_waveform_canvas.children[0].getAttribute("d")
|
||||||
song.fft_data = []
|
song.fft_data = []
|
||||||
}
|
}
|
||||||
|
waveform_canvas.remove()
|
||||||
console.dir(result.db, { depth: null })
|
console.dir(result.db, { depth: null })
|
||||||
download(JSON.stringify(result.db), "db.json", "text/plain")
|
download(JSON.stringify(result.db), "db.json", "text/plain")
|
||||||
})
|
})
|
||||||
|
@ -45,6 +48,8 @@ async function analyze(): Promise<AnalyzeReturn> {
|
||||||
track.connect(audioContextAnalyser).connect(gain).connect(audioContext.destination)
|
track.connect(audioContextAnalyser).connect(gain).connect(audioContext.destination)
|
||||||
|
|
||||||
let db = generate_db()
|
let db = generate_db()
|
||||||
|
// db.songs.splice(0, 10)
|
||||||
|
// db.songs.splice(2)
|
||||||
console.log(db)
|
console.log(db)
|
||||||
for (const song of db.songs) {
|
for (const song of db.songs) {
|
||||||
// const song = db.songs[db.songs.length - 1]
|
// const song = db.songs[db.songs.length - 1]
|
||||||
|
@ -73,11 +78,11 @@ async function analyze(): Promise<AnalyzeReturn> {
|
||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
function download(content: BlobPart, fileName: string, contentType: string) {
|
function download(content: BlobPart, fileName: string, contentType: string) {
|
||||||
var a = document.createElement("a");
|
var a = document.querySelector("#download") as HTMLAnchorElement;
|
||||||
var file = new Blob([content], { type: contentType });
|
var file = new Blob([content], { type: contentType });
|
||||||
a.href = URL.createObjectURL(file);
|
a.href = URL.createObjectURL(file);
|
||||||
a.download = fileName;
|
a.download = fileName;
|
||||||
a.click();
|
// a.click();
|
||||||
}
|
}
|
||||||
|
|
||||||
type AnalyzeReturn = {
|
type AnalyzeReturn = {
|
||||||
|
|
99
packages/preprocessor/src/media_process.js
Normal file
99
packages/preprocessor/src/media_process.js
Normal file
|
@ -0,0 +1,99 @@
|
||||||
|
/**
|
||||||
|
* TODO:
|
||||||
|
* -add back -metadata
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
import filehound from "filehound"
|
||||||
|
import { execSync, exec } from 'child_process'
|
||||||
|
import { fstat, unlinkSync } from "fs"
|
||||||
|
|
||||||
|
function generateNewPhotoSizes(file, currentExtention) {
|
||||||
|
exec(`start ffmpeg -y -i "${file}.${currentExtention}" -lossless 0 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_ogw.webp" -vf scale=1000:-1 -lossless 0 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_1000w.webp" -vf scale=800:-1 -lossless 0 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_800w.webp" -vf scale=500:-1 -lossless 0 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_500w.webp" -vf scale=320:-1 -lossless 0 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_320w.webp" -vf scale=-1:64,gblur=sigma=10:steps=2 -lossless 0 -compression_level 6 -quality 85 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_placeholder.webp"`)
|
||||||
|
}
|
||||||
|
function generateNewAnimPhotoSizes(file, currentExtention) {
|
||||||
|
exec(`start ffmpeg -y -i "${file}.${currentExtention}" -lossless 0 -frames:v 1 -r 1 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_ogw_static.webp" -vf scale=1000:-1 -lossless 0 -frames:v 1 -r 1 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_1000w_static.webp" -vf scale=800:-1 -lossless 0 -frames:v 1 -r 1 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_800w_static.webp" -vf scale=500:-1 -lossless 0 -frames:v 1 -r 1 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_500w_static.webp" -vf scale=320:-1 -lossless 0 -frames:v 1 -r 1 -quality 85 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_320w_static.webp" -vf scale=-1:64,gblur=sigma=10:steps=2 -lossless 0 -frames:v 1 -r 1 -compression_level 6 -quality 85 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_placeholder_static.webp"`)
|
||||||
|
exec(`start ffmpeg -y -i "${file}.${currentExtention}" -lossless 0 -quality 85 -loop 0 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_ogw.webp" -vf scale=1000:-1 -lossless 0 -quality 85 -loop 0 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_1000w.webp" -vf scale=800:-1 -lossless 0 -quality 85 -loop 0 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_800w.webp" -vf scale=500:-1 -lossless 0 -quality 85 -loop 0 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_500w.webp" -vf scale=320:-1 -lossless 0 -quality 85 -loop 0 -compression_level 6 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_320w.webp" -vf scale=-1:64,gblur=sigma=10:steps=2 -frames:v 1 -lossless 0 -c:v libwebp -compression_level 6 -quality 85 -metadata author="Djkáťo" -metadata copyright="https://djkato.net" "${file}_placeholder.webp"`)
|
||||||
|
}
|
||||||
|
function generateNewSounds(file, currentExtention) {
|
||||||
|
const path = file.substring(0, file.lastIndexOf("\\"))
|
||||||
|
file = file.substring(file.lastIndexOf("\\") + 1)
|
||||||
|
|
||||||
|
let command = ""
|
||||||
|
command += `cd "${path}" && start cmd /k "`
|
||||||
|
command += `ffmpeg -y -i "${file}.${currentExtention}" `
|
||||||
|
command += `-c:a libopus -b:a 128k "${file}.ogg"`
|
||||||
|
command += ` && exit"`
|
||||||
|
exec(command)
|
||||||
|
// console.log(command)
|
||||||
|
}
|
||||||
|
function generateNewVideoSizes(file, currentExtention, width_resolutions) {
|
||||||
|
const path = file.substring(0, file.lastIndexOf("\\"))
|
||||||
|
file = file.substring(file.lastIndexOf("\\") + 1)
|
||||||
|
|
||||||
|
let command = ""
|
||||||
|
command += `cd "${path}" && `
|
||||||
|
command += `del ffmpeg2pass-0.log && `
|
||||||
|
command += `ffmpeg -y -i "${file}.${currentExtention}" `
|
||||||
|
command += `-vcodec libvpx-vp9 -cpu-used 0 -deadline good -quality good -g 240 -crf 42 -b:v 0 -c:a libopus -row-mt 1 -tile-rows 2 -tile-columns 4 -threads 16 -auto-alt-ref 6 `
|
||||||
|
command += `-pass 1 -f webm NUL && exit`
|
||||||
|
|
||||||
|
exec(command).once("exit", () => {
|
||||||
|
for (const resolution of width_resolutions) {
|
||||||
|
let res_command = ""
|
||||||
|
res_command += `start cmd /k "`
|
||||||
|
res_command += `cd "${path}" && `
|
||||||
|
res_command += `ffmpeg -y -i "${file}.${currentExtention}" `
|
||||||
|
res_command += `-vcodec libvpx-vp9 -cpu-used 0 -deadline good -quality good -g 240 -vf scale=${resolution}:-1 -crf 42 -b:v 0 -c:a libopus -row-mt 1 -tile-rows 2 -tile-columns 4 -threads 16 -auto-alt-ref 6 -pass 2 "${file}_${resolution}p.webm"`
|
||||||
|
res_command += "&& exit\""
|
||||||
|
exec(res_command)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
let dirs = filehound.create()
|
||||||
|
.path("../public/")
|
||||||
|
.directory()
|
||||||
|
.findSync()
|
||||||
|
console.log(dirs)
|
||||||
|
|
||||||
|
for (let i = 0; i < dirs.length; i++) {
|
||||||
|
//gets current name file+ext
|
||||||
|
let current_folder_files = filehound.create()
|
||||||
|
.path(`${dirs[i]}`)
|
||||||
|
.findSync()
|
||||||
|
|
||||||
|
if (current_folder_files[0] != undefined) {
|
||||||
|
//if previous encode was cancelled and 2pass log not removed, remove it :)
|
||||||
|
if (current_folder_files[0].includes("ffmpeg2pass-0.log")) {
|
||||||
|
try { unlinkSync(`${dirs[i]}/ffmpeg2pass-0.log`) } catch (err) { }
|
||||||
|
current_folder_files = current_folder_files.slice(1)
|
||||||
|
}
|
||||||
|
for (let current_media of current_folder_files) {
|
||||||
|
current_media = [current_media.substring(0, current_media.lastIndexOf(".")), current_media.substring(current_media.lastIndexOf(".") + 1)]
|
||||||
|
if (current_media[1] == "wav" || current_media[1] == "mp3") {
|
||||||
|
console.log(`${current_media[0]}.${current_media[1]}\n`)
|
||||||
|
|
||||||
|
generateNewSounds(`${current_media[0]}`, `${current_media[1]}`)
|
||||||
|
}
|
||||||
|
if (current_media[1] == "png" || current_media[1] == "jpg") {
|
||||||
|
console.log(`.\\${current_media[0]}.${current_media[1]}\n`)
|
||||||
|
|
||||||
|
generateNewPhotoSizes(`.\\${current_media[0]}`, `${current_media[1]}`)
|
||||||
|
}
|
||||||
|
else if (current_media[1] == "gif") {
|
||||||
|
console.log(`.\\${current_media[0]}.${current_media[1]}\n`)
|
||||||
|
|
||||||
|
generateNewAnimPhotoSizes(`.\\${current_media[0]}`, `${current_media[1]}`)
|
||||||
|
}
|
||||||
|
else if (current_media[1] == "webm" || current_media[1] == "mov" || current_media[1] == "avi" || current_media[1] == "mp4") {
|
||||||
|
//console.log(`Video: ${current_media[0]}.${current_media[1]}\n`)
|
||||||
|
|
||||||
|
// generateNewVideoSizes(`${current_media[0]}`, `${current_media[1]}`, [2560, 1080, 720, 480])
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
export const songs = ["public\\samples\\bass\\H2 arp basses 01.ogg","public\\samples\\bass\\H2 arp basses 02.ogg","public\\samples\\bass\\H2 arp basses 03.ogg","public\\samples\\drums\\H2 909 Crash 03.ogg","public\\samples\\drums\\H2 buildsnares cut 01.ogg","public\\samples\\drums\\H2 buildsnares cut 04.ogg","public\\samples\\drums\\H2 buildsnares cut 06.ogg","public\\samples\\drums\\H2 Claps 07.ogg","public\\samples\\drums\\H2 Claps 21.ogg","public\\samples\\drums\\H2 Claps Tails 11.ogg","public\\samples\\drums\\H2 Claps Tails 15.ogg","public\\samples\\drums\\H2 clubsnares 08.ogg","public\\samples\\drums\\H2 clubsnares 15.ogg","public\\samples\\drums\\H2 clubsnares 20.ogg","public\\samples\\drums\\H2 Trancekick 13.ogg","public\\samples\\drums\\H2 Trancekick 24.ogg","public\\samples\\drums\\H2 Trancekick 45.ogg","public\\samples\\loops\\H2 loops 01.ogg","public\\samples\\loops\\H2 loops 02.ogg"]
|
export const songs = ["public\\samples\\bass\\arp bass noise 5.ogg","public\\samples\\bass\\arp bass noise 6.ogg","public\\samples\\bass\\lead bass 7.ogg","public\\samples\\bass\\sub bass 5.ogg","public\\samples\\demos\\nuphory - NVISION (EXTENDED MIX).ogg","public\\samples\\drums\\H2 buildsnares long 11.ogg","public\\samples\\drums\\H2 Claps 21.ogg","public\\samples\\drums\\H2 clubsnares 14.ogg","public\\samples\\drums\\H2 Trancekick 08.ogg","public\\samples\\FX\\H2 Boom Kicks 05.ogg","public\\samples\\FX\\H2 Depth Charge 07.ogg","public\\samples\\FX\\H2 Downlifters 11.ogg","public\\samples\\FX\\H2 Noisesweeps 08.ogg","public\\samples\\kicks\\H2 Trancekick 08.ogg","public\\samples\\kicks\\H2 Trancekick 20.ogg","public\\samples\\kicks\\H2 Trancekick 28.ogg","public\\samples\\kicks\\H2 Trancekick 34.ogg","public\\samples\\loops\\H2 Closed Hat Loop 160BPM 02.ogg","public\\samples\\loops\\H2 loops 04.ogg","public\\samples\\loops\\H2 Open Hat Loop 160BPM 06.ogg","public\\samples\\loops\\H2 Perc Loop 160BPM 03.ogg","public\\samples\\synths\\H2 Pads 01 C Saw.ogg","public\\samples\\synths\\H2 Pads 13 F Voc.ogg","public\\samples\\synths\\H2 Pads 20 G Saw.ogg","public\\samples\\synths\\H2 Pads 23 D String.ogg"]
|
|
@ -7,7 +7,7 @@ export default defineConfig({
|
||||||
cacheDir: "../../node_modules/.vite/preprocessor",
|
cacheDir: "../../node_modules/.vite/preprocessor",
|
||||||
|
|
||||||
server: {
|
server: {
|
||||||
port: 4200,
|
port: 4201,
|
||||||
host: "localhost"
|
host: "localhost"
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
|
@ -215,8 +215,9 @@ export class AudioVisual {
|
||||||
case ShapeType.Waveform: {
|
case ShapeType.Waveform: {
|
||||||
if (this.#shape.waveform_options!.shape_type == WaveformShape.LineLike) {
|
if (this.#shape.waveform_options!.shape_type == WaveformShape.LineLike) {
|
||||||
if (this.#shape.symmetry) {
|
if (this.#shape.symmetry) {
|
||||||
for (let i = 0; i < this.#shape.points.length - 1; i += 2) {
|
for (let i = 0; i < this.#shape.points.length; i += 2) {
|
||||||
const mutator = this.#convert_range(frequency_data[i / 2] * this.#fft_multiplier + this.#fft_offset, in_range, out_range)
|
let mutator = this.#convert_range(frequency_data[i / 2] * this.#fft_multiplier + this.#fft_offset, in_range, out_range)
|
||||||
|
if (mutator <= 0) mutator = 2
|
||||||
if (this.#shape.waveform_options!.orientation == WaveformOrientation.Horizontal) {
|
if (this.#shape.waveform_options!.orientation == WaveformOrientation.Horizontal) {
|
||||||
mutated_points.push({
|
mutated_points.push({
|
||||||
x: this.#shape.points[i].x,
|
x: this.#shape.points[i].x,
|
||||||
|
@ -262,6 +263,7 @@ export class AudioVisual {
|
||||||
|
|
||||||
#convert_range(value: number, r1: number[], r2: number[]) {
|
#convert_range(value: number, r1: number[], r2: number[]) {
|
||||||
if (!isFinite(value)) return 0
|
if (!isFinite(value)) return 0
|
||||||
|
if (value < r1[0]) return 0
|
||||||
return ((value - r1[0]) * (r2[1] - r2[0])) / (r1[1] - r1[0]) + r2[0]
|
return ((value - r1[0]) * (r2[1] - r2[0])) / (r1[1] - r1[0]) + r2[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -340,6 +342,7 @@ export class AudioVisual {
|
||||||
case SmoothingAlgorythm.CatmullRom: {
|
case SmoothingAlgorythm.CatmullRom: {
|
||||||
if (this.#shape.shape_type == ShapeType.Waveform && this.#shape.symmetry == true) {
|
if (this.#shape.shape_type == ShapeType.Waveform && this.#shape.symmetry == true) {
|
||||||
//adding points so both halfs ends and start at the same center point
|
//adding points so both halfs ends and start at the same center point
|
||||||
|
console.log(arr)
|
||||||
const first_half = [{ x: 0, y: this.#canvas_height / 2 }]
|
const first_half = [{ x: 0, y: this.#canvas_height / 2 }]
|
||||||
const second_half = [{ x: 0, y: this.#canvas_height / 2 }]
|
const second_half = [{ x: 0, y: this.#canvas_height / 2 }]
|
||||||
for (let i = 0; i < arr.length - 1; i += 2) {
|
for (let i = 0; i < arr.length - 1; i += 2) {
|
||||||
|
|
Loading…
Reference in a new issue