big visualizer upgrade

This commit is contained in:
Djkato 2023-07-19 00:08:15 +02:00
parent 4e0d491ea0
commit bb9b9c425c
6 changed files with 292 additions and 46 deletions

View file

@ -15,6 +15,16 @@
<div class="viz-wrapper"> <div class="viz-wrapper">
<svg id="trapnation-canvas" viewBox="0 0 500 500" preserveAspectRatio="none"></svg> <svg id="trapnation-canvas" viewBox="0 0 500 500" preserveAspectRatio="none"></svg>
<svg id="bar-canvas" viewBox="0 0 500 500" preserveAspectRatio="none"></svg> <svg id="bar-canvas" viewBox="0 0 500 500" preserveAspectRatio="none"></svg>
<svg id="waveform-canvas" viewBox="0 0 500 500" preserveAspectRatio="none">
<clipPath id="clip-seek">
<rect id="clip-seek-rect" width="500" height="500" />
</clipPath>
<clipPath id="clip-time">
<rect id="clip-time-rect" width="500" height="500" />
</clipPath>
<use clip-path="url(#clip-seek)" href="#waveform-seek" />
<use clip-path="url(#clip-time)" href="#waveform-time" />
</svg>
</div> </div>
<div class="player-wrapper"> <div class="player-wrapper">
<audio src="" id="audio"></audio> <audio src="" id="audio"></audio>

View file

@ -1,12 +1,17 @@
import { MusicPlayerBuilder } from "@euterpe.js/player"; import { MusicPlayerBuilder } from "@euterpe.js/player";
import { AudioVisualBuilder, SmoothingAlgorythm, ShapeType } from "@euterpe.js/visualizer" import { fft_data } from "./waveform_data";
import { AudioVisualBuilder, SmoothingAlgorythm, ShapeType, WaveformOrientation, WaveformShape } from "@euterpe.js/visualizer"
const audio_el = document.querySelector("#audio") as HTMLAudioElement const audio_el = document.querySelector("#audio") as HTMLAudioElement
const music_player_builder = new MusicPlayerBuilder(audio_el) const music_player_builder = new MusicPlayerBuilder(audio_el)
const trapnation_analyser_node = music_player_builder.add_analyser() const trapnation_analyser_node = music_player_builder.add_analyser()
const bar_analyser_node = music_player_builder.add_analyser() const bar_analyser_node = music_player_builder.add_analyser()
const music_player = music_player_builder.build() const music_player = music_player_builder.build()
music_player.change_volume(1) music_player.change_volume(.5)
const waveform_canvas = document.querySelector("#waveform-canvas") as SVGSVGElement
const seek_element = document.querySelector("#seek") as HTMLInputElement
const duration_element = document.querySelector("#duration") as HTMLElement
const current_time_element = document.querySelector("#current") as HTMLElement
/** /**
* Create the Audio Visualizer * Create the Audio Visualizer
*/ */
@ -18,23 +23,84 @@ const trapnation_visual_builder = new AudioVisualBuilder(trapnation_analyser_nod
.set_fft_time_smoothing(0.6) .set_fft_time_smoothing(0.6)
//If not using typescript enums, CatmullRom = number 2 //If not using typescript enums, CatmullRom = number 2
.set_smoothing_algorythm(SmoothingAlgorythm.CatmullRom) .set_smoothing_algorythm(SmoothingAlgorythm.CatmullRom)
const trapnation_visual = trapnation_visual_builder.build(ShapeType.Circle) const trapnation_visual = trapnation_visual_builder.build(ShapeType.Circle, false)
const bar_visual_builder = new AudioVisualBuilder(bar_analyser_node, document.querySelector("#bar-canvas") as SVGSVGElement) const bar_visual_builder = new AudioVisualBuilder(bar_analyser_node, document.querySelector("#bar-canvas") as SVGSVGElement)
.set_fft_data_tresholds({ point_count_i: 50, fft_multiplier_i: 3, fft_offset_i: -30 }) .set_fft_data_tresholds({ point_count_i: 50, fft_multiplier_i: 2, fft_offset_i: -100 })
.set_fft_time_smoothing(0.8) .set_fft_time_smoothing(0.8)
.set_smoothing_algorythm(SmoothingAlgorythm.BezierPerpendicular) .set_smoothing_algorythm(SmoothingAlgorythm.BezierPerpendicular)
const bar_visual = bar_visual_builder.build(ShapeType.Line) const bar_visual = bar_visual_builder.build(ShapeType.Line, false)
const waveform_visual_builder = new AudioVisualBuilder(bar_analyser_node, waveform_canvas)
.set_fft_data_tresholds({ point_count_i: 100, fft_multiplier_i: 1, fft_offset_i: -80 })
.set_fft_time_smoothing(0.8)
.set_smoothing_algorythm(SmoothingAlgorythm.CatmullRom)
const waveform_visual = waveform_visual_builder.build(ShapeType.Waveform, true, { fft_data: new Float32Array(fft_data.fft_data), orientation: WaveformOrientation.Horizontal, shape_type: WaveformShape.LineLike })
trapnation_visual.draw() trapnation_visual.draw()
bar_visual.draw() bar_visual.draw()
waveform_visual.draw_once()
//Here I create 2 duplicate elements of the waveform, set their opacity to 1/2, map one to current song time, other to seeking on hover
const waveform_path_seek = waveform_canvas.children[0].cloneNode()
const waveform_path_time = waveform_canvas.children[0].cloneNode()
waveform_path_seek.id = "waveform-seek"
waveform_path_time.id = "waveform-time"
waveform_path_seek.classList.add("waveform-seek")
waveform_path_time.classList.add("waveform-time")
waveform_canvas.appendChild(waveform_path_time)
waveform_canvas.appendChild(waveform_path_seek)
/*
const time_clip_path = document.createElement("clipPath")
const seek_clip_path = document.createElement("clipPath")
const seek_clip_rect = document.createElement("rect")
const time_clip_rect = document.createElement("rect")
const time_clip_use = document.createElement("use")
const seek_clip_use = document.createElement("use")
time_clip_path.id = "clip-time"
seek_clip_path.id = "clip-seek"
time_clip_rect.id = "clip-time-rect"
time_clip_rect.setAttribute("width", "0")
time_clip_rect.setAttribute("height", "500")
seek_clip_rect.id = "clip-seek-rect"
seek_clip_rect.setAttribute("width", "200")
seek_clip_rect.setAttribute("height", "500")
time_clip_use.setAttribute("clip-path", "url(#clip-time)")
time_clip_use.setAttribute("href", "#waveform-time")
time_clip_use.classList.add("clipping-waveform")
seek_clip_use.classList.add("clipping-waveform")
seek_clip_use.setAttribute("clip-path", "url(#clip-seek)")
seek_clip_use.setAttribute("href", "#waveform-seek")
waveform_canvas.appendChild(time_clip_path)
waveform_canvas.appendChild(seek_clip_path)
waveform_canvas.appendChild(time_clip_use)
waveform_canvas.appendChild(seek_clip_use)
seek_clip_path.appendChild(seek_clip_rect)
time_clip_path.appendChild(time_clip_rect)
*/
function convert_range(value: number, r1: number[], r2: number[]) {
return ((value - r1[0]) * (r2[1] - r2[0])) / (r1[1] - r1[0]) + r2[0]
}
waveform_canvas.addEventListener("mousemove", (e) => {
const rect = e.target.getBoundingClientRect()
const x = e.clientX - rect.left
const resX = convert_range(x, [0, rect.width], [0, waveform_canvas.viewBox.baseVal.width + 40])
const polygon = `polygon(0 0, ${resX}px 0, ${resX}px 100%, 0 100%)`
document.documentElement.style.setProperty("--clip-seek-path", polygon)
})
waveform_canvas.addEventListener("mouseleave", (e) => {
const polygon = `polygon(0 0, 0 0, 0 100%, 0 100%)`
document.documentElement.style.setProperty("--clip-seek-path", polygon)
})
/* /*
* The player part * The player part
*/ */
music_player.try_new_song_async(encodeURI("http://127.0.0.1:4200/nuphory - NVISION (EXTENDED MIX).ogg")) music_player.try_new_song_async(encodeURI("http://localhost:4200/nuphory - NVISION (EXTENDED MIX).ogg"))
.then(() => { .then(() => {
let is_seeking = false let is_seeking = false
document.querySelector("#play")?.addEventListener("click", () => { document.querySelector("#play")?.addEventListener("click", () => {
@ -70,15 +136,18 @@ music_player.try_new_song_async(encodeURI("http://127.0.0.1:4200/nuphory - NVISI
}) })
// Subscriptions to AudioContext changes, eg. time.. // Subscriptions to AudioContext changes, eg. time..
music_player.on_duration_formatted((time) => { music_player.on_duration_formatted((time) => {
document.querySelector("#duration").innerHTML = time duration_element.innerHTML = time
document.querySelector("#seek").max = "" + music_player.current_song_duration seek_element.max = "" + music_player.current_song_duration
}) })
music_player.on_time_tick_formatted((time) => { music_player.on_time_tick_formatted((time) => {
document.querySelector("#current").innerHTML = time current_time_element.innerHTML = time
}) })
music_player.on_time_tick((time) => { music_player.on_time_tick((time) => {
if (is_seeking) return if (is_seeking) return
document.querySelector("#seek").value = "" + time seek_element.value = "" + time
const x = `${time / music_player.current_song_duration * 100}%`
const polygon = `polygon(0 0, ${x} 0, ${x} 100%, 0 100%)`
document.documentElement.style.setProperty("--clip-time-path", polygon)
}) })
}, (e) => console.log(e)) }, (e) => console.log(e))

View file

@ -1,22 +1,23 @@
#volume{ #volume{
transform: rotate(270deg); transform: rotate(270deg);
}
:root {
--clip-time-path: polygon("0 0, 0 0, 0 100%, 0 100%");
--clip-seek-path: polygon("0 0, 0 0, 0 100%, 0 100%");
} }
body { body {
width: 100vw; height:100%;
height:100vh; padding: 10vh;
margin: 0; margin: 0;
padding: 0;
display: flex;
justify-content: center;
align-items: center;
} }
.player-wrapper{ .player-wrapper{
width:50vw; width:100%;
display: flex; display: flex;
justify-content: space-between; justify-content: space-between;
margin-top:10vh;
} }
.viz-wrapper{ .viz-wrapper{
width:50vw; width:100%;
display: flex; display: flex;
justify-content: space-between; justify-content: space-between;
} }
@ -27,4 +28,12 @@ body {
} }
.wrapper div{ .wrapper div{
width:100%; width:100%;
}
.waveform-time {
clip-path: var(--clip-time-path);
fill: rgba(50,145,200,0.3);
}
.waveform-seek {
clip-path: var(--clip-seek-path);
fill: rgba(50,145,200,0.3);
} }

View file

@ -0,0 +1,14 @@
export const fft_data = {
"fft_data": [
-106.24, -99.73, -100.98, -101.34, -107.01, -92.38, -84.85, -90.28, -93.68, -95.02, -97.16,
-96.32, -99.23, -103.13, -85.57, -98.17, -103.27, -107.5, -83.62, -95.23, -97.12, -94.78,
-95.93, -101.42, -97.83, -102.42, -111.74, -101.38, -106.8, -111.05, -88.04, -90.88, -97.67,
-96.31, -96.69, -102.15, -102.03, -100.51, -107.14, -101.48, -101.6, -106.62, -73.94,
-79.53, -92.74, -96.08, -96.26, -100.35, -99.13, -102.03, -107.4, -93.57, -102.31, -102.3,
-109.04, -81.85, -92.79, -100.06, -95.79, -96.49, -99.89, -100.27, -102.69, -107.35,
-103.94, -104.64, -104.3, -78.82, -84.2, -95.29, -92.57, -93.47, -98.08, -98.9, -101.56,
-109.38, -102.01, -102.51, -104.83, -72.18, -76.52, -91.69, -99.97, -96.63, -98.61, -76.97,
-90.41, -100.38, -106.77, -102.83, -104.46, -108.59, -80.97, -88.05, -100.77, -79.64, -72.3,
-87.96, -92.89, -93.03
],
}

View file

@ -1,6 +1,6 @@
{ {
"name": "@euterpe.js/visualizer", "name": "@euterpe.js/visualizer",
"version": "1.0.1", "version": "1.0.2",
"type": "module", "type": "module",
"description": "Music visualizer based on SVG and AudioContext", "description": "Music visualizer based on SVG and AudioContext",
"main": "./src/index.js", "main": "./src/index.js",

View file

@ -7,12 +7,25 @@ export enum SmoothingAlgorythm {
export enum ShapeType { export enum ShapeType {
Circle, Circle,
Line, Line,
Waveform,
/*To be Implmeneted /*To be Implmeneted
Custom, Custom,
Waveform,
FullSongWaveForm FullSongWaveForm
*/ */
} }
export enum WaveformOrientation {
Vertical,
Horizontal,
}
export enum WaveformShape {
LineLike,
Striped,
}
export type WaveformOptions = {
fft_data: Float32Array,
shape_type: WaveformShape,
orientation: WaveformOrientation
}
type Point = { type Point = {
x: number, x: number,
y: number, y: number,
@ -22,6 +35,8 @@ type Shape = {
//Algo-rythm, because this is about music. Get it? xd //Algo-rythm, because this is about music. Get it? xd
smoothing_algorythm: SmoothingAlgorythm smoothing_algorythm: SmoothingAlgorythm
points: Point[] points: Point[]
waveform_options?: WaveformOptions
symmetry?: boolean
} }
export class AudioVisual { export class AudioVisual {
@ -59,14 +74,20 @@ export class AudioVisual {
this.#to_fft_range = to_fft_range this.#to_fft_range = to_fft_range
this.#point_count = point_count this.#point_count = point_count
this.#fft_data = new Float32Array() this.#fft_data = new Float32Array(this.#analyzer_node.frequencyBinCount)
this.#canvas_width = svg_injecting_element.viewBox.baseVal.width this.#canvas_width = svg_injecting_element.viewBox.baseVal.width
this.#canvas_height = svg_injecting_element.viewBox.baseVal.height this.#canvas_height = svg_injecting_element.viewBox.baseVal.height
} }
#get_cured_frequency_data() { #get_cured_frequency_data(): Array<number> {
this.#fft_data = new Float32Array(this.#buffer_length) let buffer_length_cache
this.#analyzer_node.getFloatFrequencyData(this.#fft_data) if (this.#shape.shape_type == ShapeType.Waveform) {
this.#fft_data = this.#shape.waveform_options!.fft_data
buffer_length_cache = this.#buffer_length
this.#buffer_length = this.#fft_data.length
} else {
this.#analyzer_node.getFloatFrequencyData(this.#fft_data)
}
const from = Math.round((this.#point_count / 100) * this.#from_fft_range) const from = Math.round((this.#point_count / 100) * this.#from_fft_range)
const to = Math.round(this.#buffer_length - (this.#buffer_length / 100) * this.#to_fft_range) const to = Math.round(this.#buffer_length - (this.#buffer_length / 100) * this.#to_fft_range)
const squeeze_factor = Math.round((this.#buffer_length - to) / this.#point_count) const squeeze_factor = Math.round((this.#buffer_length - to) / this.#point_count)
@ -127,8 +148,7 @@ export class AudioVisual {
#catmull_rom_smooth(arr: { x: number, y: number }[], k: number) { #catmull_rom_smooth(arr: { x: number, y: number }[], k: number) {
if (k == null) k = 1 if (k == null) k = 1
const last = arr.length - 2 const last = arr.length - 2
let path = ""
let path = "M" + [arr[0].x, arr[0].y]
for (let i = 0; i < arr.length - 1; i++) { for (let i = 0; i < arr.length - 1; i++) {
@ -159,7 +179,6 @@ export class AudioVisual {
path += "C" + [cp1x, cp1y, cp2x, cp2y, x2, y2] path += "C" + [cp1x, cp1y, cp2x, cp2y, x2, y2]
} }
path += " Z"
return path return path
} }
@ -171,9 +190,10 @@ export class AudioVisual {
switch (this.#shape.shape_type) { switch (this.#shape.shape_type) {
case ShapeType.Line: { case ShapeType.Line: {
for (let i = 0; i < frequency_data.length - 1; i++) { for (let i = 0; i < frequency_data.length - 1; i++) {
const mutator = isFinite(frequency_data[i]) ? this.#convert_range(frequency_data[i] * this.#fft_multiplier + this.#fft_offset, in_range, out_range) : -1 * this.#canvas_height
mutated_points.push({ mutated_points.push({
x: this.#shape.points[i].x /** ((Math.max(FFTDataArray[i] + 100)) * 4)*/, x: this.#shape.points[i].x /** ((Math.max(FFTDataArray[i] + 100)) * 4)*/,
y: this.#shape.points[i].y - this.#convert_range(frequency_data[i] * this.#fft_multiplier + this.#fft_offset, in_range, out_range), y: this.#shape.points[i].y - mutator,
}) })
} }
break break
@ -192,6 +212,50 @@ export class AudioVisual {
break break
} }
case ShapeType.Waveform: {
if (this.#shape.waveform_options!.shape_type == WaveformShape.LineLike) {
if (this.#shape.symmetry) {
for (let i = 0; i < this.#shape.points.length - 1; i += 2) {
const mutator = this.#convert_range(frequency_data[i / 2] * this.#fft_multiplier + this.#fft_offset, in_range, out_range)
if (this.#shape.waveform_options!.orientation == WaveformOrientation.Horizontal) {
mutated_points.push({
x: this.#shape.points[i].x,
y: this.#shape.points[i].y - mutator
})
mutated_points.push({
x: this.#shape.points[i + 1].x,
y: this.#shape.points[i + 1].y + mutator
})
} else {
mutated_points.push({
x: this.#shape.points[i].x + mutator,
y: this.#shape.points[i].y
})
mutated_points.push({
x: this.#shape.points[i + 1].x - mutator,
y: this.#shape.points[i + 1].y
})
}
}
} else {
for (let i = 0; i < frequency_data.length - 1; i++) {
const mutator = this.#convert_range(frequency_data[i] * this.#fft_multiplier + this.#fft_offset, in_range, out_range)
if (this.#shape.waveform_options!.orientation == WaveformOrientation.Horizontal) {
mutated_points.push({
x: this.#shape.points[i].x,
y: this.#shape.points[i].y - mutator
})
} else {
mutated_points.push({
x: this.#shape.points[i].x - mutator,
y: this.#shape.points[i].y
})
}
}
}
}
break
}
} }
return mutated_points return mutated_points
} }
@ -211,16 +275,40 @@ export class AudioVisual {
} }
case ShapeType.Circle: { case ShapeType.Circle: {
path = `M ${arr[0].x} ${arr[0].y} ` path = `M ${arr[0].x} ${arr[0].y} `
break
}
case ShapeType.Waveform: {
path = `M ${0} ${this.#canvas_height / 2}`
break
} }
} }
switch (this.#shape.smoothing_algorythm) { switch (this.#shape.smoothing_algorythm) {
case SmoothingAlgorythm.Linear: { case SmoothingAlgorythm.Linear: {
for (let i = 0; i < arr.length; i++) { switch (this.#shape.shape_type) {
path += `L ${arr[i].x},${arr[i].y} ` case ShapeType.Line: {
} for (let i = 0; i < arr.length; i++) {
if (this.#shape.shape_type == ShapeType.Line) { path += `L ${arr[i].x},${arr[i].y} `
path += `L ${this.#canvas_width} ${this.#canvas_height / 2} ` }
//path += `L ${canvas_width} ${canvas_height} ` if (this.#shape.shape_type == ShapeType.Line) {
path += `L ${this.#canvas_width} ${this.#canvas_height} `
//path += `L ${canvas_width} ${canvas_height} `
}
break
}
case ShapeType.Circle: {
for (let i = 0; i < arr.length; i++) {
path += `L ${arr[i].x},${arr[i].y} `
}
break
}
case ShapeType.Waveform: {
for (let i = 0; i < arr.length; i += 2) {
path += `L ${arr[i].x},${arr[i].y} `
}
for (let i = arr.length - 1; i >= 0; i -= 2) {
path += `L ${arr[i].x},${arr[i].y} `
}
}
} }
path += `Z ` path += `Z `
break break
@ -250,7 +338,28 @@ export class AudioVisual {
break break
} }
case SmoothingAlgorythm.CatmullRom: { case SmoothingAlgorythm.CatmullRom: {
path = this.#catmull_rom_smooth(arr, 1) if (this.#shape.shape_type == ShapeType.Waveform && this.#shape.symmetry == true) {
//adding points so both halfs ends and start at the same center point
const first_half = [{ x: 0, y: this.#canvas_height / 2 }]
const second_half = [{ x: 0, y: this.#canvas_height / 2 }]
for (let i = 0; i < arr.length - 1; i += 2) {
first_half.push(arr[i])
second_half.push(arr[i + 1])
}
first_half.push({ x: this.#canvas_width, y: this.#canvas_height / 2 })
second_half.push({ x: this.#canvas_width, y: this.#canvas_height / 2 })
// path += `M ${this.#canvas_width},${this.#canvas_height / 2}`
second_half.reverse()
//path += ` L 0 ${this.#canvas_height / 2}`
path += this.#catmull_rom_smooth(first_half, 1)
//path += ` L ${this.#canvas_width} ${this.#canvas_height / 2}`
path += this.#catmull_rom_smooth(second_half, 1)
//path += `L 0 ${this.#canvas_height / 2}`
}
else {
path += this.#catmull_rom_smooth(arr, 1)
}
path += `Z`
break break
} }
} }
@ -260,9 +369,15 @@ export class AudioVisual {
on_data(fn: ((data: Float32Array) => void)) { on_data(fn: ((data: Float32Array) => void)) {
this.#subscriber_fns.push(fn) this.#subscriber_fns.push(fn)
} }
/**
* Useful for waveforms or shapes that don't need to redraw every frame
*/
draw_once() {
this.#svg_injecting_element.innerHTML = this.#create_svg_element()
this.#subscriber_fns.forEach((fn) => fn(this.#fft_data))
}
draw() { draw() {
this.#analyzer_node.getFloatFrequencyData(this.#fft_data)
this.#svg_injecting_element.innerHTML = this.#create_svg_element() this.#svg_injecting_element.innerHTML = this.#create_svg_element()
this.#subscriber_fns.forEach((fn) => fn(this.#fft_data)) this.#subscriber_fns.forEach((fn) => fn(this.#fft_data))
requestAnimationFrame(this.draw.bind(this)) requestAnimationFrame(this.draw.bind(this))
@ -381,11 +496,11 @@ export class AudioVisualBuilder {
* @param shape_type Circle = 0; Line = 1; * @param shape_type Circle = 0; Line = 1;
* @returns `new AudioVisual` * @returns `new AudioVisual`
*/ */
build(shape_type: ShapeType) { build(shape_type: ShapeType, symmetry: boolean, waveform_options?: WaveformOptions) {
const shape = this.#create_shape(shape_type) const shape = this.#create_shape(shape_type, symmetry, waveform_options)
return new AudioVisual(this.#analyzer_node, this.#svg_injecting_element, shape, this.#buffer_length, this.#fft_multipier, this.#fft_offset, this.#from_fft_range, this.#to_fft_range, this.#point_count) return new AudioVisual(this.#analyzer_node, this.#svg_injecting_element, shape, this.#buffer_length, this.#fft_multipier, this.#fft_offset, this.#from_fft_range, this.#to_fft_range, this.#point_count)
} }
#create_shape(shape_type: ShapeType): Shape { #create_shape(shape_type: ShapeType, symmetry: boolean, waveform_options?: WaveformOptions): Shape {
const point_amount = this.#get_cured_frequency_data().length const point_amount = this.#get_cured_frequency_data().length
let new_shape: Shape let new_shape: Shape
switch (shape_type) { switch (shape_type) {
@ -394,7 +509,7 @@ export class AudioVisualBuilder {
for (let i = 0; i < point_amount; i++) { for (let i = 0; i < point_amount; i++) {
points.push({ points.push({
x: (this.#canvas_width / point_amount) * i, x: (this.#canvas_width / point_amount) * i,
y: this.#canvas_height / 2 - (0 / point_amount) * i, y: 0,
}) })
} }
new_shape = { shape_type, points, smoothing_algorythm: this.#smoothing_algorythm } new_shape = { shape_type, points, smoothing_algorythm: this.#smoothing_algorythm }
@ -413,20 +528,49 @@ export class AudioVisualBuilder {
new_shape = { shape_type, points, smoothing_algorythm: this.#smoothing_algorythm } new_shape = { shape_type, points, smoothing_algorythm: this.#smoothing_algorythm }
break break
} }
case ShapeType.Waveform: {
if (waveform_options === undefined) {
console.error("Waveform options undefined at shapetype.waveform, please define!")
throw Error("Waveform options undefined at shapetype.waveform, please define!")
}
const fft_length = this.#get_cured_frequency_data(waveform_options.fft_data).length
const points = []
for (let i = 0; i < fft_length; i++) {
let x, y
if (waveform_options.shape_type == WaveformShape.LineLike) {
x = (this.#canvas_width / point_amount) * i
y = this.#canvas_height / 2
} else {
throw Error("WaveformShape.Striped not implemented yet")
}
waveform_options.orientation == WaveformOrientation.Horizontal ?
points.push({ x: x, y: y }) :
points.push({ x: y, y: x })
//Douple the points needed for symmetry
if (symmetry) {
waveform_options.orientation == WaveformOrientation.Horizontal ?
points.push({ x: x, y: y }) :
points.push({ x: y, y: x })
}
}
new_shape = { shape_type, points, smoothing_algorythm: this.#smoothing_algorythm, symmetry: symmetry, waveform_options: waveform_options }
}
} }
return new_shape return new_shape
} }
#get_cured_frequency_data() {
const fft_data_array = new Float32Array(this.#buffer_length) #get_cured_frequency_data(fft_data?: Float32Array) {
this.#analyzer_node.getFloatFrequencyData(fft_data_array) if (!fft_data) {
fft_data = new Float32Array(this.#buffer_length)
this.#analyzer_node.getFloatFrequencyData(fft_data)
}
const from = Math.round((this.#point_count / 100) * this.#from_fft_range) const from = Math.round((this.#point_count / 100) * this.#from_fft_range)
const to = Math.round(this.#buffer_length - (this.#buffer_length / 100) * this.#to_fft_range) const to = Math.round(this.#buffer_length - (this.#buffer_length / 100) * this.#to_fft_range)
const squeezeFactor = Math.round((this.#buffer_length - to) / this.#point_count) const squeezeFactor = Math.round((this.#buffer_length - to) / this.#point_count)
const return_array = new Array(this.#point_count) const return_array = new Array(this.#point_count)
for (let i = 0; i < this.#point_count; i++) { for (let i = 0; i < this.#point_count; i++) {
return_array[i] = fft_data_array[from + i * squeezeFactor] return_array[i] = fft_data[from + i * squeezeFactor]
} }
return return_array return return_array
} }