Currently, I am developing a web-based audio visualizer that allows users to adjust the raw audio signal visualizer to a specific frequency, similar to the functionality found in hardware oscilloscopes. The goal is for the wave to remain stationary on the canvas when centered at 440Hz with a corresponding sine wave that repeats every 1/440s. My initial approach involved shifting the graph to the left based on the frequency (e.g., 440Hz = move 1/440s to the left per second), but this method proved ineffective.
I encountered difficulty determining the units used by the Audio Analyzer Node's time domain data. While I believe it may be in milliseconds, I cannot confirm this.
"use strict";
// Debugging using Oscillator instead of microphone
const USE_OSCILLATOR = true;
// Checking compatibility
if (!window.AudioContext)
window.AudioContext = window.webkitAudioContext;
if (!navigator.getUserMedia)
navigator.getUserMedia =
navigator.mozGetUserMedia ||
navigator.webkitGetUserMedia ||
navigator.msGetUserMedia;
// Main Application Class
class App {
constructor(visualizerElement, optionsElement) {
this.visualizerElement = visualizerElement;
this.optionsElement = optionsElement;
// Create HTML elements
this.canvas = document.createElement("canvas");
// Context initialization
this.context = new AudioContext({
// Optimize for low latency
latencyHint: "interactive",
});
this.canvasCtx = this.canvas.getContext("2d", {
// Optimize for low latency and performance
desynchronized: true,
alpha: false,
});
// Initialize audio nodes
this.audioAnalyser = this.context.createAnalyser();
this.audioBuffer = new Uint8Array(this.audioAnalyser.frequencyBinCount);
this.audioInputStream = null;
this.audioInputNode = null;
if (this.canvasCtx === null)
throw new Error("2D rendering context not supported by browser.");
this.updateCanvasSize();
window.addEventListener("resize", () => this.updateCanvasSize());
this.drawVisualizer();
this.visualizerElement.appendChild(this.canvas);
if (USE_OSCILLATOR) {
let oscillator = this.context.createOscillator();
oscillator.type = "sine";
oscillator.frequency.setValueAtTime(440, this.context.currentTime);
oscillator.connect(this.audioAnalyser);
oscillator.start();
}
else {
navigator.getUserMedia({ audio: true }, (stream) => {
this.audioInputStream = stream;
this.audioInputNode = this.context.createMediaStreamSource(stream);
this.audioInputNode.channelCountMode = "explicit";
this.audioInputNode.channelCount = 1;
this.audioBuffer = new Uint8Array(this.audioAnalyser.frequencyBinCount);
this.audioInputNode.connect(this.audioAnalyser);
}, (err) => console.error(err));
}
}
updateCanvasSize() {
var _a;
this.canvas.width = window.innerWidth;
this.canvas.height = window.innerHeight;
(_a = this.canvasCtx) === null || _a === void 0 ? void 0 : _a.setTransform(1, 0, 0, -1, 0, this.canvas.height * 0.5);
}
drawVisualizer() {
if (this.canvasCtx === null)
return;
const ctx = this.canvasCtx;
ctx.fillStyle = "black";
ctx.fillRect(0, -0.5 * this.canvas.height, this.canvas.width, this.canvas.height);
// Draw FFT
this.audioAnalyser.getByteFrequencyData(this.audioBuffer);
const step = this.canvas.width / this.audioBuffer.length;
const scale = this.canvas.height / (2 * 255);
ctx.beginPath();
ctx.moveTo(-step, this.audioBuffer[0] * scale);
this.audioBuffer.forEach((sample, index) => {
ctx.lineTo(index * step, scale * sample);
});
ctx.strokeStyle = "white";
ctx.stroke();
// Identify the highest dominant frequency
let highestFreqHalfHz = 0;
{
/**
* Highest frequency in 0.5Hz increments
*/
let highestFreq = NaN;
let highestFreqAmp = NaN;
let remSteps = NaN;
for (let i = this.audioBuffer.length - 1; i >= 0; i--) {
const sample = this.audioBuffer[i];
if (sample > 20 && (isNaN(highestFreqAmp) || sample > highestFreqAmp)) {
highestFreq = i;
highestFreqAmp = sample;
if (isNaN(remSteps))
remSteps = 500;
}
if (!isNaN(remSteps)) {
if (remSteps-- < 0)
break;
}
}
if (!isNaN(highestFreq)) {
ctx.beginPath();
ctx.moveTo(highestFreq * step, 0);
ctx.lineTo(highestFreq * step, scale * 255);
ctx.strokeStyle = "green";
ctx.stroke();
highestFreqHalfHz = highestFreq;
}
}
// Draw Audio waveform
this.audioAnalyser.getByteTimeDomainData(this.audioBuffer);
{
const bufferSize = this.audioBuffer.length;
const offsetY = -this.canvas.height * 0.5;
// Determining offset value
const offsetX = highestFreqHalfHz == 0
? 0
: bufferSize -
Math.round(((this.context.currentTime * 1000) % (1 / 440)) % bufferSize);
// Rendering the audio graph with the calculated offset
ctx.beginPath();
ctx.moveTo(-step, this.audioBuffer[0] * scale + offsetY);
for (let i = 0; i < bufferSize; i++) {
const index = (offsetX + i) % bufferSize;
const sample = this.audioBuffer[index];
ctx.lineTo(i * step, scale * sample + offsetY);
}
ctx.strokeStyle = "white";
ctx.stroke();
}
}
}
window.addEventListener("load", () => {
const app = new App(document.getElementById("visualizer"), document.getElementById("options"));
requestAnimationFrame(draw);
function draw() {
requestAnimationFrame(draw);
app.drawVisualizer();
}
});
html {
background: black;
}
body {
width: 100vw;
height: 100vh;
margin: 0;
overflow: hidden;
}
#visualizer {
position: fixed;
inset: 0;
}
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Equalizer</title>
</head>
<body>
<div id="visualizer"></div>
<div id="options"></div>
</body>
</html>
The TypeScript snippet above demonstrates the integration of an oscillating graph within an audio visualizer application. For further details, refer to the source here. If functioning correctly, the oscillating graph should remain motionless.