Spaces:
Running
Running
<html> | |
<head> | |
<title>FFT Analysis with Visualization and Controls</title> | |
<style> | |
body { font-family: sans-serif; text-align: center; margin-top: 30px; background-color: #f4f4f4; } | |
#controls button { padding: 10px 15px; font-size: 15px; cursor: pointer; margin: 5px; border: none; border-radius: 5px; } | |
#startButton { background-color: #4CAF50; color: white; } | |
#stopButton { background-color: #f44336; color: white; } | |
#startButton:disabled, #stopButton:disabled { background-color: #cccccc; cursor: not-allowed; } | |
#status { margin-top: 15px; color: #555; font-weight: bold; min-height: 20px; } | |
.error { color: red; font-weight: bold; } | |
#visualizerCanvas { display: block; margin: 20px auto; background-color: #000; border: 1px solid #ccc; } | |
#volumeControl { margin-top: 15px; } | |
#logArea { | |
margin: 20px auto; | |
width: 80%; | |
max-width: 600px; | |
height: 100px; | |
overflow-y: scroll; | |
border: 1px solid #ccc; | |
background-color: #fff; | |
text-align: left; | |
padding: 10px; | |
font-size: 12px; | |
font-family: monospace; | |
} | |
label { font-size: 14px; margin-right: 5px; } | |
</style> | |
</head> | |
<body> | |
<h1>FFT Analysis with Visualization and Controls</h1> | |
<p>Real-time microphone input analysis, FFT visualization, and basic sound resynthesis.</p> | |
<div id="controls"> | |
<button id="startButton">Start</button> | |
<button id="stopButton" disabled>Stop</button> | |
</div> | |
<div id="volumeControl"> | |
<label for="volumeSlider">Volume:</label> | |
<input type="range" id="volumeSlider" min="0" max="1" step="0.01" value="0.5" disabled> | |
</div> | |
<div id="status">Waiting...</div> | |
<canvas id="visualizerCanvas" width="600" height="200"></canvas> | |
<label for="logArea">Log:</label> | |
<div id="logArea"></div> | |
<script> | |
// --- DOM Elements --- | |
const startButton = document.getElementById('startButton'); | |
const stopButton = document.getElementById('stopButton'); | |
const statusDiv = document.getElementById('status'); | |
const canvas = document.getElementById('visualizerCanvas'); | |
const canvasCtx = canvas.getContext('2d'); | |
const volumeSlider = document.getElementById('volumeSlider'); | |
const logArea = document.getElementById('logArea'); | |
// --- Web Audio API Variables --- | |
let audioContext; | |
let analyser; | |
let microphoneSource; | |
let mainGainNode; | |
let animationFrameId = null; // For processAudio loop | |
let visualizerFrameId = null; // For drawVisualizer loop | |
let activeOscillators = []; | |
// --- Configuration --- | |
const fftSize = 2048; | |
const frequencyBinCount = fftSize / 2; // analyser.frequencyBinCount | |
let frequencyDataArray = new Uint8Array(frequencyBinCount); | |
const numReconstructedOscillators = 50; // Number of oscillators for resynthesis | |
// --- Logging Function --- | |
const logMessage = (message, isError = false) => { | |
console[isError ? 'error' : 'log'](message); | |
const logEntry = document.createElement('div'); | |
logEntry.textContent = `[${new Date().toLocaleTimeString()}] ${message}`; | |
if (isError) { | |
logEntry.style.color = 'red'; | |
} | |
logArea.appendChild(logEntry); | |
logArea.scrollTop = logArea.scrollHeight; // Auto-scroll to bottom | |
}; | |
// --- Visualization Function --- | |
const drawVisualizer = () => { | |
if (!analyser) return; | |
visualizerFrameId = requestAnimationFrame(drawVisualizer); // Schedule next frame | |
// Get frequency data | |
analyser.getByteFrequencyData(frequencyDataArray); | |
// Clear canvas | |
canvasCtx.fillStyle = '#000'; // Black background | |
canvasCtx.fillRect(0, 0, canvas.width, canvas.height); | |
// Draw frequency bars | |
const barWidth = (canvas.width / frequencyBinCount) * 2.5; // Adjust multiplier for bar density | |
let barHeight; | |
let x = 0; | |
for (let i = 0; i < frequencyBinCount; i++) { | |
barHeight = frequencyDataArray[i] * (canvas.height / 255); // Scale height | |
// Color gradient (e.g., blue to red based on height) | |
const hue = (barHeight / canvas.height) * 240; // Hue range (e.g., 0=red, 240=blue) | |
canvasCtx.fillStyle = `hsl(${240 - hue}, 100%, 50%)`; | |
canvasCtx.fillRect(x, canvas.height - barHeight, barWidth, barHeight); | |
x += barWidth + 1; // Move to the next bar position | |
} | |
}; | |
// --- Audio Processing Function (Resynthesis) --- | |
const processAudio = () => { | |
if (!analyser) return; | |
animationFrameId = requestAnimationFrame(processAudio); // Schedule next frame | |
// 1. FFT Analysis (Data is fetched by drawVisualizer, but could be fetched here too if needed) | |
// analyser.getByteFrequencyData(frequencyDataArray); // Can be redundant if visualizer runs | |
// 2. Cleanup Previous Oscillators | |
activeOscillators.forEach(osc => { | |
try { | |
osc.stop(); | |
osc.disconnect(); | |
} catch (e) { /* Ignore */ } | |
}); | |
activeOscillators = []; | |
// 3. Create New Oscillators | |
for (let i = 0; i < numReconstructedOscillators; i++) { | |
const oscillator = audioContext.createOscillator(); | |
const frequencyIndex = Math.min(Math.floor(i * (frequencyBinCount / numReconstructedOscillators)), frequencyBinCount - 1); | |
const magnitude = frequencyDataArray[frequencyIndex]; // Use data from visualizer's last fetch | |
if (magnitude > 10) { // Magnitude threshold | |
const frequency = (frequencyIndex * audioContext.sampleRate) / fftSize; | |
if (frequency > 20 && frequency < audioContext.sampleRate / 2) { | |
oscillator.type = 'sine'; | |
try { | |
if (frequency > 0) { | |
oscillator.frequency.setValueAtTime(frequency, audioContext.currentTime); | |
} else continue; | |
} catch (e) { | |
logMessage(`Error setting frequency: ${frequency} Hz. ${e}`, true); | |
continue; | |
} | |
const oscGainNode = audioContext.createGain(); | |
// Volume based on magnitude, squared for emphasis, scaled down | |
oscGainNode.gain.setValueAtTime((magnitude / 255) ** 2 * 0.8, audioContext.currentTime); // Adjusted scaling | |
oscillator.connect(oscGainNode); | |
oscGainNode.connect(mainGainNode); // Connect to main volume control | |
oscillator.start(audioContext.currentTime); | |
activeOscillators.push(oscillator); | |
} | |
} | |
} | |
}; | |
// --- Start Audio Function --- | |
const startAudio = async () => { | |
logMessage("Attempting to start audio..."); | |
if (audioContext) { | |
logMessage("AudioContext already active."); | |
return; | |
} | |
try { | |
statusDiv.textContent = 'Accessing microphone...'; | |
statusDiv.classList.remove('error'); | |
// 1. Create Audio Context | |
audioContext = new (window.AudioContext || window.webkitAudioContext)(); | |
logMessage("AudioContext created."); | |
// 2. Get Microphone Stream | |
const stream = await navigator.mediaDevices.getUserMedia({ audio: true }); | |
logMessage("Microphone access granted."); | |
// 3. Create Nodes | |
microphoneSource = audioContext.createMediaStreamSource(stream); | |
analyser = audioContext.createAnalyser(); | |
analyser.fftSize = fftSize; | |
mainGainNode = audioContext.createGain(); | |
mainGainNode.gain.setValueAtTime(volumeSlider.value, audioContext.currentTime); // Initial volume | |
// Reinitialize data array based on actual bin count | |
frequencyDataArray = new Uint8Array(analyser.frequencyBinCount); | |
// 4. Connect Nodes: Mic -> Analyser -> Main Gain -> Destination | |
microphoneSource.connect(analyser); | |
// We only connect the *resynthesized* sound (via oscillators) to mainGainNode | |
mainGainNode.connect(audioContext.destination); | |
logMessage("Audio nodes connected."); | |
// 5. Start Processing and Visualization | |
processAudio(); // Start resynthesis loop | |
drawVisualizer(); // Start visualization loop | |
logMessage("Audio processing and visualization started."); | |
// 6. Update UI | |
startButton.disabled = true; | |
stopButton.disabled = false; | |
volumeSlider.disabled = false; | |
statusDiv.textContent = 'Running...'; | |
} catch (err) { | |
logMessage(`Error starting audio: ${err.message}`, true); | |
statusDiv.textContent = `Error: ${err.message}`; | |
statusDiv.classList.add('error'); | |
stopAudio(); // Clean up on error | |
} | |
}; | |
// --- Stop Audio Function --- | |
const stopAudio = () => { | |
logMessage("Stopping audio..."); | |
if (animationFrameId) { | |
cancelAnimationFrame(animationFrameId); | |
animationFrameId = null; | |
} | |
if (visualizerFrameId) { | |
cancelAnimationFrame(visualizerFrameId); | |
visualizerFrameId = null; | |
} | |
// Stop and disconnect oscillators | |
activeOscillators.forEach(osc => { | |
try { | |
osc.stop(); | |
osc.disconnect(); | |
} catch (e) { /* Ignore */ } | |
}); | |
activeOscillators = []; | |
// Disconnect main nodes | |
if (analyser) analyser.disconnect(); | |
if (mainGainNode) mainGainNode.disconnect(); | |
if (microphoneSource) { | |
microphoneSource.mediaStream.getTracks().forEach(track => track.stop()); // IMPORTANT: Stop mic track | |
microphoneSource.disconnect(); | |
} | |
logMessage("Nodes disconnected, microphone track stopped."); | |
// Close Audio Context | |
if (audioContext && audioContext.state !== 'closed') { | |
audioContext.close().then(() => { | |
logMessage("AudioContext closed."); | |
audioContext = null; // Ensure it's nullified | |
}); | |
} else { | |
audioContext = null; // Ensure it's nullified if already closed or never opened | |
} | |
// Clear visualizer | |
canvasCtx.fillStyle = '#333'; | |
canvasCtx.fillRect(0, 0, canvas.width, canvas.height); | |
canvasCtx.fillStyle = '#fff'; | |
canvasCtx.textAlign = 'center'; | |
canvasCtx.fillText("Stopped", canvas.width / 2, canvas.height / 2); | |
// Update UI | |
startButton.disabled = false; | |
stopButton.disabled = true; | |
volumeSlider.disabled = true; | |
statusDiv.textContent = 'Stopped.'; | |
logMessage("Audio stopped successfully."); | |
}; | |
// --- Event Listeners --- | |
startButton.addEventListener('click', startAudio); | |
stopButton.addEventListener('click', stopAudio); | |
volumeSlider.addEventListener('input', (event) => { | |
if (mainGainNode) { | |
mainGainNode.gain.setValueAtTime(event.target.value, audioContext.currentTime); | |
} | |
}); | |
// Initial canvas state | |
canvasCtx.fillStyle = '#333'; | |
canvasCtx.fillRect(0, 0, canvas.width, canvas.height); | |
canvasCtx.fillStyle = '#fff'; | |
canvasCtx.textAlign = 'center'; | |
canvasCtx.fillText("Waiting to start...", canvas.width / 2, canvas.height / 2); | |
</script> | |
</body> | |
</html> |