Spaces:
Running
Running
File size: 11,187 Bytes
bcaf481 fe818d2 bcaf481 390eac0 bcaf481 390eac0 34a689f 390eac0 bcaf481 390eac0 bcaf481 6574219 bcaf481 390eac0 bcaf481 390eac0 bcaf481 390eac0 bcaf481 390eac0 bcaf481 390eac0 bcaf481 390eac0 bcaf481 390eac0 34a689f 390eac0 34a689f 390eac0 34a689f 390eac0 34a689f 390eac0 34a689f 390eac0 34a689f 390eac0 34a689f 390eac0 34a689f 6574219 390eac0 34a689f 390eac0 34a689f 390eac0 34a689f 390eac0 34a689f 390eac0 34a689f 390eac0 34a689f 390eac0 34a689f bcaf481 390eac0 bcaf481 390eac0 6574219 34a689f bcaf481 6574219 34a689f 390eac0 bcaf481 390eac0 34a689f 390eac0 bcaf481 390eac0 bcaf481 390eac0 6574219 bcaf481 390eac0 bcaf481 390eac0 34a689f 390eac0 bcaf481 34a689f 390eac0 6574219 34a689f 390eac0 34a689f 390eac0 bcaf481 390eac0 34a689f 390eac0 34a689f 390eac0 34a689f 390eac0 bcaf481 390eac0 34a689f 390eac0 34a689f 390eac0 6574219 390eac0 34a689f 390eac0 bcaf481 390eac0 34a689f 390eac0 34a689f 390eac0 bcaf481 34a689f 390eac0 6574219 34a689f 390eac0 6574219 390eac0 34a689f bcaf481 390eac0 bcaf481 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 |
<!DOCTYPE html>
<html>
<head>
<title>FFT Analysis with Visualization and Controls</title>
<style>
body { font-family: sans-serif; text-align: center; margin-top: 30px; background-color: #f4f4f4; }
#controls button { padding: 10px 15px; font-size: 15px; cursor: pointer; margin: 5px; border: none; border-radius: 5px; }
#startButton { background-color: #4CAF50; color: white; }
#stopButton { background-color: #f44336; color: white; }
#startButton:disabled, #stopButton:disabled { background-color: #cccccc; cursor: not-allowed; }
#status { margin-top: 15px; color: #555; font-weight: bold; min-height: 20px; }
.error { color: red; font-weight: bold; }
#visualizerCanvas { display: block; margin: 20px auto; background-color: #000; border: 1px solid #ccc; }
#volumeControl { margin-top: 15px; }
#logArea {
margin: 20px auto;
width: 80%;
max-width: 600px;
height: 100px;
overflow-y: scroll;
border: 1px solid #ccc;
background-color: #fff;
text-align: left;
padding: 10px;
font-size: 12px;
font-family: monospace;
}
label { font-size: 14px; margin-right: 5px; }
</style>
</head>
<body>
<h1>FFT Analysis with Visualization and Controls</h1>
<p>Real-time microphone input analysis, FFT visualization, and basic sound resynthesis.</p>
<div id="controls">
<button id="startButton">Start</button>
<button id="stopButton" disabled>Stop</button>
</div>
<div id="volumeControl">
<label for="volumeSlider">Volume:</label>
<input type="range" id="volumeSlider" min="0" max="1" step="0.01" value="0.5" disabled>
</div>
<div id="status">Waiting...</div>
<canvas id="visualizerCanvas" width="600" height="200"></canvas>
<label for="logArea">Log:</label>
<div id="logArea"></div>
<script>
// --- DOM Elements ---
const startButton = document.getElementById('startButton');
const stopButton = document.getElementById('stopButton');
const statusDiv = document.getElementById('status');
const canvas = document.getElementById('visualizerCanvas');
const canvasCtx = canvas.getContext('2d');
const volumeSlider = document.getElementById('volumeSlider');
const logArea = document.getElementById('logArea');
// --- Web Audio API Variables ---
let audioContext;
let analyser;
let microphoneSource;
let mainGainNode;
let animationFrameId = null; // For processAudio loop
let visualizerFrameId = null; // For drawVisualizer loop
let activeOscillators = [];
// --- Configuration ---
const fftSize = 2048;
const frequencyBinCount = fftSize / 2; // analyser.frequencyBinCount
let frequencyDataArray = new Uint8Array(frequencyBinCount);
const numReconstructedOscillators = 50; // Number of oscillators for resynthesis
// --- Logging Function ---
const logMessage = (message, isError = false) => {
console[isError ? 'error' : 'log'](message);
const logEntry = document.createElement('div');
logEntry.textContent = `[${new Date().toLocaleTimeString()}] ${message}`;
if (isError) {
logEntry.style.color = 'red';
}
logArea.appendChild(logEntry);
logArea.scrollTop = logArea.scrollHeight; // Auto-scroll to bottom
};
// --- Visualization Function ---
const drawVisualizer = () => {
if (!analyser) return;
visualizerFrameId = requestAnimationFrame(drawVisualizer); // Schedule next frame
// Get frequency data
analyser.getByteFrequencyData(frequencyDataArray);
// Clear canvas
canvasCtx.fillStyle = '#000'; // Black background
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
// Draw frequency bars
const barWidth = (canvas.width / frequencyBinCount) * 2.5; // Adjust multiplier for bar density
let barHeight;
let x = 0;
for (let i = 0; i < frequencyBinCount; i++) {
barHeight = frequencyDataArray[i] * (canvas.height / 255); // Scale height
// Color gradient (e.g., blue to red based on height)
const hue = (barHeight / canvas.height) * 240; // Hue range (e.g., 0=red, 240=blue)
canvasCtx.fillStyle = `hsl(${240 - hue}, 100%, 50%)`;
canvasCtx.fillRect(x, canvas.height - barHeight, barWidth, barHeight);
x += barWidth + 1; // Move to the next bar position
}
};
// --- Audio Processing Function (Resynthesis) ---
const processAudio = () => {
if (!analyser) return;
animationFrameId = requestAnimationFrame(processAudio); // Schedule next frame
// 1. FFT Analysis (Data is fetched by drawVisualizer, but could be fetched here too if needed)
// analyser.getByteFrequencyData(frequencyDataArray); // Can be redundant if visualizer runs
// 2. Cleanup Previous Oscillators
activeOscillators.forEach(osc => {
try {
osc.stop();
osc.disconnect();
} catch (e) { /* Ignore */ }
});
activeOscillators = [];
// 3. Create New Oscillators
for (let i = 0; i < numReconstructedOscillators; i++) {
const oscillator = audioContext.createOscillator();
const frequencyIndex = Math.min(Math.floor(i * (frequencyBinCount / numReconstructedOscillators)), frequencyBinCount - 1);
const magnitude = frequencyDataArray[frequencyIndex]; // Use data from visualizer's last fetch
if (magnitude > 10) { // Magnitude threshold
const frequency = (frequencyIndex * audioContext.sampleRate) / fftSize;
if (frequency > 20 && frequency < audioContext.sampleRate / 2) {
oscillator.type = 'sine';
try {
if (frequency > 0) {
oscillator.frequency.setValueAtTime(frequency, audioContext.currentTime);
} else continue;
} catch (e) {
logMessage(`Error setting frequency: ${frequency} Hz. ${e}`, true);
continue;
}
const oscGainNode = audioContext.createGain();
// Volume based on magnitude, squared for emphasis, scaled down
oscGainNode.gain.setValueAtTime((magnitude / 255) ** 2 * 0.8, audioContext.currentTime); // Adjusted scaling
oscillator.connect(oscGainNode);
oscGainNode.connect(mainGainNode); // Connect to main volume control
oscillator.start(audioContext.currentTime);
activeOscillators.push(oscillator);
}
}
}
};
// --- Start Audio Function ---
const startAudio = async () => {
logMessage("Attempting to start audio...");
if (audioContext) {
logMessage("AudioContext already active.");
return;
}
try {
statusDiv.textContent = 'Accessing microphone...';
statusDiv.classList.remove('error');
// 1. Create Audio Context
audioContext = new (window.AudioContext || window.webkitAudioContext)();
logMessage("AudioContext created.");
// 2. Get Microphone Stream
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
logMessage("Microphone access granted.");
// 3. Create Nodes
microphoneSource = audioContext.createMediaStreamSource(stream);
analyser = audioContext.createAnalyser();
analyser.fftSize = fftSize;
mainGainNode = audioContext.createGain();
mainGainNode.gain.setValueAtTime(volumeSlider.value, audioContext.currentTime); // Initial volume
// Reinitialize data array based on actual bin count
frequencyDataArray = new Uint8Array(analyser.frequencyBinCount);
// 4. Connect Nodes: Mic -> Analyser -> Main Gain -> Destination
microphoneSource.connect(analyser);
// We only connect the *resynthesized* sound (via oscillators) to mainGainNode
mainGainNode.connect(audioContext.destination);
logMessage("Audio nodes connected.");
// 5. Start Processing and Visualization
processAudio(); // Start resynthesis loop
drawVisualizer(); // Start visualization loop
logMessage("Audio processing and visualization started.");
// 6. Update UI
startButton.disabled = true;
stopButton.disabled = false;
volumeSlider.disabled = false;
statusDiv.textContent = 'Running...';
} catch (err) {
logMessage(`Error starting audio: ${err.message}`, true);
statusDiv.textContent = `Error: ${err.message}`;
statusDiv.classList.add('error');
stopAudio(); // Clean up on error
}
};
// --- Stop Audio Function ---
const stopAudio = () => {
logMessage("Stopping audio...");
if (animationFrameId) {
cancelAnimationFrame(animationFrameId);
animationFrameId = null;
}
if (visualizerFrameId) {
cancelAnimationFrame(visualizerFrameId);
visualizerFrameId = null;
}
// Stop and disconnect oscillators
activeOscillators.forEach(osc => {
try {
osc.stop();
osc.disconnect();
} catch (e) { /* Ignore */ }
});
activeOscillators = [];
// Disconnect main nodes
if (analyser) analyser.disconnect();
if (mainGainNode) mainGainNode.disconnect();
if (microphoneSource) {
microphoneSource.mediaStream.getTracks().forEach(track => track.stop()); // IMPORTANT: Stop mic track
microphoneSource.disconnect();
}
logMessage("Nodes disconnected, microphone track stopped.");
// Close Audio Context
if (audioContext && audioContext.state !== 'closed') {
audioContext.close().then(() => {
logMessage("AudioContext closed.");
audioContext = null; // Ensure it's nullified
});
} else {
audioContext = null; // Ensure it's nullified if already closed or never opened
}
// Clear visualizer
canvasCtx.fillStyle = '#333';
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
canvasCtx.fillStyle = '#fff';
canvasCtx.textAlign = 'center';
canvasCtx.fillText("Stopped", canvas.width / 2, canvas.height / 2);
// Update UI
startButton.disabled = false;
stopButton.disabled = true;
volumeSlider.disabled = true;
statusDiv.textContent = 'Stopped.';
logMessage("Audio stopped successfully.");
};
// --- Event Listeners ---
startButton.addEventListener('click', startAudio);
stopButton.addEventListener('click', stopAudio);
volumeSlider.addEventListener('input', (event) => {
if (mainGainNode) {
mainGainNode.gain.setValueAtTime(event.target.value, audioContext.currentTime);
}
});
// Initial canvas state
canvasCtx.fillStyle = '#333';
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
canvasCtx.fillStyle = '#fff';
canvasCtx.textAlign = 'center';
canvasCtx.fillText("Waiting to start...", canvas.width / 2, canvas.height / 2);
</script>
</body>
</html> |