kimhyunwoo commited on
Commit
390eac0
·
verified ·
1 Parent(s): 6574219

Update index.html

Browse files
Files changed (1) hide show
  1. index.html +205 -128
index.html CHANGED
@@ -1,226 +1,303 @@
1
  <!DOCTYPE html>
2
  <html>
3
  <head>
4
- <title>FFT Analysis and Sine/Cosine Wave Resynthesis (Corrected)</title>
5
  <style>
6
- body { font-family: sans-serif; text-align: center; margin-top: 50px; }
7
- button { padding: 10px 20px; font-size: 16px; cursor: pointer; margin: 5px; }
8
- #status { margin-top: 20px; color: gray; }
 
 
 
9
  .error { color: red; font-weight: bold; }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  </style>
11
  </head>
12
  <body>
13
 
14
- <h1>FFT Analysis and Sine/Cosine Wave Resynthesis</h1>
15
- <p>Takes microphone input, performs FFT analysis, and reconstructs the sound using frequency components.</p>
 
 
 
 
 
 
 
 
 
 
16
 
17
- <button id="startButton">Start Microphone Input and Playback</button>
18
- <button id="stopButton" disabled>Stop</button>
19
  <div id="status">Waiting...</div>
20
 
 
 
 
 
 
21
  <script>
 
 
 
 
 
 
 
 
 
 
22
  let audioContext;
23
  let analyser;
24
- let microphoneSource; // Renamed from 'source' for clarity
25
- let mainGainNode; // Renamed from 'gainNode' for clarity
26
- let animationFrameId = null; // ID for requestAnimationFrame
27
- let activeOscillators = []; // Array to store currently active oscillator nodes
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
 
29
- // FFT analysis parameters
30
- const fftSize = 2048; // Number of samples for FFT (power of 2)
31
- // Size of the frequency data array (fftSize / 2)
32
- const frequencyBinCount = fftSize / 2;
33
 
34
- // Array to store FFT analysis results
35
- let frequencyDataArray = new Uint8Array(frequencyBinCount); // Frequency magnitude (0-255)
36
 
37
- // Number of sine/cosine waves to generate for resynthesis
38
- // How many frequency components from the FFT result will be used to reconstruct the sound
39
- const numReconstructedOscillators = 50;
40
 
41
- // Get DOM elements
42
- const startButton = document.getElementById('startButton');
43
- const stopButton = document.getElementById('stopButton');
44
- const statusDiv = document.getElementById('status');
45
 
46
- // ---------- Audio Processing Function ----------
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
  const processAudio = () => {
48
- if (!analyser) return; // Exit if analyser is not ready
49
 
50
- // 1. FFT Analysis: Get frequency data
51
- analyser.getByteFrequencyData(frequencyDataArray);
52
 
53
- // 2. Cleanup Previous Oscillators: Stop and disconnect all currently active oscillators
 
 
 
54
  activeOscillators.forEach(osc => {
55
  try {
56
- osc.stop(); // Stop immediately
57
- osc.disconnect(); // Disconnect from all connections
58
- } catch (e) {
59
- // Ignore errors if the oscillator was already stopped or disconnected
60
- // console.warn("Error stopping/disconnecting oscillator:", e);
61
- }
62
  });
63
- activeOscillators = []; // Clear the array
64
 
65
- // 3. Create and Connect New Oscillators based on FFT results
66
- // Analyze frequency components and create a fixed number of oscillators
67
  for (let i = 0; i < numReconstructedOscillators; i++) {
68
  const oscillator = audioContext.createOscillator();
69
-
70
- // Calculate the corresponding frequency index in the data array
71
- // Ensure index doesn't overlap if frequencyBinCount > numReconstructedOscillators
72
  const frequencyIndex = Math.min(Math.floor(i * (frequencyBinCount / numReconstructedOscillators)), frequencyBinCount - 1);
 
73
 
74
- // Get the magnitude (amplitude) of this frequency component
75
- const magnitude = frequencyDataArray[frequencyIndex];
76
-
77
- // Only create an oscillator if the magnitude is above a certain threshold (reduces noise)
78
- if (magnitude > 10) { // Threshold value (adjustable)
79
- // Calculate the actual frequency in Hz
80
  const frequency = (frequencyIndex * audioContext.sampleRate) / fftSize;
81
 
82
- // Optional: Limit the frequency range for playback
83
- if (frequency > 20 && frequency < audioContext.sampleRate / 2) { // Within audible range
84
- oscillator.type = 'sine'; // Waveform type: 'sine', 'square', 'sawtooth', 'triangle', or 'custom'
85
  try {
86
- // Ensure the frequency is valid (must be > 0)
87
- if (frequency > 0) {
88
- oscillator.frequency.setValueAtTime(frequency, audioContext.currentTime);
89
- } else {
90
- continue; // Skip if frequency is invalid
91
- }
92
  } catch (e) {
93
- console.error("Error setting frequency:", e, "Frequency:", frequency);
94
- continue; // Skip this oscillator if there's an error
95
  }
96
 
97
- // Create a gain node for this individual oscillator to control its volume
98
- // Set the volume proportional to the magnitude of the frequency component
99
  const oscGainNode = audioContext.createGain();
100
- // Normalize magnitude (0-255) to a 0-1 range, apply non-linear scaling and overall volume adjustment
101
- oscGainNode.gain.setValueAtTime((magnitude / 255) ** 2 * 0.5, audioContext.currentTime); // Volume control (0.5 is additional damping)
102
 
103
- // Connect: Oscillator -> Individual Gain Node -> Main Gain Node -> Output
104
  oscillator.connect(oscGainNode);
105
- oscGainNode.connect(mainGainNode);
106
 
107
- // Start the oscillator immediately
108
  oscillator.start(audioContext.currentTime);
109
-
110
- // Add the oscillator to the list of active oscillators
111
  activeOscillators.push(oscillator);
112
  }
113
  }
114
  }
115
-
116
- // 4. Request the next frame for continuous processing
117
- animationFrameId = requestAnimationFrame(processAudio);
118
  };
119
 
120
- // ---------- Start Button Click Event Handler ----------
121
- startButton.addEventListener('click', async () => {
122
- try {
123
- if (audioContext) { // If already running, do nothing
124
- console.log("AudioContext is already active.");
125
- return;
126
- }
127
 
 
128
  statusDiv.textContent = 'Accessing microphone...';
129
  statusDiv.classList.remove('error');
130
 
131
  // 1. Create Audio Context
132
  audioContext = new (window.AudioContext || window.webkitAudioContext)();
 
133
 
134
- // 2. Get Microphone Input Stream
135
  const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
 
136
 
137
- // 3. Create and Connect Audio Nodes
138
- microphoneSource = audioContext.createMediaStreamSource(stream); // Microphone source node
139
- analyser = audioContext.createAnalyser(); // Analyser node
140
- analyser.fftSize = fftSize; // Set FFT size
141
- mainGainNode = audioContext.createGain(); // Main volume control node
142
- mainGainNode.gain.setValueAtTime(0.5, audioContext.currentTime); // Set initial volume (0.5)
143
 
144
- // Connect nodes: Microphone Source -> Analyser -> Main Gain -> Destination (Speakers)
145
- microphoneSource.connect(analyser);
146
- // Note: The analyser node itself doesn't output sound directly.
147
- // Connect the source directly to gain if you want to hear the original mic input as well (uncomment below)
148
- // microphoneSource.connect(mainGainNode);
149
- mainGainNode.connect(audioContext.destination); // Connect main gain to speakers
150
-
151
- // Initialize/Reset the frequency data array size based on the analyser
152
  frequencyDataArray = new Uint8Array(analyser.frequencyBinCount);
153
 
154
- // 4. Start Audio Processing Loop
155
- processAudio();
 
 
 
 
 
 
 
 
156
 
157
- // Update UI
158
  startButton.disabled = true;
159
  stopButton.disabled = false;
160
- statusDiv.textContent = 'Microphone input active and playing back...';
 
161
 
162
  } catch (err) {
163
- console.error('Error starting audio:', err);
164
  statusDiv.textContent = `Error: ${err.message}`;
165
  statusDiv.classList.add('error');
166
- // Clean up resources on error
167
- stopAudio();
168
  }
169
- });
170
-
171
- // ---------- Stop Button Click Event Handler ----------
172
- stopButton.addEventListener('click', () => {
173
- stopAudio();
174
- });
175
 
176
- // ---------- Function to Stop Audio and Clean Up Resources ----------
177
  const stopAudio = () => {
 
178
  if (animationFrameId) {
179
- cancelAnimationFrame(animationFrameId); // Stop the animation loop
180
  animationFrameId = null;
181
  }
 
 
 
 
182
 
183
- // Stop and disconnect all active oscillators
184
  activeOscillators.forEach(osc => {
185
  try {
186
  osc.stop();
187
  osc.disconnect();
188
- } catch (e) { /* Ignore errors if already stopped/disconnected */ }
189
  });
190
  activeOscillators = [];
191
 
192
- // Disconnect audio nodes
 
 
193
  if (microphoneSource) {
194
- microphoneSource.disconnect();
195
- // Stop the microphone track(s) in the stream
196
- microphoneSource.mediaStream.getTracks().forEach(track => track.stop());
197
- microphoneSource = null;
198
- }
199
- if (analyser) {
200
- analyser.disconnect();
201
- analyser = null;
202
- }
203
- if (mainGainNode) {
204
- mainGainNode.disconnect();
205
- mainGainNode = null;
206
  }
 
207
 
208
- // Close the Audio Context to release system resources
209
  if (audioContext && audioContext.state !== 'closed') {
210
  audioContext.close().then(() => {
211
- audioContext = null;
212
- console.log("AudioContext closed.");
213
  });
 
 
214
  }
215
 
 
 
 
 
 
 
 
 
 
216
  // Update UI
217
  startButton.disabled = false;
218
  stopButton.disabled = true;
 
219
  statusDiv.textContent = 'Stopped.';
220
- // Reset frequency data array
221
- frequencyDataArray = new Uint8Array(frequencyBinCount);
222
  };
223
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
224
  </script>
225
 
226
  </body>
 
1
  <!DOCTYPE html>
2
  <html>
3
  <head>
4
+ <title>FFT Analysis with Visualization and Controls</title>
5
  <style>
6
+ body { font-family: sans-serif; text-align: center; margin-top: 30px; background-color: #f4f4f4; }
7
+ #controls button { padding: 10px 15px; font-size: 15px; cursor: pointer; margin: 5px; border: none; border-radius: 5px; }
8
+ #startButton { background-color: #4CAF50; color: white; }
9
+ #stopButton { background-color: #f44336; color: white; }
10
+ #startButton:disabled, #stopButton:disabled { background-color: #cccccc; cursor: not-allowed; }
11
+ #status { margin-top: 15px; color: #555; font-weight: bold; min-height: 20px; }
12
  .error { color: red; font-weight: bold; }
13
+ #visualizerCanvas { display: block; margin: 20px auto; background-color: #000; border: 1px solid #ccc; }
14
+ #volumeControl { margin-top: 15px; }
15
+ #logArea {
16
+ margin: 20px auto;
17
+ width: 80%;
18
+ max-width: 600px;
19
+ height: 100px;
20
+ overflow-y: scroll;
21
+ border: 1px solid #ccc;
22
+ background-color: #fff;
23
+ text-align: left;
24
+ padding: 10px;
25
+ font-size: 12px;
26
+ font-family: monospace;
27
+ }
28
+ label { font-size: 14px; margin-right: 5px; }
29
  </style>
30
  </head>
31
  <body>
32
 
33
+ <h1>FFT Analysis with Visualization and Controls</h1>
34
+ <p>Real-time microphone input analysis, FFT visualization, and basic sound resynthesis.</p>
35
+
36
+ <div id="controls">
37
+ <button id="startButton">Start</button>
38
+ <button id="stopButton" disabled>Stop</button>
39
+ </div>
40
+
41
+ <div id="volumeControl">
42
+ <label for="volumeSlider">Volume:</label>
43
+ <input type="range" id="volumeSlider" min="0" max="1" step="0.01" value="0.5" disabled>
44
+ </div>
45
 
 
 
46
  <div id="status">Waiting...</div>
47
 
48
+ <canvas id="visualizerCanvas" width="600" height="200"></canvas>
49
+
50
+ <label for="logArea">Log:</label>
51
+ <div id="logArea"></div>
52
+
53
  <script>
54
+ // --- DOM Elements ---
55
+ const startButton = document.getElementById('startButton');
56
+ const stopButton = document.getElementById('stopButton');
57
+ const statusDiv = document.getElementById('status');
58
+ const canvas = document.getElementById('visualizerCanvas');
59
+ const canvasCtx = canvas.getContext('2d');
60
+ const volumeSlider = document.getElementById('volumeSlider');
61
+ const logArea = document.getElementById('logArea');
62
+
63
+ // --- Web Audio API Variables ---
64
  let audioContext;
65
  let analyser;
66
+ let microphoneSource;
67
+ let mainGainNode;
68
+ let animationFrameId = null; // For processAudio loop
69
+ let visualizerFrameId = null; // For drawVisualizer loop
70
+ let activeOscillators = [];
71
+
72
+ // --- Configuration ---
73
+ const fftSize = 2048;
74
+ const frequencyBinCount = fftSize / 2; // analyser.frequencyBinCount
75
+ let frequencyDataArray = new Uint8Array(frequencyBinCount);
76
+ const numReconstructedOscillators = 50; // Number of oscillators for resynthesis
77
+
78
+ // --- Logging Function ---
79
+ const logMessage = (message, isError = false) => {
80
+ console[isError ? 'error' : 'log'](message);
81
+ const logEntry = document.createElement('div');
82
+ logEntry.textContent = `[${new Date().toLocaleTimeString()}] ${message}`;
83
+ if (isError) {
84
+ logEntry.style.color = 'red';
85
+ }
86
+ logArea.appendChild(logEntry);
87
+ logArea.scrollTop = logArea.scrollHeight; // Auto-scroll to bottom
88
+ };
89
 
90
+ // --- Visualization Function ---
91
+ const drawVisualizer = () => {
92
+ if (!analyser) return;
 
93
 
94
+ visualizerFrameId = requestAnimationFrame(drawVisualizer); // Schedule next frame
 
95
 
96
+ // Get frequency data
97
+ analyser.getByteFrequencyData(frequencyDataArray);
 
98
 
99
+ // Clear canvas
100
+ canvasCtx.fillStyle = '#000'; // Black background
101
+ canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
 
102
 
103
+ // Draw frequency bars
104
+ const barWidth = (canvas.width / frequencyBinCount) * 2.5; // Adjust multiplier for bar density
105
+ let barHeight;
106
+ let x = 0;
107
+
108
+ for (let i = 0; i < frequencyBinCount; i++) {
109
+ barHeight = frequencyDataArray[i] * (canvas.height / 255); // Scale height
110
+
111
+ // Color gradient (e.g., blue to red based on height)
112
+ const hue = (barHeight / canvas.height) * 240; // Hue range (e.g., 0=red, 240=blue)
113
+ canvasCtx.fillStyle = `hsl(${240 - hue}, 100%, 50%)`;
114
+
115
+ canvasCtx.fillRect(x, canvas.height - barHeight, barWidth, barHeight);
116
+
117
+ x += barWidth + 1; // Move to the next bar position
118
+ }
119
+ };
120
+
121
+ // --- Audio Processing Function (Resynthesis) ---
122
  const processAudio = () => {
123
+ if (!analyser) return;
124
 
125
+ animationFrameId = requestAnimationFrame(processAudio); // Schedule next frame
 
126
 
127
+ // 1. FFT Analysis (Data is fetched by drawVisualizer, but could be fetched here too if needed)
128
+ // analyser.getByteFrequencyData(frequencyDataArray); // Can be redundant if visualizer runs
129
+
130
+ // 2. Cleanup Previous Oscillators
131
  activeOscillators.forEach(osc => {
132
  try {
133
+ osc.stop();
134
+ osc.disconnect();
135
+ } catch (e) { /* Ignore */ }
 
 
 
136
  });
137
+ activeOscillators = [];
138
 
139
+ // 3. Create New Oscillators
 
140
  for (let i = 0; i < numReconstructedOscillators; i++) {
141
  const oscillator = audioContext.createOscillator();
 
 
 
142
  const frequencyIndex = Math.min(Math.floor(i * (frequencyBinCount / numReconstructedOscillators)), frequencyBinCount - 1);
143
+ const magnitude = frequencyDataArray[frequencyIndex]; // Use data from visualizer's last fetch
144
 
145
+ if (magnitude > 10) { // Magnitude threshold
 
 
 
 
 
146
  const frequency = (frequencyIndex * audioContext.sampleRate) / fftSize;
147
 
148
+ if (frequency > 20 && frequency < audioContext.sampleRate / 2) {
149
+ oscillator.type = 'sine';
 
150
  try {
151
+ if (frequency > 0) {
152
+ oscillator.frequency.setValueAtTime(frequency, audioContext.currentTime);
153
+ } else continue;
 
 
 
154
  } catch (e) {
155
+ logMessage(`Error setting frequency: ${frequency} Hz. ${e}`, true);
156
+ continue;
157
  }
158
 
 
 
159
  const oscGainNode = audioContext.createGain();
160
+ // Volume based on magnitude, squared for emphasis, scaled down
161
+ oscGainNode.gain.setValueAtTime((magnitude / 255) ** 2 * 0.8, audioContext.currentTime); // Adjusted scaling
162
 
 
163
  oscillator.connect(oscGainNode);
164
+ oscGainNode.connect(mainGainNode); // Connect to main volume control
165
 
 
166
  oscillator.start(audioContext.currentTime);
 
 
167
  activeOscillators.push(oscillator);
168
  }
169
  }
170
  }
 
 
 
171
  };
172
 
173
+ // --- Start Audio Function ---
174
+ const startAudio = async () => {
175
+ logMessage("Attempting to start audio...");
176
+ if (audioContext) {
177
+ logMessage("AudioContext already active.");
178
+ return;
179
+ }
180
 
181
+ try {
182
  statusDiv.textContent = 'Accessing microphone...';
183
  statusDiv.classList.remove('error');
184
 
185
  // 1. Create Audio Context
186
  audioContext = new (window.AudioContext || window.webkitAudioContext)();
187
+ logMessage("AudioContext created.");
188
 
189
+ // 2. Get Microphone Stream
190
  const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
191
+ logMessage("Microphone access granted.");
192
 
193
+ // 3. Create Nodes
194
+ microphoneSource = audioContext.createMediaStreamSource(stream);
195
+ analyser = audioContext.createAnalyser();
196
+ analyser.fftSize = fftSize;
197
+ mainGainNode = audioContext.createGain();
198
+ mainGainNode.gain.setValueAtTime(volumeSlider.value, audioContext.currentTime); // Initial volume
199
 
200
+ // Reinitialize data array based on actual bin count
 
 
 
 
 
 
 
201
  frequencyDataArray = new Uint8Array(analyser.frequencyBinCount);
202
 
203
+ // 4. Connect Nodes: Mic -> Analyser -> Main Gain -> Destination
204
+ microphoneSource.connect(analyser);
205
+ // We only connect the *resynthesized* sound (via oscillators) to mainGainNode
206
+ mainGainNode.connect(audioContext.destination);
207
+ logMessage("Audio nodes connected.");
208
+
209
+ // 5. Start Processing and Visualization
210
+ processAudio(); // Start resynthesis loop
211
+ drawVisualizer(); // Start visualization loop
212
+ logMessage("Audio processing and visualization started.");
213
 
214
+ // 6. Update UI
215
  startButton.disabled = true;
216
  stopButton.disabled = false;
217
+ volumeSlider.disabled = false;
218
+ statusDiv.textContent = 'Running...';
219
 
220
  } catch (err) {
221
+ logMessage(`Error starting audio: ${err.message}`, true);
222
  statusDiv.textContent = `Error: ${err.message}`;
223
  statusDiv.classList.add('error');
224
+ stopAudio(); // Clean up on error
 
225
  }
226
+ };
 
 
 
 
 
227
 
228
+ // --- Stop Audio Function ---
229
  const stopAudio = () => {
230
+ logMessage("Stopping audio...");
231
  if (animationFrameId) {
232
+ cancelAnimationFrame(animationFrameId);
233
  animationFrameId = null;
234
  }
235
+ if (visualizerFrameId) {
236
+ cancelAnimationFrame(visualizerFrameId);
237
+ visualizerFrameId = null;
238
+ }
239
 
240
+ // Stop and disconnect oscillators
241
  activeOscillators.forEach(osc => {
242
  try {
243
  osc.stop();
244
  osc.disconnect();
245
+ } catch (e) { /* Ignore */ }
246
  });
247
  activeOscillators = [];
248
 
249
+ // Disconnect main nodes
250
+ if (analyser) analyser.disconnect();
251
+ if (mainGainNode) mainGainNode.disconnect();
252
  if (microphoneSource) {
253
+ microphoneSource.mediaStream.getTracks().forEach(track => track.stop()); // IMPORTANT: Stop mic track
254
+ microphoneSource.disconnect();
 
 
 
 
 
 
 
 
 
 
255
  }
256
+ logMessage("Nodes disconnected, microphone track stopped.");
257
 
258
+ // Close Audio Context
259
  if (audioContext && audioContext.state !== 'closed') {
260
  audioContext.close().then(() => {
261
+ logMessage("AudioContext closed.");
262
+ audioContext = null; // Ensure it's nullified
263
  });
264
+ } else {
265
+ audioContext = null; // Ensure it's nullified if already closed or never opened
266
  }
267
 
268
+
269
+ // Clear visualizer
270
+ canvasCtx.fillStyle = '#333';
271
+ canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
272
+ canvasCtx.fillStyle = '#fff';
273
+ canvasCtx.textAlign = 'center';
274
+ canvasCtx.fillText("Stopped", canvas.width / 2, canvas.height / 2);
275
+
276
+
277
  // Update UI
278
  startButton.disabled = false;
279
  stopButton.disabled = true;
280
+ volumeSlider.disabled = true;
281
  statusDiv.textContent = 'Stopped.';
282
+ logMessage("Audio stopped successfully.");
 
283
  };
284
 
285
+ // --- Event Listeners ---
286
+ startButton.addEventListener('click', startAudio);
287
+ stopButton.addEventListener('click', stopAudio);
288
+ volumeSlider.addEventListener('input', (event) => {
289
+ if (mainGainNode) {
290
+ mainGainNode.gain.setValueAtTime(event.target.value, audioContext.currentTime);
291
+ }
292
+ });
293
+
294
+ // Initial canvas state
295
+ canvasCtx.fillStyle = '#333';
296
+ canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
297
+ canvasCtx.fillStyle = '#fff';
298
+ canvasCtx.textAlign = 'center';
299
+ canvasCtx.fillText("Waiting to start...", canvas.width / 2, canvas.height / 2);
300
+
301
  </script>
302
 
303
  </body>