Spaces:
Running
Running
Update index.html
Browse files- index.html +205 -128
index.html
CHANGED
@@ -1,226 +1,303 @@
|
|
1 |
<!DOCTYPE html>
|
2 |
<html>
|
3 |
<head>
|
4 |
-
<title>FFT Analysis
|
5 |
<style>
|
6 |
-
body { font-family: sans-serif; text-align: center; margin-top:
|
7 |
-
button { padding: 10px
|
8 |
-
#
|
|
|
|
|
|
|
9 |
.error { color: red; font-weight: bold; }
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
</style>
|
11 |
</head>
|
12 |
<body>
|
13 |
|
14 |
-
<h1>FFT Analysis
|
15 |
-
<p>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
|
17 |
-
<button id="startButton">Start Microphone Input and Playback</button>
|
18 |
-
<button id="stopButton" disabled>Stop</button>
|
19 |
<div id="status">Waiting...</div>
|
20 |
|
|
|
|
|
|
|
|
|
|
|
21 |
<script>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
let audioContext;
|
23 |
let analyser;
|
24 |
-
let microphoneSource;
|
25 |
-
let mainGainNode;
|
26 |
-
let animationFrameId = null; //
|
27 |
-
let
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
|
29 |
-
//
|
30 |
-
const
|
31 |
-
|
32 |
-
const frequencyBinCount = fftSize / 2;
|
33 |
|
34 |
-
|
35 |
-
let frequencyDataArray = new Uint8Array(frequencyBinCount); // Frequency magnitude (0-255)
|
36 |
|
37 |
-
|
38 |
-
|
39 |
-
const numReconstructedOscillators = 50;
|
40 |
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
const statusDiv = document.getElementById('status');
|
45 |
|
46 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
47 |
const processAudio = () => {
|
48 |
-
if (!analyser) return;
|
49 |
|
50 |
-
|
51 |
-
analyser.getByteFrequencyData(frequencyDataArray);
|
52 |
|
53 |
-
//
|
|
|
|
|
|
|
54 |
activeOscillators.forEach(osc => {
|
55 |
try {
|
56 |
-
osc.stop();
|
57 |
-
osc.disconnect();
|
58 |
-
} catch (e) {
|
59 |
-
// Ignore errors if the oscillator was already stopped or disconnected
|
60 |
-
// console.warn("Error stopping/disconnecting oscillator:", e);
|
61 |
-
}
|
62 |
});
|
63 |
-
activeOscillators = [];
|
64 |
|
65 |
-
// 3. Create
|
66 |
-
// Analyze frequency components and create a fixed number of oscillators
|
67 |
for (let i = 0; i < numReconstructedOscillators; i++) {
|
68 |
const oscillator = audioContext.createOscillator();
|
69 |
-
|
70 |
-
// Calculate the corresponding frequency index in the data array
|
71 |
-
// Ensure index doesn't overlap if frequencyBinCount > numReconstructedOscillators
|
72 |
const frequencyIndex = Math.min(Math.floor(i * (frequencyBinCount / numReconstructedOscillators)), frequencyBinCount - 1);
|
|
|
73 |
|
74 |
-
|
75 |
-
const magnitude = frequencyDataArray[frequencyIndex];
|
76 |
-
|
77 |
-
// Only create an oscillator if the magnitude is above a certain threshold (reduces noise)
|
78 |
-
if (magnitude > 10) { // Threshold value (adjustable)
|
79 |
-
// Calculate the actual frequency in Hz
|
80 |
const frequency = (frequencyIndex * audioContext.sampleRate) / fftSize;
|
81 |
|
82 |
-
|
83 |
-
|
84 |
-
oscillator.type = 'sine'; // Waveform type: 'sine', 'square', 'sawtooth', 'triangle', or 'custom'
|
85 |
try {
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
} else {
|
90 |
-
continue; // Skip if frequency is invalid
|
91 |
-
}
|
92 |
} catch (e) {
|
93 |
-
|
94 |
-
continue;
|
95 |
}
|
96 |
|
97 |
-
// Create a gain node for this individual oscillator to control its volume
|
98 |
-
// Set the volume proportional to the magnitude of the frequency component
|
99 |
const oscGainNode = audioContext.createGain();
|
100 |
-
//
|
101 |
-
oscGainNode.gain.setValueAtTime((magnitude / 255) ** 2 * 0.
|
102 |
|
103 |
-
// Connect: Oscillator -> Individual Gain Node -> Main Gain Node -> Output
|
104 |
oscillator.connect(oscGainNode);
|
105 |
-
oscGainNode.connect(mainGainNode);
|
106 |
|
107 |
-
// Start the oscillator immediately
|
108 |
oscillator.start(audioContext.currentTime);
|
109 |
-
|
110 |
-
// Add the oscillator to the list of active oscillators
|
111 |
activeOscillators.push(oscillator);
|
112 |
}
|
113 |
}
|
114 |
}
|
115 |
-
|
116 |
-
// 4. Request the next frame for continuous processing
|
117 |
-
animationFrameId = requestAnimationFrame(processAudio);
|
118 |
};
|
119 |
|
120 |
-
//
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
|
127 |
|
|
|
128 |
statusDiv.textContent = 'Accessing microphone...';
|
129 |
statusDiv.classList.remove('error');
|
130 |
|
131 |
// 1. Create Audio Context
|
132 |
audioContext = new (window.AudioContext || window.webkitAudioContext)();
|
|
|
133 |
|
134 |
-
// 2. Get Microphone
|
135 |
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
|
|
136 |
|
137 |
-
// 3. Create
|
138 |
-
microphoneSource = audioContext.createMediaStreamSource(stream);
|
139 |
-
analyser = audioContext.createAnalyser();
|
140 |
-
analyser.fftSize = fftSize;
|
141 |
-
mainGainNode = audioContext.createGain();
|
142 |
-
mainGainNode.gain.setValueAtTime(
|
143 |
|
144 |
-
//
|
145 |
-
microphoneSource.connect(analyser);
|
146 |
-
// Note: The analyser node itself doesn't output sound directly.
|
147 |
-
// Connect the source directly to gain if you want to hear the original mic input as well (uncomment below)
|
148 |
-
// microphoneSource.connect(mainGainNode);
|
149 |
-
mainGainNode.connect(audioContext.destination); // Connect main gain to speakers
|
150 |
-
|
151 |
-
// Initialize/Reset the frequency data array size based on the analyser
|
152 |
frequencyDataArray = new Uint8Array(analyser.frequencyBinCount);
|
153 |
|
154 |
-
// 4.
|
155 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
156 |
|
157 |
-
// Update UI
|
158 |
startButton.disabled = true;
|
159 |
stopButton.disabled = false;
|
160 |
-
|
|
|
161 |
|
162 |
} catch (err) {
|
163 |
-
|
164 |
statusDiv.textContent = `Error: ${err.message}`;
|
165 |
statusDiv.classList.add('error');
|
166 |
-
// Clean up
|
167 |
-
stopAudio();
|
168 |
}
|
169 |
-
}
|
170 |
-
|
171 |
-
// ---------- Stop Button Click Event Handler ----------
|
172 |
-
stopButton.addEventListener('click', () => {
|
173 |
-
stopAudio();
|
174 |
-
});
|
175 |
|
176 |
-
//
|
177 |
const stopAudio = () => {
|
|
|
178 |
if (animationFrameId) {
|
179 |
-
cancelAnimationFrame(animationFrameId);
|
180 |
animationFrameId = null;
|
181 |
}
|
|
|
|
|
|
|
|
|
182 |
|
183 |
-
// Stop and disconnect
|
184 |
activeOscillators.forEach(osc => {
|
185 |
try {
|
186 |
osc.stop();
|
187 |
osc.disconnect();
|
188 |
-
} catch (e) { /* Ignore
|
189 |
});
|
190 |
activeOscillators = [];
|
191 |
|
192 |
-
// Disconnect
|
|
|
|
|
193 |
if (microphoneSource) {
|
194 |
-
|
195 |
-
|
196 |
-
microphoneSource.mediaStream.getTracks().forEach(track => track.stop());
|
197 |
-
microphoneSource = null;
|
198 |
-
}
|
199 |
-
if (analyser) {
|
200 |
-
analyser.disconnect();
|
201 |
-
analyser = null;
|
202 |
-
}
|
203 |
-
if (mainGainNode) {
|
204 |
-
mainGainNode.disconnect();
|
205 |
-
mainGainNode = null;
|
206 |
}
|
|
|
207 |
|
208 |
-
// Close
|
209 |
if (audioContext && audioContext.state !== 'closed') {
|
210 |
audioContext.close().then(() => {
|
211 |
-
|
212 |
-
|
213 |
});
|
|
|
|
|
214 |
}
|
215 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
216 |
// Update UI
|
217 |
startButton.disabled = false;
|
218 |
stopButton.disabled = true;
|
|
|
219 |
statusDiv.textContent = 'Stopped.';
|
220 |
-
|
221 |
-
frequencyDataArray = new Uint8Array(frequencyBinCount);
|
222 |
};
|
223 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
224 |
</script>
|
225 |
|
226 |
</body>
|
|
|
1 |
<!DOCTYPE html>
|
2 |
<html>
|
3 |
<head>
|
4 |
+
<title>FFT Analysis with Visualization and Controls</title>
|
5 |
<style>
|
6 |
+
body { font-family: sans-serif; text-align: center; margin-top: 30px; background-color: #f4f4f4; }
|
7 |
+
#controls button { padding: 10px 15px; font-size: 15px; cursor: pointer; margin: 5px; border: none; border-radius: 5px; }
|
8 |
+
#startButton { background-color: #4CAF50; color: white; }
|
9 |
+
#stopButton { background-color: #f44336; color: white; }
|
10 |
+
#startButton:disabled, #stopButton:disabled { background-color: #cccccc; cursor: not-allowed; }
|
11 |
+
#status { margin-top: 15px; color: #555; font-weight: bold; min-height: 20px; }
|
12 |
.error { color: red; font-weight: bold; }
|
13 |
+
#visualizerCanvas { display: block; margin: 20px auto; background-color: #000; border: 1px solid #ccc; }
|
14 |
+
#volumeControl { margin-top: 15px; }
|
15 |
+
#logArea {
|
16 |
+
margin: 20px auto;
|
17 |
+
width: 80%;
|
18 |
+
max-width: 600px;
|
19 |
+
height: 100px;
|
20 |
+
overflow-y: scroll;
|
21 |
+
border: 1px solid #ccc;
|
22 |
+
background-color: #fff;
|
23 |
+
text-align: left;
|
24 |
+
padding: 10px;
|
25 |
+
font-size: 12px;
|
26 |
+
font-family: monospace;
|
27 |
+
}
|
28 |
+
label { font-size: 14px; margin-right: 5px; }
|
29 |
</style>
|
30 |
</head>
|
31 |
<body>
|
32 |
|
33 |
+
<h1>FFT Analysis with Visualization and Controls</h1>
|
34 |
+
<p>Real-time microphone input analysis, FFT visualization, and basic sound resynthesis.</p>
|
35 |
+
|
36 |
+
<div id="controls">
|
37 |
+
<button id="startButton">Start</button>
|
38 |
+
<button id="stopButton" disabled>Stop</button>
|
39 |
+
</div>
|
40 |
+
|
41 |
+
<div id="volumeControl">
|
42 |
+
<label for="volumeSlider">Volume:</label>
|
43 |
+
<input type="range" id="volumeSlider" min="0" max="1" step="0.01" value="0.5" disabled>
|
44 |
+
</div>
|
45 |
|
|
|
|
|
46 |
<div id="status">Waiting...</div>
|
47 |
|
48 |
+
<canvas id="visualizerCanvas" width="600" height="200"></canvas>
|
49 |
+
|
50 |
+
<label for="logArea">Log:</label>
|
51 |
+
<div id="logArea"></div>
|
52 |
+
|
53 |
<script>
|
54 |
+
// --- DOM Elements ---
|
55 |
+
const startButton = document.getElementById('startButton');
|
56 |
+
const stopButton = document.getElementById('stopButton');
|
57 |
+
const statusDiv = document.getElementById('status');
|
58 |
+
const canvas = document.getElementById('visualizerCanvas');
|
59 |
+
const canvasCtx = canvas.getContext('2d');
|
60 |
+
const volumeSlider = document.getElementById('volumeSlider');
|
61 |
+
const logArea = document.getElementById('logArea');
|
62 |
+
|
63 |
+
// --- Web Audio API Variables ---
|
64 |
let audioContext;
|
65 |
let analyser;
|
66 |
+
let microphoneSource;
|
67 |
+
let mainGainNode;
|
68 |
+
let animationFrameId = null; // For processAudio loop
|
69 |
+
let visualizerFrameId = null; // For drawVisualizer loop
|
70 |
+
let activeOscillators = [];
|
71 |
+
|
72 |
+
// --- Configuration ---
|
73 |
+
const fftSize = 2048;
|
74 |
+
const frequencyBinCount = fftSize / 2; // analyser.frequencyBinCount
|
75 |
+
let frequencyDataArray = new Uint8Array(frequencyBinCount);
|
76 |
+
const numReconstructedOscillators = 50; // Number of oscillators for resynthesis
|
77 |
+
|
78 |
+
// --- Logging Function ---
|
79 |
+
const logMessage = (message, isError = false) => {
|
80 |
+
console[isError ? 'error' : 'log'](message);
|
81 |
+
const logEntry = document.createElement('div');
|
82 |
+
logEntry.textContent = `[${new Date().toLocaleTimeString()}] ${message}`;
|
83 |
+
if (isError) {
|
84 |
+
logEntry.style.color = 'red';
|
85 |
+
}
|
86 |
+
logArea.appendChild(logEntry);
|
87 |
+
logArea.scrollTop = logArea.scrollHeight; // Auto-scroll to bottom
|
88 |
+
};
|
89 |
|
90 |
+
// --- Visualization Function ---
|
91 |
+
const drawVisualizer = () => {
|
92 |
+
if (!analyser) return;
|
|
|
93 |
|
94 |
+
visualizerFrameId = requestAnimationFrame(drawVisualizer); // Schedule next frame
|
|
|
95 |
|
96 |
+
// Get frequency data
|
97 |
+
analyser.getByteFrequencyData(frequencyDataArray);
|
|
|
98 |
|
99 |
+
// Clear canvas
|
100 |
+
canvasCtx.fillStyle = '#000'; // Black background
|
101 |
+
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
|
|
|
102 |
|
103 |
+
// Draw frequency bars
|
104 |
+
const barWidth = (canvas.width / frequencyBinCount) * 2.5; // Adjust multiplier for bar density
|
105 |
+
let barHeight;
|
106 |
+
let x = 0;
|
107 |
+
|
108 |
+
for (let i = 0; i < frequencyBinCount; i++) {
|
109 |
+
barHeight = frequencyDataArray[i] * (canvas.height / 255); // Scale height
|
110 |
+
|
111 |
+
// Color gradient (e.g., blue to red based on height)
|
112 |
+
const hue = (barHeight / canvas.height) * 240; // Hue range (e.g., 0=red, 240=blue)
|
113 |
+
canvasCtx.fillStyle = `hsl(${240 - hue}, 100%, 50%)`;
|
114 |
+
|
115 |
+
canvasCtx.fillRect(x, canvas.height - barHeight, barWidth, barHeight);
|
116 |
+
|
117 |
+
x += barWidth + 1; // Move to the next bar position
|
118 |
+
}
|
119 |
+
};
|
120 |
+
|
121 |
+
// --- Audio Processing Function (Resynthesis) ---
|
122 |
const processAudio = () => {
|
123 |
+
if (!analyser) return;
|
124 |
|
125 |
+
animationFrameId = requestAnimationFrame(processAudio); // Schedule next frame
|
|
|
126 |
|
127 |
+
// 1. FFT Analysis (Data is fetched by drawVisualizer, but could be fetched here too if needed)
|
128 |
+
// analyser.getByteFrequencyData(frequencyDataArray); // Can be redundant if visualizer runs
|
129 |
+
|
130 |
+
// 2. Cleanup Previous Oscillators
|
131 |
activeOscillators.forEach(osc => {
|
132 |
try {
|
133 |
+
osc.stop();
|
134 |
+
osc.disconnect();
|
135 |
+
} catch (e) { /* Ignore */ }
|
|
|
|
|
|
|
136 |
});
|
137 |
+
activeOscillators = [];
|
138 |
|
139 |
+
// 3. Create New Oscillators
|
|
|
140 |
for (let i = 0; i < numReconstructedOscillators; i++) {
|
141 |
const oscillator = audioContext.createOscillator();
|
|
|
|
|
|
|
142 |
const frequencyIndex = Math.min(Math.floor(i * (frequencyBinCount / numReconstructedOscillators)), frequencyBinCount - 1);
|
143 |
+
const magnitude = frequencyDataArray[frequencyIndex]; // Use data from visualizer's last fetch
|
144 |
|
145 |
+
if (magnitude > 10) { // Magnitude threshold
|
|
|
|
|
|
|
|
|
|
|
146 |
const frequency = (frequencyIndex * audioContext.sampleRate) / fftSize;
|
147 |
|
148 |
+
if (frequency > 20 && frequency < audioContext.sampleRate / 2) {
|
149 |
+
oscillator.type = 'sine';
|
|
|
150 |
try {
|
151 |
+
if (frequency > 0) {
|
152 |
+
oscillator.frequency.setValueAtTime(frequency, audioContext.currentTime);
|
153 |
+
} else continue;
|
|
|
|
|
|
|
154 |
} catch (e) {
|
155 |
+
logMessage(`Error setting frequency: ${frequency} Hz. ${e}`, true);
|
156 |
+
continue;
|
157 |
}
|
158 |
|
|
|
|
|
159 |
const oscGainNode = audioContext.createGain();
|
160 |
+
// Volume based on magnitude, squared for emphasis, scaled down
|
161 |
+
oscGainNode.gain.setValueAtTime((magnitude / 255) ** 2 * 0.8, audioContext.currentTime); // Adjusted scaling
|
162 |
|
|
|
163 |
oscillator.connect(oscGainNode);
|
164 |
+
oscGainNode.connect(mainGainNode); // Connect to main volume control
|
165 |
|
|
|
166 |
oscillator.start(audioContext.currentTime);
|
|
|
|
|
167 |
activeOscillators.push(oscillator);
|
168 |
}
|
169 |
}
|
170 |
}
|
|
|
|
|
|
|
171 |
};
|
172 |
|
173 |
+
// --- Start Audio Function ---
|
174 |
+
const startAudio = async () => {
|
175 |
+
logMessage("Attempting to start audio...");
|
176 |
+
if (audioContext) {
|
177 |
+
logMessage("AudioContext already active.");
|
178 |
+
return;
|
179 |
+
}
|
180 |
|
181 |
+
try {
|
182 |
statusDiv.textContent = 'Accessing microphone...';
|
183 |
statusDiv.classList.remove('error');
|
184 |
|
185 |
// 1. Create Audio Context
|
186 |
audioContext = new (window.AudioContext || window.webkitAudioContext)();
|
187 |
+
logMessage("AudioContext created.");
|
188 |
|
189 |
+
// 2. Get Microphone Stream
|
190 |
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
|
191 |
+
logMessage("Microphone access granted.");
|
192 |
|
193 |
+
// 3. Create Nodes
|
194 |
+
microphoneSource = audioContext.createMediaStreamSource(stream);
|
195 |
+
analyser = audioContext.createAnalyser();
|
196 |
+
analyser.fftSize = fftSize;
|
197 |
+
mainGainNode = audioContext.createGain();
|
198 |
+
mainGainNode.gain.setValueAtTime(volumeSlider.value, audioContext.currentTime); // Initial volume
|
199 |
|
200 |
+
// Reinitialize data array based on actual bin count
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
201 |
frequencyDataArray = new Uint8Array(analyser.frequencyBinCount);
|
202 |
|
203 |
+
// 4. Connect Nodes: Mic -> Analyser -> Main Gain -> Destination
|
204 |
+
microphoneSource.connect(analyser);
|
205 |
+
// We only connect the *resynthesized* sound (via oscillators) to mainGainNode
|
206 |
+
mainGainNode.connect(audioContext.destination);
|
207 |
+
logMessage("Audio nodes connected.");
|
208 |
+
|
209 |
+
// 5. Start Processing and Visualization
|
210 |
+
processAudio(); // Start resynthesis loop
|
211 |
+
drawVisualizer(); // Start visualization loop
|
212 |
+
logMessage("Audio processing and visualization started.");
|
213 |
|
214 |
+
// 6. Update UI
|
215 |
startButton.disabled = true;
|
216 |
stopButton.disabled = false;
|
217 |
+
volumeSlider.disabled = false;
|
218 |
+
statusDiv.textContent = 'Running...';
|
219 |
|
220 |
} catch (err) {
|
221 |
+
logMessage(`Error starting audio: ${err.message}`, true);
|
222 |
statusDiv.textContent = `Error: ${err.message}`;
|
223 |
statusDiv.classList.add('error');
|
224 |
+
stopAudio(); // Clean up on error
|
|
|
225 |
}
|
226 |
+
};
|
|
|
|
|
|
|
|
|
|
|
227 |
|
228 |
+
// --- Stop Audio Function ---
|
229 |
const stopAudio = () => {
|
230 |
+
logMessage("Stopping audio...");
|
231 |
if (animationFrameId) {
|
232 |
+
cancelAnimationFrame(animationFrameId);
|
233 |
animationFrameId = null;
|
234 |
}
|
235 |
+
if (visualizerFrameId) {
|
236 |
+
cancelAnimationFrame(visualizerFrameId);
|
237 |
+
visualizerFrameId = null;
|
238 |
+
}
|
239 |
|
240 |
+
// Stop and disconnect oscillators
|
241 |
activeOscillators.forEach(osc => {
|
242 |
try {
|
243 |
osc.stop();
|
244 |
osc.disconnect();
|
245 |
+
} catch (e) { /* Ignore */ }
|
246 |
});
|
247 |
activeOscillators = [];
|
248 |
|
249 |
+
// Disconnect main nodes
|
250 |
+
if (analyser) analyser.disconnect();
|
251 |
+
if (mainGainNode) mainGainNode.disconnect();
|
252 |
if (microphoneSource) {
|
253 |
+
microphoneSource.mediaStream.getTracks().forEach(track => track.stop()); // IMPORTANT: Stop mic track
|
254 |
+
microphoneSource.disconnect();
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
255 |
}
|
256 |
+
logMessage("Nodes disconnected, microphone track stopped.");
|
257 |
|
258 |
+
// Close Audio Context
|
259 |
if (audioContext && audioContext.state !== 'closed') {
|
260 |
audioContext.close().then(() => {
|
261 |
+
logMessage("AudioContext closed.");
|
262 |
+
audioContext = null; // Ensure it's nullified
|
263 |
});
|
264 |
+
} else {
|
265 |
+
audioContext = null; // Ensure it's nullified if already closed or never opened
|
266 |
}
|
267 |
|
268 |
+
|
269 |
+
// Clear visualizer
|
270 |
+
canvasCtx.fillStyle = '#333';
|
271 |
+
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
|
272 |
+
canvasCtx.fillStyle = '#fff';
|
273 |
+
canvasCtx.textAlign = 'center';
|
274 |
+
canvasCtx.fillText("Stopped", canvas.width / 2, canvas.height / 2);
|
275 |
+
|
276 |
+
|
277 |
// Update UI
|
278 |
startButton.disabled = false;
|
279 |
stopButton.disabled = true;
|
280 |
+
volumeSlider.disabled = true;
|
281 |
statusDiv.textContent = 'Stopped.';
|
282 |
+
logMessage("Audio stopped successfully.");
|
|
|
283 |
};
|
284 |
|
285 |
+
// --- Event Listeners ---
|
286 |
+
startButton.addEventListener('click', startAudio);
|
287 |
+
stopButton.addEventListener('click', stopAudio);
|
288 |
+
volumeSlider.addEventListener('input', (event) => {
|
289 |
+
if (mainGainNode) {
|
290 |
+
mainGainNode.gain.setValueAtTime(event.target.value, audioContext.currentTime);
|
291 |
+
}
|
292 |
+
});
|
293 |
+
|
294 |
+
// Initial canvas state
|
295 |
+
canvasCtx.fillStyle = '#333';
|
296 |
+
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
|
297 |
+
canvasCtx.fillStyle = '#fff';
|
298 |
+
canvasCtx.textAlign = 'center';
|
299 |
+
canvasCtx.fillText("Waiting to start...", canvas.width / 2, canvas.height / 2);
|
300 |
+
|
301 |
</script>
|
302 |
|
303 |
</body>
|