omni-docker / webui /index.html
victor's picture
victor HF Staff
add index
bd6c0c0
raw
history blame
5.73 kB
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>My Web Page</title>
</head>
<body style="background-color: black; color: white;">
<div id="svg-container"></div>
<button id="play-button">play</button>
<main>
<p>Current status: listening</p>
</main>
</body>
<script>
// Load the SVG
const svgContainer = document.getElementById('svg-container');
const svgContent = `
<svg width="800" height="600" viewBox="0 0 800 600" xmlns="http://www.w3.org/2000/svg">
<ellipse id="left-eye" cx="340" cy="200" rx="20" ry="20" fill="white"/>
<circle id="left-pupil" cx="340" cy="200" r="8" fill="black"/>
<ellipse id="right-eye" cx="460" cy="200" rx="20" ry="20" fill="white"/>
<circle id="right-pupil" cx="460" cy="200" r="8" fill="black"/>
<path id="upper-lip" d="M 300 300 C 350 284, 450 284, 500 300" stroke="white" stroke-width="10" fill="none"/>
<path id="lower-lip" d="M 300 300 C 350 316, 450 316, 500 300" stroke="white" stroke-width="10" fill="none"/>
</svg>`;
svgContainer.innerHTML = svgContent;
// Set up audio context
const audioContext = new (window.AudioContext || window.webkitAudioContext)();
const analyser = audioContext.createAnalyser();
analyser.fftSize = 256;
// Load audio file
const audio = new Audio('/heath_ledger.mp3');
const source = audioContext.createMediaElementSource(audio);
source.connect(analyser);
analyser.connect(audioContext.destination);
// Animation variables
let isAudioPlaying = false;
let lastBlinkTime = 0;
let eyeMovementOffset = { x: 0, y: 0 };
// Idle eye animation function
function animateIdleEyes(timestamp) {
const leftEye = document.getElementById('left-eye');
const rightEye = document.getElementById('right-eye');
const leftPupil = document.getElementById('left-pupil');
const rightPupil = document.getElementById('right-pupil');
const baseEyeX = { left: 340, right: 460 };
const baseEyeY = 200;
// Blink effect
const blinkInterval = 4000 + Math.random() * 2000; // Random blink interval between 4-6 seconds
if (timestamp - lastBlinkTime > blinkInterval) {
leftEye.setAttribute('ry', '2');
rightEye.setAttribute('ry', '2');
leftPupil.setAttribute('ry', '0.8');
rightPupil.setAttribute('ry', '0.8');
setTimeout(() => {
leftEye.setAttribute('ry', '20');
rightEye.setAttribute('ry', '20');
leftPupil.setAttribute('ry', '8');
rightPupil.setAttribute('ry', '8');
}, 150);
lastBlinkTime = timestamp;
}
// Subtle eye movement
const movementSpeed = 0.001;
eyeMovementOffset.x = Math.sin(timestamp * movementSpeed) * 6;
eyeMovementOffset.y = Math.cos(timestamp * movementSpeed * 1.3) * 1; // Reduced vertical movement
leftEye.setAttribute('cx', baseEyeX.left + eyeMovementOffset.x);
leftEye.setAttribute('cy', baseEyeY + eyeMovementOffset.y);
rightEye.setAttribute('cx', baseEyeX.right + eyeMovementOffset.x);
rightEye.setAttribute('cy', baseEyeY + eyeMovementOffset.y);
leftPupil.setAttribute('cx', baseEyeX.left + eyeMovementOffset.x);
leftPupil.setAttribute('cy', baseEyeY + eyeMovementOffset.y);
rightPupil.setAttribute('cx', baseEyeX.right + eyeMovementOffset.x);
rightPupil.setAttribute('cy', baseEyeY + eyeMovementOffset.y);
}
// Main animation function
function animate(timestamp) {
if (isAudioPlaying) {
const dataArray = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(dataArray);
// Calculate the average amplitude in the speech frequency range
const speechRange = dataArray.slice(5, 80); // Adjust based on your needs
const averageAmplitude = speechRange.reduce((a, b) => a + b) / speechRange.length;
// Normalize the amplitude (0-1 range)
const normalizedAmplitude = averageAmplitude / 255;
// Animate mouth
const upperLip = document.getElementById('upper-lip');
const lowerLip = document.getElementById('lower-lip');
const baseY = 300;
const maxMovement = 60;
const newUpperY = baseY - normalizedAmplitude * maxMovement;
const newLowerY = baseY + normalizedAmplitude * maxMovement;
// Adjust control points for more natural movement
const upperControlY1 = newUpperY - 8;
const upperControlY2 = newUpperY - 8;
const lowerControlY1 = newLowerY + 8;
const lowerControlY2 = newLowerY + 8;
upperLip.setAttribute('d', `M 300 ${baseY} C 350 ${upperControlY1}, 450 ${upperControlY2}, 500 ${baseY}`);
lowerLip.setAttribute('d', `M 300 ${baseY} C 350 ${lowerControlY1}, 450 ${lowerControlY2}, 500 ${baseY}`);
// Animate eyes
const leftEye = document.getElementById('left-eye');
const rightEye = document.getElementById('right-eye');
const leftPupil = document.getElementById('left-pupil');
const rightPupil = document.getElementById('right-pupil');
const baseEyeY = 200;
const maxEyeMovement = 10;
const newEyeY = baseEyeY - normalizedAmplitude * maxEyeMovement;
leftEye.setAttribute('cy', newEyeY);
rightEye.setAttribute('cy', newEyeY);
leftPupil.setAttribute('cy', newEyeY);
rightPupil.setAttribute('cy', newEyeY);
} else {
animateIdleEyes(timestamp);
}
requestAnimationFrame(animate);
}
// Start animation
animate();
// Handle audio play/pause
audio.addEventListener('play', () => {
isAudioPlaying = true;
audioContext.resume();
});
audio.addEventListener('pause', () => {
isAudioPlaying = false;
});
audio.addEventListener('ended', () => {
isAudioPlaying = false;
});
// Play audio (you might want to trigger this with a user interaction)
document.getElementById('play-button').addEventListener('click', () => {
audio.play();
});
</script>
</html>