new-deep-site / index.html
Simultaneous-Orthoganlity-In-Time's picture
Add 2 files
1ccc840 verified
raw
history blame contribute delete
38.5 kB
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Universal LLM Playground</title>
<script src="https://cdn.tailwindcss.com"></script>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.4.0/css/all.min.css">
<style>
.gradient-bg {
background: linear-gradient(135deg, #6e8efb, #a777e3);
}
.card-hover:hover {
transform: translateY(-5px);
box-shadow: 0 20px 25px -5px rgba(0, 0, 0, 0.1), 0 10px 10px -5px rgba(0, 0, 0, 0.04);
}
.provider-card {
transition: all 0.3s ease;
border: 2px solid transparent;
}
.provider-card:hover {
border-color: #6e8efb;
}
.provider-card.active {
border-color: #6e8efb;
background-color: #f0f4ff;
}
.tab-content {
display: none;
}
.tab-content.active {
display: block;
animation: fadeIn 0.5s ease-in-out;
}
@keyframes fadeIn {
from { opacity: 0; }
to { opacity: 1; }
}
.code-block {
font-family: 'Courier New', Courier, monospace;
background-color: #2d3748;
color: #f7fafc;
padding: 1rem;
border-radius: 0.375rem;
overflow-x: auto;
}
.model-badge {
font-size: 0.75rem;
padding: 0.25rem 0.5rem;
border-radius: 9999px;
background-color: #e9d8fd;
color: #6b46c1;
}
.response-area {
min-height: 200px;
background-color: #f8fafc;
border: 1px solid #e2e8f0;
border-radius: 0.5rem;
}
.token-counter {
position: absolute;
right: 1rem;
bottom: 1rem;
font-size: 0.75rem;
color: #718096;
}
</style>
</head>
<body class="bg-gray-50">
<!-- Header -->
<header class="gradient-bg text-white py-16">
<div class="container mx-auto px-6 text-center">
<h1 class="text-4xl md:text-5xl font-bold mb-6">Universal LLM Playground</h1>
<p class="text-xl md:text-2xl mb-8 max-w-3xl mx-auto">Connect to any LLM provider with a single interface</p>
<div class="flex justify-center space-x-4">
<a href="#setup" class="bg-white text-purple-600 px-6 py-3 rounded-full font-semibold hover:bg-purple-50 transition">Get Started</a>
<a href="#playground" class="border-2 border-white text-white px-6 py-3 rounded-full font-semibold hover:bg-white hover:text-purple-600 transition">Try It Now</a>
</div>
</div>
</header>
<!-- Setup Section -->
<section id="setup" class="py-16 bg-white">
<div class="container mx-auto px-6">
<h2 class="text-3xl font-bold text-center text-gray-800 mb-12">Easy Setup for Any Provider</h2>
<div class="max-w-6xl mx-auto grid grid-cols-1 md:grid-cols-2 gap-8">
<!-- Local Models -->
<div class="bg-gray-50 p-8 rounded-xl border border-gray-200 card-hover transition">
<div class="flex items-center mb-6">
<div class="bg-purple-100 p-3 rounded-full mr-4">
<i class="fas fa-laptop-code text-purple-600 text-2xl"></i>
</div>
<h3 class="text-2xl font-semibold text-gray-800">Local Models</h3>
</div>
<p class="text-gray-600 mb-6">Run models locally with Ollama, LM Studio, or directly with transformers.</p>
<div class="space-y-4">
<div>
<h4 class="font-medium text-gray-700 mb-2">1. Install Requirements</h4>
<div class="code-block mb-4">
<p># For Ollama</p>
<p>curl -fsSL https://ollama.com/install.sh | sh</p>
<p>ollama pull mistral</p>
<p><br></p>
<p># For Python transformers</p>
<p>pip install torch transformers sentencepiece</p>
</div>
</div>
<div>
<h4 class="font-medium text-gray-700 mb-2">2. Configuration</h4>
<div class="code-block">
<p>local_model = {</p>
<p> "type": "ollama", // or "transformers"</p>
<p> "base_url": "http://localhost:11434",</p>
<p> "model": "mistral",</p>
<p> "temperature": 0.7</p>
<p>}</p>
</div>
</div>
</div>
</div>
<!-- OpenAI/Anthropic -->
<div class="bg-gray-50 p-8 rounded-xl border border-gray-200 card-hover transition">
<div class="flex items-center mb-6">
<div class="bg-purple-100 p-3 rounded-full mr-4">
<i class="fas fa-cloud text-purple-600 text-2xl"></i>
</div>
<h3 class="text-2xl font-semibold text-gray-800">Cloud Providers</h3>
</div>
<p class="text-gray-600 mb-6">Connect to OpenAI, Anthropic, Groq, or any API-compatible service.</p>
<div class="space-y-4">
<div>
<h4 class="font-medium text-gray-700 mb-2">1. Get API Keys</h4>
<p class="text-gray-600 mb-2">Create accounts and get API keys from:</p>
<ul class="list-disc pl-5 text-gray-600 space-y-1">
<li><a href="https://platform.openai.com" class="text-purple-600 hover:underline">OpenAI</a></li>
<li><a href="https://console.anthropic.com" class="text-purple-600 hover:underline">Anthropic</a></li>
<li><a href="https://console.groq.com" class="text-purple-600 hover:underline">Groq</a></li>
</ul>
</div>
<div>
<h4 class="font-medium text-gray-700 mb-2">2. Configuration</h4>
<div class="code-block">
<p>openai_config = {</p>
<p> "type": "openai",</p>
<p> "api_key": "sk-your-key-here",</p>
<p> "model": "gpt-4-turbo",</p>
<p> "temperature": 0.7</p>
<p>}</p>
<p><br></p>
<p>anthropic_config = {</p>
<p> "type": "anthropic",</p>
<p> "api_key": "sk-your-key-here",</p>
<p> "model": "claude-3-opus-20240229"</p>
<p>}</p>
</div>
</div>
</div>
</div>
<!-- Hugging Face -->
<div class="bg-gray-50 p-8 rounded-xl border border-gray-200 card-hover transition">
<div class="flex items-center mb-6">
<div class="bg-purple-100 p-3 rounded-full mr-4">
<i class="fas fa-robot text-purple-600 text-2xl"></i>
</div>
<h3 class="text-2xl font-semibold text-gray-800">Hugging Face</h3>
</div>
<p class="text-gray-600 mb-6">Use free Inference API or Pro endpoints for private models.</p>
<div class="space-y-4">
<div>
<h4 class="font-medium text-gray-700 mb-2">1. Get Access</h4>
<ul class="list-disc pl-5 text-gray-600 space-y-1">
<li>Free tier: <a href="https://huggingface.co" class="text-purple-600 hover:underline">Hugging Face account</a></li>
<li>Pro tier: <a href="https://huggingface.co/pro" class="text-purple-600 hover:underline">Hugging Face Pro</a></li>
</ul>
</div>
<div>
<h4 class="font-medium text-gray-700 mb-2">2. Configuration</h4>
<div class="code-block">
<p>hf_config = {</p>
<p> "type": "huggingface",</p>
<p> "api_key": "hf_your_key_here",</p>
<p> "model": "mistralai/Mistral-7B-Instruct-v0.1",</p>
<p> "endpoint": "https://api-inference.huggingface.co/models/",</p>
<p> "pro_endpoint": false // Set true for Pro</p>
<p>}</p>
</div>
</div>
</div>
</div>
<!-- Image Models -->
<div class="bg-gray-50 p-8 rounded-xl border border-gray-200 card-hover transition">
<div class="flex items-center mb-6">
<div class="bg-purple-100 p-3 rounded-full mr-4">
<i class="fas fa-image text-purple-600 text-2xl"></i>
</div>
<h3 class="text-2xl font-semibold text-gray-800">Image Generation</h3>
</div>
<p class="text-gray-600 mb-6">Connect to Stable Diffusion, DALL-E, or other image generation models.</p>
<div class="space-y-4">
<div>
<h4 class="font-medium text-gray-700 mb-2">1. Setup Options</h4>
<ul class="list-disc pl-5 text-gray-600 space-y-1">
<li>Local: Stable Diffusion with diffusers</li>
<li>Cloud: DALL-E, Midjourney API, etc.</li>
</ul>
</div>
<div>
<h4 class="font-medium text-gray-700 mb-2">2. Configuration</h4>
<div class="code-block">
<p>image_config = {</p>
<p> "type": "dalle", // or "stable_diffusion"</p>
<p> "api_key": "sk-your-key-here", // For DALL-E</p>
<p> "model": "dall-e-3", // or "stabilityai/stable-diffusion-xl-base-1.0"</p>
<p> "size": "1024x1024"</p>
<p>}</p>
</div>
</div>
</div>
</div>
</div>
<!-- Quick Start -->
<div class="max-w-4xl mx-auto mt-16 bg-purple-50 rounded-xl p-8 border border-purple-200">
<h3 class="text-2xl font-semibold text-purple-800 mb-4">🚀 Quick Start</h3>
<p class="text-gray-700 mb-6">Copy this starter code to begin using any provider:</p>
<div class="code-block mb-6">
<p># Install required packages</p>
<p>pip install requests python-dotenv</p>
<p><br></p>
<p># In your .env file:</p>
<p>OPENAI_API_KEY=your_key_here</p>
<p>ANTHROPIC_API_KEY=your_key_here</p>
<p>HF_API_KEY=your_key_here</p>
<p><br></p>
<p># Basic usage example</p>
<p>from llm_connector import LLMConnector</p>
<p><br></p>
<p>connector = LLMConnector(provider="openai")</p>
<p>response = connector.generate("Explain quantum computing")</p>
<p>print(response)</p>
</div>
<a href="#playground" class="bg-purple-600 text-white px-6 py-3 rounded-full font-semibold hover:bg-purple-700 transition inline-block">Try in Playground</a>
</div>
</div>
</section>
<!-- Playground Section -->
<section id="playground" class="py-16 bg-gray-50">
<div class="container mx-auto px-6">
<h2 class="text-3xl font-bold text-center text-gray-800 mb-12">Interactive Playground</h2>
<div class="max-w-6xl mx-auto bg-white rounded-xl shadow-md overflow-hidden">
<div class="p-8">
<!-- Provider Selection -->
<div class="mb-8">
<h3 class="text-xl font-semibold text-gray-800 mb-4">Select LLM Provider</h3>
<div class="grid grid-cols-2 md:grid-cols-4 gap-4">
<div class="provider-card p-4 rounded-lg cursor-pointer text-center active" onclick="selectProvider('openai')">
<div class="bg-blue-50 p-4 rounded-full inline-block mb-2">
<i class="fab fa-openai text-blue-500 text-2xl"></i>
</div>
<h4 class="font-medium">OpenAI</h4>
<span class="model-badge inline-block mt-1">GPT-4</span>
</div>
<div class="provider-card p-4 rounded-lg cursor-pointer text-center" onclick="selectProvider('anthropic')">
<div class="bg-orange-50 p-4 rounded-full inline-block mb-2">
<i class="fas fa-robot text-orange-500 text-2xl"></i>
</div>
<h4 class="font-medium">Anthropic</h4>
<span class="model-badge inline-block mt-1">Claude 3</span>
</div>
<div class="provider-card p-4 rounded-lg cursor-pointer text-center" onclick="selectProvider('groq')">
<div class="bg-green-50 p-4 rounded-full inline-block mb-2">
<i class="fas fa-bolt text-green-500 text-2xl"></i>
</div>
<h4 class="font-medium">Groq</h4>
<span class="model-badge inline-block mt-1">Mixtral</span>
</div>
<div class="provider-card p-4 rounded-lg cursor-pointer text-center" onclick="selectProvider('huggingface')">
<div class="bg-yellow-50 p-4 rounded-full inline-block mb-2">
<i class="fas fa-h-square text-yellow-500 text-2xl"></i>
</div>
<h4 class="font-medium">Hugging Face</h4>
<span class="model-badge inline-block mt-1">Mistral</span>
</div>
<div class="provider-card p-4 rounded-lg cursor-pointer text-center" onclick="selectProvider('ollama')">
<div class="bg-purple-50 p-4 rounded-full inline-block mb-2">
<i class="fas fa-laptop-code text-purple-500 text-2xl"></i>
</div>
<h4 class="font-medium">Ollama</h4>
<span class="model-badge inline-block mt-1">Local</span>
</div>
<div class="provider-card p-4 rounded-lg cursor-pointer text-center" onclick="selectProvider('transformers')">
<div class="bg-red-50 p-4 rounded-full inline-block mb-2">
<i class="fas fa-code text-red-500 text-2xl"></i>
</div>
<h4 class="font-medium">Transformers</h4>
<span class="model-badge inline-block mt-1">PyTorch</span>
</div>
<div class="provider-card p-4 rounded-lg cursor-pointer text-center" onclick="selectProvider('dalle')">
<div class="bg-pink-50 p-4 rounded-full inline-block mb-2">
<i class="fas fa-image text-pink-500 text-2xl"></i>
</div>
<h4 class="font-medium">DALL·E</h4>
<span class="model-badge inline-block mt-1">Images</span>
</div>
<div class="provider-card p-4 rounded-lg cursor-pointer text-center" onclick="selectProvider('sd')">
<div class="bg-indigo-50 p-4 rounded-full inline-block mb-2">
<i class="fas fa-paint-brush text-indigo-500 text-2xl"></i>
</div>
<h4 class="font-medium">Stable Diffusion</h4>
<span class="model-badge inline-block mt-1">Local</span>
</div>
</div>
</div>
<!-- Configuration Panel -->
<div class="mb-8">
<h3 class="text-xl font-semibold text-gray-800 mb-4">Configuration</h3>
<div class="grid grid-cols-1 md:grid-cols-2 gap-6">
<div>
<label class="block text-gray-700 mb-2">Model</label>
<select class="w-full p-3 border border-gray-300 rounded-lg focus:ring-2 focus:ring-purple-500 focus:border-purple-500">
<option>gpt-4-turbo</option>
<option>gpt-4</option>
<option>gpt-3.5-turbo</option>
</select>
</div>
<div>
<label class="block text-gray-700 mb-2">API Key</label>
<input type="password" class="w-full p-3 border border-gray-300 rounded-lg focus:ring-2 focus:ring-purple-500 focus:border-purple-500" placeholder="sk-...">
</div>
<div>
<label class="block text-gray-700 mb-2">Temperature</label>
<input type="range" min="0" max="1" step="0.1" value="0.7" class="w-full">
<div class="flex justify-between text-sm text-gray-500 mt-1">
<span>Precise</span>
<span>Balanced</span>
<span>Creative</span>
</div>
</div>
<div>
<label class="block text-gray-700 mb-2">Max Tokens</label>
<input type="number" value="1000" class="w-full p-3 border border-gray-300 rounded-lg focus:ring-2 focus:ring-purple-500 focus:border-purple-500">
</div>
</div>
</div>
<!-- Prompt Area -->
<div class="mb-8">
<h3 class="text-xl font-semibold text-gray-800 mb-4">Prompt</h3>
<div class="relative">
<textarea class="w-full p-4 border border-gray-300 rounded-lg focus:ring-2 focus:ring-purple-500 focus:border-purple-500 h-40" placeholder="Enter your prompt here..."></textarea>
<div class="token-counter">0 tokens</div>
</div>
<div class="flex space-x-3 mt-3">
<button class="bg-gray-100 text-gray-700 px-4 py-2 rounded-lg hover:bg-gray-200">
<i class="fas fa-magic mr-2"></i> Enhance
</button>
<button class="bg-gray-100 text-gray-700 px-4 py-2 rounded-lg hover:bg-gray-200">
<i class="fas fa-history mr-2"></i> History
</button>
<button class="bg-gray-100 text-gray-700 px-4 py-2 rounded-lg hover:bg-gray-200">
<i class="fas fa-save mr-2"></i> Save
</button>
</div>
</div>
<!-- Response Area -->
<div>
<div class="flex justify-between items-center mb-4">
<h3 class="text-xl font-semibold text-gray-800">Response</h3>
<div class="flex space-x-2">
<button class="bg-gray-100 text-gray-700 px-3 py-1 rounded-lg text-sm hover:bg-gray-200">
<i class="fas fa-copy mr-1"></i> Copy
</button>
<button class="bg-gray-100 text-gray-700 px-3 py-1 rounded-lg text-sm hover:bg-gray-200">
<i class="fas fa-download mr-1"></i> Save
</button>
</div>
</div>
<div class="response-area p-4">
<p class="text-gray-500 italic">Response will appear here...</p>
</div>
<div class="flex justify-between items-center mt-3 text-sm text-gray-500">
<div>
<span>Tokens: 0</span>
<span class="mx-2">|</span>
<span>Time: 0s</span>
</div>
<div>
<span>Model: gpt-4-turbo</span>
</div>
</div>
</div>
<!-- Action Buttons -->
<div class="flex justify-center space-x-4 mt-8">
<button class="gradient-bg text-white px-8 py-3 rounded-full font-semibold hover:opacity-90 transition flex items-center">
<i class="fas fa-paper-plane mr-2"></i> Generate
</button>
<button class="border-2 border-purple-600 text-purple-600 px-8 py-3 rounded-full font-semibold hover:bg-purple-50 transition flex items-center">
<i class="fas fa-redo mr-2"></i> Regenerate
</button>
</div>
</div>
</div>
<!-- API Examples -->
<div class="max-w-4xl mx-auto mt-16">
<h3 class="text-2xl font-semibold text-gray-800 mb-6">API Usage Examples</h3>
<div class="mb-8">
<div class="flex items-center mb-4">
<div class="bg-blue-100 p-2 rounded-full mr-3">
<i class="fas fa-code text-blue-500"></i>
</div>
<h4 class="text-lg font-medium text-gray-700">Basic Text Generation</h4>
</div>
<div class="code-block">
<p>import LLMConnector</p>
<p><br></p>
<p># Initialize with your preferred provider</p>
<p>llm = LLMConnector(provider="openai", api_key="your_key")</p>
<p><br></p>
<p># Simple generation</p>
<p>response = llm.generate(</p>
<p> prompt="Explain quantum computing",</p>
<p> model="gpt-4-turbo",</p>
<p> temperature=0.7,</p>
<p> max_tokens=1000</p>
<p>)</p>
<p><br></p>
<p>print(response)</p>
</div>
</div>
<div class="mb-8">
<div class="flex items-center mb-4">
<div class="bg-purple-100 p-2 rounded-full mr-3">
<i class="fas fa-image text-purple-500"></i>
</div>
<h4 class="text-lg font-medium text-gray-700">Image Generation</h4>
</div>
<div class="code-block">
<p>import LLMConnector</p>
<p><br></p>
<p># Initialize image generator</p>
<p>image_gen = LLMConnector(provider="dalle", api_key="your_key")</p>
<p><br></p>
<p># Generate image</p>
<p>image_url = image_gen.generate_image(</p>
<p> prompt="A futuristic cityscape at sunset",</p>
<p> model="dall-e-3",</p>
<p> size="1024x1024",</p>
<p> quality="hd"</p>
<p>)</p>
<p><br></p>
<p>print(f"Image generated at: {image_url}")</p>
</div>
</div>
<div class="mb-8">
<div class="flex items-center mb-4">
<div class="bg-green-100 p-2 rounded-full mr-3">
<i class="fas fa-exchange-alt text-green-500"></i>
</div>
<h4 class="text-lg font-medium text-gray-700">Chat Completion</h4>
</div>
<div class="code-block">
<p>import LLMConnector</p>
<p><br></p>
<p># Initialize chat</p>
<p>chat = LLMConnector(provider="anthropic", api_key="your_key")</p>
<p><br></p>
<p># Start conversation</p>
<p>messages = [</p>
<p> {"role": "system", "content": "You are a helpful assistant."},</p>
<p> {"role": "user", "content": "What's the weather today?"}</p>
<p>]</p>
<p><br></p>
<p># Get response</p>
<p>response = chat.chat_complete(</p>
<p> messages=messages,</p>
<p> model="claude-3-opus-20240229",</p>
<p> temperature=0.5</p>
<p>)</p>
<p><br></p>
<p>print(response)</p>
</div>
</div>
<div>
<div class="flex items-center mb-4">
<div class="bg-yellow-100 p-2 rounded-full mr-3">
<i class="fas fa-server text-yellow-500"></i>
</div>
<h4 class="text-lg font-medium text-gray-700">Local Model Setup</h4>
</div>
<div class="code-block">
<p>import LLMConnector</p>
<p><br></p>
<p># Initialize local Ollama model</p>
<p>local_llm = LLMConnector(</p>
<p> provider="ollama",</p>
<p> base_url="http://localhost:11434",</p>
<p> model="mistral"</p>
<p>)</p>
<p><br></p>
<p># Generate text</p>
<p>response = local_llm.generate(</p>
<p> prompt="Explain the theory of relativity",</p>
<p> temperature=0.7</p>
<p>)</p>
<p><br></p>
<p>print(response)</p>
</div>
</div>
</div>
</div>
</section>
<!-- Features Section -->
<section class="py-16 bg-white">
<div class="container mx-auto px-6">
<h2 class="text-3xl font-bold text-center text-gray-800 mb-12">Key Features</h2>
<div class="max-w-6xl mx-auto grid grid-cols-1 md:grid-cols-3 gap-8">
<div class="bg-gray-50 p-8 rounded-xl border border-gray-200 card-hover transition">
<div class="bg-blue-100 p-4 rounded-full inline-block mb-4">
<i class="fas fa-exchange-alt text-blue-500 text-2xl"></i>
</div>
<h3 class="text-xl font-semibold text-gray-800 mb-3">Provider Agnostic</h3>
<p class="text-gray-600">Switch between different LLM providers with a single line of code. No need to rewrite your application when changing models.</p>
</div>
<div class="bg-gray-50 p-8 rounded-xl border border-gray-200 card-hover transition">
<div class="bg-purple-100 p-4 rounded-full inline-block mb-4">
<i class="fas fa-sliders-h text-purple-500 text-2xl"></i>
</div>
<h3 class="text-xl font-semibold text-gray-800 mb-3">Unified Interface</h3>
<p class="text-gray-600">Consistent API for text generation, chat completion, and image generation across all providers.</p>
</div>
<div class="bg-gray-50 p-8 rounded-xl border border-gray-200 card-hover transition">
<div class="bg-green-100 p-4 rounded-full inline-block mb-4">
<i class="fas fa-lock text-green-500 text-2xl"></i>
</div>
<h3 class="text-xl font-semibold text-gray-800 mb-3">Secure Configuration</h3>
<p class="text-gray-600">Environment variable support for API keys and sensitive configuration. Never hardcode credentials.</p>
</div>
<div class="bg-gray-50 p-8 rounded-xl border border-gray-200 card-hover transition">
<div class="bg-yellow-100 p-4 rounded-full inline-block mb-4">
<i class="fas fa-tachometer-alt text-yellow-500 text-2xl"></i>
</div>
<h3 class="text-xl font-semibold text-gray-800 mb-3">Performance Metrics</h3>
<p class="text-gray-600">Track response times, token usage, and costs across different providers and models.</p>
</div>
<div class="bg-gray-50 p-8 rounded-xl border border-gray-200 card-hover transition">
<div class="bg-red-100 p-4 rounded-full inline-block mb-4">
<i class="fas fa-code-branch text-red-500 text-2xl"></i>
</div>
<h3 class="text-xl font-semibold text-gray-800 mb-3">Fallback Handling</h3>
<p class="text-gray-600">Automatic fallback to alternative models when primary model is unavailable or rate-limited.</p>
</div>
<div class="bg-gray-50 p-8 rounded-xl border border-gray-200 card-hover transition">
<div class="bg-indigo-100 p-4 rounded-full inline-block mb-4">
<i class="fas fa-plug text-indigo-500 text-2xl"></i>
</div>
<h3 class="text-xl font-semibold text-gray-800 mb-3">Extensible Design</h3>
<p class="text-gray-600">Easy to add new providers or customize existing ones with plugin architecture.</p>
</div>
</div>
</div>
</section>
<!-- Footer -->
<footer class="gradient-bg text-white py-12">
<div class="container mx-auto px-6">
<div class="max-w-4xl mx-auto text-center">
<h2 class="text-2xl font-bold mb-6">Start Building with Any LLM Today</h2>
<p class="text-lg mb-8">Universal interface for all your AI model needs. Switch providers without rewriting your application.</p>
<div class="flex justify-center space-x-4">
<a href="#setup" class="bg-white text-purple-600 px-6 py-3 rounded-full font-semibold hover:bg-purple-50 transition">Get Started</a>
<a href="#playground" class="border-2 border-white text-white px-6 py-3 rounded-full font-semibold hover:bg-white hover:text-purple-600 transition">Try Playground</a>
</div>
<p class="mt-10 text-purple-200">© 2023 Universal LLM Playground. MIT License.</p>
</div>
</div>
</footer>
<script>
// Provider selection
function selectProvider(provider) {
// Remove active class from all provider cards
document.querySelectorAll('.provider-card').forEach(card => {
card.classList.remove('active');
});
// Add active class to clicked card
event.currentTarget.classList.add('active');
// Update model dropdown based on provider
const modelSelect = document.querySelector('select');
let options = [];
switch(provider) {
case 'openai':
options = ['gpt-4-turbo', 'gpt-4', 'gpt-3.5-turbo'];
break;
case 'anthropic':
options = ['claude-3-opus-20240229', 'claude-3-sonnet-20240229', 'claude-2.1'];
break;
case 'groq':
options = ['mixtral-8x7b-32768', 'llama2-70b-4096'];
break;
case 'huggingface':
options = ['mistralai/Mistral-7B-Instruct-v0.1', 'meta-llama/Llama-2-7b-chat-hf'];
break;
case 'ollama':
options = ['mistral', 'llama2', 'codellama'];
break;
case 'transformers':
options = ['local/transformers'];
break;
case 'dalle':
options = ['dall-e-3', 'dall-e-2'];
break;
case 'sd':
options = ['stabilityai/stable-diffusion-xl-base-1.0'];
break;
}
// Update select options
modelSelect.innerHTML = '';
options.forEach(opt => {
const option = document.createElement('option');
option.value = opt;
option.textContent = opt;
modelSelect.appendChild(option);
});
// Update API key placeholder
const apiKeyInput = document.querySelector('input[type="password"]');
if (provider === 'openai') {
apiKeyInput.placeholder = 'sk-...';
} else if (provider === 'anthropic') {
apiKeyInput.placeholder = 'sk-ant-...';
} else if (provider === 'huggingface') {
apiKeyInput.placeholder = 'hf_...';
} else {
apiKeyInput.placeholder = 'API key (if required)';
}
}
// Token counter for prompt
const promptTextarea = document.querySelector('textarea');
const tokenCounter = document.querySelector('.token-counter');
promptTextarea.addEventListener('input', () => {
// Simple token estimation (4 chars ≈ 1 token)
const tokenCount = Math.ceil(promptTextarea.value.length / 4);
tokenCounter.textContent = `${tokenCount} tokens`;
});
// Smooth scrolling for anchor links
document.querySelectorAll('a[href^="#"]').forEach(anchor => {
anchor.addEventListener('click', function (e) {
e.preventDefault();
document.querySelector(this.getAttribute('href')).scrollIntoView({
behavior: 'smooth'
});
});
});
</script>
<p style="border-radius: 8px; text-align: center; font-size: 12px; color: #fff; margin-top: 16px;position: fixed; left: 8px; bottom: 8px; z-index: 10; background: rgba(0, 0, 0, 0.8); padding: 4px 8px;">Made with <img src="https://enzostvs-deepsite.hf.space/logo.svg" alt="DeepSite Logo" style="width: 16px; height: 16px; vertical-align: middle;display:inline-block;margin-right:3px;filter:brightness(0) invert(1);"><a href="https://enzostvs-deepsite.hf.space" style="color: #fff;text-decoration: underline;" target="_blank" >DeepSite</a> - 🧬 <a href="https://enzostvs-deepsite.hf.space?remix=Simultaneous-Orthoganlity-In-Time/new-deep-site" style="color: #fff;text-decoration: underline;" target="_blank" >Remix</a></p></body>
</html>