CrispChat / app.py
cstr's picture
Update app.py
81d1619 verified
raw
history blame
3.35 kB
import os
import gradio as gr
import requests
import json
# API key
OPENROUTER_API_KEY = os.environ.get("OPENROUTER_API_KEY", "")
# Basic model list
MODELS = [
("Gemini Pro 2.0", "google/gemini-2.0-pro-exp-02-05:free"),
("Llama 3.2 Vision", "meta-llama/llama-3.2-11b-vision-instruct:free")
]
def format_to_message_dict(history):
"""Convert history to proper message format"""
messages = []
for pair in history:
if len(pair) == 2:
human, ai = pair
if human:
messages.append({"role": "user", "content": human})
if ai:
messages.append({"role": "assistant", "content": ai})
return messages
def ask_ai(message, chatbot, model_choice):
"""Basic AI query function"""
if not message.strip():
return chatbot, ""
# Get model ID
model_id = MODELS[0][1] # Default
for name, model_id_value in MODELS:
if name == model_choice:
model_id = model_id_value
break
# Create messages from chatbot history
messages = format_to_message_dict(chatbot)
# Add current message
messages.append({"role": "user", "content": message})
# Call API
try:
response = requests.post(
"https://openrouter.ai/api/v1/chat/completions",
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {OPENROUTER_API_KEY}",
"HTTP-Referer": "https://huggingface.co/spaces"
},
json={
"model": model_id,
"messages": messages,
"temperature": 0.7,
"max_tokens": 1000
},
timeout=60
)
if response.status_code == 200:
result = response.json()
ai_response = result.get("choices", [{}])[0].get("message", {}).get("content", "")
chatbot = chatbot + [[message, ai_response]]
else:
chatbot = chatbot + [[message, f"Error: Status code {response.status_code}"]]
except Exception as e:
chatbot = chatbot + [[message, f"Error: {str(e)}"]]
return chatbot, ""
def clear_chat():
return [], ""
# Create minimal interface
with gr.Blocks() as demo:
gr.Markdown("# Simple AI Chat")
chatbot = gr.Chatbot(height=400)
with gr.Row():
message = gr.Textbox(
placeholder="Type your message here...",
label="Message",
lines=2
)
with gr.Row():
model_choice = gr.Radio(
[name for name, _ in MODELS],
value=MODELS[0][0],
label="Model"
)
with gr.Row():
submit_btn = gr.Button("Send")
clear_btn = gr.Button("Clear Chat")
# Set up events
submit_btn.click(
fn=ask_ai,
inputs=[message, chatbot, model_choice],
outputs=[chatbot, message]
)
message.submit(
fn=ask_ai,
inputs=[message, chatbot, model_choice],
outputs=[chatbot, message]
)
clear_btn.click(
fn=clear_chat,
inputs=[],
outputs=[chatbot, message]
)
# Launch directly with Gradio's built-in server
if __name__ == "__main__":
demo.launch(server_name="0.0.0.0", server_port=7860)