|
import gradio as gr |
|
import requests |
|
|
|
OLLAMA_API = "http://localhost:11434/api/chat" |
|
|
|
def chat_fn(message, history): |
|
messages = [{"role": "user", "content": message}] |
|
payload = { |
|
"model": "qwen2.5-coder:3b", |
|
"messages": messages |
|
} |
|
try: |
|
response = requests.post(OLLAMA_API, json=payload) |
|
response.raise_for_status() |
|
data = response.json() |
|
reply = data.get("message", {}).get("content", "No response.") |
|
return reply |
|
except Exception as e: |
|
return f"Error: {e}" |
|
|
|
gr.ChatInterface(chat_fn, title="Chat with Ollama Model").launch() |
|
|