Spaces:
Running
Running
File size: 2,757 Bytes
00f5488 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 |
from fastapi import FastAPI, Request, Form, UploadFile, File
from fastapi.templating import Jinja2Templates
from fastapi.responses import HTMLResponse, RedirectResponse
from fastapi.staticfiles import StaticFiles
from dotenv import load_dotenv
import os, io
from PIL import Image
import markdown
import google.generativeai as genai
# Load environment variable
load_dotenv()
API_KEY = os.getenv("GOOGLE_API_KEY") or "AIzaSyDXqoZkx92J1jt_XAYxCGEHmYQtg2XwfLU"
genai.configure(api_key=API_KEY)
app = FastAPI()
templates = Jinja2Templates(directory="templates")
app.mount("/static", StaticFiles(directory="static"), name="static")
model = genai.GenerativeModel('gemini-2.0-flash')
# Create a global chat session
chat = None
chat_history = []
@app.get("/", response_class=HTMLResponse)
async def root(request: Request):
return templates.TemplateResponse("index.html", {
"request": request,
"chat_history": chat_history,
})
@app.post("/", response_class=HTMLResponse)
async def handle_input(
request: Request,
user_input: str = Form(...),
image: UploadFile = File(None)
):
global chat, chat_history
# Initialize chat session if needed
if chat is None:
chat = model.start_chat(history=[])
parts = []
if user_input:
parts.append(user_input)
# For display in the UI
user_message = user_input
if image and image.content_type.startswith("image/"):
data = await image.read()
try:
img = Image.open(io.BytesIO(data))
parts.append(img)
user_message += " [Image uploaded]" # Indicate image in chat history
except Exception as e:
chat_history.append({
"role": "model",
"content": markdown.markdown(f"**Error loading image:** {e}")
})
return RedirectResponse("/", status_code=303)
# Store user message for display
chat_history.append({"role": "user", "content": user_message})
try:
# Send message to Gemini model
resp = chat.send_message(parts)
# Add model response to history
raw = resp.text
chat_history.append({"role": "model", "content": raw})
except Exception as e:
err = f"**Error:** {e}"
chat_history.append({
"role": "model",
"content": markdown.markdown(err)
})
# Post-Redirect-Get
return RedirectResponse("/", status_code=303)
# Clear chat history and start fresh
@app.post("/new")
async def new_chat():
global chat, chat_history
chat = None
chat_history.clear()
return RedirectResponse("/", status_code=303) |