File size: 4,009 Bytes
cbdf3eb 1ece3c6 e1933c4 cbdf3eb 1ece3c6 6581e65 1ece3c6 e1933c4 e4872e8 1ece3c6 cbdf3eb 6991b14 6581e65 cbdf3eb 6581e65 6991b14 e1933c4 6991b14 cbdf3eb 6581e65 1ece3c6 e1933c4 1ece3c6 e1933c4 1ece3c6 6991b14 e1933c4 1ece3c6 6991b14 e1933c4 cbdf3eb 1ece3c6 e1933c4 1ece3c6 e1933c4 1ece3c6 e1933c4 1ece3c6 cbdf3eb 1ece3c6 cbdf3eb 1ece3c6 cbdf3eb 6991b14 e1933c4 396640c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 |
import os
import io
from io import BytesIO
from fastapi import FastAPI, UploadFile, File, Form
from fastapi.responses import JSONResponse, HTMLResponse
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from huggingface_hub import InferenceClient, login
from PyPDF2 import PdfReader
from docx import Document
from PIL import Image
from routers import ai # conservez vos routes annexes
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
# 1) Authentification Hugging Face
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
HF_TOKEN = os.getenv("HF_TOKEN") or os.getenv("HUGGINGFACE_HUB_TOKEN")
if not HF_TOKEN:
raise RuntimeError(
"Variable d'environnement HF_TOKEN absente ; crΓ©ez un jeton Β« Read Β» "
"sur https://huggingface.co/settings/tokens et exportez-le (voir .env)."
)
login(token=HF_TOKEN) # Authentifie tout le process
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
# 2) Configuration FastAPI
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
PORT = int(os.getenv("PORT", 7860))
app = FastAPI(
title="AI Web App API",
description="Backend API for AI-powered web application",
version="1.0.0"
)
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.mount("/", StaticFiles(directory=".", html=True), name="static")
app.include_router(ai.router)
# Clients HF (token passΓ© implicitement)
summary_client = InferenceClient("facebook/bart-large-cnn")
qa_client = InferenceClient("deepset/roberta-base-squad2")
image_caption_client = InferenceClient("nlpconnect/vit-gpt2-image-captioning")
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
# 3) Utils : extraction texte, routes API (inchangΓ©s ou presque)
# ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
def extract_text_from_pdf(content: bytes) -> str:
reader = PdfReader(io.BytesIO(content))
return "\n".join(p.extract_text() or "" for p in reader.pages).strip()
def extract_text_from_docx(content: bytes) -> str:
doc = Document(io.BytesIO(content))
return "\n".join(p.text for p in doc.paragraphs).strip()
def process_uploaded_file(file: UploadFile) -> str:
content = file.file.read()
ext = file.filename.rsplit(".", 1)[-1].lower()
if ext == "pdf":
return extract_text_from_pdf(content)
if ext == "docx":
return extract_text_from_docx(content)
if ext == "txt":
return content.decode("utf-8").strip()
raise ValueError("Type de fichier non supportΓ©")
# β¦ (gardez vos trois routes /analyze, /ask, /interpret_image identiques)
@app.get("/api/health")
async def health_check():
return {"status": "healthy", "version": "1.0.0", "hf_token_set": True}
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=PORT)
|