File size: 1,479 Bytes
5889992 7fc05ec 5889992 9111274 5889992 9111274 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 |
from fastapi import FastAPI, APIRouter, Depends, HTTPException, Request, Response
from fastapi.responses import JSONResponse
import schedule
import time
import requests
import threading
import asyncio
import uvicorn
from multiprocessing import Process
from src.llm.routes import router as conversation_router
from src.llm.core.config import settings
from src.llm.agents.conversation_agent import ConversationAgent
app = FastAPI(
title="TheryAI API",
description="API for TheryAI",
version="0.1.0",
docs_url="/docs",
redoc_url="/redoc",
openapi_url="/openapi.json",
debug=True,
)
app.include_router(conversation_router)
@app.get("/")
async def home():
return {"message": "Welcome to TheryAI API"}
@app.get("/health")
async def health():
return {"status": "ok"}
def ping_server():
try:
print("Pinging server")
response = requests.get("https://testys-thery-ai.hf.space")
except requests.exceptions.RequestException as e:
print("Server is down")
# send email to admin
schedule.every(10).minutes.do(ping_server)
def run_schedule():
while True:
schedule.run_pending()
time.sleep(1)
thread = threading.Thread(target=run_schedule)
thread.daemon = True
thread.start()
def run_fastapi():
uvicorn.run(
"src.api:app",
host="0.0.0.0",
port=8000,
log_level="info",
reload=True,
)
if __name__ == "__main__":
run_fastapi() |