Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -64,10 +64,10 @@ MAX_HISTORY = 100 # limit memory to avoid crashes
|
|
64 |
|
65 |
# Generate response stream
|
66 |
|
67 |
-
async def
|
68 |
-
token = os.getenv("
|
69 |
if not token:
|
70 |
-
raise HTTPException(
|
71 |
|
72 |
```
|
73 |
endpoint = "https://models.github.ai/inference"
|
@@ -131,12 +131,12 @@ return StreamingResponse(
|
|
131 |
# Optional: reset chat history
|
132 |
|
133 |
@app.post("/reset")
|
134 |
-
async def
|
135 |
-
if
|
136 |
-
|
137 |
-
return {"message": f"Chat {
|
138 |
else:
|
139 |
-
raise HTTPException(
|
140 |
|
141 |
# For ASGI servers like Uvicorn
|
142 |
|
|
|
64 |
|
65 |
# Generate response stream
|
66 |
|
67 |
+
async def generate_ai_response(chat_id: str, model: str):
|
68 |
+
token = os.getenv("GITHUB_TOKEN")
|
69 |
if not token:
|
70 |
+
raise HTTPException(status_code=500, detail="GitHub token not configured")
|
71 |
|
72 |
```
|
73 |
endpoint = "https://models.github.ai/inference"
|
|
|
131 |
# Optional: reset chat history
|
132 |
|
133 |
@app.post("/reset")
|
134 |
+
async def reset_chat(chat_id: str = Query(..., description="ID of chat to reset")):
|
135 |
+
if chat_id in chat_histories:
|
136 |
+
chat_histories\[chat_id].clear()
|
137 |
+
return {"message": f"Chat {chat_id} history reset."}
|
138 |
else:
|
139 |
+
raise HTTPException(status_code=404, detail="Chat ID not found")
|
140 |
|
141 |
# For ASGI servers like Uvicorn
|
142 |
|