Update app.py
Browse files
app.py
CHANGED
@@ -17,7 +17,6 @@ else:
|
|
17 |
|
18 |
|
19 |
model = ollama.pull('llama3.2')
|
20 |
-
status = f"Status{model.status}"
|
21 |
|
22 |
class ChatRequest(BaseModel):
|
23 |
message: str
|
@@ -27,6 +26,7 @@ class ChatRequest(BaseModel):
|
|
27 |
top_p: float = 0.95
|
28 |
|
29 |
class ChatResponse(BaseModel):
|
|
|
30 |
response: str
|
31 |
|
32 |
@app.post("/chat", response_model=ChatResponse)
|
@@ -48,6 +48,6 @@ async def chat(request: ChatRequest):
|
|
48 |
|
49 |
|
50 |
# return {"response": response.choices[0].message.content}
|
51 |
-
return {"model_status": status, "response": response}
|
52 |
except Exception as e:
|
53 |
raise HTTPException(status_code=500, detail=str(e))
|
|
|
17 |
|
18 |
|
19 |
model = ollama.pull('llama3.2')
|
|
|
20 |
|
21 |
class ChatRequest(BaseModel):
|
22 |
message: str
|
|
|
26 |
top_p: float = 0.95
|
27 |
|
28 |
class ChatResponse(BaseModel):
|
29 |
+
model_status: str
|
30 |
response: str
|
31 |
|
32 |
@app.post("/chat", response_model=ChatResponse)
|
|
|
48 |
|
49 |
|
50 |
# return {"response": response.choices[0].message.content}
|
51 |
+
return {"model_status": model.status, "response": response}
|
52 |
except Exception as e:
|
53 |
raise HTTPException(status_code=500, detail=str(e))
|