Update app.py
Browse files
app.py
CHANGED
@@ -32,19 +32,21 @@ class ChatResponse(BaseModel):
|
|
32 |
@app.post("/chat", response_model=ChatResponse)
|
33 |
async def chat(request: ChatRequest):
|
34 |
try:
|
35 |
-
messages = [
|
36 |
-
|
37 |
-
|
38 |
-
]
|
39 |
-
|
40 |
-
response = client.chat_completion(
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
)
|
|
|
|
|
46 |
|
47 |
# return {"response": response.choices[0].message.content}
|
48 |
-
return {"response":
|
49 |
except Exception as e:
|
50 |
raise HTTPException(status_code=500, detail=str(e))
|
|
|
32 |
@app.post("/chat", response_model=ChatResponse)
|
33 |
async def chat(request: ChatRequest):
|
34 |
try:
|
35 |
+
# messages = [
|
36 |
+
# {"role": "system", "content": request.system_message},
|
37 |
+
# {"role": "user", "content": request.message},
|
38 |
+
# ]
|
39 |
+
|
40 |
+
# response = client.chat_completion(
|
41 |
+
# messages=messages,
|
42 |
+
# max_tokens=request.max_tokens,
|
43 |
+
# temperature=request.temperature,
|
44 |
+
# top_p=request.top_p,
|
45 |
+
# )
|
46 |
+
response = ollama.chat(model='llama3.2', messages=[{'role': 'user', 'content': 'Hello!'}])
|
47 |
+
|
48 |
|
49 |
# return {"response": response.choices[0].message.content}
|
50 |
+
return {"model_status": status, "response": response}
|
51 |
except Exception as e:
|
52 |
raise HTTPException(status_code=500, detail=str(e))
|