Drag2121 commited on
Commit
15ec193
·
1 Parent(s): 4481d28
Files changed (1) hide show
  1. app.py +1 -16
app.py CHANGED
@@ -4,7 +4,7 @@ from pydantic import BaseModel
4
  from langchain_ollama import ChatOllama
5
  from langchain.schema import StrOutputParser
6
  from langchain.prompts import ChatPromptTemplate
7
- from fastapi.responses import StreamingResponse
8
  import logging
9
  from functools import lru_cache
10
 
@@ -45,21 +45,6 @@ async def ask_question(question: Question):
45
  logger.error(f"Error in /ask endpoint: {str(e)}")
46
  raise HTTPException(status_code=500, detail=str(e))
47
 
48
- @app.post("/ask_streaming")
49
- async def ask_question_stream(question: Question):
50
- try:
51
- logger.info(f"Received question for streaming: {question.text}")
52
- chain = get_chain()
53
-
54
- async def generate():
55
- async for chunk in chain.astream({"question": question.text}):
56
- yield chunk + "\n"
57
-
58
- return StreamingResponse(generate(), media_type="text/plain")
59
- except Exception as e:
60
- logger.error(f"Error in /ask_stream endpoint: {str(e)}")
61
- raise HTTPException(status_code=500, detail=str(e))
62
-
63
 
64
  @app.on_event("startup")
65
  async def startup_event():
 
4
  from langchain_ollama import ChatOllama
5
  from langchain.schema import StrOutputParser
6
  from langchain.prompts import ChatPromptTemplate
7
+
8
  import logging
9
  from functools import lru_cache
10
 
 
45
  logger.error(f"Error in /ask endpoint: {str(e)}")
46
  raise HTTPException(status_code=500, detail=str(e))
47
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48
 
49
  @app.on_event("startup")
50
  async def startup_event():