abdullahalioo commited on
Commit
387e225
·
verified ·
1 Parent(s): a62c68b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +39 -8
app.py CHANGED
@@ -1,12 +1,13 @@
1
  import os
2
  from fastapi import FastAPI, HTTPException
3
- from fastapi.responses import StreamingResponse
4
  from openai import AsyncOpenAI
5
 
6
  app = FastAPI()
7
 
8
  async def generate_ai_response(prompt: str):
9
  # Configuration for unofficial GitHub AI endpoint
 
10
  token = os.getenv("GITHUB_TOKEN")
11
  if not token:
12
  raise HTTPException(status_code=500, detail="GitHub token not configured")
@@ -36,20 +37,50 @@ async def generate_ai_response(prompt: str):
36
  yield f"Error: {str(err)}"
37
  raise HTTPException(status_code=500, detail="AI generation failed")
38
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
  @app.post("/generate")
40
  async def generate_response(prompt: str):
41
  if not prompt:
42
  raise HTTPException(status_code=400, detail="Prompt cannot be empty")
43
 
44
- return StreamingResponse(
45
- generate_ai_response(prompt),
 
 
46
  media_type="text/event-stream"
47
  )
48
 
49
- def get_app():
50
- return app
51
-
52
- @app.post("/get-token")
53
  async def get_token():
54
  global token
55
- return {"token": token}
 
 
 
 
 
 
1
  import os
2
  from fastapi import FastAPI, HTTPException
3
+ from fastapi.responses import StreamingResponse, Response
4
  from openai import AsyncOpenAI
5
 
6
  app = FastAPI()
7
 
8
  async def generate_ai_response(prompt: str):
9
  # Configuration for unofficial GitHub AI endpoint
10
+ global token
11
  token = os.getenv("GITHUB_TOKEN")
12
  if not token:
13
  raise HTTPException(status_code=500, detail="GitHub token not configured")
 
37
  yield f"Error: {str(err)}"
38
  raise HTTPException(status_code=500, detail="AI generation failed")
39
 
40
+ class CustomStreamingResponse(Response):
41
+ def __init__(self, content, token, media_type="text/event-stream", status_code=200):
42
+ super().__init__(content=content, media_type=media_type, status_code=status_code)
43
+ self.token = token
44
+
45
+ async def __call__(self, scope, receive, send):
46
+ await send({
47
+ "type": "http.response.start",
48
+ "status": self.status_code,
49
+ "headers": [
50
+ (b"content-type", self.media_type.encode()),
51
+ (b"x-token-value", self.token.encode())
52
+ ]
53
+ })
54
+ async for chunk in self.body_iterator:
55
+ await send({
56
+ "type": "http.response.body",
57
+ "body": chunk.encode() if isinstance(chunk, str) else chunk,
58
+ "more_body": True
59
+ })
60
+ await send({
61
+ "type": "http.response.body",
62
+ "body": b"",
63
+ "more_body": False
64
+ })
65
+
66
  @app.post("/generate")
67
  async def generate_response(prompt: str):
68
  if not prompt:
69
  raise HTTPException(status_code=400, detail="Prompt cannot be empty")
70
 
71
+ global token
72
+ return CustomStreamingResponse(
73
+ content=generate_ai_response(prompt),
74
+ token=token,
75
  media_type="text/event-stream"
76
  )
77
 
78
+ @app.get("/get-token") # New endpoint to return the token
 
 
 
79
  async def get_token():
80
  global token
81
+ if not token:
82
+ raise HTTPException(status_code=500, detail="GitHub token not configured")
83
+ return {"token": token}
84
+
85
+ def get_app():
86
+ return app