abdullahalioo commited on
Commit
f7c0abb
·
verified ·
1 Parent(s): 744ffd6

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +58 -0
app.py ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from fastapi import FastAPI, HTTPException
3
+ from fastapi.responses import StreamingResponse
4
+ from openai import AsyncOpenAI
5
+ from pydantic import BaseModel
6
+ import asyncio
7
+
8
+ # Initialize FastAPI app
9
+ app = FastAPI()
10
+
11
+ # Define request body model for the prompt
12
+ class PromptRequest(BaseModel):
13
+ prompt: str
14
+
15
+ # Initialize OpenAI client
16
+ token = os.getenv("GITHUB_TOKEN")
17
+ if not token:
18
+ raise ValueError("GITHUB_TOKEN environment variable not set")
19
+ endpoint = "https://models.github.ai/inference"
20
+ model = "openai/gpt-4.1-mini"
21
+ client = AsyncOpenAI(base_url=endpoint, api_key=token)
22
+
23
+ # Async generator to stream chunks
24
+ async def stream_response(prompt: str):
25
+ try:
26
+ # Create streaming chat completion
27
+ stream = await client.chat.completions.create(
28
+ messages=[
29
+ {"role": "system", "content": "You are a helpful assistant."},
30
+ {"role": "user", "content": prompt}
31
+ ],
32
+ temperature=1.0,
33
+ top_p=1.0,
34
+ model=model,
35
+ stream=True
36
+ )
37
+
38
+ # Yield each chunk as it arrives
39
+ async for chunk in stream:
40
+ if chunk.choices and len(chunk.choices) > 0:
41
+ content = chunk.choices[0].delta.content or ""
42
+ yield content
43
+
44
+ except Exception as err:
45
+ yield f"Error: {err}"
46
+
47
+ # Endpoint to handle prompt and stream response
48
+ @app.post("/generate")
49
+ async def generate_response(request: PromptRequest):
50
+ try:
51
+ # Return a StreamingResponse with the async generator
52
+ return StreamingResponse(
53
+ stream_response(request.prompt),
54
+ media_type="text/plain"
55
+ )
56
+ except Exception as err:
57
+ raise HTTPException(status_code=500, detail=f"Server error: {err}")
58
+