samu commited on
Commit
ed5b42d
·
1 Parent(s): fefb5c9
backend/__pycache__/main.cpython-310.pyc CHANGED
Binary files a/backend/__pycache__/main.cpython-310.pyc and b/backend/__pycache__/main.cpython-310.pyc differ
 
backend/main.py CHANGED
@@ -1,14 +1,26 @@
1
  from fastapi import FastAPI, HTTPException
2
  from fastapi.responses import JSONResponse
 
3
  from pydantic import BaseModel
4
  from backend.utils import generate_completions
5
  from backend import config
6
  from backend.database import get_db_connection
7
  import psycopg2
8
  from psycopg2.extras import RealDictCursor
 
 
9
 
10
  app = FastAPI()
11
 
 
 
 
 
 
 
 
 
 
12
  # Dependency to get database connection
13
  async def get_db():
14
  conn = await get_db_connection()
@@ -17,9 +29,17 @@ async def get_db():
17
  finally:
18
  conn.close()
19
 
 
 
 
 
 
 
 
 
20
  class GenerationRequest(BaseModel):
21
  user_id: int
22
- query: str
23
 
24
  class MetadataRequest(BaseModel):
25
  query: str
@@ -64,6 +84,24 @@ async def generate_flashcards(data: GenerationRequest):
64
  except Exception as e:
65
  raise HTTPException(status_code=500, detail=str(e))
66
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
67
 
68
  @app.post("/generate/exercises")
69
  async def generate_exercises(data: GenerationRequest):
 
1
  from fastapi import FastAPI, HTTPException
2
  from fastapi.responses import JSONResponse
3
+ from fastapi.middleware.cors import CORSMiddleware
4
  from pydantic import BaseModel
5
  from backend.utils import generate_completions
6
  from backend import config
7
  from backend.database import get_db_connection
8
  import psycopg2
9
  from psycopg2.extras import RealDictCursor
10
+ from typing import Union, List, Literal
11
+
12
 
13
  app = FastAPI()
14
 
15
+ # Add CORS middleware
16
+ app.add_middleware(
17
+ CORSMiddleware,
18
+ allow_origins=["*"], # Allows all origins
19
+ allow_credentials=True,
20
+ allow_methods=["*"], # Allows all methods
21
+ allow_headers=["*"], # Allows all headers
22
+ )
23
+
24
  # Dependency to get database connection
25
  async def get_db():
26
  conn = await get_db_connection()
 
29
  finally:
30
  conn.close()
31
 
32
+ # class GenerationRequest(BaseModel):
33
+ # user_id: int
34
+ # query: str
35
+
36
+ class Message(BaseModel):
37
+ role: Literal["user", "assistant"]
38
+ content: str
39
+
40
  class GenerationRequest(BaseModel):
41
  user_id: int
42
+ query: Union[str, List[Message]]
43
 
44
  class MetadataRequest(BaseModel):
45
  query: str
 
84
  except Exception as e:
85
  raise HTTPException(status_code=500, detail=str(e))
86
 
87
+ # @app.post("/generate/flashcards")
88
+ # async def generate_flashcards(data: GenerationRequest):
89
+ # try:
90
+ # response = await generate_completions.get_completions(
91
+ # data.query,
92
+ # config.flashcard_mode_instructions
93
+ # )
94
+ # return JSONResponse(
95
+ # content={
96
+ # "data": response,
97
+ # "type": "flashcards",
98
+ # "status": "success"
99
+ # },
100
+ # status_code=200
101
+ # )
102
+ # except Exception as e:
103
+ # raise HTTPException(status_code=500, detail=str(e))
104
+
105
 
106
  @app.post("/generate/exercises")
107
  async def generate_exercises(data: GenerationRequest):
backend/utils/__pycache__/generate_completions.cpython-310.pyc CHANGED
Binary files a/backend/utils/__pycache__/generate_completions.cpython-310.pyc and b/backend/utils/__pycache__/generate_completions.cpython-310.pyc differ
 
backend/utils/generate_completions.py CHANGED
@@ -2,9 +2,10 @@ from openai import AsyncOpenAI, OpenAI
2
  import asyncio
3
  import json
4
  from typing import AsyncIterator
5
- from typing import Union, List, Dict
6
  from dotenv import load_dotenv
7
  import os
 
8
  load_dotenv()
9
 
10
  # Initialize the async client
@@ -13,6 +14,14 @@ client = AsyncOpenAI(
13
  api_key=os.getenv("API_KEY"),
14
  )
15
 
 
 
 
 
 
 
 
 
16
  def process_input(data: Union[str, List[Dict[str, str]]]) -> Union[str, List[Dict[str, str]]]:
17
  """
18
  Processes input to either uppercase a string or modify the 'content' field
@@ -32,19 +41,59 @@ def process_input(data: Union[str, List[Dict[str, str]]]) -> Union[str, List[Dic
32
  raise TypeError("Input must be a string or a list of dictionaries with a 'content' field")
33
 
34
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
35
  async def get_completions(
36
  prompt: Union[str, List[Dict[str, str]]],
37
  instructions: str
38
  ) -> str:
39
- processed_prompt = process_input(prompt) # Ensures the input format is correct
 
 
 
 
 
 
 
40
 
41
  if isinstance(processed_prompt, str):
42
- messages = [
43
- {"role": "system", "content": instructions},
44
- {"role": "user", "content": processed_prompt}
45
- ]
46
  elif isinstance(processed_prompt, list):
47
- messages = [{"role": "system", "content": instructions}] + processed_prompt
 
 
 
 
 
 
 
 
 
 
48
  else:
49
  raise TypeError("Unexpected processed input type.")
50
 
@@ -54,5 +103,4 @@ async def get_completions(
54
  response_format={"type": "json_object"}
55
  )
56
 
57
- output: str = response.choices[0].message.content
58
- return output
 
2
  import asyncio
3
  import json
4
  from typing import AsyncIterator
5
+ from typing import Union, List, Dict, Literal
6
  from dotenv import load_dotenv
7
  import os
8
+ from pydantic import BaseModel
9
  load_dotenv()
10
 
11
  # Initialize the async client
 
14
  api_key=os.getenv("API_KEY"),
15
  )
16
 
17
+ class Message(BaseModel):
18
+ role: Literal["user", "assistant"]
19
+ content: str
20
+
21
+ # Helper function to flatten chat messages into a single string prompt
22
+ def flatten_messages(messages: List[Message]) -> str:
23
+ return "\n".join([f"{m.role}: {m.content}" for m in messages])
24
+
25
  def process_input(data: Union[str, List[Dict[str, str]]]) -> Union[str, List[Dict[str, str]]]:
26
  """
27
  Processes input to either uppercase a string or modify the 'content' field
 
41
  raise TypeError("Input must be a string or a list of dictionaries with a 'content' field")
42
 
43
 
44
+ # async def get_completions(
45
+ # prompt: Union[str, List[Dict[str, str]]],
46
+ # instructions: str
47
+ # ) -> str:
48
+ # processed_prompt = process_input(prompt) # Ensures the input format is correct
49
+
50
+ # if isinstance(processed_prompt, str):
51
+ # messages = [
52
+ # {"role": "system", "content": instructions},
53
+ # {"role": "user", "content": processed_prompt}
54
+ # ]
55
+ # elif isinstance(processed_prompt, list):
56
+ # messages = [{"role": "system", "content": instructions}] + processed_prompt
57
+ # else:
58
+ # raise TypeError("Unexpected processed input type.")
59
+
60
+ # response = await client.chat.completions.create(
61
+ # model=os.getenv("MODEL"),
62
+ # messages=messages,
63
+ # response_format={"type": "json_object"}
64
+ # )
65
+
66
+ # output: str = response.choices[0].message.content
67
+ # return output
68
+
69
  async def get_completions(
70
  prompt: Union[str, List[Dict[str, str]]],
71
  instructions: str
72
  ) -> str:
73
+ if isinstance(prompt, list):
74
+ formatted_query = flatten_messages(prompt)
75
+ else:
76
+ formatted_query = prompt
77
+
78
+ processed_prompt = process_input(formatted_query)
79
+
80
+ messages = [{"role": "system", "content": instructions}]
81
 
82
  if isinstance(processed_prompt, str):
83
+ messages.append({"role": "user", "content": processed_prompt})
84
+
 
 
85
  elif isinstance(processed_prompt, list):
86
+ # Only keep the history for context and append the latest user query at the end
87
+ history = processed_prompt[:-1]
88
+ last_user_msg = processed_prompt[-1]
89
+
90
+ # Optional: Validate that the last message is from the user
91
+ if last_user_msg.get("role") != "user":
92
+ raise ValueError("Last message must be from the user.")
93
+
94
+ messages += history
95
+ messages.append(last_user_msg)
96
+
97
  else:
98
  raise TypeError("Unexpected processed input type.")
99
 
 
103
  response_format={"type": "json_object"}
104
  )
105
 
106
+ return response.choices[0].message.content # adjust based on your client