Update app.py
Browse files
app.py
CHANGED
@@ -37,12 +37,20 @@ async def chat(request: ChatRequest):
|
|
37 |
try:
|
38 |
if request.model_choice == "google":
|
39 |
client = genai.Client(api_key=google_api_key)
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
46 |
|
47 |
response = client.models.generate_content(
|
48 |
model="gemini-2.0-flash",
|
|
|
37 |
try:
|
38 |
if request.model_choice == "google":
|
39 |
client = genai.Client(api_key=google_api_key)
|
40 |
+
|
41 |
+
if len(request.chat_history) > 10:
|
42 |
+
summarize_prompt = f"""Please summarize the following chat history concisely, focusing on the key points and main topics discussed. Avoid
|
43 |
+
unnecessary details and provide a clear, straightforward summary. {request.chat_history[:-10]}""" # summarize everything except last k items
|
44 |
+
summary_response = client.models.generate_content(
|
45 |
+
model="gemini-2.0-flash",
|
46 |
+
contents=summarize_prompt,
|
47 |
+
config=GenerateContentConfig(
|
48 |
+
system_instruction=["You are a helpful assistant who is an expert at summarization."]
|
49 |
+
),
|
50 |
+
)
|
51 |
+
request.chat_history = request.chat_history[-10:] # keep last k items
|
52 |
+
request.chat_history.insert(0, {"role": "user", "parts": [{"text": f"Here is a summary of this conversation so far: {summary_response.text}"}]})
|
53 |
+
|
54 |
|
55 |
response = client.models.generate_content(
|
56 |
model="gemini-2.0-flash",
|