Update app.py
Browse files
app.py
CHANGED
@@ -222,33 +222,55 @@ def generate_matrix_with_gpt(matrix_type, file_contents):
|
|
222 |
data = data[1:]
|
223 |
|
224 |
return pd.DataFrame(data, columns=headers)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
225 |
)
|
226 |
def update_matrix_via_chat(n_clicks, chat_input):
|
227 |
-
global current_matrix
|
228 |
if not chat_input or current_matrix is None:
|
229 |
raise PreventUpdate
|
230 |
|
231 |
prompt = f"Update the following {matrix_type} based on this instruction: {chat_input}\n\n"
|
232 |
-
prompt += current_matrix.to_string()
|
|
|
233 |
|
234 |
response = openai.ChatCompletion.create(
|
235 |
model="gpt-3.5-turbo",
|
236 |
messages=[
|
237 |
-
{"role": "system", "content": "You are a helpful assistant that updates project management matrices."},
|
238 |
{"role": "user", "content": prompt}
|
239 |
]
|
240 |
)
|
241 |
|
242 |
updated_matrix_text = response.choices[0].message.content
|
243 |
-
|
244 |
-
|
245 |
-
lines
|
246 |
-
|
247 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
248 |
current_matrix = pd.DataFrame(data, columns=headers)
|
249 |
|
250 |
return f"Matrix updated based on: {chat_input}", dbc.Table.from_dataframe(current_matrix, striped=True, bordered=True, hover=True)
|
251 |
-
|
252 |
@app.callback(
|
253 |
Output("download-matrix", "data"),
|
254 |
Input("btn-download", "n_clicks"),
|
|
|
222 |
data = data[1:]
|
223 |
|
224 |
return pd.DataFrame(data, columns=headers)
|
225 |
+
|
226 |
+
@app.callback(
|
227 |
+
Output('chat-output', 'children'),
|
228 |
+
Output('matrix-preview', 'children', allow_duplicate=True),
|
229 |
+
Input('btn-send-chat', 'n_clicks'),
|
230 |
+
State('chat-input', 'value'),
|
231 |
+
prevent_initial_call=True
|
232 |
)
|
233 |
def update_matrix_via_chat(n_clicks, chat_input):
|
234 |
+
global current_matrix, matrix_type
|
235 |
if not chat_input or current_matrix is None:
|
236 |
raise PreventUpdate
|
237 |
|
238 |
prompt = f"Update the following {matrix_type} based on this instruction: {chat_input}\n\n"
|
239 |
+
prompt += current_matrix.to_string(index=False)
|
240 |
+
prompt += "\n\nProvide the updated matrix using '|' to separate columns. Do not include any separator lines, headers, or formatting characters. Start directly with the column names separated by '|'."
|
241 |
|
242 |
response = openai.ChatCompletion.create(
|
243 |
model="gpt-3.5-turbo",
|
244 |
messages=[
|
245 |
+
{"role": "system", "content": "You are a helpful assistant that updates project management matrices without any formatting or separator lines."},
|
246 |
{"role": "user", "content": prompt}
|
247 |
]
|
248 |
)
|
249 |
|
250 |
updated_matrix_text = response.choices[0].message.content
|
251 |
+
print("Raw updated matrix text from GPT:", updated_matrix_text) # For debugging
|
252 |
+
|
253 |
+
# Filter out any lines that don't contain the '|' character
|
254 |
+
lines = [line.strip() for line in updated_matrix_text.strip().split('\n') if '|' in line]
|
255 |
+
|
256 |
+
# Parse the matrix text
|
257 |
+
data = [line.split('|') for line in lines]
|
258 |
+
|
259 |
+
# Strip whitespace from each cell
|
260 |
+
data = [[cell.strip() for cell in row] for row in data]
|
261 |
+
|
262 |
+
# Ensure all rows have the same number of columns
|
263 |
+
max_columns = max(len(row) for row in data)
|
264 |
+
data = [row + [''] * (max_columns - len(row)) for row in data]
|
265 |
+
|
266 |
+
# Use the first row as headers, and the rest as data
|
267 |
+
headers = data[0]
|
268 |
+
data = data[1:]
|
269 |
+
|
270 |
current_matrix = pd.DataFrame(data, columns=headers)
|
271 |
|
272 |
return f"Matrix updated based on: {chat_input}", dbc.Table.from_dataframe(current_matrix, striped=True, bordered=True, hover=True)
|
273 |
+
|
274 |
@app.callback(
|
275 |
Output("download-matrix", "data"),
|
276 |
Input("btn-download", "n_clicks"),
|