Update app.py via AI Editor
Browse files
app.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
import dash
|
2 |
-
from dash import dcc, html, Input, Output, State, callback
|
3 |
import dash_bootstrap_components as dbc
|
4 |
import base64
|
5 |
import io
|
@@ -7,18 +7,59 @@ import pandas as pd
|
|
7 |
import openai
|
8 |
import os
|
9 |
import time
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
from dash.exceptions import PreventUpdate
|
11 |
import PyPDF2
|
12 |
import docx
|
13 |
import chardet
|
14 |
|
|
|
|
|
|
|
|
|
|
|
15 |
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.BOOTSTRAP])
|
|
|
16 |
|
17 |
openai.api_key = os.environ.get('OPENAI_API_KEY')
|
18 |
|
19 |
-
|
20 |
-
|
21 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
|
23 |
matrix_types = {
|
24 |
"Project Deliverables Matrix": "Generate a project deliverables matrix all presumed and actual deliverables based on tasks, requirements and scope.",
|
@@ -40,73 +81,101 @@ matrix_types = {
|
|
40 |
"SWOT Matrix": "Create a matrix analyzing Strengths, Weaknesses, Opportunities, and Threats."
|
41 |
}
|
42 |
|
|
|
43 |
app.layout = dbc.Container([
|
44 |
dbc.Row([
|
45 |
dbc.Col([
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
|
|
|
|
|
|
|
|
77 |
dbc.Col([
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
87 |
html.Hr(),
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
99 |
])
|
100 |
-
], fluid=True)
|
101 |
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
try:
|
|
|
|
|
106 |
if filename.endswith('.pdf'):
|
107 |
with io.BytesIO(decoded) as pdf_file:
|
108 |
reader = PyPDF2.PdfReader(pdf_file)
|
109 |
-
return ' '.join([page.extract_text() for page in reader.pages])
|
110 |
elif filename.endswith('.docx'):
|
111 |
with io.BytesIO(decoded) as docx_file:
|
112 |
doc = docx.Document(docx_file)
|
@@ -117,7 +186,7 @@ def parse_file_content(contents, filename):
|
|
117 |
else:
|
118 |
return "Unsupported file format"
|
119 |
except Exception as e:
|
120 |
-
|
121 |
return "Error processing file"
|
122 |
|
123 |
def truncate_filename(filename, max_length=24):
|
@@ -126,102 +195,137 @@ def truncate_filename(filename, max_length=24):
|
|
126 |
else:
|
127 |
return filename[:max_length - 3] + '...'
|
128 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
129 |
@app.callback(
|
130 |
Output('file-list', 'children'),
|
131 |
Input('upload-files', 'contents'),
|
132 |
State('upload-files', 'filename'),
|
133 |
-
|
134 |
)
|
135 |
-
def update_output(list_of_contents, list_of_names
|
136 |
-
|
|
|
137 |
if list_of_contents is not None:
|
138 |
-
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
150 |
-
style={
|
151 |
-
'display': 'inline-block',
|
152 |
-
'overflow': 'hidden',
|
153 |
-
'textOverflow': 'ellipsis',
|
154 |
-
'whiteSpace': 'nowrap',
|
155 |
-
'maxWidth': '90%',
|
156 |
-
'verticalAlign': 'middle',
|
157 |
-
}
|
158 |
-
),
|
159 |
-
width='auto',
|
160 |
-
style={'display': 'flex', 'alignItems': 'center', 'padding': '0'}
|
161 |
-
),
|
162 |
-
dbc.Col(
|
163 |
-
dbc.Button(
|
164 |
-
"Delete",
|
165 |
-
id={'type': 'remove-file', 'index': name},
|
166 |
-
color="danger",
|
167 |
-
size="sm",
|
168 |
-
style={'marginLeft': 'auto', 'float': 'right'}
|
169 |
-
),
|
170 |
-
width='auto',
|
171 |
-
style={'display': 'flex', 'alignItems': 'center', 'justifyContent': 'flex-end', 'padding': '0'}
|
172 |
-
),
|
173 |
-
],
|
174 |
-
justify="between",
|
175 |
-
align="center",
|
176 |
-
style={"margin": "0", "padding": "0"}
|
177 |
-
),
|
178 |
-
style={'padding': '6px 8px', 'margin': '0', 'display': 'flex', 'alignItems': 'center', 'background': 'none', 'boxShadow': 'none'}
|
179 |
-
),
|
180 |
-
style={'border': 'none', 'boxShadow': 'none', 'background': 'none', 'marginBottom': '2px'}
|
181 |
-
)
|
182 |
-
)
|
183 |
-
if existing_files is None:
|
184 |
-
existing_files = []
|
185 |
-
return existing_files + new_files
|
186 |
-
return existing_files
|
187 |
|
188 |
@app.callback(
|
189 |
Output('file-list', 'children', allow_duplicate=True),
|
190 |
-
Input({'type': 'remove-file', 'index':
|
191 |
State('file-list', 'children'),
|
192 |
prevent_initial_call=True
|
193 |
)
|
194 |
-
def remove_file(n_clicks,
|
195 |
-
|
196 |
ctx = dash.callback_context
|
197 |
if not ctx.triggered:
|
198 |
raise PreventUpdate
|
199 |
-
# Find which button was pressed
|
200 |
triggered_id = ctx.triggered[0]['prop_id'].split('.')[0]
|
201 |
-
# triggered_id is a dict-like string, e.g. "{'type':'remove-file','index':'filename'}"
|
202 |
-
# Safely eval to dict (since dash handles this)
|
203 |
import ast
|
204 |
try:
|
205 |
triggered_id_dict = ast.literal_eval(triggered_id)
|
206 |
removed_file = triggered_id_dict['index']
|
207 |
except Exception:
|
208 |
raise PreventUpdate
|
209 |
-
|
210 |
-
|
211 |
-
|
212 |
-
|
213 |
-
|
214 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
215 |
try:
|
216 |
-
|
217 |
-
|
218 |
-
|
219 |
-
|
220 |
-
|
221 |
-
|
222 |
-
|
223 |
-
filtered_files.append(file_card)
|
224 |
-
return filtered_files
|
225 |
|
226 |
def generate_matrix_with_gpt(matrix_type, file_contents):
|
227 |
prompt = f"""Generate a {matrix_type} based on the following project artifacts:
|
@@ -240,53 +344,22 @@ Item1A|Item1B|Item1C
|
|
240 |
Item2A|Item2B|Item2C
|
241 |
Now, generate the {matrix_type}:
|
242 |
"""
|
243 |
-
|
244 |
response = openai.ChatCompletion.create(
|
245 |
model="gpt-4-turbo",
|
246 |
messages=[
|
247 |
-
{"role": "system", "content": "You are a precise matrix generator that outputs only the requested matrix without any additional text. Based on the files uploaded, as the project manager you perform the analysis and make appropriate assumptions to populate the matrix like roles, tasks, timelines, logically
|
248 |
{"role": "user", "content": prompt}
|
249 |
]
|
250 |
)
|
251 |
-
|
252 |
matrix_text = response.choices[0].message.content.strip()
|
253 |
-
|
254 |
-
|
255 |
lines = [line.strip() for line in matrix_text.split('\n') if '|' in line]
|
256 |
data = [line.split('|') for line in lines]
|
257 |
data = [[cell.strip() for cell in row] for row in data]
|
258 |
-
|
259 |
headers = data[0]
|
260 |
data = data[1:]
|
261 |
-
|
262 |
return pd.DataFrame(data, columns=headers)
|
263 |
|
264 |
-
@app.callback(
|
265 |
-
Output('matrix-preview', 'children'),
|
266 |
-
Output('loading-output', 'children'),
|
267 |
-
[Input(f'btn-{matrix_type.lower().replace(" ", "-")}', 'n_clicks') for matrix_type in matrix_types.keys()],
|
268 |
-
prevent_initial_call=True
|
269 |
-
)
|
270 |
-
def generate_matrix(*args):
|
271 |
-
global current_matrix, matrix_type
|
272 |
-
ctx = dash.callback_context
|
273 |
-
if not ctx.triggered:
|
274 |
-
raise PreventUpdate
|
275 |
-
button_id = ctx.triggered[0]['prop_id'].split('.')[0]
|
276 |
-
matrix_type = button_id.replace('btn-', '').replace('-', ' ').title()
|
277 |
-
|
278 |
-
if not uploaded_files:
|
279 |
-
return html.Div("Please upload project artifacts before generating a matrix."), ""
|
280 |
-
|
281 |
-
file_contents = list(uploaded_files.values())
|
282 |
-
|
283 |
-
try:
|
284 |
-
current_matrix = generate_matrix_with_gpt(matrix_type, file_contents)
|
285 |
-
return dbc.Table.from_dataframe(current_matrix, striped=True, bordered=True, hover=True), f"{matrix_type} generated"
|
286 |
-
except Exception as e:
|
287 |
-
print(f"Error generating matrix: {str(e)}")
|
288 |
-
return html.Div(f"Error generating matrix: {str(e)}"), "Error"
|
289 |
-
|
290 |
@app.callback(
|
291 |
Output('chat-output', 'children'),
|
292 |
Output('matrix-preview', 'children', allow_duplicate=True),
|
@@ -295,13 +368,14 @@ def generate_matrix(*args):
|
|
295 |
prevent_initial_call=True
|
296 |
)
|
297 |
def update_matrix_via_chat(n_clicks, chat_input):
|
298 |
-
|
299 |
-
if not chat_input or current_matrix is None:
|
300 |
raise PreventUpdate
|
301 |
-
|
302 |
-
|
|
|
303 |
Current matrix:
|
304 |
-
{current_matrix.to_string(index=False)}
|
305 |
Instructions:
|
306 |
1. Provide ONLY the updated matrix as a table.
|
307 |
2. Use ONLY pipe symbols (|) to separate columns.
|
@@ -312,46 +386,39 @@ Instructions:
|
|
312 |
7. Each subsequent row should represent a single item in the matrix.
|
313 |
Now, provide the updated {matrix_type}:
|
314 |
"""
|
315 |
-
|
316 |
-
|
317 |
-
|
318 |
-
|
319 |
-
|
320 |
-
|
321 |
-
|
322 |
-
|
323 |
-
|
324 |
-
|
325 |
-
|
326 |
-
|
327 |
-
|
328 |
-
|
329 |
-
|
330 |
-
|
331 |
-
headers = data[0]
|
332 |
-
data = data[1:]
|
333 |
|
334 |
-
current_matrix = pd.DataFrame(data, columns=headers)
|
335 |
-
|
336 |
-
return f"Matrix updated based on: {chat_input}", dbc.Table.from_dataframe(current_matrix, striped=True, bordered=True, hover=True)
|
337 |
-
|
338 |
@app.callback(
|
339 |
Output("download-matrix", "data"),
|
340 |
Input("btn-download", "n_clicks"),
|
341 |
prevent_initial_call=True
|
342 |
)
|
343 |
def download_matrix(n_clicks):
|
344 |
-
|
345 |
-
if current_matrix is None:
|
346 |
raise PreventUpdate
|
347 |
-
|
348 |
output = io.BytesIO()
|
349 |
with pd.ExcelWriter(output, engine='xlsxwriter') as writer:
|
350 |
-
current_matrix.to_excel(writer, sheet_name='Sheet1', index=False)
|
351 |
-
|
352 |
-
return dcc.send_bytes(output.getvalue(), f"{matrix_type}.xlsx")
|
353 |
|
354 |
if __name__ == '__main__':
|
355 |
print("Starting the Dash application...")
|
356 |
-
app.run(debug=
|
357 |
print("Dash application has finished running.")
|
|
|
1 |
import dash
|
2 |
+
from dash import dcc, html, Input, Output, State, callback, ALL
|
3 |
import dash_bootstrap_components as dbc
|
4 |
import base64
|
5 |
import io
|
|
|
7 |
import openai
|
8 |
import os
|
9 |
import time
|
10 |
+
import uuid
|
11 |
+
import threading
|
12 |
+
import tempfile
|
13 |
+
import shutil
|
14 |
+
import logging
|
15 |
+
from flask import request
|
16 |
from dash.exceptions import PreventUpdate
|
17 |
import PyPDF2
|
18 |
import docx
|
19 |
import chardet
|
20 |
|
21 |
+
# --- Logging Setup ---
|
22 |
+
logging.basicConfig(level=logging.INFO)
|
23 |
+
logger = logging.getLogger("maiko_matrix_app")
|
24 |
+
|
25 |
+
# --- App and Session Setup ---
|
26 |
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.BOOTSTRAP])
|
27 |
+
server = app.server
|
28 |
|
29 |
openai.api_key = os.environ.get('OPENAI_API_KEY')
|
30 |
|
31 |
+
# Session management globals
|
32 |
+
SESSION_DATA = {}
|
33 |
+
SESSION_LOCKS = {}
|
34 |
+
|
35 |
+
def get_session_id():
|
36 |
+
if 'session-id' in request.cookies:
|
37 |
+
return request.cookies['session-id']
|
38 |
+
# Generate new session id (uuid4) if not present
|
39 |
+
session_id = str(uuid.uuid4())
|
40 |
+
logger.info(f"Generated new session id: {session_id}")
|
41 |
+
return session_id
|
42 |
+
|
43 |
+
def get_session_data():
|
44 |
+
session_id = get_session_id()
|
45 |
+
if session_id not in SESSION_DATA:
|
46 |
+
SESSION_DATA[session_id] = {
|
47 |
+
'uploaded_files': {}, # filename -> temp file path
|
48 |
+
'file_texts': {}, # filename -> extracted text
|
49 |
+
'current_matrix': None,
|
50 |
+
'matrix_type': None,
|
51 |
+
'temp_dir': tempfile.mkdtemp(prefix=f"maiko_{session_id}_"),
|
52 |
+
}
|
53 |
+
SESSION_LOCKS[session_id] = threading.Lock()
|
54 |
+
return SESSION_DATA[session_id], SESSION_LOCKS[session_id]
|
55 |
+
|
56 |
+
def cleanup_session_tempdirs():
|
57 |
+
# Called optionally on server shutdown for cleanup
|
58 |
+
for sess in SESSION_DATA.values():
|
59 |
+
try:
|
60 |
+
shutil.rmtree(sess['temp_dir'])
|
61 |
+
except Exception as e:
|
62 |
+
logger.warning(f"Failed to cleanup tempdir: {e}")
|
63 |
|
64 |
matrix_types = {
|
65 |
"Project Deliverables Matrix": "Generate a project deliverables matrix all presumed and actual deliverables based on tasks, requirements and scope.",
|
|
|
81 |
"SWOT Matrix": "Create a matrix analyzing Strengths, Weaknesses, Opportunities, and Threats."
|
82 |
}
|
83 |
|
84 |
+
# --- UI Layout ---
|
85 |
app.layout = dbc.Container([
|
86 |
dbc.Row([
|
87 |
dbc.Col([
|
88 |
+
dbc.Card([
|
89 |
+
dbc.CardBody([
|
90 |
+
html.H4("Project Artifacts", className="mb-3 mt-1"),
|
91 |
+
dcc.Upload(
|
92 |
+
id='upload-files',
|
93 |
+
children=html.Div([
|
94 |
+
'Drag and Drop or ',
|
95 |
+
html.A('Select Files')
|
96 |
+
]),
|
97 |
+
style={
|
98 |
+
'width': '100%',
|
99 |
+
'height': '60px',
|
100 |
+
'lineHeight': '60px',
|
101 |
+
'borderWidth': '1px',
|
102 |
+
'borderStyle': 'dashed',
|
103 |
+
'borderRadius': '5px',
|
104 |
+
'textAlign': 'center',
|
105 |
+
'margin': '10px 0'
|
106 |
+
},
|
107 |
+
multiple=True
|
108 |
+
),
|
109 |
+
html.Div(id='file-list'),
|
110 |
+
html.Hr(),
|
111 |
+
html.Div([
|
112 |
+
dbc.Button(
|
113 |
+
matrix_label,
|
114 |
+
id={'type': 'matrix-btn', 'index': matrix_label},
|
115 |
+
color="link",
|
116 |
+
className="mb-2 w-100 text-left custom-button",
|
117 |
+
style={'overflow': 'hidden', 'text-overflow': 'ellipsis', 'white-space': 'nowrap'}
|
118 |
+
) for matrix_label in matrix_types.keys()
|
119 |
+
])
|
120 |
+
])
|
121 |
+
], className="mb-2")
|
122 |
+
], width=3, style={'minWidth': '260px', 'background': '#f8f9fa', 'height': '100vh', 'position': 'fixed', 'overflowY': 'auto'}),
|
123 |
dbc.Col([
|
124 |
+
dbc.Row([
|
125 |
+
dbc.Col([
|
126 |
+
html.H2("Maiko Project Matrix Generator", className="mb-3 mt-2")
|
127 |
+
])
|
128 |
+
]),
|
129 |
+
dbc.Row([
|
130 |
+
dbc.Col([
|
131 |
+
dbc.Card([
|
132 |
+
dbc.CardBody([
|
133 |
+
dcc.Loading(
|
134 |
+
id="loading",
|
135 |
+
type="default",
|
136 |
+
children=[
|
137 |
+
html.Div(id="loading-output"),
|
138 |
+
html.Div(id='matrix-preview', className="border p-3 mb-3"),
|
139 |
+
dbc.Button("Download Matrix", id="btn-download", color="success", className="mt-3"),
|
140 |
+
dcc.Download(id="download-matrix"),
|
141 |
+
]
|
142 |
+
)
|
143 |
+
])
|
144 |
+
])
|
145 |
+
])
|
146 |
+
]),
|
147 |
html.Hr(),
|
148 |
+
dbc.Row([
|
149 |
+
dbc.Col([
|
150 |
+
dbc.Card([
|
151 |
+
dbc.CardBody([
|
152 |
+
dcc.Loading(
|
153 |
+
id="chat-loading",
|
154 |
+
type="default",
|
155 |
+
children=[
|
156 |
+
dbc.Textarea(id="chat-input", placeholder="Chat with Maiko to update matrix...", className="mb-2", style={'width': '100%', 'wordWrap': 'break-word'}),
|
157 |
+
dbc.Button("Send", id="btn-send-chat", color="primary", className="mb-3"),
|
158 |
+
html.Div(id="chat-output")
|
159 |
+
]
|
160 |
+
)
|
161 |
+
])
|
162 |
+
])
|
163 |
+
])
|
164 |
+
])
|
165 |
+
], width=9, style={'marginLeft': '30%'})
|
166 |
])
|
167 |
+
], fluid=True, style={'padding': '0'})
|
168 |
|
169 |
+
|
170 |
+
# --- File Parsing ---
|
171 |
+
def parse_file_content(file_path, filename):
|
172 |
try:
|
173 |
+
with open(file_path, "rb") as f:
|
174 |
+
decoded = f.read()
|
175 |
if filename.endswith('.pdf'):
|
176 |
with io.BytesIO(decoded) as pdf_file:
|
177 |
reader = PyPDF2.PdfReader(pdf_file)
|
178 |
+
return ' '.join([page.extract_text() or "" for page in reader.pages])
|
179 |
elif filename.endswith('.docx'):
|
180 |
with io.BytesIO(decoded) as docx_file:
|
181 |
doc = docx.Document(docx_file)
|
|
|
186 |
else:
|
187 |
return "Unsupported file format"
|
188 |
except Exception as e:
|
189 |
+
logger.exception(f"Error processing file {filename}: {str(e)}")
|
190 |
return "Error processing file"
|
191 |
|
192 |
def truncate_filename(filename, max_length=24):
|
|
|
195 |
else:
|
196 |
return filename[:max_length - 3] + '...'
|
197 |
|
198 |
+
def get_file_cards(file_dict):
|
199 |
+
cards = []
|
200 |
+
for name in file_dict:
|
201 |
+
cards.append(
|
202 |
+
dbc.Card(
|
203 |
+
dbc.CardBody(
|
204 |
+
dbc.Row([
|
205 |
+
dbc.Col(
|
206 |
+
html.Span(
|
207 |
+
truncate_filename(name),
|
208 |
+
title=name,
|
209 |
+
style={
|
210 |
+
'display': 'inline-block',
|
211 |
+
'overflow': 'hidden',
|
212 |
+
'textOverflow': 'ellipsis',
|
213 |
+
'whiteSpace': 'nowrap',
|
214 |
+
'maxWidth': '90%',
|
215 |
+
'verticalAlign': 'middle',
|
216 |
+
}
|
217 |
+
),
|
218 |
+
width='auto',
|
219 |
+
style={'display': 'flex', 'alignItems': 'center', 'padding': '0'}
|
220 |
+
),
|
221 |
+
dbc.Col(
|
222 |
+
dbc.Button(
|
223 |
+
"Delete",
|
224 |
+
id={'type': 'remove-file', 'index': name},
|
225 |
+
color="danger",
|
226 |
+
size="sm",
|
227 |
+
style={'marginLeft': 'auto', 'float': 'right'}
|
228 |
+
),
|
229 |
+
width='auto',
|
230 |
+
style={'display': 'flex', 'alignItems': 'center', 'justifyContent': 'flex-end', 'padding': '0'}
|
231 |
+
),
|
232 |
+
],
|
233 |
+
justify="between",
|
234 |
+
align="center",
|
235 |
+
style={"margin": "0", "padding": "0"}
|
236 |
+
),
|
237 |
+
style={'padding': '6px 8px', 'margin': '0', 'display': 'flex', 'alignItems': 'center', 'background': 'none', 'boxShadow': 'none'}
|
238 |
+
),
|
239 |
+
style={'border': 'none', 'boxShadow': 'none', 'background': 'none', 'marginBottom': '2px'}
|
240 |
+
)
|
241 |
+
)
|
242 |
+
return cards
|
243 |
+
|
244 |
+
# --- Callbacks ---
|
245 |
+
|
246 |
@app.callback(
|
247 |
Output('file-list', 'children'),
|
248 |
Input('upload-files', 'contents'),
|
249 |
State('upload-files', 'filename'),
|
250 |
+
prevent_initial_call=True
|
251 |
)
|
252 |
+
def update_output(list_of_contents, list_of_names):
|
253 |
+
session_data, lock = get_session_data()
|
254 |
+
logger.info("Uploading files...")
|
255 |
if list_of_contents is not None:
|
256 |
+
with lock:
|
257 |
+
for content, name in zip(list_of_contents, list_of_names):
|
258 |
+
content_type, content_string = content.split(',')
|
259 |
+
decoded = base64.b64decode(content_string)
|
260 |
+
temp_path = os.path.join(session_data['temp_dir'], name)
|
261 |
+
with open(temp_path, 'wb') as f:
|
262 |
+
f.write(decoded)
|
263 |
+
session_data['uploaded_files'][name] = temp_path
|
264 |
+
session_data['file_texts'][name] = parse_file_content(temp_path, name)
|
265 |
+
logger.info(f"Files after upload: {list(session_data['uploaded_files'].keys())}")
|
266 |
+
return get_file_cards(session_data['uploaded_files'])
|
267 |
+
raise PreventUpdate
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
268 |
|
269 |
@app.callback(
|
270 |
Output('file-list', 'children', allow_duplicate=True),
|
271 |
+
Input({'type': 'remove-file', 'index': ALL}, 'n_clicks'),
|
272 |
State('file-list', 'children'),
|
273 |
prevent_initial_call=True
|
274 |
)
|
275 |
+
def remove_file(n_clicks, existing_cards):
|
276 |
+
session_data, lock = get_session_data()
|
277 |
ctx = dash.callback_context
|
278 |
if not ctx.triggered:
|
279 |
raise PreventUpdate
|
|
|
280 |
triggered_id = ctx.triggered[0]['prop_id'].split('.')[0]
|
|
|
|
|
281 |
import ast
|
282 |
try:
|
283 |
triggered_id_dict = ast.literal_eval(triggered_id)
|
284 |
removed_file = triggered_id_dict['index']
|
285 |
except Exception:
|
286 |
raise PreventUpdate
|
287 |
+
with lock:
|
288 |
+
if removed_file in session_data['uploaded_files']:
|
289 |
+
try:
|
290 |
+
os.remove(session_data['uploaded_files'][removed_file])
|
291 |
+
except Exception as e:
|
292 |
+
logger.warning(f"Failed to delete temp file {removed_file}: {e}")
|
293 |
+
session_data['uploaded_files'].pop(removed_file, None)
|
294 |
+
session_data['file_texts'].pop(removed_file, None)
|
295 |
+
logger.info(f"Files after deletion: {list(session_data['uploaded_files'].keys())}")
|
296 |
+
return get_file_cards(session_data['uploaded_files'])
|
297 |
+
|
298 |
+
@app.callback(
|
299 |
+
Output('matrix-preview', 'children'),
|
300 |
+
Output('loading-output', 'children'),
|
301 |
+
[Input({'type': 'matrix-btn', 'index': matrix_label}, 'n_clicks') for matrix_label in matrix_types.keys()],
|
302 |
+
prevent_initial_call=True
|
303 |
+
)
|
304 |
+
def generate_matrix(*args):
|
305 |
+
session_data, lock = get_session_data()
|
306 |
+
ctx = dash.callback_context
|
307 |
+
if not ctx.triggered:
|
308 |
+
raise PreventUpdate
|
309 |
+
button_id = ctx.triggered[0]['prop_id'].split('.')[0]
|
310 |
+
import ast
|
311 |
+
try:
|
312 |
+
triggered = ast.literal_eval(button_id)
|
313 |
+
matrix_type = triggered['index']
|
314 |
+
except Exception:
|
315 |
+
raise PreventUpdate
|
316 |
+
|
317 |
+
if not session_data['uploaded_files']:
|
318 |
+
return html.Div("Please upload project artifacts before generating a matrix."), ""
|
319 |
+
file_contents = list(session_data['file_texts'].values())
|
320 |
+
with lock:
|
321 |
try:
|
322 |
+
session_data['matrix_type'] = matrix_type
|
323 |
+
session_data['current_matrix'] = generate_matrix_with_gpt(matrix_type, file_contents)
|
324 |
+
logger.info(f"{matrix_type} generated for session.")
|
325 |
+
return dbc.Table.from_dataframe(session_data['current_matrix'], striped=True, bordered=True, hover=True), f"{matrix_type} generated"
|
326 |
+
except Exception as e:
|
327 |
+
logger.exception(f"Error generating matrix: {str(e)}")
|
328 |
+
return html.Div(f"Error generating matrix: {str(e)}"), "Error"
|
|
|
|
|
329 |
|
330 |
def generate_matrix_with_gpt(matrix_type, file_contents):
|
331 |
prompt = f"""Generate a {matrix_type} based on the following project artifacts:
|
|
|
344 |
Item2A|Item2B|Item2C
|
345 |
Now, generate the {matrix_type}:
|
346 |
"""
|
|
|
347 |
response = openai.ChatCompletion.create(
|
348 |
model="gpt-4-turbo",
|
349 |
messages=[
|
350 |
+
{"role": "system", "content": "You are a precise matrix generator that outputs only the requested matrix without any additional text. Based on the files uploaded, as the project manager you perform the analysis and make appropriate assumptions to populate the matrix like roles, tasks, timelines, logically sequencing the matrix etc."},
|
351 |
{"role": "user", "content": prompt}
|
352 |
]
|
353 |
)
|
|
|
354 |
matrix_text = response.choices[0].message.content.strip()
|
355 |
+
logger.info(f"Raw matrix text from GPT: {matrix_text[:200]}...") # log only first 200 chars
|
|
|
356 |
lines = [line.strip() for line in matrix_text.split('\n') if '|' in line]
|
357 |
data = [line.split('|') for line in lines]
|
358 |
data = [[cell.strip() for cell in row] for row in data]
|
|
|
359 |
headers = data[0]
|
360 |
data = data[1:]
|
|
|
361 |
return pd.DataFrame(data, columns=headers)
|
362 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
363 |
@app.callback(
|
364 |
Output('chat-output', 'children'),
|
365 |
Output('matrix-preview', 'children', allow_duplicate=True),
|
|
|
368 |
prevent_initial_call=True
|
369 |
)
|
370 |
def update_matrix_via_chat(n_clicks, chat_input):
|
371 |
+
session_data, lock = get_session_data()
|
372 |
+
if not chat_input or session_data['current_matrix'] is None or session_data['matrix_type'] is None:
|
373 |
raise PreventUpdate
|
374 |
+
matrix_type = session_data['matrix_type']
|
375 |
+
with lock:
|
376 |
+
prompt = f"""Update the following {matrix_type} based on this instruction: {chat_input}
|
377 |
Current matrix:
|
378 |
+
{session_data['current_matrix'].to_string(index=False)}
|
379 |
Instructions:
|
380 |
1. Provide ONLY the updated matrix as a table.
|
381 |
2. Use ONLY pipe symbols (|) to separate columns.
|
|
|
386 |
7. Each subsequent row should represent a single item in the matrix.
|
387 |
Now, provide the updated {matrix_type}:
|
388 |
"""
|
389 |
+
response = openai.ChatCompletion.create(
|
390 |
+
model="gpt-4-turbo",
|
391 |
+
messages=[
|
392 |
+
{"role": "system", "content": "You are a precise matrix updater that outputs only the requested matrix without any additional text. You will make assumptions as a project manager to produce the matrix based on the limited information provided"},
|
393 |
+
{"role": "user", "content": prompt}
|
394 |
+
]
|
395 |
+
)
|
396 |
+
updated_matrix_text = response.choices[0].message.content.strip()
|
397 |
+
logger.info(f"Raw updated matrix text from GPT: {updated_matrix_text[:200]}...")
|
398 |
+
lines = [line.strip() for line in updated_matrix_text.split('\n') if '|' in line]
|
399 |
+
data = [line.split('|') for line in lines]
|
400 |
+
data = [[cell.strip() for cell in row] for row in data]
|
401 |
+
headers = data[0]
|
402 |
+
data = data[1:]
|
403 |
+
session_data['current_matrix'] = pd.DataFrame(data, columns=headers)
|
404 |
+
return f"Matrix updated based on: {chat_input}", dbc.Table.from_dataframe(session_data['current_matrix'], striped=True, bordered=True, hover=True)
|
|
|
|
|
405 |
|
|
|
|
|
|
|
|
|
406 |
@app.callback(
|
407 |
Output("download-matrix", "data"),
|
408 |
Input("btn-download", "n_clicks"),
|
409 |
prevent_initial_call=True
|
410 |
)
|
411 |
def download_matrix(n_clicks):
|
412 |
+
session_data, lock = get_session_data()
|
413 |
+
if session_data['current_matrix'] is None or session_data['matrix_type'] is None:
|
414 |
raise PreventUpdate
|
|
|
415 |
output = io.BytesIO()
|
416 |
with pd.ExcelWriter(output, engine='xlsxwriter') as writer:
|
417 |
+
session_data['current_matrix'].to_excel(writer, sheet_name='Sheet1', index=False)
|
418 |
+
logger.info(f"Matrix downloaded: {session_data['matrix_type']}")
|
419 |
+
return dcc.send_bytes(output.getvalue(), f"{session_data['matrix_type']}.xlsx")
|
420 |
|
421 |
if __name__ == '__main__':
|
422 |
print("Starting the Dash application...")
|
423 |
+
app.run(debug=True, host='0.0.0.0', port=7860, threaded=True)
|
424 |
print("Dash application has finished running.")
|