Update app.py
Browse files
app.py
CHANGED
@@ -8,6 +8,9 @@ import openai
|
|
8 |
import os
|
9 |
import time
|
10 |
from dash.exceptions import PreventUpdate
|
|
|
|
|
|
|
11 |
|
12 |
# Initialize the Dash app
|
13 |
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.BOOTSTRAP])
|
@@ -16,7 +19,7 @@ app = dash.Dash(__name__, external_stylesheets=[dbc.themes.BOOTSTRAP])
|
|
16 |
openai.api_key = os.environ.get('OPENAI_API_KEY')
|
17 |
|
18 |
# Global variables
|
19 |
-
uploaded_files =
|
20 |
current_matrix = None
|
21 |
matrix_type = None
|
22 |
|
@@ -92,6 +95,27 @@ app.layout = dbc.Container([
|
|
92 |
])
|
93 |
], fluid=True)
|
94 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
95 |
@app.callback(
|
96 |
Output('file-list', 'children'),
|
97 |
Input('upload-files', 'contents'),
|
@@ -99,18 +123,57 @@ app.layout = dbc.Container([
|
|
99 |
State('file-list', 'children')
|
100 |
)
|
101 |
def update_output(list_of_contents, list_of_names, existing_files):
|
|
|
102 |
if list_of_contents is not None:
|
103 |
-
new_files = [
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
|
|
|
|
109 |
if existing_files is None:
|
110 |
existing_files = []
|
111 |
return existing_files + new_files
|
112 |
return existing_files
|
113 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
114 |
@app.callback(
|
115 |
Output('matrix-preview', 'children'),
|
116 |
Output('loading-output', 'children'),
|
@@ -125,13 +188,12 @@ def generate_matrix(*args):
|
|
125 |
button_id = ctx.triggered[0]['prop_id'].split('.')[0]
|
126 |
matrix_type = button_id.replace('btn-', '').replace('-', ' ').title()
|
127 |
|
128 |
-
|
129 |
-
|
130 |
-
|
131 |
-
|
132 |
-
|
133 |
-
|
134 |
-
})
|
135 |
|
136 |
return dbc.Table.from_dataframe(current_matrix, striped=True, bordered=True, hover=True), f"{matrix_type} generated"
|
137 |
|
@@ -144,15 +206,29 @@ def generate_matrix(*args):
|
|
144 |
)
|
145 |
def update_matrix_via_chat(n_clicks, chat_input):
|
146 |
global current_matrix
|
147 |
-
if not chat_input:
|
148 |
raise PreventUpdate
|
149 |
|
150 |
-
|
151 |
-
|
152 |
-
|
153 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
154 |
|
155 |
-
return
|
156 |
|
157 |
@app.callback(
|
158 |
Output("download-matrix", "data"),
|
|
|
8 |
import os
|
9 |
import time
|
10 |
from dash.exceptions import PreventUpdate
|
11 |
+
import PyPDF2
|
12 |
+
import docx
|
13 |
+
import chardet
|
14 |
|
15 |
# Initialize the Dash app
|
16 |
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.BOOTSTRAP])
|
|
|
19 |
openai.api_key = os.environ.get('OPENAI_API_KEY')
|
20 |
|
21 |
# Global variables
|
22 |
+
uploaded_files = {}
|
23 |
current_matrix = None
|
24 |
matrix_type = None
|
25 |
|
|
|
95 |
])
|
96 |
], fluid=True)
|
97 |
|
98 |
+
def parse_file_content(contents, filename):
|
99 |
+
content_type, content_string = contents.split(',')
|
100 |
+
decoded = base64.b64decode(content_string)
|
101 |
+
try:
|
102 |
+
if filename.endswith('.pdf'):
|
103 |
+
with io.BytesIO(decoded) as pdf_file:
|
104 |
+
reader = PyPDF2.PdfReader(pdf_file)
|
105 |
+
return ' '.join([page.extract_text() for page in reader.pages])
|
106 |
+
elif filename.endswith('.docx'):
|
107 |
+
with io.BytesIO(decoded) as docx_file:
|
108 |
+
doc = docx.Document(docx_file)
|
109 |
+
return ' '.join([para.text for para in doc.paragraphs])
|
110 |
+
elif filename.endswith('.txt') or filename.endswith('.rtf'):
|
111 |
+
encoding = chardet.detect(decoded)['encoding']
|
112 |
+
return decoded.decode(encoding)
|
113 |
+
else:
|
114 |
+
return "Unsupported file format"
|
115 |
+
except Exception as e:
|
116 |
+
print(f"Error processing file {filename}: {str(e)}")
|
117 |
+
return "Error processing file"
|
118 |
+
|
119 |
@app.callback(
|
120 |
Output('file-list', 'children'),
|
121 |
Input('upload-files', 'contents'),
|
|
|
123 |
State('file-list', 'children')
|
124 |
)
|
125 |
def update_output(list_of_contents, list_of_names, existing_files):
|
126 |
+
global uploaded_files
|
127 |
if list_of_contents is not None:
|
128 |
+
new_files = []
|
129 |
+
for i, (content, name) in enumerate(zip(list_of_contents, list_of_names)):
|
130 |
+
file_content = parse_file_content(content, name)
|
131 |
+
uploaded_files[name] = file_content
|
132 |
+
new_files.append(html.Div([
|
133 |
+
html.Button('×', id={'type': 'remove-file', 'index': name}, style={'marginRight': '5px', 'fontSize': '10px'}),
|
134 |
+
html.Span(name)
|
135 |
+
]))
|
136 |
if existing_files is None:
|
137 |
existing_files = []
|
138 |
return existing_files + new_files
|
139 |
return existing_files
|
140 |
|
141 |
+
@app.callback(
|
142 |
+
Output('file-list', 'children', allow_duplicate=True),
|
143 |
+
Input({'type': 'remove-file', 'index': dash.ALL}, 'n_clicks'),
|
144 |
+
State('file-list', 'children'),
|
145 |
+
prevent_initial_call=True
|
146 |
+
)
|
147 |
+
def remove_file(n_clicks, existing_files):
|
148 |
+
global uploaded_files
|
149 |
+
ctx = dash.callback_context
|
150 |
+
if not ctx.triggered:
|
151 |
+
raise PreventUpdate
|
152 |
+
removed_file = ctx.triggered[0]['prop_id'].split(',')[0].split(':')[-1].strip('}')
|
153 |
+
uploaded_files.pop(removed_file, None)
|
154 |
+
return [file for file in existing_files if file['props']['children'][1]['props']['children'] != removed_file]
|
155 |
+
|
156 |
+
def generate_matrix_with_gpt(matrix_type, file_contents):
|
157 |
+
prompt = f"Generate a {matrix_type} based on the following project artifacts:\n\n"
|
158 |
+
prompt += "\n\n".join(file_contents)
|
159 |
+
prompt += f"\n\nCreate a {matrix_type} in a format that can be represented as a pandas DataFrame."
|
160 |
+
|
161 |
+
response = openai.ChatCompletion.create(
|
162 |
+
model="gpt-3.5-turbo",
|
163 |
+
messages=[
|
164 |
+
{"role": "system", "content": "You are a helpful assistant that generates project management matrices."},
|
165 |
+
{"role": "user", "content": prompt}
|
166 |
+
]
|
167 |
+
)
|
168 |
+
|
169 |
+
matrix_text = response.choices[0].message.content
|
170 |
+
# Parse the matrix_text into a pandas DataFrame
|
171 |
+
# This is a simplified parsing, you might need to adjust based on the actual output format
|
172 |
+
lines = matrix_text.strip().split('\n')
|
173 |
+
headers = lines[0].split('|')
|
174 |
+
data = [line.split('|') for line in lines[2:]]
|
175 |
+
return pd.DataFrame(data, columns=headers)
|
176 |
+
|
177 |
@app.callback(
|
178 |
Output('matrix-preview', 'children'),
|
179 |
Output('loading-output', 'children'),
|
|
|
188 |
button_id = ctx.triggered[0]['prop_id'].split('.')[0]
|
189 |
matrix_type = button_id.replace('btn-', '').replace('-', ' ').title()
|
190 |
|
191 |
+
if not uploaded_files:
|
192 |
+
return html.Div("Please upload project artifacts before generating a matrix."), ""
|
193 |
+
|
194 |
+
file_contents = list(uploaded_files.values())
|
195 |
+
|
196 |
+
current_matrix = generate_matrix_with_gpt(matrix_type, file_contents)
|
|
|
197 |
|
198 |
return dbc.Table.from_dataframe(current_matrix, striped=True, bordered=True, hover=True), f"{matrix_type} generated"
|
199 |
|
|
|
206 |
)
|
207 |
def update_matrix_via_chat(n_clicks, chat_input):
|
208 |
global current_matrix
|
209 |
+
if not chat_input or current_matrix is None:
|
210 |
raise PreventUpdate
|
211 |
|
212 |
+
prompt = f"Update the following {matrix_type} based on this instruction: {chat_input}\n\n"
|
213 |
+
prompt += current_matrix.to_string()
|
214 |
+
|
215 |
+
response = openai.ChatCompletion.create(
|
216 |
+
model="gpt-3.5-turbo",
|
217 |
+
messages=[
|
218 |
+
{"role": "system", "content": "You are a helpful assistant that updates project management matrices."},
|
219 |
+
{"role": "user", "content": prompt}
|
220 |
+
]
|
221 |
+
)
|
222 |
+
|
223 |
+
updated_matrix_text = response.choices[0].message.content
|
224 |
+
# Parse the updated_matrix_text into a pandas DataFrame
|
225 |
+
# This is a simplified parsing, you might need to adjust based on the actual output format
|
226 |
+
lines = updated_matrix_text.strip().split('\n')
|
227 |
+
headers = lines[0].split('|')
|
228 |
+
data = [line.split('|') for line in lines[2:]]
|
229 |
+
current_matrix = pd.DataFrame(data, columns=headers)
|
230 |
|
231 |
+
return f"Matrix updated based on: {chat_input}", dbc.Table.from_dataframe(current_matrix, striped=True, bordered=True, hover=True)
|
232 |
|
233 |
@app.callback(
|
234 |
Output("download-matrix", "data"),
|