|
import dash |
|
from dash import dcc, html, Input, Output, State, callback |
|
import dash_bootstrap_components as dbc |
|
import base64 |
|
import io |
|
import pandas as pd |
|
import openai |
|
import os |
|
import time |
|
from dash.exceptions import PreventUpdate |
|
import PyPDF2 |
|
import docx |
|
import chardet |
|
|
|
|
|
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.BOOTSTRAP]) |
|
|
|
|
|
openai.api_key = os.environ.get('OPENAI_API_KEY') |
|
|
|
|
|
uploaded_files = {} |
|
current_matrix = None |
|
matrix_type = None |
|
|
|
|
|
matrix_types = { |
|
"Communications Plan Matrix": "Create a matrix showing stakeholders, communication methods, frequency, and responsibilities.", |
|
"Project Kick-off Matrix": "Generate a matrix outlining key project details, goals, team roles, and initial timelines.", |
|
"Decision Matrix": "Develop a matrix for evaluating options against criteria, with weighted scores.", |
|
"Lessons Learned Matrix": "Create a matrix capturing project experiences, challenges, solutions, and recommendations.", |
|
"Key Performance Indicator Matrix": "Generate a matrix of KPIs, their targets, actual performance, and status.", |
|
"Prioritization Matrix": "Develop a matrix for ranking tasks or features based on importance and urgency.", |
|
"Risk Matrix": "Create a matrix assessing potential risks, their likelihood, impact, and mitigation strategies.", |
|
"RACI Matrix": "Generate a matrix showing team members and their roles (Responsible, Accountable, Consulted, Informed) for each task.", |
|
"Project Schedule Matrix": "Develop a matrix showing project phases, tasks, durations, and dependencies.", |
|
"Quality Control Matrix": "Create a matrix outlining quality standards, testing methods, and acceptance criteria.", |
|
"Requirements Traceability Matrix": "Generate a matrix linking requirements to their sources, test cases, and status.", |
|
"Sprint Planning Matrix": "Develop a matrix for sprint tasks, story points, assignees, and status.", |
|
"Test Traceability Matrix": "Create a matrix linking test cases to requirements, execution status, and results.", |
|
"Sprint Backlog": "Generate a matrix of user stories, tasks, estimates, and priorities for the sprint.", |
|
"Sprint Retrospective": "Develop a matrix capturing what went well, what didn't, and action items from the sprint.", |
|
"SWOT Matrix": "Create a matrix analyzing Strengths, Weaknesses, Opportunities, and Threats." |
|
} |
|
|
|
|
|
app.layout = dbc.Container([ |
|
dbc.Row([ |
|
dbc.Col([ |
|
html.H4("Project Artifacts", className="mt-3 mb-4"), |
|
dcc.Upload( |
|
id='upload-files', |
|
children=html.Div([ |
|
'Drag and Drop or ', |
|
html.A('Select Files') |
|
]), |
|
style={ |
|
'width': '100%', |
|
'height': '60px', |
|
'lineHeight': '60px', |
|
'borderWidth': '1px', |
|
'borderStyle': 'dashed', |
|
'borderRadius': '5px', |
|
'textAlign': 'center', |
|
'margin': '10px 0' |
|
}, |
|
multiple=True |
|
), |
|
html.Div(id='file-list'), |
|
html.Hr(), |
|
html.Div([ |
|
dbc.Button( |
|
matrix_type, |
|
id=f'btn-{matrix_type.lower().replace(" ", "-")}', |
|
color="primary", |
|
className="mb-2 w-100", |
|
style={'overflow': 'hidden', 'text-overflow': 'ellipsis', 'white-space': 'nowrap'} |
|
) for matrix_type in matrix_types.keys() |
|
]) |
|
], width=3), |
|
dbc.Col([ |
|
html.Div(id='matrix-preview', className="border p-3 mb-3"), |
|
dcc.Loading( |
|
id="loading-indicator", |
|
type="dot", |
|
children=[html.Div(id="loading-output")] |
|
), |
|
dbc.Button("Download Matrix", id="btn-download", color="success", className="mt-3"), |
|
dcc.Download(id="download-matrix"), |
|
html.Hr(), |
|
dbc.Input(id="chat-input", type="text", placeholder="Chat with GPT to update matrix...", className="mb-2"), |
|
dbc.Button("Send", id="btn-send-chat", color="primary", className="mb-3"), |
|
html.Div(id="chat-output") |
|
], width=9) |
|
]) |
|
], fluid=True) |
|
|
|
def parse_file_content(contents, filename): |
|
content_type, content_string = contents.split(',') |
|
decoded = base64.b64decode(content_string) |
|
try: |
|
if filename.endswith('.pdf'): |
|
with io.BytesIO(decoded) as pdf_file: |
|
reader = PyPDF2.PdfReader(pdf_file) |
|
return ' '.join([page.extract_text() for page in reader.pages]) |
|
elif filename.endswith('.docx'): |
|
with io.BytesIO(decoded) as docx_file: |
|
doc = docx.Document(docx_file) |
|
return ' '.join([para.text for para in doc.paragraphs]) |
|
elif filename.endswith('.txt') or filename.endswith('.rtf'): |
|
encoding = chardet.detect(decoded)['encoding'] |
|
return decoded.decode(encoding) |
|
else: |
|
return "Unsupported file format" |
|
except Exception as e: |
|
print(f"Error processing file {filename}: {str(e)}") |
|
return "Error processing file" |
|
|
|
@app.callback( |
|
Output('file-list', 'children'), |
|
Input('upload-files', 'contents'), |
|
State('upload-files', 'filename'), |
|
State('file-list', 'children') |
|
) |
|
def update_output(list_of_contents, list_of_names, existing_files): |
|
global uploaded_files |
|
if list_of_contents is not None: |
|
new_files = [] |
|
for i, (content, name) in enumerate(zip(list_of_contents, list_of_names)): |
|
file_content = parse_file_content(content, name) |
|
uploaded_files[name] = file_content |
|
new_files.append(html.Div([ |
|
html.Button('×', id={'type': 'remove-file', 'index': name}, style={'marginRight': '5px', 'fontSize': '10px'}), |
|
html.Span(name) |
|
])) |
|
if existing_files is None: |
|
existing_files = [] |
|
return existing_files + new_files |
|
return existing_files |
|
|
|
@app.callback( |
|
Output('file-list', 'children', allow_duplicate=True), |
|
Input({'type': 'remove-file', 'index': dash.ALL}, 'n_clicks'), |
|
State('file-list', 'children'), |
|
prevent_initial_call=True |
|
) |
|
def remove_file(n_clicks, existing_files): |
|
global uploaded_files |
|
ctx = dash.callback_context |
|
if not ctx.triggered: |
|
raise PreventUpdate |
|
removed_file = ctx.triggered[0]['prop_id'].split(',')[0].split(':')[-1].strip('}') |
|
uploaded_files.pop(removed_file, None) |
|
return [file for file in existing_files if file['props']['children'][1]['props']['children'] != removed_file] |
|
|
|
def generate_matrix_with_gpt(matrix_type, file_contents): |
|
prompt = f"Generate a {matrix_type} based on the following project artifacts:\n\n" |
|
prompt += "\n\n".join(file_contents) |
|
prompt += f"\n\nCreate a {matrix_type} in a format that can be represented as a pandas DataFrame." |
|
|
|
response = openai.ChatCompletion.create( |
|
model="gpt-3.5-turbo", |
|
messages=[ |
|
{"role": "system", "content": "You are a helpful assistant that generates project management matrices."}, |
|
{"role": "user", "content": prompt} |
|
] |
|
) |
|
|
|
matrix_text = response.choices[0].message.content |
|
|
|
|
|
lines = matrix_text.strip().split('\n') |
|
headers = lines[0].split('|') |
|
data = [line.split('|') for line in lines[2:]] |
|
return pd.DataFrame(data, columns=headers) |
|
|
|
@app.callback( |
|
Output('matrix-preview', 'children'), |
|
Output('loading-output', 'children'), |
|
[Input(f'btn-{matrix_type.lower().replace(" ", "-")}', 'n_clicks') for matrix_type in matrix_types.keys()], |
|
prevent_initial_call=True |
|
) |
|
def generate_matrix(*args): |
|
global current_matrix, matrix_type |
|
ctx = dash.callback_context |
|
if not ctx.triggered: |
|
raise PreventUpdate |
|
button_id = ctx.triggered[0]['prop_id'].split('.')[0] |
|
matrix_type = button_id.replace('btn-', '').replace('-', ' ').title() |
|
|
|
if not uploaded_files: |
|
return html.Div("Please upload project artifacts before generating a matrix."), "" |
|
|
|
file_contents = list(uploaded_files.values()) |
|
|
|
current_matrix = generate_matrix_with_gpt(matrix_type, file_contents) |
|
|
|
return dbc.Table.from_dataframe(current_matrix, striped=True, bordered=True, hover=True), f"{matrix_type} generated" |
|
|
|
@app.callback( |
|
Output('chat-output', 'children'), |
|
Output('matrix-preview', 'children', allow_duplicate=True), |
|
Input('btn-send-chat', 'n_clicks'), |
|
State('chat-input', 'value'), |
|
prevent_initial_call=True |
|
) |
|
def update_matrix_via_chat(n_clicks, chat_input): |
|
global current_matrix |
|
if not chat_input or current_matrix is None: |
|
raise PreventUpdate |
|
|
|
prompt = f"Update the following {matrix_type} based on this instruction: {chat_input}\n\n" |
|
prompt += current_matrix.to_string() |
|
|
|
response = openai.ChatCompletion.create( |
|
model="gpt-3.5-turbo", |
|
messages=[ |
|
{"role": "system", "content": "You are a helpful assistant that updates project management matrices."}, |
|
{"role": "user", "content": prompt} |
|
] |
|
) |
|
|
|
updated_matrix_text = response.choices[0].message.content |
|
|
|
|
|
lines = updated_matrix_text.strip().split('\n') |
|
headers = lines[0].split('|') |
|
data = [line.split('|') for line in lines[2:]] |
|
current_matrix = pd.DataFrame(data, columns=headers) |
|
|
|
return f"Matrix updated based on: {chat_input}", dbc.Table.from_dataframe(current_matrix, striped=True, bordered=True, hover=True) |
|
|
|
@app.callback( |
|
Output("download-matrix", "data"), |
|
Input("btn-download", "n_clicks"), |
|
prevent_initial_call=True |
|
) |
|
def download_matrix(n_clicks): |
|
global current_matrix, matrix_type |
|
if current_matrix is None: |
|
raise PreventUpdate |
|
|
|
|
|
output = io.BytesIO() |
|
with pd.ExcelWriter(output, engine='xlsxwriter') as writer: |
|
current_matrix.to_excel(writer, sheet_name='Sheet1', index=False) |
|
|
|
return dcc.send_bytes(output.getvalue(), f"{matrix_type}.xlsx") |
|
|
|
if __name__ == '__main__': |
|
print("Starting the Dash application...") |
|
app.run(debug=True, host='0.0.0.0', port=7860) |
|
print("Dash application has finished running.") |