Spaces:
Sleeping
Sleeping
copy df error in logs fix
Browse files
weekly.py
CHANGED
@@ -9,6 +9,9 @@ def generate_weekly_report(uploaded_files):
|
|
9 |
st.error("No files uploaded. Please upload CSV files for analysis.")
|
10 |
return
|
11 |
|
|
|
|
|
|
|
12 |
combined_data = pd.DataFrame()
|
13 |
for uploaded_file in uploaded_files:
|
14 |
data = preprocess_uploaded_file(uploaded_file)
|
@@ -33,15 +36,19 @@ def generate_weekly_report(uploaded_files):
|
|
33 |
st.error("No data found in the uploaded files. Please check the file contents.")
|
34 |
return
|
35 |
|
36 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
37 |
|
38 |
if failed_data.empty:
|
39 |
st.warning("No failed scenarios found in the uploaded data.")
|
40 |
return
|
41 |
|
42 |
-
# Use 'File Date' for grouping
|
43 |
-
failed_data['Date'] = failed_data['File Date']
|
44 |
-
|
45 |
# UI for selecting environments and functional areas
|
46 |
environments = combined_data['Environment'].unique()
|
47 |
selected_environments = st.multiselect("Select Environments", options=environments, default=environments)
|
@@ -82,6 +89,9 @@ def generate_weekly_report(uploaded_files):
|
|
82 |
date_range = pd.date_range(start=start_date, end=end_date)
|
83 |
daily_failures = daily_failures.reindex(date_range, fill_value=0)
|
84 |
|
|
|
|
|
|
|
85 |
# Y-axis scaling option
|
86 |
y_axis_scale = st.radio("Y-axis Scaling", ["Fixed", "Dynamic"])
|
87 |
|
|
|
9 |
st.error("No files uploaded. Please upload CSV files for analysis.")
|
10 |
return
|
11 |
|
12 |
+
# Set pandas option to use Copy-on-Write
|
13 |
+
pd.options.mode.copy_on_write = True
|
14 |
+
|
15 |
combined_data = pd.DataFrame()
|
16 |
for uploaded_file in uploaded_files:
|
17 |
data = preprocess_uploaded_file(uploaded_file)
|
|
|
36 |
st.error("No data found in the uploaded files. Please check the file contents.")
|
37 |
return
|
38 |
|
39 |
+
# Create a boolean mask for failed data
|
40 |
+
failed_mask = combined_data['Status'] == 'FAILED'
|
41 |
+
|
42 |
+
# Use .loc to set the 'Date' column for failed data
|
43 |
+
combined_data.loc[failed_mask, 'Date'] = combined_data.loc[failed_mask, 'File Date']
|
44 |
+
|
45 |
+
# Filter failed data
|
46 |
+
failed_data = combined_data[failed_mask]
|
47 |
|
48 |
if failed_data.empty:
|
49 |
st.warning("No failed scenarios found in the uploaded data.")
|
50 |
return
|
51 |
|
|
|
|
|
|
|
52 |
# UI for selecting environments and functional areas
|
53 |
environments = combined_data['Environment'].unique()
|
54 |
selected_environments = st.multiselect("Select Environments", options=environments, default=environments)
|
|
|
89 |
date_range = pd.date_range(start=start_date, end=end_date)
|
90 |
daily_failures = daily_failures.reindex(date_range, fill_value=0)
|
91 |
|
92 |
+
# Convert all columns to int64 to avoid Arrow serialization issues
|
93 |
+
daily_failures = daily_failures.astype('int64')
|
94 |
+
|
95 |
# Y-axis scaling option
|
96 |
y_axis_scale = st.radio("Y-axis Scaling", ["Fixed", "Dynamic"])
|
97 |
|