bhagwandas commited on
Commit
c2295d1
Β·
verified Β·
1 Parent(s): 8f0ab4e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +66 -22
app.py CHANGED
@@ -1,30 +1,52 @@
 
 
1
  import streamlit as st
2
  import pandas as pd
3
  import numpy as np
 
4
  from sentence_transformers import SentenceTransformer
5
  from transformers import pipeline
 
 
 
 
6
 
7
- # Page setup
8
- st.set_page_config(page_title="FactoryRAG - Upload Logs", layout="wide")
9
- st.title("🏭 FactoryRAG: Human-Centric AI for Sensor Log Analysis")
10
 
11
  # Load models
12
  EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
13
  GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base')
14
 
15
- # File uploader
16
- uploaded_file = st.sidebar.file_uploader("πŸ“‚ Upload your sensor CSV log file", type=["csv"])
17
 
18
  if uploaded_file:
19
  df = pd.read_csv(uploaded_file)
20
- st.success("βœ… File uploaded and loaded!")
21
- st.write("πŸ“Š Sensor Data Snapshot:", df.head())
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
 
23
- # Convert to chunks
24
  def convert_to_chunks(df):
25
  chunks = []
26
  for idx, row in df.iterrows():
27
- sentence = f"Log entry {idx}: " + ", ".join([f"{col}: {row[col]:.2f}" for col in df.columns])
28
  chunks.append(sentence)
29
  return chunks
30
 
@@ -32,25 +54,47 @@ if uploaded_file:
32
  st.session_state.chunks = convert_to_chunks(df)
33
  st.session_state.embeddings = EMBED_MODEL.encode(st.session_state.chunks)
34
 
35
- # User query
36
- query = st.text_input("πŸ” Ask something about the sensor logs:")
 
 
 
 
 
37
 
 
 
 
38
  if query:
39
  query_vec = EMBED_MODEL.encode([query])[0]
40
- scores = np.dot(st.session_state.embeddings, query_vec)
41
- top_idxs = np.argsort(scores)[-3:][::-1]
42
  context = "\n".join([st.session_state.chunks[i] for i in top_idxs])
43
-
44
- prompt = f"Answer based on the following logs:\n{context}\n\nQuestion: {query}"
45
  response = GEN_MODEL(prompt, max_length=256)[0]['generated_text']
46
-
47
  st.subheader("πŸ€– FactoryGPT Answer")
48
- st.write(response)
 
 
 
 
 
 
 
 
 
 
49
 
50
- st.markdown("### πŸ§‘β€πŸ­ Human Feedback")
51
- st.radio("Is this answer acceptable?", ["Approve", "Correct", "Escalate"], horizontal=True)
 
 
 
 
 
 
 
 
52
 
53
- with st.expander("πŸ“„ Retrieved Log Context"):
54
- st.code(context)
55
  else:
56
- st.info("πŸ‘ˆ Please upload your sensor log file (CSV) to begin.")
 
1
+ # app.py - FactoryRAG+: Condition Monitoring with Dashboard, PDF Export, Anomaly Detection & Digital Twin
2
+
3
  import streamlit as st
4
  import pandas as pd
5
  import numpy as np
6
+ import matplotlib.pyplot as plt
7
  from sentence_transformers import SentenceTransformer
8
  from transformers import pipeline
9
+ from sklearn.ensemble import IsolationForest
10
+ import base64
11
+ from io import BytesIO
12
+ from fpdf import FPDF
13
 
14
+ # Streamlit config
15
+ st.set_page_config(page_title="FactoryRAG+ - Smart Sensor Twin", layout="wide")
16
+ st.title("🏭 FactoryRAG+: Smart Dashboard with AI Monitoring, PDF Reporting & Digital Twin")
17
 
18
  # Load models
19
  EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
20
  GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base')
21
 
22
+ # Upload data
23
+ uploaded_file = st.sidebar.file_uploader("πŸ“‚ Upload your condition monitoring CSV", type=["csv"])
24
 
25
  if uploaded_file:
26
  df = pd.read_csv(uploaded_file)
27
+ numeric_cols = df.select_dtypes(include=np.number).columns.tolist()
28
+ st.success("βœ… Data loaded successfully!")
29
+
30
+ st.subheader("πŸ“Š Sensor Snapshot")
31
+ st.dataframe(df.head())
32
+
33
+ # --- Multi-signal Dashboard ---
34
+ st.subheader("πŸ“ˆ Sensor Dashboard")
35
+ selected_cols = st.multiselect("Select signals to visualize", numeric_cols, default=numeric_cols[:3])
36
+ fig, ax = plt.subplots(len(selected_cols), 1, figsize=(8, 2 * len(selected_cols)))
37
+ if len(selected_cols) == 1:
38
+ ax = [ax]
39
+ for i, col in enumerate(selected_cols):
40
+ ax[i].plot(df[col], label=col)
41
+ ax[i].set_ylabel(col)
42
+ ax[i].legend()
43
+ st.pyplot(fig)
44
 
45
+ # --- Convert Logs to Chunks ---
46
  def convert_to_chunks(df):
47
  chunks = []
48
  for idx, row in df.iterrows():
49
+ sentence = f"[Log {idx}] " + ", ".join([f"{col}: {row[col]:.2f}" for col in numeric_cols])
50
  chunks.append(sentence)
51
  return chunks
52
 
 
54
  st.session_state.chunks = convert_to_chunks(df)
55
  st.session_state.embeddings = EMBED_MODEL.encode(st.session_state.chunks)
56
 
57
+ # --- Anomaly Detection ---
58
+ st.subheader("🚨 Anomaly Detection (Isolation Forest)")
59
+ iso = IsolationForest(contamination=0.02)
60
+ anomaly_labels = iso.fit_predict(df[numeric_cols])
61
+ df['anomaly'] = ['❌' if x == -1 else '' for x in anomaly_labels]
62
+ st.write("Anomaly Flags:")
63
+ st.dataframe(df[df['anomaly'] == '❌'].head(5))
64
 
65
+ # --- Technical Question Answering ---
66
+ st.subheader("🧠 Ask Expert Questions")
67
+ query = st.text_input("Ask a question like 'Where is instability?' or 'Are anomalies visible?'")
68
  if query:
69
  query_vec = EMBED_MODEL.encode([query])[0]
70
+ sims = np.dot(st.session_state.embeddings, query_vec)
71
+ top_idxs = np.argsort(sims)[-3:][::-1]
72
  context = "\n".join([st.session_state.chunks[i] for i in top_idxs])
73
+ prompt = f"Context:\n{context}\n\nAs a reliability engineer, answer: {query}"
 
74
  response = GEN_MODEL(prompt, max_length=256)[0]['generated_text']
 
75
  st.subheader("πŸ€– FactoryGPT Answer")
76
+ st.markdown(response)
77
+
78
+ # --- Digital Twin Metrics ---
79
+ st.subheader("πŸ§ͺ Digital Twin Summary")
80
+ twin_report = ""
81
+ for col in selected_cols:
82
+ max_v = df[col].max()
83
+ min_v = df[col].min()
84
+ mean_v = df[col].mean()
85
+ twin_report += f"{col}\n→ Max: {max_v:.2f}, Min: {min_v:.2f}, Avg: {mean_v:.2f}\n\n"
86
+ st.code(twin_report)
87
 
88
+ # --- PDF Export ---
89
+ st.subheader("πŸ“€ Export Digital Twin Report as PDF")
90
+ pdf = FPDF()
91
+ pdf.add_page()
92
+ pdf.set_font("Arial", size=12)
93
+ pdf.multi_cell(0, 10, f"FactoryRAG+ Digital Twin Report\n\nSelected Signals: {', '.join(selected_cols)}\n\n" + twin_report)
94
+ pdf_bytes = pdf.output(dest='S').encode('latin1')
95
+ b64 = base64.b64encode(pdf_bytes).decode()
96
+ href = f'<a href="data:application/octet-stream;base64,{b64}" download="digital_twin_report.pdf">πŸ“„ Download PDF Report</a>'
97
+ st.markdown(href, unsafe_allow_html=True)
98
 
 
 
99
  else:
100
+ st.info("πŸ‘ˆ Upload a sensor log CSV file to explore digital twin analysis, waveform charts, anomaly detection, and PDF export.")