bhagwandas commited on
Commit
6141da1
Β·
verified Β·
1 Parent(s): 3dacf24

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -40
app.py CHANGED
@@ -1,61 +1,56 @@
1
- # app.py - FactoryRAG: Q&A over Sensor Logs (Streamlit + Hugging Face)
2
-
3
  import streamlit as st
4
  import pandas as pd
5
  import numpy as np
6
  from sentence_transformers import SentenceTransformer
7
  from transformers import pipeline
8
 
9
- # Set page config
10
- st.set_page_config(page_title="FactoryRAG - Sensor Logs", layout="wide")
11
  st.title("🏭 FactoryRAG: Human-Centric AI for Sensor Log Analysis")
12
 
13
  # Load models
14
  EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
15
  GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base')
16
 
17
- # Load CSV sensor log file
18
- @st.cache_data
19
- def load_logs():
20
- df = pd.read_csv("01-04T184148_000_mode1.csv")
21
- return df
22
-
23
- # Convert log rows to natural language chunks
24
- def convert_to_chunks(df):
25
- chunks = []
26
- for idx, row in df.iterrows():
27
- sentence = f"Log entry {idx}: " + ", ".join([f"{col}: {row[col]:.2f}" for col in df.columns])
28
- chunks.append(sentence)
29
- return chunks
30
 
31
- # Load and embed logs
32
- df = load_logs()
33
- st.write("πŸ“Š Sensor Data Snapshot:", df.head())
 
34
 
35
- if 'chunks' not in st.session_state:
36
- st.session_state.chunks = convert_to_chunks(df)
37
- st.session_state.embeddings = EMBED_MODEL.encode(st.session_state.chunks)
 
 
 
 
38
 
39
- # Ask a question
40
- query = st.text_input("πŸ” Ask something about the sensor logs:")
 
41
 
42
- if query:
43
- query_vec = EMBED_MODEL.encode([query])[0]
44
- scores = np.dot(st.session_state.embeddings, query_vec)
45
- top_idxs = np.argsort(scores)[-3:][::-1]
46
- context = "\n".join([st.session_state.chunks[i] for i in top_idxs])
47
 
48
- prompt = f"Answer based on the following logs:\n{context}\n\nQuestion: {query}"
49
- response = GEN_MODEL(prompt, max_length=256)[0]['generated_text']
 
 
 
50
 
51
- st.subheader("πŸ€– FactoryGPT Answer")
52
- st.write(response)
53
 
54
- st.markdown("### πŸ§‘β€πŸ­ Human Feedback")
55
- st.radio("Is this answer acceptable?", ["Approve", "Correct", "Escalate"], horizontal=True)
56
 
57
- with st.expander("πŸ“„ Retrieved Log Context"):
58
- st.code(context)
59
 
60
- elif len(df) == 0:
61
- st.warning("⚠️ No data loaded. Please check your CSV file.")
 
 
 
 
 
1
  import streamlit as st
2
  import pandas as pd
3
  import numpy as np
4
  from sentence_transformers import SentenceTransformer
5
  from transformers import pipeline
6
 
7
+ # Page setup
8
+ st.set_page_config(page_title="FactoryRAG - Upload Logs", layout="wide")
9
  st.title("🏭 FactoryRAG: Human-Centric AI for Sensor Log Analysis")
10
 
11
  # Load models
12
  EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
13
  GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base')
14
 
15
+ # File uploader
16
+ uploaded_file = st.sidebar.file_uploader("πŸ“‚ Upload your sensor CSV log file", type=["csv"])
 
 
 
 
 
 
 
 
 
 
 
17
 
18
+ if uploaded_file:
19
+ df = pd.read_csv(uploaded_file)
20
+ st.success("βœ… File uploaded and loaded!")
21
+ st.write("πŸ“Š Sensor Data Snapshot:", df.head())
22
 
23
+ # Convert to chunks
24
+ def convert_to_chunks(df):
25
+ chunks = []
26
+ for idx, row in df.iterrows():
27
+ sentence = f"Log entry {idx}: " + ", ".join([f"{col}: {row[col]:.2f}" for col in df.columns])
28
+ chunks.append(sentence)
29
+ return chunks
30
 
31
+ if 'chunks' not in st.session_state:
32
+ st.session_state.chunks = convert_to_chunks(df)
33
+ st.session_state.embeddings = EMBED_MODEL.encode(st.session_state.chunks)
34
 
35
+ # User query
36
+ query = st.text_input("πŸ” Ask something about the sensor logs:")
 
 
 
37
 
38
+ if query:
39
+ query_vec = EMBED_MODEL.encode([query])[0]
40
+ scores = np.dot(st.session_state.embeddings, query_vec)
41
+ top_idxs = np.argsort(scores)[-3:][::-1]
42
+ context = "\n".join([st.session_state.chunks[i] for i in top_idxs])
43
 
44
+ prompt = f"Answer based on the following logs:\n{context}\n\nQuestion: {query}"
45
+ response = GEN_MODEL(prompt, max_length=256)[0]['generated_text']
46
 
47
+ st.subheader("πŸ€– FactoryGPT Answer")
48
+ st.write(response)
49
 
50
+ st.markdown("### πŸ§‘β€πŸ­ Human Feedback")
51
+ st.radio("Is this answer acceptable?", ["Approve", "Correct", "Escalate"], horizontal=True)
52
 
53
+ with st.expander("πŸ“„ Retrieved Log Context"):
54
+ st.code(context)
55
+ else:
56
+ st.info("πŸ‘ˆ Please upload your sensor log file (CSV) to begin.")