File size: 2,100 Bytes
5ea6795
 
 
 
 
 
6141da1
 
5ea6795
 
 
 
 
 
6141da1
 
5ea6795
6141da1
 
 
 
5ea6795
6141da1
 
 
 
 
 
 
5ea6795
6141da1
 
 
5ea6795
6141da1
 
5ea6795
6141da1
 
 
 
 
5ea6795
6141da1
 
5ea6795
6141da1
 
5ea6795
6141da1
 
5ea6795
6141da1
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import streamlit as st
import pandas as pd
import numpy as np
from sentence_transformers import SentenceTransformer
from transformers import pipeline

# Page setup
st.set_page_config(page_title="FactoryRAG - Upload Logs", layout="wide")
st.title("🏭 FactoryRAG: Human-Centric AI for Sensor Log Analysis")

# Load models
EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base')

# File uploader
uploaded_file = st.sidebar.file_uploader("πŸ“‚ Upload your sensor CSV log file", type=["csv"])

if uploaded_file:
    df = pd.read_csv(uploaded_file)
    st.success("βœ… File uploaded and loaded!")
    st.write("πŸ“Š Sensor Data Snapshot:", df.head())

    # Convert to chunks
    def convert_to_chunks(df):
        chunks = []
        for idx, row in df.iterrows():
            sentence = f"Log entry {idx}: " + ", ".join([f"{col}: {row[col]:.2f}" for col in df.columns])
            chunks.append(sentence)
        return chunks

    if 'chunks' not in st.session_state:
        st.session_state.chunks = convert_to_chunks(df)
        st.session_state.embeddings = EMBED_MODEL.encode(st.session_state.chunks)

    # User query
    query = st.text_input("πŸ” Ask something about the sensor logs:")

    if query:
        query_vec = EMBED_MODEL.encode([query])[0]
        scores = np.dot(st.session_state.embeddings, query_vec)
        top_idxs = np.argsort(scores)[-3:][::-1]
        context = "\n".join([st.session_state.chunks[i] for i in top_idxs])

        prompt = f"Answer based on the following logs:\n{context}\n\nQuestion: {query}"
        response = GEN_MODEL(prompt, max_length=256)[0]['generated_text']

        st.subheader("πŸ€– FactoryGPT Answer")
        st.write(response)

        st.markdown("### πŸ§‘β€πŸ­ Human Feedback")
        st.radio("Is this answer acceptable?", ["Approve", "Correct", "Escalate"], horizontal=True)

        with st.expander("πŸ“„ Retrieved Log Context"):
            st.code(context)
else:
    st.info("πŸ‘ˆ Please upload your sensor log file (CSV) to begin.")