File size: 3,687 Bytes
0cd7d19
c2295d1
5ea6795
 
 
 
 
c2295d1
5ea6795
0cd7d19
 
 
 
 
 
 
 
 
 
5ea6795
 
 
 
 
0cd7d19
 
 
5ea6795
6141da1
 
c2295d1
0cd7d19
c2295d1
0cd7d19
 
c2295d1
 
0cd7d19
6141da1
 
 
0cd7d19
 
6141da1
5ea6795
b365915
 
 
 
 
5ea6795
c2295d1
0cd7d19
c2295d1
0cd7d19
 
 
 
 
 
ff73cbe
 
 
 
 
0cd7d19
ff73cbe
 
 
 
0cd7d19
ff73cbe
 
 
 
 
 
 
 
 
 
0cd7d19
 
ff73cbe
 
0cd7d19
ff73cbe
6141da1
0cd7d19
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
# app.py - FactoryRAG+: Fancy Lite Version with Animation & Chatbot Only

import streamlit as st
import pandas as pd
import numpy as np
from sentence_transformers import SentenceTransformer
from transformers import pipeline
from sklearn.ensemble import IsolationForest

# Page config with emoji + layout
st.set_page_config(page_title="FactoryRAG+ Assistant", page_icon="🧠", layout="wide")

# Animated header
st.markdown("""
    <h1 style='text-align: center; color: #3498db; font-size: 48px;'>
        🏭 FactoryRAG+ <span style="font-size: 28px;">| AI Assistant for Smart Sensors</span>
    </h1>
    <hr style='border-top: 2px solid #bbb;' />
""", unsafe_allow_html=True)

# Load models
EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base')

# Sidebar upload
st.sidebar.markdown("### πŸ“‚ Upload Sensor Log")
uploaded_file = st.sidebar.file_uploader("Upload a CSV sensor file", type=["csv"])

if uploaded_file:
    df = pd.read_csv(uploaded_file)
    numeric_cols = df.select_dtypes(include=np.number).columns.tolist()
    st.success("βœ… Data uploaded successfully!")

    # Animated section
    st.markdown("### πŸ” Sensor Log Preview")
    st.dataframe(df.head())

    # Convert to text chunks
    def convert_to_chunks(df):
        chunks = []
        for idx, row in df.iterrows():
            log_text = f"[Log {idx}] " + ", ".join([f"{col}: {row[col]:.2f}" for col in numeric_cols])
            chunks.append(log_text)
        return chunks

    if 'chunks' not in st.session_state or 'embeddings' not in st.session_state:
        chunks = convert_to_chunks(df)
        embeddings = EMBED_MODEL.encode(chunks)
        st.session_state.chunks = chunks
        st.session_state.embeddings = embeddings

    # --- Anomaly Detection ---
    st.markdown("### 🚨 Real-Time Anomaly Scanner")
    iso = IsolationForest(contamination=0.02)
    labels = iso.fit_predict(df[numeric_cols])
    df['anomaly'] = ['❌ Anomaly' if x == -1 else 'βœ… Normal' for x in labels]
    st.dataframe(df[df['anomaly'].str.contains("❌")].head())

    # --- Chatbot Assistant ---
    st.markdown("### πŸ’¬ Ask FactoryGPT")
    roles = {
        "Operator": "You are a machine operator. Provide practical insights and safety warnings.",
        "Maintenance": "You are a maintenance technician. Suggest inspections and likely causes of sensor anomalies.",
        "Engineer": "You are a control systems engineer. Offer analytical interpretations and system-level advice."
    }
    role = st.selectbox("πŸ‘· Select your role", list(roles.keys()))

    if 'chat_history' not in st.session_state:
        st.session_state.chat_history = []

    user_input = st.text_input("πŸ—¨οΈ Ask about the sensor log...", key="chat_input")

    if user_input:
        query_vec = EMBED_MODEL.encode([user_input])[0]
        sims = np.dot(st.session_state.embeddings, query_vec)
        top_idxs = np.argsort(sims)[-3:][::-1]
        context = "\n".join([st.session_state.chunks[i] for i in top_idxs])
        system_prompt = roles[role]
        full_prompt = f"{system_prompt}\n\nSensor Context:\n{context}\n\nUser Question: {user_input}"
        reply = GEN_MODEL(full_prompt, max_length=256)[0]['generated_text']

        st.session_state.chat_history.append((f"πŸ‘€ You ({role})", user_input))
        st.session_state.chat_history.append(("πŸ€– FactoryGPT", reply))

    for speaker, msg in st.session_state.chat_history[-10:]:
        st.markdown(f"<div style='margin-bottom: 10px;'><b>{speaker}:</b> {msg}</div>", unsafe_allow_html=True)

else:
    st.info("πŸ‘ˆ Please upload a sensor CSV file to begin.")