File size: 4,226 Bytes
75b6b4d
c2295d1
5ea6795
 
 
 
 
c2295d1
5ea6795
75b6b4d
d3a8296
75b6b4d
d3a8296
 
 
0cd7d19
75b6b4d
0cd7d19
7fdcd7c
75b6b4d
7fdcd7c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
75b6b4d
7fdcd7c
 
75b6b4d
 
7fdcd7c
 
0cd7d19
5ea6795
 
 
 
 
7fdcd7c
 
5ea6795
6141da1
 
c2295d1
7fdcd7c
c2295d1
7fdcd7c
 
c2295d1
75b6b4d
6141da1
7fdcd7c
5ea6795
b365915
 
 
 
 
5ea6795
75b6b4d
7fdcd7c
c2295d1
0cd7d19
7fdcd7c
 
 
0cd7d19
75b6b4d
d3a8296
ff73cbe
7fdcd7c
 
 
ff73cbe
7fdcd7c
 
ff73cbe
 
 
 
7fdcd7c
ff73cbe
 
 
 
 
 
 
7fdcd7c
ff73cbe
 
0cd7d19
 
ff73cbe
 
0cd7d19
ff73cbe
6141da1
7fdcd7c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
# app.py - FactoryGPT 5.0: Predictive Maintenance + Role Chat (No 3D Map)

import streamlit as st
import pandas as pd
import numpy as np
from sentence_transformers import SentenceTransformer
from transformers import pipeline
from sklearn.ensemble import IsolationForest

# Page setup
st.set_page_config(
    page_title="FactoryGPT 5.0 – Predict, Perfect, and Connect",
    page_icon="🧠",
    layout="wide"
)

# Dark mode CSS
st.markdown("""
    <style>
    html, body, [class*="css"] {
        font-family: 'Segoe UI', sans-serif;
        background-color: #0f1117;
        color: #f0f0f0;
    }
    .stTextInput>div>div>input,
    .stSelectbox>div>div>div>div {
        background-color: #1a1c23;
        color: #fff;
    }
    .stDataFrame .blank {
        background-color: #0f1117 !important;
    }
    </style>
""", unsafe_allow_html=True)

# Title
st.markdown("""
    <div style='text-align: center;'>
        <h1 style='color: #58a6ff;'>🏭 FactoryGPT 5.0 – Predict, Perfect, and Connect</h1>
        <p style='color: #bbb;'>AI-Powered Predictive Maintenance | Human-in-the-Loop Decision Support</p>
        <hr style='border-top: 2px solid #888;'>
    </div>
""", unsafe_allow_html=True)

# Load models
EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base')

# File upload
uploaded_file = st.sidebar.file_uploader("πŸ“‚ Upload your sensor CSV", type=["csv"])

if uploaded_file:
    df = pd.read_csv(uploaded_file)
    numeric_cols = df.select_dtypes(include=np.number).columns.tolist()
    st.success("βœ… Sensor log loaded!")

    st.markdown("### 🧾 Sensor Log Preview")
    st.dataframe(df.head(), use_container_width=True)

    # RAG Embeddings
    def convert_to_chunks(df):
        return [f"[Log {i}] " + ", ".join([f"{col}: {row[col]:.2f}" for col in numeric_cols]) for i, row in df.iterrows()]

    if 'chunks' not in st.session_state or 'embeddings' not in st.session_state:
        chunks = convert_to_chunks(df)
        embeddings = EMBED_MODEL.encode(chunks)
        st.session_state.chunks = chunks
        st.session_state.embeddings = embeddings

    # Equipment condition via Isolation Forest
    st.markdown("### βš™οΈ Equipment Condition Status")
    iso = IsolationForest(contamination=0.02)
    labels = iso.fit_predict(df[numeric_cols])
    df['status'] = ['❌ No Function' if x == -1 else 'βœ… Functional' for x in labels]
    df['maintenance'] = ['πŸ”§ Needs Maintenance' if x == -1 else '🟒 Stable' for x in labels]
    st.dataframe(df[['status', 'maintenance'] + numeric_cols].head(), use_container_width=True)

    # Role-based Assistant
    st.markdown("### πŸ’¬ Role-Based Chat Assistant")
    roles = {
        "Operator": "You are a machine operator. Check if equipment is running properly. If not, flag it immediately.",
        "Maintenance": "You are a maintenance technician. Assess faulty logs and provide service insights.",
        "Engineer": "You are a systems engineer. Offer data-backed advice and failure diagnostics."
    }

    role = st.selectbox("πŸ‘· Choose your role", list(roles.keys()))

    if 'chat_history' not in st.session_state:
        st.session_state.chat_history = []

    user_input = st.text_input("πŸ—¨οΈ Ask FactoryGPT about machine status or maintenance needs")

    if user_input:
        query_vec = EMBED_MODEL.encode([user_input])[0]
        sims = np.dot(st.session_state.embeddings, query_vec)
        top_idxs = np.argsort(sims)[-3:][::-1]
        context = "\n".join([st.session_state.chunks[i] for i in top_idxs])
        system_prompt = roles[role]
        full_prompt = f"{system_prompt}\n\nSensor Log Context:\n{context}\n\nUser Question: {user_input}"
        reply = GEN_MODEL(full_prompt, max_length=256)[0]['generated_text']

        st.session_state.chat_history.append((f"πŸ‘€ You ({role})", user_input))
        st.session_state.chat_history.append(("πŸ€– FactoryGPT", reply))

    for speaker, msg in st.session_state.chat_history[-10:]:
        st.markdown(f"<div style='margin-bottom: 10px;'><b>{speaker}:</b> {msg}</div>", unsafe_allow_html=True)

else:
    st.info("πŸ‘ˆ Upload a CSV file with sensor logs to begin.")