Spaces:
Sleeping
Sleeping
# app.py - FactoryGPT 5.0: Predictive Maintenance + Role Chat (No 3D Map) | |
import streamlit as st | |
import pandas as pd | |
import numpy as np | |
from sentence_transformers import SentenceTransformer | |
from transformers import pipeline | |
from sklearn.ensemble import IsolationForest | |
# Page setup | |
st.set_page_config( | |
page_title="FactoryGPT 5.0 β Predict, Perfect, and Connect", | |
page_icon="π§ ", | |
layout="wide" | |
) | |
# Dark mode CSS | |
st.markdown(""" | |
<style> | |
html, body, [class*="css"] { | |
font-family: 'Segoe UI', sans-serif; | |
background-color: #0f1117; | |
color: #f0f0f0; | |
} | |
.stTextInput>div>div>input, | |
.stSelectbox>div>div>div>div { | |
background-color: #1a1c23; | |
color: #fff; | |
} | |
.stDataFrame .blank { | |
background-color: #0f1117 !important; | |
} | |
</style> | |
""", unsafe_allow_html=True) | |
# Title | |
st.markdown(""" | |
<div style='text-align: center;'> | |
<h1 style='color: #58a6ff;'>π FactoryGPT 5.0 β Predict, Perfect, and Connect</h1> | |
<p style='color: #bbb;'>AI-Powered Predictive Maintenance | Human-in-the-Loop Decision Support</p> | |
<hr style='border-top: 2px solid #888;'> | |
</div> | |
""", unsafe_allow_html=True) | |
# Load models | |
EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2') | |
GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base') | |
# File upload | |
uploaded_file = st.sidebar.file_uploader("π Upload your sensor CSV", type=["csv"]) | |
if uploaded_file: | |
df = pd.read_csv(uploaded_file) | |
numeric_cols = df.select_dtypes(include=np.number).columns.tolist() | |
st.success("β Sensor log loaded!") | |
st.markdown("### π§Ύ Sensor Log Preview") | |
st.dataframe(df.head(), use_container_width=True) | |
# RAG Embeddings | |
def convert_to_chunks(df): | |
return [f"[Log {i}] " + ", ".join([f"{col}: {row[col]:.2f}" for col in numeric_cols]) for i, row in df.iterrows()] | |
if 'chunks' not in st.session_state or 'embeddings' not in st.session_state: | |
chunks = convert_to_chunks(df) | |
embeddings = EMBED_MODEL.encode(chunks) | |
st.session_state.chunks = chunks | |
st.session_state.embeddings = embeddings | |
# Equipment condition via Isolation Forest | |
st.markdown("### βοΈ Equipment Condition Status") | |
iso = IsolationForest(contamination=0.02) | |
labels = iso.fit_predict(df[numeric_cols]) | |
df['status'] = ['β No Function' if x == -1 else 'β Functional' for x in labels] | |
df['maintenance'] = ['π§ Needs Maintenance' if x == -1 else 'π’ Stable' for x in labels] | |
st.dataframe(df[['status', 'maintenance'] + numeric_cols].head(), use_container_width=True) | |
# Role-based Assistant | |
st.markdown("### π¬ Role-Based Chat Assistant") | |
roles = { | |
"Operator": "You are a machine operator. Check if equipment is running properly. If not, flag it immediately.", | |
"Maintenance": "You are a maintenance technician. Assess faulty logs and provide service insights.", | |
"Engineer": "You are a systems engineer. Offer data-backed advice and failure diagnostics." | |
} | |
role = st.selectbox("π· Choose your role", list(roles.keys())) | |
if 'chat_history' not in st.session_state: | |
st.session_state.chat_history = [] | |
user_input = st.text_input("π¨οΈ Ask FactoryGPT about machine status or maintenance needs") | |
if user_input: | |
query_vec = EMBED_MODEL.encode([user_input])[0] | |
sims = np.dot(st.session_state.embeddings, query_vec) | |
top_idxs = np.argsort(sims)[-3:][::-1] | |
context = "\n".join([st.session_state.chunks[i] for i in top_idxs]) | |
system_prompt = roles[role] | |
full_prompt = f"{system_prompt}\n\nSensor Log Context:\n{context}\n\nUser Question: {user_input}" | |
reply = GEN_MODEL(full_prompt, max_length=256)[0]['generated_text'] | |
st.session_state.chat_history.append((f"π€ You ({role})", user_input)) | |
st.session_state.chat_history.append(("π€ FactoryGPT", reply)) | |
for speaker, msg in st.session_state.chat_history[-10:]: | |
st.markdown(f"<div style='margin-bottom: 10px;'><b>{speaker}:</b> {msg}</div>", unsafe_allow_html=True) | |
else: | |
st.info("π Upload a CSV file with sensor logs to begin.") | |