Spaces:
Sleeping
Sleeping
File size: 5,098 Bytes
bcf8dac c2295d1 5ea6795 c2295d1 5ea6795 c2295d1 5ea6795 c2295d1 5ea6795 c2295d1 5ea6795 6141da1 c2295d1 bcf8dac 5ea6795 c2295d1 6141da1 c2295d1 6141da1 5ea6795 b365915 5ea6795 c2295d1 5ea6795 ff73cbe c2295d1 5ea6795 c2295d1 5ea6795 bcf8dac ff73cbe 6141da1 ff73cbe |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 |
# app.py - FactoryGPT: FactoryGPT Humanization for the Factories of the Future in Industry 5.0 β Predict, Perfect, and Connect
import streamlit as st
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sentence_transformers import SentenceTransformer
from transformers import pipeline
from sklearn.ensemble import IsolationForest
import base64
from io import BytesIO
from fpdf import FPDF
st.set_page_config(page_title="FactoryRAG+ - Smart Sensor Twin", layout="wide")
st.title("π FactoryRAG+: Smart Dashboard with AI Monitoring, PDF Reporting & Digital Twin")
# Load models
EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base')
# Upload data
uploaded_file = st.sidebar.file_uploader("π Upload your condition monitoring CSV", type=["csv"])
if uploaded_file:
df = pd.read_csv(uploaded_file)
numeric_cols = df.select_dtypes(include=np.number).columns.tolist()
st.success("β
Data loaded successfully!")
st.subheader("π Sensor Snapshot")
st.dataframe(df.head())
# --- Multi-signal Dashboard (Collapsible) ---
with st.expander("π Expand to View Sensor Dashboard", expanded=False):
selected_cols = st.multiselect("Select signals to visualize", numeric_cols, default=numeric_cols[:3])
fig, ax = plt.subplots(len(selected_cols), 1, figsize=(8, 2 * len(selected_cols)))
if len(selected_cols) == 1:
ax = [ax]
for i, col in enumerate(selected_cols):
ax[i].plot(df[col], label=col)
ax[i].set_ylabel(col)
ax[i].legend()
st.pyplot(fig)
# --- Convert Logs to Chunks ---
def convert_to_chunks(df):
chunks = []
for idx, row in df.iterrows():
sentence = f"[Log {idx}] " + ", ".join([f"{col}: {row[col]:.2f}" for col in numeric_cols])
chunks.append(sentence)
return chunks
if 'chunks' not in st.session_state or 'embeddings' not in st.session_state:
chunks = convert_to_chunks(df)
embeddings = EMBED_MODEL.encode(chunks)
st.session_state.chunks = chunks
st.session_state.embeddings = embeddings
# --- Anomaly Detection ---
st.subheader("π¨ Anomaly Detection (Isolation Forest)")
iso = IsolationForest(contamination=0.02)
anomaly_labels = iso.fit_predict(df[numeric_cols])
df['anomaly'] = ['β' if x == -1 else '' for x in anomaly_labels]
st.dataframe(df[df['anomaly'] == 'β'].head(5))
# --- Digital Twin Summary ---
st.subheader("π§ͺ Digital Twin Summary")
twin_report = ""
for col in selected_cols:
max_v = df[col].max()
min_v = df[col].min()
mean_v = df[col].mean()
twin_report += f"{col}\nβ Max: {max_v:.2f}, Min: {min_v:.2f}, Avg: {mean_v:.2f}\n\n"
st.code(twin_report)
# --- PDF Export ---
st.subheader("π€ Export Digital Twin Report as PDF")
pdf = FPDF()
pdf.add_page()
pdf.set_font("Arial", size=12)
pdf.multi_cell(0, 10, f"FactoryRAG+ Digital Twin Report\n\nSelected Signals: {', '.join(selected_cols)}\n\n" + twin_report)
pdf_bytes = pdf.output(dest='S').encode('latin1')
b64 = base64.b64encode(pdf_bytes).decode()
href = f'<a href="data:application/octet-stream;base64,{b64}" download="digital_twin_report.pdf">π Download PDF Report</a>'
st.markdown(href, unsafe_allow_html=True)
# --- Role-based Factory Assistant Chatbot ---
st.subheader("π¬ Factory Assistant Chat")
roles = {
"Operator": "You are a machine operator. Provide practical insights and safety warnings.",
"Maintenance": "You are a maintenance technician. Suggest inspections and likely causes of sensor anomalies.",
"Engineer": "You are a control systems engineer. Offer analytical interpretations and system-level advice."
}
role = st.selectbox("π€ Choose your role: Operator, Maintenance, or Engineer", list(roles.keys()))
if 'chat_history' not in st.session_state:
st.session_state.chat_history = []
user_input = st.text_input("Ask FactoryGPT anything (based on uploaded sensor logs):", key="chat_input")
if user_input:
query_vec = EMBED_MODEL.encode([user_input])[0]
sims = np.dot(st.session_state.embeddings, query_vec)
top_idxs = np.argsort(sims)[-3:][::-1]
context = "\n".join([st.session_state.chunks[i] for i in top_idxs])
system_prompt = roles[role]
full_prompt = f"{system_prompt}\n\nSensor Context:\n{context}\n\nUser Question: {user_input}"
reply = GEN_MODEL(full_prompt, max_length=256)[0]['generated_text']
st.session_state.chat_history.append(("You", user_input))
st.session_state.chat_history.append((f"{role} - FactoryGPT", reply))
for speaker, msg in st.session_state.chat_history[-10:]:
st.markdown(f"**{speaker}:** {msg}")
else:
st.info("π Upload a sensor log CSV file to explore digital twin analysis, chatbot Q&A, waveform charts, anomaly detection, and PDF export.")
|