Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,52 +1,46 @@
|
|
1 |
-
# app.py -
|
2 |
|
3 |
import streamlit as st
|
4 |
import pandas as pd
|
5 |
import numpy as np
|
6 |
-
import matplotlib.pyplot as plt
|
7 |
from sentence_transformers import SentenceTransformer
|
8 |
from transformers import pipeline
|
9 |
from sklearn.ensemble import IsolationForest
|
10 |
-
import base64
|
11 |
-
from io import BytesIO
|
12 |
-
from fpdf import FPDF
|
13 |
|
14 |
-
|
15 |
-
st.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
16 |
|
17 |
# Load models
|
18 |
EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
|
19 |
GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base')
|
20 |
|
21 |
-
#
|
22 |
-
|
|
|
23 |
|
24 |
if uploaded_file:
|
25 |
df = pd.read_csv(uploaded_file)
|
26 |
numeric_cols = df.select_dtypes(include=np.number).columns.tolist()
|
27 |
-
st.success("β
Data
|
28 |
|
29 |
-
|
|
|
30 |
st.dataframe(df.head())
|
31 |
|
32 |
-
#
|
33 |
-
with st.expander("π Expand to View Sensor Dashboard", expanded=False):
|
34 |
-
selected_cols = st.multiselect("Select signals to visualize", numeric_cols, default=numeric_cols[:3])
|
35 |
-
fig, ax = plt.subplots(len(selected_cols), 1, figsize=(8, 2 * len(selected_cols)))
|
36 |
-
if len(selected_cols) == 1:
|
37 |
-
ax = [ax]
|
38 |
-
for i, col in enumerate(selected_cols):
|
39 |
-
ax[i].plot(df[col], label=col)
|
40 |
-
ax[i].set_ylabel(col)
|
41 |
-
ax[i].legend()
|
42 |
-
st.pyplot(fig)
|
43 |
-
|
44 |
-
# --- Convert Logs to Chunks ---
|
45 |
def convert_to_chunks(df):
|
46 |
chunks = []
|
47 |
for idx, row in df.iterrows():
|
48 |
-
|
49 |
-
chunks.append(
|
50 |
return chunks
|
51 |
|
52 |
if 'chunks' not in st.session_state or 'embeddings' not in st.session_state:
|
@@ -56,46 +50,25 @@ if uploaded_file:
|
|
56 |
st.session_state.embeddings = embeddings
|
57 |
|
58 |
# --- Anomaly Detection ---
|
59 |
-
st.
|
60 |
iso = IsolationForest(contamination=0.02)
|
61 |
-
|
62 |
-
df['anomaly'] = ['β' if x == -1 else '' for x in
|
63 |
-
st.dataframe(df[df['anomaly']
|
64 |
-
|
65 |
-
# ---
|
66 |
-
st.
|
67 |
-
twin_report = ""
|
68 |
-
for col in selected_cols:
|
69 |
-
max_v = df[col].max()
|
70 |
-
min_v = df[col].min()
|
71 |
-
mean_v = df[col].mean()
|
72 |
-
twin_report += f"{col}\nβ Max: {max_v:.2f}, Min: {min_v:.2f}, Avg: {mean_v:.2f}\n\n"
|
73 |
-
st.code(twin_report)
|
74 |
-
|
75 |
-
# --- PDF Export ---
|
76 |
-
st.subheader("π€ Export Digital Twin Report as PDF")
|
77 |
-
pdf = FPDF()
|
78 |
-
pdf.add_page()
|
79 |
-
pdf.set_font("Arial", size=12)
|
80 |
-
pdf.multi_cell(0, 10, f"FactoryRAG+ Digital Twin Report\n\nSelected Signals: {', '.join(selected_cols)}\n\n" + twin_report)
|
81 |
-
pdf_bytes = pdf.output(dest='S').encode('latin1')
|
82 |
-
b64 = base64.b64encode(pdf_bytes).decode()
|
83 |
-
href = f'<a href="data:application/octet-stream;base64,{b64}" download="digital_twin_report.pdf">π Download PDF Report</a>'
|
84 |
-
st.markdown(href, unsafe_allow_html=True)
|
85 |
-
|
86 |
-
# --- Role-based Factory Assistant Chatbot ---
|
87 |
-
st.subheader("π¬ Factory Assistant Chat")
|
88 |
roles = {
|
89 |
"Operator": "You are a machine operator. Provide practical insights and safety warnings.",
|
90 |
"Maintenance": "You are a maintenance technician. Suggest inspections and likely causes of sensor anomalies.",
|
91 |
"Engineer": "You are a control systems engineer. Offer analytical interpretations and system-level advice."
|
92 |
}
|
93 |
-
role = st.selectbox("
|
94 |
|
95 |
if 'chat_history' not in st.session_state:
|
96 |
st.session_state.chat_history = []
|
97 |
|
98 |
-
user_input = st.text_input("Ask
|
99 |
|
100 |
if user_input:
|
101 |
query_vec = EMBED_MODEL.encode([user_input])[0]
|
@@ -106,11 +79,11 @@ if uploaded_file:
|
|
106 |
full_prompt = f"{system_prompt}\n\nSensor Context:\n{context}\n\nUser Question: {user_input}"
|
107 |
reply = GEN_MODEL(full_prompt, max_length=256)[0]['generated_text']
|
108 |
|
109 |
-
st.session_state.chat_history.append(("You", user_input))
|
110 |
-
st.session_state.chat_history.append((
|
111 |
|
112 |
for speaker, msg in st.session_state.chat_history[-10:]:
|
113 |
-
st.markdown(f"
|
114 |
|
115 |
else:
|
116 |
-
st.info("π
|
|
|
1 |
+
# app.py - FactoryRAG+: Fancy Lite Version with Animation & Chatbot Only
|
2 |
|
3 |
import streamlit as st
|
4 |
import pandas as pd
|
5 |
import numpy as np
|
|
|
6 |
from sentence_transformers import SentenceTransformer
|
7 |
from transformers import pipeline
|
8 |
from sklearn.ensemble import IsolationForest
|
|
|
|
|
|
|
9 |
|
10 |
+
# Page config with emoji + layout
|
11 |
+
st.set_page_config(page_title="FactoryRAG+ Assistant", page_icon="π§ ", layout="wide")
|
12 |
+
|
13 |
+
# Animated header
|
14 |
+
st.markdown("""
|
15 |
+
<h1 style='text-align: center; color: #3498db; font-size: 48px;'>
|
16 |
+
π FactoryRAG+ <span style="font-size: 28px;">| AI Assistant for Smart Sensors</span>
|
17 |
+
</h1>
|
18 |
+
<hr style='border-top: 2px solid #bbb;' />
|
19 |
+
""", unsafe_allow_html=True)
|
20 |
|
21 |
# Load models
|
22 |
EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
|
23 |
GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base')
|
24 |
|
25 |
+
# Sidebar upload
|
26 |
+
st.sidebar.markdown("### π Upload Sensor Log")
|
27 |
+
uploaded_file = st.sidebar.file_uploader("Upload a CSV sensor file", type=["csv"])
|
28 |
|
29 |
if uploaded_file:
|
30 |
df = pd.read_csv(uploaded_file)
|
31 |
numeric_cols = df.select_dtypes(include=np.number).columns.tolist()
|
32 |
+
st.success("β
Data uploaded successfully!")
|
33 |
|
34 |
+
# Animated section
|
35 |
+
st.markdown("### π Sensor Log Preview")
|
36 |
st.dataframe(df.head())
|
37 |
|
38 |
+
# Convert to text chunks
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
39 |
def convert_to_chunks(df):
|
40 |
chunks = []
|
41 |
for idx, row in df.iterrows():
|
42 |
+
log_text = f"[Log {idx}] " + ", ".join([f"{col}: {row[col]:.2f}" for col in numeric_cols])
|
43 |
+
chunks.append(log_text)
|
44 |
return chunks
|
45 |
|
46 |
if 'chunks' not in st.session_state or 'embeddings' not in st.session_state:
|
|
|
50 |
st.session_state.embeddings = embeddings
|
51 |
|
52 |
# --- Anomaly Detection ---
|
53 |
+
st.markdown("### π¨ Real-Time Anomaly Scanner")
|
54 |
iso = IsolationForest(contamination=0.02)
|
55 |
+
labels = iso.fit_predict(df[numeric_cols])
|
56 |
+
df['anomaly'] = ['β Anomaly' if x == -1 else 'β
Normal' for x in labels]
|
57 |
+
st.dataframe(df[df['anomaly'].str.contains("β")].head())
|
58 |
+
|
59 |
+
# --- Chatbot Assistant ---
|
60 |
+
st.markdown("### π¬ Ask FactoryGPT")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
61 |
roles = {
|
62 |
"Operator": "You are a machine operator. Provide practical insights and safety warnings.",
|
63 |
"Maintenance": "You are a maintenance technician. Suggest inspections and likely causes of sensor anomalies.",
|
64 |
"Engineer": "You are a control systems engineer. Offer analytical interpretations and system-level advice."
|
65 |
}
|
66 |
+
role = st.selectbox("π· Select your role", list(roles.keys()))
|
67 |
|
68 |
if 'chat_history' not in st.session_state:
|
69 |
st.session_state.chat_history = []
|
70 |
|
71 |
+
user_input = st.text_input("π¨οΈ Ask about the sensor log...", key="chat_input")
|
72 |
|
73 |
if user_input:
|
74 |
query_vec = EMBED_MODEL.encode([user_input])[0]
|
|
|
79 |
full_prompt = f"{system_prompt}\n\nSensor Context:\n{context}\n\nUser Question: {user_input}"
|
80 |
reply = GEN_MODEL(full_prompt, max_length=256)[0]['generated_text']
|
81 |
|
82 |
+
st.session_state.chat_history.append((f"π€ You ({role})", user_input))
|
83 |
+
st.session_state.chat_history.append(("π€ FactoryGPT", reply))
|
84 |
|
85 |
for speaker, msg in st.session_state.chat_history[-10:]:
|
86 |
+
st.markdown(f"<div style='margin-bottom: 10px;'><b>{speaker}:</b> {msg}</div>", unsafe_allow_html=True)
|
87 |
|
88 |
else:
|
89 |
+
st.info("π Please upload a sensor CSV file to begin.")
|