bhagwandas commited on
Commit
0cd7d19
Β·
verified Β·
1 Parent(s): bcf8dac

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -60
app.py CHANGED
@@ -1,52 +1,46 @@
1
- # app.py - FactoryGPT: FactoryGPT Humanization for the Factories of the Future in Industry 5.0 – Predict, Perfect, and Connect
2
 
3
  import streamlit as st
4
  import pandas as pd
5
  import numpy as np
6
- import matplotlib.pyplot as plt
7
  from sentence_transformers import SentenceTransformer
8
  from transformers import pipeline
9
  from sklearn.ensemble import IsolationForest
10
- import base64
11
- from io import BytesIO
12
- from fpdf import FPDF
13
 
14
- st.set_page_config(page_title="FactoryRAG+ - Smart Sensor Twin", layout="wide")
15
- st.title("🏭 FactoryRAG+: Smart Dashboard with AI Monitoring, PDF Reporting & Digital Twin")
 
 
 
 
 
 
 
 
16
 
17
  # Load models
18
  EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
19
  GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base')
20
 
21
- # Upload data
22
- uploaded_file = st.sidebar.file_uploader("πŸ“‚ Upload your condition monitoring CSV", type=["csv"])
 
23
 
24
  if uploaded_file:
25
  df = pd.read_csv(uploaded_file)
26
  numeric_cols = df.select_dtypes(include=np.number).columns.tolist()
27
- st.success("βœ… Data loaded successfully!")
28
 
29
- st.subheader("πŸ“Š Sensor Snapshot")
 
30
  st.dataframe(df.head())
31
 
32
- # --- Multi-signal Dashboard (Collapsible) ---
33
- with st.expander("πŸ“ˆ Expand to View Sensor Dashboard", expanded=False):
34
- selected_cols = st.multiselect("Select signals to visualize", numeric_cols, default=numeric_cols[:3])
35
- fig, ax = plt.subplots(len(selected_cols), 1, figsize=(8, 2 * len(selected_cols)))
36
- if len(selected_cols) == 1:
37
- ax = [ax]
38
- for i, col in enumerate(selected_cols):
39
- ax[i].plot(df[col], label=col)
40
- ax[i].set_ylabel(col)
41
- ax[i].legend()
42
- st.pyplot(fig)
43
-
44
- # --- Convert Logs to Chunks ---
45
  def convert_to_chunks(df):
46
  chunks = []
47
  for idx, row in df.iterrows():
48
- sentence = f"[Log {idx}] " + ", ".join([f"{col}: {row[col]:.2f}" for col in numeric_cols])
49
- chunks.append(sentence)
50
  return chunks
51
 
52
  if 'chunks' not in st.session_state or 'embeddings' not in st.session_state:
@@ -56,46 +50,25 @@ if uploaded_file:
56
  st.session_state.embeddings = embeddings
57
 
58
  # --- Anomaly Detection ---
59
- st.subheader("🚨 Anomaly Detection (Isolation Forest)")
60
  iso = IsolationForest(contamination=0.02)
61
- anomaly_labels = iso.fit_predict(df[numeric_cols])
62
- df['anomaly'] = ['❌' if x == -1 else '' for x in anomaly_labels]
63
- st.dataframe(df[df['anomaly'] == '❌'].head(5))
64
-
65
- # --- Digital Twin Summary ---
66
- st.subheader("πŸ§ͺ Digital Twin Summary")
67
- twin_report = ""
68
- for col in selected_cols:
69
- max_v = df[col].max()
70
- min_v = df[col].min()
71
- mean_v = df[col].mean()
72
- twin_report += f"{col}\n→ Max: {max_v:.2f}, Min: {min_v:.2f}, Avg: {mean_v:.2f}\n\n"
73
- st.code(twin_report)
74
-
75
- # --- PDF Export ---
76
- st.subheader("πŸ“€ Export Digital Twin Report as PDF")
77
- pdf = FPDF()
78
- pdf.add_page()
79
- pdf.set_font("Arial", size=12)
80
- pdf.multi_cell(0, 10, f"FactoryRAG+ Digital Twin Report\n\nSelected Signals: {', '.join(selected_cols)}\n\n" + twin_report)
81
- pdf_bytes = pdf.output(dest='S').encode('latin1')
82
- b64 = base64.b64encode(pdf_bytes).decode()
83
- href = f'<a href="data:application/octet-stream;base64,{b64}" download="digital_twin_report.pdf">πŸ“„ Download PDF Report</a>'
84
- st.markdown(href, unsafe_allow_html=True)
85
-
86
- # --- Role-based Factory Assistant Chatbot ---
87
- st.subheader("πŸ’¬ Factory Assistant Chat")
88
  roles = {
89
  "Operator": "You are a machine operator. Provide practical insights and safety warnings.",
90
  "Maintenance": "You are a maintenance technician. Suggest inspections and likely causes of sensor anomalies.",
91
  "Engineer": "You are a control systems engineer. Offer analytical interpretations and system-level advice."
92
  }
93
- role = st.selectbox("πŸ‘€ Choose your role: Operator, Maintenance, or Engineer", list(roles.keys()))
94
 
95
  if 'chat_history' not in st.session_state:
96
  st.session_state.chat_history = []
97
 
98
- user_input = st.text_input("Ask FactoryGPT anything (based on uploaded sensor logs):", key="chat_input")
99
 
100
  if user_input:
101
  query_vec = EMBED_MODEL.encode([user_input])[0]
@@ -106,11 +79,11 @@ if uploaded_file:
106
  full_prompt = f"{system_prompt}\n\nSensor Context:\n{context}\n\nUser Question: {user_input}"
107
  reply = GEN_MODEL(full_prompt, max_length=256)[0]['generated_text']
108
 
109
- st.session_state.chat_history.append(("You", user_input))
110
- st.session_state.chat_history.append((f"{role} - FactoryGPT", reply))
111
 
112
  for speaker, msg in st.session_state.chat_history[-10:]:
113
- st.markdown(f"**{speaker}:** {msg}")
114
 
115
  else:
116
- st.info("πŸ‘ˆ Upload a sensor log CSV file to explore digital twin analysis, chatbot Q&A, waveform charts, anomaly detection, and PDF export.")
 
1
+ # app.py - FactoryRAG+: Fancy Lite Version with Animation & Chatbot Only
2
 
3
  import streamlit as st
4
  import pandas as pd
5
  import numpy as np
 
6
  from sentence_transformers import SentenceTransformer
7
  from transformers import pipeline
8
  from sklearn.ensemble import IsolationForest
 
 
 
9
 
10
+ # Page config with emoji + layout
11
+ st.set_page_config(page_title="FactoryRAG+ Assistant", page_icon="🧠", layout="wide")
12
+
13
+ # Animated header
14
+ st.markdown("""
15
+ <h1 style='text-align: center; color: #3498db; font-size: 48px;'>
16
+ 🏭 FactoryRAG+ <span style="font-size: 28px;">| AI Assistant for Smart Sensors</span>
17
+ </h1>
18
+ <hr style='border-top: 2px solid #bbb;' />
19
+ """, unsafe_allow_html=True)
20
 
21
  # Load models
22
  EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
23
  GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base')
24
 
25
+ # Sidebar upload
26
+ st.sidebar.markdown("### πŸ“‚ Upload Sensor Log")
27
+ uploaded_file = st.sidebar.file_uploader("Upload a CSV sensor file", type=["csv"])
28
 
29
  if uploaded_file:
30
  df = pd.read_csv(uploaded_file)
31
  numeric_cols = df.select_dtypes(include=np.number).columns.tolist()
32
+ st.success("βœ… Data uploaded successfully!")
33
 
34
+ # Animated section
35
+ st.markdown("### πŸ” Sensor Log Preview")
36
  st.dataframe(df.head())
37
 
38
+ # Convert to text chunks
 
 
 
 
 
 
 
 
 
 
 
 
39
  def convert_to_chunks(df):
40
  chunks = []
41
  for idx, row in df.iterrows():
42
+ log_text = f"[Log {idx}] " + ", ".join([f"{col}: {row[col]:.2f}" for col in numeric_cols])
43
+ chunks.append(log_text)
44
  return chunks
45
 
46
  if 'chunks' not in st.session_state or 'embeddings' not in st.session_state:
 
50
  st.session_state.embeddings = embeddings
51
 
52
  # --- Anomaly Detection ---
53
+ st.markdown("### 🚨 Real-Time Anomaly Scanner")
54
  iso = IsolationForest(contamination=0.02)
55
+ labels = iso.fit_predict(df[numeric_cols])
56
+ df['anomaly'] = ['❌ Anomaly' if x == -1 else 'βœ… Normal' for x in labels]
57
+ st.dataframe(df[df['anomaly'].str.contains("❌")].head())
58
+
59
+ # --- Chatbot Assistant ---
60
+ st.markdown("### πŸ’¬ Ask FactoryGPT")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
61
  roles = {
62
  "Operator": "You are a machine operator. Provide practical insights and safety warnings.",
63
  "Maintenance": "You are a maintenance technician. Suggest inspections and likely causes of sensor anomalies.",
64
  "Engineer": "You are a control systems engineer. Offer analytical interpretations and system-level advice."
65
  }
66
+ role = st.selectbox("πŸ‘· Select your role", list(roles.keys()))
67
 
68
  if 'chat_history' not in st.session_state:
69
  st.session_state.chat_history = []
70
 
71
+ user_input = st.text_input("πŸ—¨οΈ Ask about the sensor log...", key="chat_input")
72
 
73
  if user_input:
74
  query_vec = EMBED_MODEL.encode([user_input])[0]
 
79
  full_prompt = f"{system_prompt}\n\nSensor Context:\n{context}\n\nUser Question: {user_input}"
80
  reply = GEN_MODEL(full_prompt, max_length=256)[0]['generated_text']
81
 
82
+ st.session_state.chat_history.append((f"πŸ‘€ You ({role})", user_input))
83
+ st.session_state.chat_history.append(("πŸ€– FactoryGPT", reply))
84
 
85
  for speaker, msg in st.session_state.chat_history[-10:]:
86
+ st.markdown(f"<div style='margin-bottom: 10px;'><b>{speaker}:</b> {msg}</div>", unsafe_allow_html=True)
87
 
88
  else:
89
+ st.info("πŸ‘ˆ Please upload a sensor CSV file to begin.")