bhagwandas commited on
Commit
5f49e69
Β·
verified Β·
1 Parent(s): 75b6b4d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +46 -28
app.py CHANGED
@@ -1,4 +1,4 @@
1
- # app.py - FactoryGPT 5.0: Predictive Maintenance + Role Chat (No 3D Map)
2
 
3
  import streamlit as st
4
  import pandas as pd
@@ -9,7 +9,7 @@ from sklearn.ensemble import IsolationForest
9
 
10
  # Page setup
11
  st.set_page_config(
12
- page_title="FactoryGPT 5.0 – Predict, Perfect, and Connect",
13
  page_icon="🧠",
14
  layout="wide"
15
  )
@@ -33,20 +33,20 @@ st.markdown("""
33
  </style>
34
  """, unsafe_allow_html=True)
35
 
36
- # Title
37
  st.markdown("""
38
  <div style='text-align: center;'>
39
- <h1 style='color: #58a6ff;'>🏭 FactoryGPT 5.0 – Predict, Perfect, and Connect</h1>
40
- <p style='color: #bbb;'>AI-Powered Predictive Maintenance | Human-in-the-Loop Decision Support</p>
41
  <hr style='border-top: 2px solid #888;'>
42
  </div>
43
  """, unsafe_allow_html=True)
44
 
45
- # Load models
46
  EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
47
  GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base')
48
 
49
- # File upload
50
  uploaded_file = st.sidebar.file_uploader("πŸ“‚ Upload your sensor CSV", type=["csv"])
51
 
52
  if uploaded_file:
@@ -54,12 +54,12 @@ if uploaded_file:
54
  numeric_cols = df.select_dtypes(include=np.number).columns.tolist()
55
  st.success("βœ… Sensor log loaded!")
56
 
57
- st.markdown("### 🧾 Sensor Log Preview")
58
  st.dataframe(df.head(), use_container_width=True)
59
 
60
- # RAG Embeddings
61
  def convert_to_chunks(df):
62
- return [f"[Log {i}] " + ", ".join([f"{col}: {row[col]:.2f}" for col in numeric_cols]) for i, row in df.iterrows()]
63
 
64
  if 'chunks' not in st.session_state or 'embeddings' not in st.session_state:
65
  chunks = convert_to_chunks(df)
@@ -67,43 +67,61 @@ if uploaded_file:
67
  st.session_state.chunks = chunks
68
  st.session_state.embeddings = embeddings
69
 
70
- # Equipment condition via Isolation Forest
71
- st.markdown("### βš™οΈ Equipment Condition Status")
72
  iso = IsolationForest(contamination=0.02)
73
  labels = iso.fit_predict(df[numeric_cols])
74
- df['status'] = ['❌ No Function' if x == -1 else 'βœ… Functional' for x in labels]
75
- df['maintenance'] = ['πŸ”§ Needs Maintenance' if x == -1 else '🟒 Stable' for x in labels]
76
- st.dataframe(df[['status', 'maintenance'] + numeric_cols].head(), use_container_width=True)
77
 
78
- # Role-based Assistant
79
- st.markdown("### πŸ’¬ Role-Based Chat Assistant")
80
  roles = {
81
- "Operator": "You are a machine operator. Check if equipment is running properly. If not, flag it immediately.",
82
- "Maintenance": "You are a maintenance technician. Assess faulty logs and provide service insights.",
83
- "Engineer": "You are a systems engineer. Offer data-backed advice and failure diagnostics."
 
 
 
 
 
 
 
 
 
84
  }
85
 
86
- role = st.selectbox("πŸ‘· Choose your role", list(roles.keys()))
87
 
88
  if 'chat_history' not in st.session_state:
89
  st.session_state.chat_history = []
90
 
91
- user_input = st.text_input("πŸ—¨οΈ Ask FactoryGPT about machine status or maintenance needs")
92
 
93
  if user_input:
 
94
  query_vec = EMBED_MODEL.encode([user_input])[0]
95
  sims = np.dot(st.session_state.embeddings, query_vec)
96
- top_idxs = np.argsort(sims)[-3:][::-1]
97
  context = "\n".join([st.session_state.chunks[i] for i in top_idxs])
98
- system_prompt = roles[role]
99
- full_prompt = f"{system_prompt}\n\nSensor Log Context:\n{context}\n\nUser Question: {user_input}"
100
- reply = GEN_MODEL(full_prompt, max_length=256)[0]['generated_text']
101
 
 
 
 
 
 
 
 
 
 
 
102
  st.session_state.chat_history.append((f"πŸ‘€ You ({role})", user_input))
103
  st.session_state.chat_history.append(("πŸ€– FactoryGPT", reply))
104
 
 
105
  for speaker, msg in st.session_state.chat_history[-10:]:
106
- st.markdown(f"<div style='margin-bottom: 10px;'><b>{speaker}:</b> {msg}</div>", unsafe_allow_html=True)
107
 
108
  else:
109
- st.info("πŸ‘ˆ Upload a CSV file with sensor logs to begin.")
 
1
+ # app.py - FactoryGPT 5.0: Predictive Maintenance with Technical Expert GPT
2
 
3
  import streamlit as st
4
  import pandas as pd
 
9
 
10
  # Page setup
11
  st.set_page_config(
12
+ page_title="FactoryGPT 5.0 – Predictive Maintenance Expert",
13
  page_icon="🧠",
14
  layout="wide"
15
  )
 
33
  </style>
34
  """, unsafe_allow_html=True)
35
 
36
+ # Header
37
  st.markdown("""
38
  <div style='text-align: center;'>
39
+ <h1 style='color: #58a6ff;'>🏭 FactoryGPT 5.0 – Predictive Maintenance Assistant</h1>
40
+ <p style='color: #bbb;'>Sensor-Driven Diagnostics | Role-Based Expert Assistant | Industry 5.0 Ready</p>
41
  <hr style='border-top: 2px solid #888;'>
42
  </div>
43
  """, unsafe_allow_html=True)
44
 
45
+ # Load NLP models
46
  EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')
47
  GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base')
48
 
49
+ # Upload data
50
  uploaded_file = st.sidebar.file_uploader("πŸ“‚ Upload your sensor CSV", type=["csv"])
51
 
52
  if uploaded_file:
 
54
  numeric_cols = df.select_dtypes(include=np.number).columns.tolist()
55
  st.success("βœ… Sensor log loaded!")
56
 
57
+ st.markdown("### πŸ“Š Sensor Data Preview")
58
  st.dataframe(df.head(), use_container_width=True)
59
 
60
+ # Prepare vector chunks
61
  def convert_to_chunks(df):
62
+ return [f"[Entry {i}] " + ", ".join([f"{col}: {row[col]:.2f}" for col in numeric_cols]) for i, row in df.iterrows()]
63
 
64
  if 'chunks' not in st.session_state or 'embeddings' not in st.session_state:
65
  chunks = convert_to_chunks(df)
 
67
  st.session_state.chunks = chunks
68
  st.session_state.embeddings = embeddings
69
 
70
+ # Predict machine status
71
+ st.markdown("### πŸ” Equipment Status Prediction")
72
  iso = IsolationForest(contamination=0.02)
73
  labels = iso.fit_predict(df[numeric_cols])
74
+ df['status'] = ['❌ Faulty' if x == -1 else 'βœ… OK' for x in labels]
75
+ df['maintenance_flag'] = ['πŸ”§ Action Required' if x == -1 else '🟒 Normal' for x in labels]
76
+ st.dataframe(df[['status', 'maintenance_flag'] + numeric_cols].head(), use_container_width=True)
77
 
78
+ # Role-based assistant
79
+ st.markdown("### πŸ’¬ Role-Based Technical Assistant")
80
  roles = {
81
+ "Operator": (
82
+ "You are a senior machine operator. Based on sensor logs, identify abnormal conditions in real-time operation. "
83
+ "Explain deviations clearly and suggest next steps to the floor team."
84
+ ),
85
+ "Maintenance": (
86
+ "You are a certified maintenance technician. Analyze anomalous sensor readings, reference maintenance schedules, "
87
+ "and recommend immediate actions or predictive interventions with justification."
88
+ ),
89
+ "Engineer": (
90
+ "You are a reliability and control systems engineer. Provide a detailed root cause analysis from the sensor logs, "
91
+ "explain what failure modes may be emerging (e.g., torque drift, PID lag), and propose technical mitigations."
92
+ )
93
  }
94
 
95
+ role = st.selectbox("πŸ‘€ Select your role for technical insights", list(roles.keys()))
96
 
97
  if 'chat_history' not in st.session_state:
98
  st.session_state.chat_history = []
99
 
100
+ user_input = st.text_input("🧠 Ask FactoryGPT a technical question about the machine behavior")
101
 
102
  if user_input:
103
+ # Retrieve relevant context from logs
104
  query_vec = EMBED_MODEL.encode([user_input])[0]
105
  sims = np.dot(st.session_state.embeddings, query_vec)
106
+ top_idxs = np.argsort(sims)[-5:][::-1]
107
  context = "\n".join([st.session_state.chunks[i] for i in top_idxs])
 
 
 
108
 
109
+ # Prompt Engineering
110
+ system_prompt = (
111
+ f"ROLE CONTEXT: {roles[role]}\n\n"
112
+ f"DATA CONTEXT:\n{context}\n\n"
113
+ f"INSTRUCTION:\nPlease respond with a technical explanation that includes sensor-based reasoning, terminology, and if possible, an action plan.\n"
114
+ )
115
+ full_prompt = f"{system_prompt}\nUser Query: {user_input}"
116
+ reply = GEN_MODEL(full_prompt, max_length=350)[0]['generated_text']
117
+
118
+ # Record conversation
119
  st.session_state.chat_history.append((f"πŸ‘€ You ({role})", user_input))
120
  st.session_state.chat_history.append(("πŸ€– FactoryGPT", reply))
121
 
122
+ # Show chat
123
  for speaker, msg in st.session_state.chat_history[-10:]:
124
+ st.markdown(f"<div style='margin-bottom: 10px;'><b>{speaker}:</b><br>{msg}</div>", unsafe_allow_html=True)
125
 
126
  else:
127
+ st.info("πŸ‘ˆ Please upload a CSV file containing numeric sensor logs to begin analysis.")