# app.py - FactoryRAG+: Fancy Lite Version with Animation & Chatbot Only import streamlit as st import pandas as pd import numpy as np from sentence_transformers import SentenceTransformer from transformers import pipeline from sklearn.ensemble import IsolationForest # Page config with emoji + layout st.set_page_config(page_title="FactoryRAG+ Assistant", page_icon="🧠", layout="wide") # Animated header st.markdown("""

🏭 FactoryRAG+ | AI Assistant for Smart Sensors


""", unsafe_allow_html=True) # Load models EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2') GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base') # Sidebar upload st.sidebar.markdown("### 📂 Upload Sensor Log") uploaded_file = st.sidebar.file_uploader("Upload a CSV sensor file", type=["csv"]) if uploaded_file: df = pd.read_csv(uploaded_file) numeric_cols = df.select_dtypes(include=np.number).columns.tolist() st.success("✅ Data uploaded successfully!") # Animated section st.markdown("### 🔍 Sensor Log Preview") st.dataframe(df.head()) # Convert to text chunks def convert_to_chunks(df): chunks = [] for idx, row in df.iterrows(): log_text = f"[Log {idx}] " + ", ".join([f"{col}: {row[col]:.2f}" for col in numeric_cols]) chunks.append(log_text) return chunks if 'chunks' not in st.session_state or 'embeddings' not in st.session_state: chunks = convert_to_chunks(df) embeddings = EMBED_MODEL.encode(chunks) st.session_state.chunks = chunks st.session_state.embeddings = embeddings # --- Anomaly Detection --- st.markdown("### 🚨 Real-Time Anomaly Scanner") iso = IsolationForest(contamination=0.02) labels = iso.fit_predict(df[numeric_cols]) df['anomaly'] = ['❌ Anomaly' if x == -1 else '✅ Normal' for x in labels] st.dataframe(df[df['anomaly'].str.contains("❌")].head()) # --- Chatbot Assistant --- st.markdown("### 💬 Ask FactoryGPT") roles = { "Operator": "You are a machine operator. Provide practical insights and safety warnings.", "Maintenance": "You are a maintenance technician. Suggest inspections and likely causes of sensor anomalies.", "Engineer": "You are a control systems engineer. Offer analytical interpretations and system-level advice." } role = st.selectbox("👷 Select your role", list(roles.keys())) if 'chat_history' not in st.session_state: st.session_state.chat_history = [] user_input = st.text_input("🗨️ Ask about the sensor log...", key="chat_input") if user_input: query_vec = EMBED_MODEL.encode([user_input])[0] sims = np.dot(st.session_state.embeddings, query_vec) top_idxs = np.argsort(sims)[-3:][::-1] context = "\n".join([st.session_state.chunks[i] for i in top_idxs]) system_prompt = roles[role] full_prompt = f"{system_prompt}\n\nSensor Context:\n{context}\n\nUser Question: {user_input}" reply = GEN_MODEL(full_prompt, max_length=256)[0]['generated_text'] st.session_state.chat_history.append((f"👤 You ({role})", user_input)) st.session_state.chat_history.append(("🤖 FactoryGPT", reply)) for speaker, msg in st.session_state.chat_history[-10:]: st.markdown(f"
{speaker}: {msg}
", unsafe_allow_html=True) else: st.info("👈 Please upload a sensor CSV file to begin.")