Spaces:
Sleeping
Sleeping
# app.py - FactoryRAG+: Fancy Lite Version with Animation & Chatbot Only | |
import streamlit as st | |
import pandas as pd | |
import numpy as np | |
from sentence_transformers import SentenceTransformer | |
from transformers import pipeline | |
from sklearn.ensemble import IsolationForest | |
# Page config with emoji + layout | |
st.set_page_config(page_title="FactoryRAG+ Assistant", page_icon="π§ ", layout="wide") | |
# Animated header | |
st.markdown(""" | |
<h1 style='text-align: center; color: #3498db; font-size: 48px;'> | |
π FactoryRAG+ <span style="font-size: 28px;">| AI Assistant for Smart Sensors</span> | |
</h1> | |
<hr style='border-top: 2px solid #bbb;' /> | |
""", unsafe_allow_html=True) | |
# Load models | |
EMBED_MODEL = SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2') | |
GEN_MODEL = pipeline('text2text-generation', model='google/flan-t5-base') | |
# Sidebar upload | |
st.sidebar.markdown("### π Upload Sensor Log") | |
uploaded_file = st.sidebar.file_uploader("Upload a CSV sensor file", type=["csv"]) | |
if uploaded_file: | |
df = pd.read_csv(uploaded_file) | |
numeric_cols = df.select_dtypes(include=np.number).columns.tolist() | |
st.success("β Data uploaded successfully!") | |
# Animated section | |
st.markdown("### π Sensor Log Preview") | |
st.dataframe(df.head()) | |
# Convert to text chunks | |
def convert_to_chunks(df): | |
chunks = [] | |
for idx, row in df.iterrows(): | |
log_text = f"[Log {idx}] " + ", ".join([f"{col}: {row[col]:.2f}" for col in numeric_cols]) | |
chunks.append(log_text) | |
return chunks | |
if 'chunks' not in st.session_state or 'embeddings' not in st.session_state: | |
chunks = convert_to_chunks(df) | |
embeddings = EMBED_MODEL.encode(chunks) | |
st.session_state.chunks = chunks | |
st.session_state.embeddings = embeddings | |
# --- Anomaly Detection --- | |
st.markdown("### π¨ Real-Time Anomaly Scanner") | |
iso = IsolationForest(contamination=0.02) | |
labels = iso.fit_predict(df[numeric_cols]) | |
df['anomaly'] = ['β Anomaly' if x == -1 else 'β Normal' for x in labels] | |
st.dataframe(df[df['anomaly'].str.contains("β")].head()) | |
# --- Chatbot Assistant --- | |
st.markdown("### π¬ Ask FactoryGPT") | |
roles = { | |
"Operator": "You are a machine operator. Provide practical insights and safety warnings.", | |
"Maintenance": "You are a maintenance technician. Suggest inspections and likely causes of sensor anomalies.", | |
"Engineer": "You are a control systems engineer. Offer analytical interpretations and system-level advice." | |
} | |
role = st.selectbox("π· Select your role", list(roles.keys())) | |
if 'chat_history' not in st.session_state: | |
st.session_state.chat_history = [] | |
user_input = st.text_input("π¨οΈ Ask about the sensor log...", key="chat_input") | |
if user_input: | |
query_vec = EMBED_MODEL.encode([user_input])[0] | |
sims = np.dot(st.session_state.embeddings, query_vec) | |
top_idxs = np.argsort(sims)[-3:][::-1] | |
context = "\n".join([st.session_state.chunks[i] for i in top_idxs]) | |
system_prompt = roles[role] | |
full_prompt = f"{system_prompt}\n\nSensor Context:\n{context}\n\nUser Question: {user_input}" | |
reply = GEN_MODEL(full_prompt, max_length=256)[0]['generated_text'] | |
st.session_state.chat_history.append((f"π€ You ({role})", user_input)) | |
st.session_state.chat_history.append(("π€ FactoryGPT", reply)) | |
for speaker, msg in st.session_state.chat_history[-10:]: | |
st.markdown(f"<div style='margin-bottom: 10px;'><b>{speaker}:</b> {msg}</div>", unsafe_allow_html=True) | |
else: | |
st.info("π Please upload a sensor CSV file to begin.") | |