MaheshP98's picture
Update app.py
f6b4c75 verified
raw
history blame
3.86 kB
import streamlit as st
import pandas as pd
import plotly.express as px
from datetime import datetime, timedelta
from simple_salesforce import Salesforce
from transformers import pipeline
from reportlab.lib.pagesizes import letter
from reportlab.platypus import SimpleDocTemplate, Table, TableStyle, Paragraph
from reportlab.lib import colors
from reportlab.lib.styles import getSampleStyleSheet
from utils import fetch_salesforce_data, detect_anomalies, generate_pdf_report
# Streamlit app configuration
st.set_page_config(page_title="LabOps Dashboard", layout="wide")
# Salesforce authentication (replace with your credentials)
sf = Salesforce(
username=st.secrets["sf_username"],
password=st.secrets["sf_password"],
security_token=st.secrets["sf_security_token"]
)
# Initialize Hugging Face anomaly detection pipeline
anomaly_detector = pipeline("text-classification", model="bert-base-uncased", tokenizer="bert-base-uncased")
def main():
st.title("Multi-Device LabOps Dashboard")
# Filters
col1, col2, col3 = st.columns(3)
with col1:
lab_site = st.selectbox("Select Lab Site", ["All", "Lab1", "Lab2", "Lab3"])
with col2:
equipment_type = st.selectbox("Equipment Type", ["All", "Cell Analyzer", "Weight Log", "UV Verification"])
with col3:
date_range = st.date_input("Date Range", [datetime.now() - timedelta(days=7), datetime.now()])
# Fetch data from Salesforce
query = f"""
SELECT Equipment__c, Log_Timestamp__c, Status__c, Usage_Count__c
FROM SmartLog__c
WHERE Log_Timestamp__c >= {date_range[0].strftime('%Y-%m-%d')}
AND Log_Timestamp__c <= {date_range[1].strftime('%Y-%m-%d')}
"""
if lab_site != "All":
query += f" AND Lab__c = '{lab_site}'"
if equipment_type != "All":
query += f" AND Equipment_Type__c = '{equipment_type}'"
data = fetch_salesforce_data(sf, query)
df = pd.DataFrame(data)
if df.empty:
st.warning("No data available for the selected filters.")
return
# Detect anomalies using Hugging Face
df["Anomaly"] = df["Status__c"].apply(lambda x: detect_anomalies(x, anomaly_detector))
# Device Cards
st.subheader("Device Status")
for equipment in df["Equipment__c"].unique():
device_data = df[df["Equipment__c"] == equipment]
latest_log = device_data.iloc[-1]
anomaly = "⚠️ Anomaly" if latest_log["Anomaly"] == "POSITIVE" else "✅ Normal"
st.markdown(f"""
**{equipment}** | Health: {latest_log["Status__c"]} | Usage: {latest_log["Usage_Count__c"]} | Last Log: {latest_log["Log_Timestamp__c"]} | {anomaly}
""")
# Usage Chart
st.subheader("Usage Trends")
fig = px.line(df, x="Log_Timestamp__c", y="Usage_Count__c", color="Equipment__c", title="Daily Usage Trends")
st.plotly_chart(fig, use_container_width=True)
# Downtime Chart
downtime_df = df[df["Status__c"] == "Down"]
if not downtime_df.empty:
fig_downtime = px.histogram(downtime_df, x="Log_Timestamp__c", color="Equipment__c", title="Downtime Patterns")
st.plotly_chart(fig_downtime, use_container_width=True)
# AMC Reminders
st.subheader("AMC Reminders")
amc_query = "SELECT Equipment__c, AMC_Expiry_Date__c FROM Equipment__c WHERE AMC_Expiry_Date__c <= NEXT_N_DAYS:14"
amc_data = fetch_salesforce_data(sf, amc_query)
for record in amc_data:
st.write(f"Equipment {record['Equipment__c']} - AMC Expiry: {record['AMC_Expiry_Date__c']}")
# Export PDF
if st.button("Export PDF Report"):
pdf_file = generate_pdf_report(df, lab_site, equipment_type, date_range)
with open(pdf_file, "rb") as f:
st.download_button("Download PDF", f, file_name="LabOps_Report.pdf")
if __name__ == "__main__":
main()