Spaces:
Build error
Build error
| import os | |
| import streamlit as st | |
| from transformers import pipeline, AutoImageProcessor, AutoModelForImageClassification | |
| from PIL import Image | |
| def load_pipeline(): | |
| """Load the Hugging Face pipeline for image classification.""" | |
| try: | |
| return pipeline("image-classification", model="dima806/pneumonia_chest_xray_image_detection") | |
| except Exception as e: | |
| st.error(f"Error loading pipeline: {e}") | |
| return None | |
| def classify_image_with_pipeline(pipe, image): | |
| """Classify an image using the pipeline.""" | |
| try: | |
| results = pipe(image) | |
| return results | |
| except Exception as e: | |
| st.error(f"Error classifying image: {e}") | |
| return None | |
| # Streamlit App | |
| st.title("Pneumonia Chest X-ray Image Detection") | |
| st.markdown( | |
| """ | |
| This app detects signs of pneumonia in chest X-ray images using a pre-trained Hugging Face model. | |
| """ | |
| ) | |
| # File uploader | |
| uploaded_file = st.file_uploader("Upload a chest X-ray image", type=["jpg", "jpeg", "png"]) | |
| if uploaded_file: | |
| image = Image.open(uploaded_file) | |
| st.image(image, caption="Uploaded Chest X-ray", use_column_width=True) | |
| # Load the model pipeline | |
| pipe = load_pipeline() | |
| if pipe: | |
| st.write("Classifying the image...") | |
| results = classify_image_with_pipeline(pipe, image) | |
| if results: | |
| st.write("### Classification Results:") | |
| for result in results: | |
| st.write(f"**Label:** {result['label']} | **Score:** {result['score']:.4f}") | |
| # Optional: Add Groq API integration if applicable | |
| if os.getenv("GROQ_API_KEY"): | |
| from groq import Groq | |
| client = Groq(api_key=os.environ.get("GROQ_API_KEY")) | |
| st.sidebar.markdown("### Groq API Integration") | |
| question = st.sidebar.text_input("Ask a question about pneumonia or X-ray diagnosis:") | |
| if question: | |
| try: | |
| chat_completion = client.chat.completions.create( | |
| messages=[ | |
| { | |
| "role": "user", | |
| "content": question, | |
| } | |
| ], | |
| model="llama-3.3-70b-versatile", | |
| ) | |
| st.sidebar.write("**Groq API Response:**") | |
| st.sidebar.write(chat_completion.choices[0].message.content) | |
| except Exception as e: | |
| st.sidebar.error(f"Error using Groq API: {e}") | |