Spaces:
Sleeping
Sleeping
File size: 4,115 Bytes
10e4cb6 3a64fb1 ab6d6cd 7ffd344 3a64fb1 10e4cb6 34a4f65 a4cce9a 34a4f65 9903fee 72d0e7a 34a4f65 72d0e7a 9903fee 34a4f65 9903fee be8b77d 9903fee 61d9513 72d0e7a 9903fee ab6d6cd d02fed4 3a64fb1 d02fed4 61d9513 d02fed4 10e4cb6 d02fed4 3a64fb1 d02fed4 3a64fb1 ab6d6cd 87a519d ab6d6cd 3a64fb1 ab6d6cd 3a64fb1 ab6d6cd 10e4cb6 ab6d6cd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 |
import streamlit as st
from transformers import AutoTokenizer
from llama_cpp import Llama
#from transformers import pipeline
#from peft import PeftModel, PeftConfig
from transformers import AutoModelForCausalLM
from datasets import load_dataset
# Replace with the direct image URL
flower_image_url = "https://i.postimg.cc/hG2FG85D/2.png"
# Inject custom CSS for the background with a centered and blurred image
st.markdown(
f"""
<style>
/* Container for background */
html, body {{
margin: 0;
padding: 0;
overflow: hidden;
}}
[data-testid="stAppViewContainer"] {{
position: relative;
z-index: 1; /* Ensure UI elements are above the background */
}}
/* Blurred background image */
.blurred-background {{
position: fixed;
top: 0;
left: 0;
width: 100%;
height: 100%;
z-index: -1; /* Send background image behind all UI elements */
background-image: url("{flower_image_url}");
background-size: cover;
background-position: center;
filter: blur(10px); /* Adjust blur ratio here */
opacity: 0.8; /* Optional: Add slight transparency for a subtle effect */
}}
</style>
""",
unsafe_allow_html=True
)
# Add the blurred background div
st.markdown('<div class="blurred-background"></div>', unsafe_allow_html=True)
# Path to your GGUF model file
MODEL_PATH = "/path/to/QuantFactory/Mental-Health-FineTuned-Mistral-7B-Instruct-v0.2-GGUF"
# Load Llama model
@st.cache_resource
def load_llama_model():
try:
return Llama(model_path=MODEL_PATH, n_threads=8) # Adjust `n_threads` based on your system
except Exception as e:
st.error(f"Error loading model: {e}")
return None
llama_model = load_llama_model()
# Load dataset for context
@st.cache_resource
def load_counseling_dataset():
return load_dataset("Amod/mental_health_counseling_conversations")
dataset = load_counseling_dataset()
# Streamlit app
st.title("Mental Health Counseling Chat")
st.markdown("""
Welcome to the Mental Health Counseling Chat application.
This platform is designed to provide supportive, positive, and encouraging responses based on mental health counseling expertise.
""")
# Check if the model loaded correctly
if llama_model is None:
st.error("The text generation model could not be loaded. Please check the model path and configuration.")
else:
# Explore dataset for additional context or resources (optional)
if st.checkbox("Show Example Questions and Answers from Dataset"):
sample = dataset["train"].shuffle(seed=42).select(range(3)) # Display 3 random samples
for example in sample:
st.markdown(f"**Question:** {example.get('context', 'N/A')}")
st.markdown(f"**Answer:** {example.get('response', 'N/A')}")
st.markdown("---")
# User input for mental health concerns
user_input = st.text_area("Your question or concern:", placeholder="Type here...")
if st.button("Get Supportive Response"):
if user_input.strip():
try:
# Generate response using Llama
prompt = f"User: {user_input}\nCounselor:"
response = llama_model(prompt, max_tokens=200, stop=["\n", "User:"])
st.subheader("Counselor's Response:")
st.write(response["choices"][0]["text"].strip())
except Exception as e:
st.error(f"An error occurred while generating the response: {e}")
else:
st.error("Please enter a question or concern to receive a response.")
# Sidebar resources
st.sidebar.header("Additional Mental Health Resources")
st.sidebar.markdown("""
- [Mental Health Foundation](https://www.mentalhealth.org)
- [Mind](https://www.mind.org.uk)
- [National Suicide Prevention Lifeline](https://suicidepreventionlifeline.org)
""")
st.sidebar.info("This application is not a replacement for professional counseling. If you are in crisis, please seek professional help immediately.") |