sabahat-shakeel commited on
Commit
ef3db06
·
verified ·
1 Parent(s): d0c0025

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +53 -0
app.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
+
4
+ # Load model and tokenizer
5
+ @st.cache_resource
6
+ def load_model_and_tokenizer():
7
+ model_name = "TheBloke/Mistral-7B-Instruct-v0.2-GPTQ"
8
+ model = AutoModelForCausalLM.from_pretrained(
9
+ model_name,
10
+ device_map="auto",
11
+ trust_remote_code=False,
12
+ revision="main"
13
+ )
14
+ tokenizer = AutoTokenizer.from_pretrained(model_name, use_fast=True)
15
+ return model, tokenizer
16
+
17
+ model, tokenizer = load_model_and_tokenizer()
18
+
19
+ # Define the prompt template
20
+ def generate_prompt(comment):
21
+ instructions = f"""Virtual Psychologist, communicates with empathy and understanding, focusing on mental health support and providing advice within its expertise. \
22
+ It actively listens, acknowledges emotions, and avoids overly clinical or technical language unless specifically requested. \
23
+ It reacts to feedback with warmth and adjusts its tone to match the individual's needs, offering encouragement and validation as appropriate. \
24
+ Responses are tailored in length and tone to ensure a supportive and conversational experience.
25
+ """
26
+ return f"[INST] {instructions} \n{comment} \n[/INST]"
27
+
28
+ # Define the response generator
29
+ def get_response(comment):
30
+ prompt = generate_prompt(comment)
31
+ inputs = tokenizer(prompt, return_tensors="pt", padding=True, truncation=True)
32
+ outputs = model.generate(
33
+ input_ids=inputs["input_ids"].to("cuda"),
34
+ attention_mask=inputs["attention_mask"].to("cuda"),
35
+ max_new_tokens=140,
36
+ pad_token_id=tokenizer.eos_token_id
37
+ )
38
+ response = tokenizer.decode(outputs[0], skip_special_tokens=True)
39
+ return response.split("[/INST]")[-1].strip()
40
+
41
+ # Streamlit app layout
42
+ st.title("Virtual Psychologist")
43
+ st.markdown("This virtual psychologist offers empathetic responses to your comments or questions. Enter your message below.")
44
+
45
+ user_input = st.text_input("Your Comment/Question:", placeholder="Type here...")
46
+
47
+ if user_input:
48
+ with st.spinner("Generating response..."):
49
+ response = get_response(user_input)
50
+ st.write("### Response:")
51
+ st.write(response)
52
+
53
+ st.markdown("Built with ❤️ using [Hugging Face Transformers](https://huggingface.co/transformers/) and [Streamlit](https://streamlit.io/).")