hashirehtisham commited on
Commit
71f0663
·
verified ·
1 Parent(s): bf9d890

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -77
app.py DELETED
@@ -1,77 +0,0 @@
1
- import streamlit as st
2
- from huggingface_hub import InferenceClient
3
-
4
- """
5
- Developed by Hashir Ehtisham
6
- """
7
- client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
8
-
9
- def respond(
10
- message,
11
- history: list[tuple[str, str]],
12
- system_message,
13
- max_tokens,
14
- temperature,
15
- top_p,
16
- ):
17
- messages = [{"role": "system", "content": system_message}]
18
-
19
- for val in history:
20
- if val[0]:
21
- messages.append({"role": "user", "content": val[0]})
22
- if val[1]:
23
- messages.append({"role": "assistant", "content": val[1]})
24
-
25
- messages.append({"role": "user", "content": message})
26
-
27
- response = ""
28
-
29
- for message in client.chat_completion(
30
- messages,
31
- max_tokens=max_tokens,
32
- stream=True,
33
- temperature=temperature,
34
- top_p=top_p,
35
- ):
36
- token = message.choices[0].delta.content
37
- response += token
38
-
39
- return response
40
-
41
- def send_message():
42
- message = st.session_state["new_message"]
43
- if message:
44
- st.session_state.history.append((message, ""))
45
- response = respond(
46
- message=message,
47
- history=st.session_state.history,
48
- system_message=st.session_state.system_message,
49
- max_tokens=st.session_state.max_tokens,
50
- temperature=st.session_state.temperature,
51
- top_p=st.session_state.top_p,
52
- )
53
- st.session_state.history[-1] = (message, response)
54
- st.session_state["new_message"] = ""
55
-
56
- # Streamlit UI
57
- st.title("Emotional Support Chatbot")
58
- st.write("Hello! I'm here to support you emotionally and answer any questions. How are you feeling today?")
59
-
60
- system_message = st.text_input("System message", value="You are a friendly Emotional Support Chatbot.", key="system_message")
61
-
62
- with st.expander("Settings"):
63
- max_tokens_options = [64, 128, 256, 512, 1024, 2048]
64
- temperature_options = [0.1, 0.3, 0.5, 0.7, 1.0, 1.5, 2.0, 3.0, 4.0]
65
- top_p_options = [0.1, 0.2, 0.3, 0.5, 0.7, 0.8, 0.9, 0.95, 1.0]
66
-
67
- max_tokens = st.selectbox("Max new tokens", options=max_tokens_options, index=max_tokens_options.index(512), key="max_tokens")
68
- temperature = st.selectbox("Temperature", options=temperature_options, index=temperature_options.index(0.7), key="temperature")
69
- top_p = st.selectbox("Top-p (nucleus sampling)", options=top_p_options, index=top_p_options.index(0.95), key="top_p")
70
-
71
- if 'history' not in st.session_state:
72
- st.session_state.history = []
73
-
74
- # Display chat history above the message input
75
- st.text_area("Chat History", value="\n".join([f"User: {h[0]}\nBot: {h[1]}" for h in st.session_state.history]), height=400, key="chat_history")
76
-
77
- message = st.text_input("Your message", key="new_message", on_change=send_message)