File size: 3,606 Bytes
3b55d4b
fd1b0cf
9ab0176
fd1b0cf
 
 
 
 
 
 
 
 
 
 
 
 
 
a65d723
fd1b0cf
 
 
 
 
 
 
 
 
a65d723
 
fd1b0cf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d9748a4
fd1b0cf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d9748a4
fd1b0cf
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
# app.py

import streamlit as st
from models import demo_qwen, demo_r1, demo_zero

st.set_page_config(page_title="DeepSeek Chatbot", layout="centered")

# A helper function to pick the correct Gradio interface
def select_demo(model_name):
    if model_name == "DeepSeek-R1-Distill-Qwen-32B":
        return demo_qwen
    elif model_name == "DeepSeek-R1":
        return demo_r1
    elif model_name == "DeepSeek-R1-Zero":
        return demo_zero
    else:
        return demo_qwen  # default fallback

# Title of the Streamlit app
st.title("DeepSeek Chatbot")

# Sidebar or main area for parameter selection
st.subheader("Model and Parameters")

model_name = st.selectbox(
    "Select Model",
    ["DeepSeek-R1-Distill-Qwen-32B", "DeepSeek-R1", "DeepSeek-R1-Zero"]
)

# Optional parameter: System message
system_message = st.text_area(
    "System Message",
    value="You are a friendly Chatbot created by ruslanmv.com",
    height=80
)

# Optional parameter: max new tokens
max_new_tokens = st.slider(
    "Max new tokens",
    min_value=1,
    max_value=4000,
    value=512,
    step=1
)

# Optional parameter: temperature
temperature = st.slider(
    "Temperature",
    min_value=0.10,
    max_value=4.00,
    value=0.80,
    step=0.05
)

# Optional parameter: top-p
top_p = st.slider(
    "Top-p (nucleus sampling)",
    min_value=0.10,
    max_value=1.00,
    value=0.90,
    step=0.05
)

# A text area for user input
st.subheader("Chat")
user_prompt = st.text_area("Your message:", value="", height=100)

if "chat_history" not in st.session_state:
    st.session_state["chat_history"] = []

# Button to send user prompt
if st.button("Send"):
    if user_prompt.strip():
        # Retrieve the correct Gradio demo
        demo = select_demo(model_name)
        
        # Here we assume the Gradio interface has a function signature for `.predict()` 
        # that accepts text plus generation parameters in some order. 
        # Many huggingface-style Gradio demos simply take a single text prompt, 
        # but it depends entirely on how `demo` is defined in your Gradio code.
        # 
        # If the interface has multiple inputs in a specific order, you might do something like:
        # response = demo.predict(system_message, user_prompt, max_new_tokens, temperature, top_p)
        #
        # Or if it only expects a single string, you might combine them:
        # combined_prompt = f"System: {system_message}\nUser: {user_prompt}"
        # response = demo.predict(combined_prompt, max_new_tokens, temperature, top_p)
        #
        # The exact call depends on your Gradio block's input signature.

        # For illustrative purposes, let's assume a simple signature:
        #   demo.predict(prompt: str, max_new_tokens: int, temperature: float, top_p: float)
        # and we inject the system message on top:

        combined_prompt = f"{system_message}\n\nUser: {user_prompt}"
        
        try:
            response = demo.predict(
                combined_prompt,
                max_new_tokens,
                temperature,
                top_p
            )
        except Exception as e:
            response = f"Error: {e}"
        
        st.session_state["chat_history"].append(("User", user_prompt))
        st.session_state["chat_history"].append(("Assistant", response))
        st.experimental_rerun()

# Display conversation
if st.session_state["chat_history"]:
    for role, text in st.session_state["chat_history"]:
        if role == "User":
            st.markdown(f"**{role}:** {text}")
        else:
            st.markdown(f"**{role}:** {text}")