File size: 3,996 Bytes
c3dfc09
 
 
334df3e
 
 
0802b34
8d3c493
c3dfc09
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8d3c493
 
 
 
 
 
 
 
 
 
1ee4bc1
334df3e
 
e1f2405
89224bc
210f8fc
 
51ab81f
 
 
 
210f8fc
51ab81f
 
 
0d5177b
51ab81f
 
210f8fc
51ab81f
 
 
210f8fc
51ab81f
 
 
210f8fc
51ab81f
 
 
 
 
 
210f8fc
51ab81f
 
 
210f8fc
51ab81f
210f8fc
 
51ab81f
 
 
 
 
 
 
 
 
 
 
 
 
210f8fc
51ab81f
 
 
 
 
 
 
 
 
 
 
0d5177b
 
 
210f8fc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0d5177b
334df3e
51ab81f
 
c3dfc09
 
 
334df3e
dc0ecdd
 
c3dfc09
 
 
 
 
dc0ecdd
c3dfc09
 
210f8fc
c3dfc09
 
8d3c493
c3dfc09
1ee4bc1
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
import gradio as gr
from huggingface_hub import InferenceClient

"""
For more information on huggingface_hub Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
"""
client = InferenceClient("Qwen/QwQ-32B")


def respond(
    message,
    history: list[tuple[str, str]],
    system_message,
    max_tokens,
    temperature,
    top_p,
):
    messages = [{"role": "system", "content": system_message}]

    for val in history:
        if val[0]:
            messages.append({"role": "user", "content": val[0]})
        if val[1]:
            messages.append({"role": "assistant", "content": val[1]})

    messages.append({"role": "user", "content": message})

    response = ""

    for message in client.chat_completion(
        messages,
        max_tokens=max_tokens,
        stream=True,
        temperature=temperature,
        top_p=top_p,
    ):
        token = message.choices[0].delta.content

        response += token
        yield response


# Define custom CSS for Metro Design with a white background and red accents
metro_design_css = """
* {
    margin: 0;
    padding: 0;
    box-sizing: border-box;
    font-family: 'Segoe UI', Tahoma, Geneva, Verdana, sans-serif;
}

.gradio-container {
    background-color: #ffffff; /* White background */
    padding: 20px;
    border-radius: 8px;
    box-shadow: 0 0 10px rgba(0, 0, 0, 0.1); /* Soft shadow */
}

.gradio-button {
    background-color: #D32F2F; /* Metro red color */
    color: white;
    border: none;
    border-radius: 5px;
    padding: 12px 25px;
    font-size: 16px;
    cursor: pointer;
    transition: background-color 0.3s ease;
}

.gradio-button:hover {
    background-color: #C62828; /* Darker red */
}

.gradio-slider {
    background-color: #f1f1f1;
    border-radius: 8px;
    border: 1px solid #ddd;
    padding: 10px;
}

.gradio-textbox {
    background-color: #ffffff;
    border: 1px solid #ddd;
    border-radius: 8px;
    padding: 10px;
    font-size: 14px;
    width: 100%;
    transition: border-color 0.3s ease;
}

.gradio-textbox:focus {
    border-color: #D32F2F; /* Red color */
}

.gradio-label {
    font-size: 14px;
    font-weight: bold;
    color: #333;
}

.gradio-interface .gradio-input-container {
    margin-bottom: 20px;
}

.gradio-chat {
    background-color: #ffffff; /* White background for chat messages */
    border: 1px solid #ddd;
    border-radius: 8px;
    padding: 15px;
}

.gradio-chat .gradio-message {
    background-color: #f1f1f1; /* Light gray for chat messages */
    border-radius: 8px;
    padding: 10px;
    margin-bottom: 10px;
}

.gradio-chat .gradio-user-message {
    background-color: #D32F2F; /* Red color for user messages */
    color: white;
}

.gradio-chat .gradio-assistant-message {
    background-color: #eeeeee; /* Lighter color for assistant's messages */
}
"""

# Launch the Gradio app with custom CSS
demo = gr.ChatInterface(
    respond,
    additional_inputs=[
        gr.Textbox(value="Здравствуйте. Отвечай кратко(не пиши вступление, умозаключения итп) и сразу пиши начинай с этого ответ: Предварительный диагноз:(диагноз), Операция: (Если требуется, только название), Лечение: (Кратко, очень). Не пиши воду. Только по факту на 3 пункта отвечай. Не отходи от этого шаблона", label="System message"),
        gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
        gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
        gr.Slider(
            minimum=0.1,
            maximum=1.0,
            value=0.95,
            step=0.05,
            label="Top-p (nucleus sampling)",
        ),
    ],
    css=metro_design_css,  # Applying the custom CSS
)


if __name__ == "__main__":
    demo.launch()