MasterGuda commited on
Commit
1b50b80
·
verified ·
1 Parent(s): d3c0b61

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +73 -73
app.py CHANGED
@@ -1,73 +1,73 @@
1
- import gradio as gr
2
- import json
3
- from openai import OpenAI
4
-
5
-
6
- def stream_chat(
7
- message,
8
- history: list[tuple[str, str]],
9
- system_message,
10
- max_tokens,
11
- temperature,
12
- top_p,
13
- base_url,
14
- api_key,
15
- model_name,
16
- ):
17
-
18
- messages = [{"role": "system", "content": system_message}]
19
-
20
- for val in history:
21
- if val[0]:
22
- messages.append({"role": "user", "content": val[0]})
23
- if val[1]:
24
- messages.append({"role": "assistant", "content": val[1]})
25
-
26
- messages.append({"role": "user", "content": message})
27
-
28
- reply = ""
29
-
30
- client = OpenAI(
31
- base_url=base_url,
32
- api_key=api_key
33
- )
34
-
35
- # 发送带有流式输出的请求
36
- for chunk in client.chat.completions.create(
37
- model=model_name,
38
- messages=messages,
39
- max_tokens=max_tokens,
40
- temperature=temperature,
41
- top_p=top_p,
42
- stream=True # 启用流式输出
43
- ):
44
- chunk_message = chunk.choices[0].delta.content
45
- if chunk_message is not None:
46
- reply += chunk_message
47
- else:
48
- reply += ""
49
- yield reply
50
-
51
-
52
- chatapp = gr.ChatInterface(
53
- stream_chat,
54
- additional_inputs=[
55
- gr.Textbox(value="你是一个乐于助人的AI助手.", label="System message"),
56
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
57
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
58
- gr.Slider(
59
- minimum=0.1,
60
- maximum=1.0,
61
- value=0.95,
62
- step=0.05,
63
- label="Top-p (nucleus sampling)",
64
- ),
65
- gr.Textbox(value="", label="基础URL", type="text"),
66
- gr.Textbox(value="", label="API Key", type="password"),
67
- gr.Textbox(value="", label="模型名称", type="text"),
68
- ]
69
- )
70
-
71
- if __name__ == "__main__":
72
- chatapp.launch()
73
-
 
1
+ import gradio as gr
2
+ import json
3
+ from openai import OpenAI
4
+
5
+
6
+ def stream_chat(
7
+ message,
8
+ history: list[tuple[str, str]],
9
+ system_message,
10
+ max_tokens,
11
+ temperature,
12
+ top_p,
13
+ base_url,
14
+ api_key,
15
+ model_name,
16
+ ):
17
+
18
+ messages = [{"role": "system", "content": system_message}]
19
+
20
+ for val in history:
21
+ if val[0]:
22
+ messages.append({"role": "user", "content": val[0]})
23
+ if val[1]:
24
+ messages.append({"role": "assistant", "content": val[1]})
25
+
26
+ messages.append({"role": "user", "content": message})
27
+
28
+ reply = ""
29
+
30
+ client = OpenAI(
31
+ base_url=base_url,
32
+ api_key=api_key
33
+ )
34
+
35
+ # 发送带有流式输出的请求
36
+ for chunk in client.chat.completions.create(
37
+ model=model_name,
38
+ messages=messages,
39
+ max_tokens=max_tokens,
40
+ temperature=temperature,
41
+ top_p=top_p,
42
+ stream=True # 启用流式输出
43
+ ):
44
+ chunk_message = chunk.choices[0].delta.content
45
+ if chunk_message is not None:
46
+ reply += chunk_message
47
+ else:
48
+ reply += ""
49
+ yield reply
50
+
51
+
52
+ chatapp = gr.ChatInterface(
53
+ stream_chat,
54
+ additional_inputs=[
55
+ gr.Textbox(value="你是一个乐于助人的AI助手.", label="System message"),
56
+ gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
57
+ gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
58
+ gr.Slider(
59
+ minimum=0.1,
60
+ maximum=1.0,
61
+ value=0.95,
62
+ step=0.05,
63
+ label="Top-p (nucleus sampling)",
64
+ ),
65
+ gr.Textbox(value="", label="基础URL", type="text"),
66
+ gr.Textbox(value="", label="API Key", type="password"),
67
+ gr.Textbox(value="", label="模型名称", type="text"),
68
+ ]
69
+ )
70
+
71
+ if __name__ == "__main__":
72
+ chatapp.launch(share=True)
73
+