NeverlandPeter commited on
Commit
bc0729f
·
1 Parent(s): 389b14a
Files changed (1) hide show
  1. app.py +32 -18
app.py CHANGED
@@ -6,6 +6,7 @@ from pynvml import *
6
  nvmlInit()
7
  gpu_h = nvmlDeviceGetHandleByIndex(0)
8
  ctx_limit = 1024
 
9
  title = "RWKV-x060-World-7B-v2.1-20240507-ctx4096"
10
 
11
  os.environ["RWKV_JIT_ON"] = '1'
@@ -17,14 +18,27 @@ model = RWKV(model=model_path, strategy='cuda fp16i8 *8 -> cuda fp16')
17
  from rwkv.utils import PIPELINE, PIPELINE_ARGS
18
  pipeline = PIPELINE(model, "rwkv_vocab_v20230424")
19
 
20
- def generate_prompt(instruction):
21
- instruction = instruction.strip().replace('\r\n','\n')
22
- instruction = re.sub(r'\n+', '\n', instruction)
23
- return f"User: {instruction}\n\nAssistant:"""
 
 
 
 
 
 
 
 
 
 
 
 
 
24
 
25
  def evaluate(
26
  ctx,
27
- token_count=500,
28
  temperature=1.0,
29
  top_p=0.3,
30
  presencePenalty = 0.3,
@@ -73,27 +87,27 @@ def evaluate(
73
  yield out_str.strip()
74
 
75
  examples = [
76
- ["Assistant: How can we craft an engaging story featuring vampires on Mars? Let's think step by step and provide an expert response.", 500, 1, 0.3, 0, 1],
77
- ["Assistant: How can we persuade Elon Musk to follow you on Twitter? Let's think step by step and provide an expert response.", 500, 1, 0.3, 0, 1],
78
- [generate_prompt("東京で訪れるべき素晴らしい場所とその紹介をいくつか挙げてください。"), 500, 1, 0.3, 0, 1],
79
- [generate_prompt("Write a story using the following information.", "A man named Alex chops a tree down."), 500, 1, 0.3, 0, 1],
80
- ["A few light taps upon the pane made her turn to the window. It had begun to snow again.", 500, 1, 0.3, 0, 1],
81
  ['''Edward: I am Edward Elric from Fullmetal Alchemist.
82
 
83
  User: Hello Edward. What have you been up to recently?
84
 
85
- Edward:''', 500, 1, 0.3, 0, 1],
86
  ['''Japanese: 春の初め、桜の花が満開になる頃、小さな町の片隅にある古びた神社の境内は、特別な雰囲気に包まれていた。
87
 
88
- English:''', 500, 1, 0.3, 0, 1],
89
- ["En una pequeña aldea escondida entre las montañas de Andalucía, donde las calles aún conservaban el eco de antiguas leyendas, vivía un joven llamado Alejandro.", 500, 1, 0.3, 0, 1],
90
- ["Dans le cœur battant de Paris, sous le ciel teinté d'un crépuscule d'or et de pourpre, se tenait une petite librairie oubliée par le temps.", 500, 1, 0.3, 0, 1],
91
- ["في تطور مذهل وغير مسبوق، أعلنت السلطات المحلية في العاصمة عن اكتشاف أثري قد يغير مجرى التاريخ كما نعرفه.", 500, 1, 0.3, 0, 1],
92
  ['''“当然可以,大宇宙不会因为这五公斤就不坍缩了。”关一帆说,他还有一个没说出来的想法:也许大宇宙真的会因为相差一个原子的质量而由封闭转为开放。大自然的精巧有时超出想象,比如生命的诞生,就需要各项宇宙参数在几亿亿分之一精度上的精确配合。但程心仍然可以留下她的生态球,因为在那无数文明创造的无数小宇宙中,肯定有相当一部分不响应回归运动的号召,所以,大宇宙最终被夺走的质量至少有几亿吨,甚至可能是几亿亿亿吨。
93
  但愿大宇宙能够忽略这个误差。
94
  程心和关一帆进入了飞船,智子最后也进来了。她早就不再穿那身华丽的和服了,她现在身着迷彩服,再次成为一名轻捷精悍的战士,她的身上佩带着许多武器和生存装备,最引人注目的是那把插在背后的武士刀。
95
  “放心,我在,你们就在!”智子对两位人类朋友说。
96
- 聚变发动机启动了,推进器发出幽幽的蓝光,''', 500, 1, 0.3, 0, 1],
97
  ]
98
 
99
  ##########################################################################
@@ -105,7 +119,7 @@ with gr.Blocks(title=title) as demo:
105
  with gr.Row():
106
  with gr.Column():
107
  prompt = gr.Textbox(lines=2, label="Prompt", value="Assistant: How can we craft an engaging story featuring vampires on Mars? Let's think step by step and provide an expert response.")
108
- token_count = gr.Slider(10, 500, label="Max Tokens", step=10, value=500)
109
  temperature = gr.Slider(0.2, 2.0, label="Temperature", step=0.1, value=1.0)
110
  top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=0.3)
111
  presence_penalty = gr.Slider(0.0, 1.0, label="Presence Penalty", step=0.1, value=0)
@@ -114,7 +128,7 @@ with gr.Blocks(title=title) as demo:
114
  with gr.Row():
115
  submit = gr.Button("Submit", variant="primary")
116
  clear = gr.Button("Clear", variant="secondary")
117
- output = gr.Textbox(label="Output", lines=50)
118
  data = gr.Dataset(components=[prompt, token_count, temperature, top_p, presence_penalty, count_penalty], samples=examples, samples_per_page=50, label="Example Instructions", headers=["Prompt", "Max Tokens", "Temperature", "Top P", "Presence Penalty", "Count Penalty"])
119
  submit.click(evaluate, [prompt, token_count, temperature, top_p, presence_penalty, count_penalty], [output])
120
  clear.click(lambda: None, [], [output])
 
6
  nvmlInit()
7
  gpu_h = nvmlDeviceGetHandleByIndex(0)
8
  ctx_limit = 1024
9
+ gen_limit = 500
10
  title = "RWKV-x060-World-7B-v2.1-20240507-ctx4096"
11
 
12
  os.environ["RWKV_JIT_ON"] = '1'
 
18
  from rwkv.utils import PIPELINE, PIPELINE_ARGS
19
  pipeline = PIPELINE(model, "rwkv_vocab_v20230424")
20
 
21
+ def generate_prompt(instruction, input=""):
22
+ instruction = instruction.strip().replace('\r\n','\n').replace('\n\n','\n')
23
+ input = input.strip().replace('\r\n','\n').replace('\n\n','\n')
24
+ if input:
25
+ return f"""Instruction: {instruction}
26
+
27
+ Input: {input}
28
+
29
+ Response:"""
30
+ else:
31
+ return f"""User: hi
32
+
33
+ Assistant: Hi. I am your assistant and I will provide expert full response in full details. Please feel free to ask any question and I will always answer it.
34
+
35
+ User: {instruction}
36
+
37
+ Assistant:"""
38
 
39
  def evaluate(
40
  ctx,
41
+ token_count=gen_limit,
42
  temperature=1.0,
43
  top_p=0.3,
44
  presencePenalty = 0.3,
 
87
  yield out_str.strip()
88
 
89
  examples = [
90
+ ["Assistant: How can we craft an engaging story featuring vampires on Mars? Let's think step by step and provide an expert response.", gen_limit, 1, 0.3, 0, 1],
91
+ ["Assistant: How can we persuade Elon Musk to follow you on Twitter? Let's think step by step and provide an expert response.", gen_limit, 1, 0.3, 0, 1],
92
+ [generate_prompt("東京で訪れるべき素晴らしい場所とその紹介をいくつか挙げてください。"), gen_limit, 1, 0.3, 0, 1],
93
+ [generate_prompt("Write a story using the following information.", "A man named Alex chops a tree down."), gen_limit, 1, 0.3, 0, 1],
94
+ ["A few light taps upon the pane made her turn to the window. It had begun to snow again.", gen_limit, 1, 0.3, 0, 1],
95
  ['''Edward: I am Edward Elric from Fullmetal Alchemist.
96
 
97
  User: Hello Edward. What have you been up to recently?
98
 
99
+ Edward:''', gen_limit, 1, 0.3, 0, 1],
100
  ['''Japanese: 春の初め、桜の花が満開になる頃、小さな町の片隅にある古びた神社の境内は、特別な雰囲気に包まれていた。
101
 
102
+ English:''', gen_limit, 1, 0.3, 0, 1],
103
+ ["En una pequeña aldea escondida entre las montañas de Andalucía, donde las calles aún conservaban el eco de antiguas leyendas, vivía un joven llamado Alejandro.", gen_limit, 1, 0.3, 0, 1],
104
+ ["Dans le cœur battant de Paris, sous le ciel teinté d'un crépuscule d'or et de pourpre, se tenait une petite librairie oubliée par le temps.", gen_limit, 1, 0.3, 0, 1],
105
+ ["في تطور مذهل وغير مسبوق، أعلنت السلطات المحلية في العاصمة عن اكتشاف أثري قد يغير مجرى التاريخ كما نعرفه.", gen_limit, 1, 0.3, 0, 1],
106
  ['''“当然可以,大宇宙不会因为这五公斤就不坍缩了。”关一帆说,他还有一个没说出来的想法:也许大宇宙真的会因为相差一个原子的质量而由封闭转为开放。大自然的精巧有时超出想象,比如生命的诞生,就需要各项宇宙参数在几亿亿分之一精度上的精确配合。但程心仍然可以留下她的生态球,因为在那无数文明创造的无数小宇宙中,肯定有相当一部分不响应回归运动的号召,所以,大宇宙最终被夺走的质量至少有几亿吨,甚至可能是几亿亿亿吨。
107
  但愿大宇宙能够忽略这个误差。
108
  程心和关一帆进入了飞船,智子最后也进来了。她早就不再穿那身华丽的和服了,她现在身着迷彩服,再次成为一名轻捷精悍的战士,她的身上佩带着许多武器和生存装备,最引人注目的是那把插在背后的武士刀。
109
  “放心,我在,你们就在!”智子对两位人类朋友说。
110
+ 聚变发动机启动了,推进器发出幽幽的蓝光,''', gen_limit, 1, 0.3, 0, 1],
111
  ]
112
 
113
  ##########################################################################
 
119
  with gr.Row():
120
  with gr.Column():
121
  prompt = gr.Textbox(lines=2, label="Prompt", value="Assistant: How can we craft an engaging story featuring vampires on Mars? Let's think step by step and provide an expert response.")
122
+ token_count = gr.Slider(10, gen_limit, label="Max Tokens", step=10, value=gen_limit)
123
  temperature = gr.Slider(0.2, 2.0, label="Temperature", step=0.1, value=1.0)
124
  top_p = gr.Slider(0.0, 1.0, label="Top P", step=0.05, value=0.3)
125
  presence_penalty = gr.Slider(0.0, 1.0, label="Presence Penalty", step=0.1, value=0)
 
128
  with gr.Row():
129
  submit = gr.Button("Submit", variant="primary")
130
  clear = gr.Button("Clear", variant="secondary")
131
+ output = gr.Textbox(label="Output", lines=30)
132
  data = gr.Dataset(components=[prompt, token_count, temperature, top_p, presence_penalty, count_penalty], samples=examples, samples_per_page=50, label="Example Instructions", headers=["Prompt", "Max Tokens", "Temperature", "Top P", "Presence Penalty", "Count Penalty"])
133
  submit.click(evaluate, [prompt, token_count, temperature, top_p, presence_penalty, count_penalty], [output])
134
  clear.click(lambda: None, [], [output])