Spaces:
Sleeping
Sleeping
File size: 4,544 Bytes
5c90aba 1184664 5c90aba bdd660d 7e2719d 58e4560 bdd660d 58e4560 5c90aba 58e4560 5c90aba 7e2719d 5c90aba bdd660d a1d3e5f 5c90aba 1e3f2e6 6531b64 de7122c 1e3f2e6 bdd660d 7b32f49 bdd660d 15d2f8d bdd660d 5c90aba f2c5032 5c90aba bdd660d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 |
from huggingface_hub import InferenceClient
import gradio as gr
import random
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
from prompts import GAME_MASTER, COMPRESS_HISTORY
def format_prompt(message, history):
prompt=""
'''
prompt = "<s>"
for user_prompt, bot_response in history:
prompt += f"[INST] {user_prompt} [/INST]"
prompt += f" {bot_response}</s> "
'''
prompt += f"[INST] {message} [/INST]"
return prompt
temperature=0.9
top_p=0.95
repetition_penalty=1.0
def compress_history(history,temperature=temperature,top_p=top_p,repetition_penalty=repetition_penalty):
formatted_prompt=f"{COMPRESS_HISTORY.format(history=history)}"
generate_kwargs = dict(
temperature=temperature,
max_new_tokens=1024,
top_p=top_p,
repetition_penalty=repetition_penalty,
do_sample=True,
seed=random.randint(1,99999999999)
#seed=42,
)
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
output = ""
for response in stream:
output += response.token.text
return output
MAX_HISTORY=100
def generate(prompt, history,max_new_tokens,temperature=temperature,top_p=top_p,repetition_penalty=repetition_penalty):
temperature = float(temperature)
if temperature < 1e-2:
temperature = 1e-2
top_p = float(top_p)
generate_kwargs = dict(
temperature=temperature,
max_new_tokens=max_new_tokens,
top_p=top_p,
repetition_penalty=repetition_penalty,
do_sample=True,
seed=random.randint(1,99999999999)
#seed=42,
)
cnt=0
for ea in history:
print (ea)
for l in ea:
print (l)
cnt+=len(l.split("\n"))
print(f'cnt:: {cnt}')
if cnt > MAX_HISTORY:
history = compress_history(str(history), temperature, max_new_tokens, top_p, repetition_penalty)
formatted_prompt = format_prompt(f"{GAME_MASTER.format(history=history)}, {prompt}", history)
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
output = ""
for response in stream:
output += response.token.text
if history:
yield "",[history,[(prompt,output)]]
else:
yield "",[(prompt,output)]
lines = output.strip().strip("\n").split("\n")
#history=""
for i,line in enumerate(lines):
if line.startswith("1. "):
print(line)
if line.startswith("2. "):
print(line)
if line.startswith("3. "):
print(line)
if line.startswith("4. "):
print(line)
if line.startswith("5. "):
print(line)
if history:
return "",[history,[(prompt,output)]]
else:
return "",[(prompt,output)]
def clear_fn():
return None,None
with gr.Blocks() as app:
gr.HTML("""<center><h1>Mixtral 8x7B RPG</h1><h3>Role Playing Game Master</h3>""")
chatbot = gr.Chatbot(label="Mixtral 8x7B Chatbot",show_copy_button=True)
with gr.Row():
with gr.Column(scale=3):
prompt=gr.Textbox(label = "Prompt")
with gr.Column(scale=1):
button=gr.Button()
#models_dd=gr.Dropdown(choices=[m for m in return_list],interactive=True)
with gr.Row():
stop_button=gr.Button("Stop")
clear_btn = gr.Button("Clear")
with gr.Row():
tokens = gr.Slider(label="Max new tokens",value=1048,minimum=0,maximum=1048*10,step=64,interactive=True,info="The maximum numbers of new tokens")
json_out=gr.JSON()
e_box=gr.Textbox()
#text=gr.JSON()
#inp_query.change(search_models,inp_query,models_dd)
#test_b=test_btn.click(itt,url,e_box)
clear_btn.click(clear_fn,None,[prompt,chatbot])
go=button.click(generate,[prompt,chatbot,tokens],[prompt,chatbot])
stop_button.click(None,None,None,cancels=[go])
app.launch(show_api=False)
'''
examples=[["Start the Game", None, None, None, None, None, ],
["Start a Game based in the year 1322", None, None, None, None, None,],
]
gr.ChatInterface(
fn=generate,
chatbot=gr.Chatbot(show_label=False, show_share_button=False, show_copy_button=True, likeable=True, layout="panel"),
additional_inputs=additional_inputs,
title="Mixtral RPG Game Master",
examples=examples,
concurrency_limit=20,
).launch(share=True,show_api=True)
'''
|