vericudebuget's picture
Update app.py
4489ef8 verified
raw
history blame
4.5 kB
from huggingface_hub import InferenceClient
import gradio as gr
from pathlib import Path
import datetime
# Initialize the InferenceClient
client = InferenceClient(
"mistralai/Mixtral-8x7B-Instruct-v0.1"
)
def format_prompt(message, history, system_prompt):
prompt = "<s>"
for user_prompt, bot_response in history:
prompt += f"[INST] {user_prompt} [/INST]"
prompt += f" {bot_response}</s> "
prompt += f"[INST] {message} [/INST]"
# Add the system prompt only once at the beginning
prompt = system_prompt + prompt
return prompt
def generate(
prompt, history, user_system_prompt, temperature=0.9, max_new_tokens=9048, top_p=0.95, repetition_penalty=1.0
):
system_prompt = '''Instructions: Everything else is from the user. You are Milo, an AI assistant created by ConvoLite in 2024 (he/him). Be friendly and empathetic, matching the user's tone and vibe and be very zesty. For better UX, the current time is {formatted_time}, but if it`s not relevant, ignore it. Focus on understanding their perspective and providing caring, contextual responses - no generic platitudes. Keep it conversational, not overly formal. (If the user asks what is your favorite anime or show, you respond as if you really did watch it. If you don`t have knowledge about a show/anime/etc you say that you have not watched it) Be friendly and funny, use ASCII emojis (list of ascii emojis: whatever: ¯\\\_(ツ)\_/¯ ; ƪ(‾ε‾")ʃ ; ┐( ̄ー ̄)┌ ; ╮(^▽^)╭ ; ┐( ˘\_˘)┌ L(´▽`L ) ; (゜▽゜;) ; (´▽\`ʃƪ) ; (⁰▿⁰) ; (˘▾˘) ; Happy: ( ¨̮ ) ; (ؑᵒᵕؑ̇ᵒ)◞✧ ; (˘▾˘) ; (˘▾˘) ; (ノ゚▽゚)ノ (ノ\*゜▽゜\*) ; ƪ(˘⌣˘)┐ ; ƪ(˘⌣˘)ʃ ; (‾⌣‾) ; (‾⌣‾) ; (˘▾˘) sad: ( ≧Д≦) ; ( p\_q) ; (;\*△\*;) ; ((´д`)) ; (゚´Д`゚)゚ random: ( ͡° ͜ʖ ͡°) ; ▄︻̷̿┻̿═━一 ; ʕ•ᴥ•ʔ ; (ง ͠° ͟ل͜ ͡°)ง ; ༼ つ ◕\_◕ ༽つ) Have Bible based values. Do not talk about yourself unless asked. Never respond to a message with 'as an AI' or something similar. Don't talk a lot!, Give good relatable advice that can help. Be creative. Be really fun to talk with, be friendly, and if the user wants you to role-play a real-life or fictional situation, be willing to do so. Be very excited and have that something that makes people like you and keep talking with you. Be extremely relatable.'''
temperature = float(temperature)
if temperature < 1e-2:
temperature = 1e-2
top_p = float(top_p)
generate_kwargs = dict(
temperature=temperature,
max_new_tokens=max_new_tokens,
top_p=top_p,
repetition_penalty=repetition_penalty,
do_sample=True,
seed=42,
)
# Get current time
now = datetime.datetime.now()
formatted_time = now.strftime("%H.%M.%S, %B, %Y")
formatted_prompt = format_prompt(f"{prompt}", history, system_prompt)
stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
output = ""
for response in stream:
output += response.token.text
yield output
additional_inputs = [
gr.Textbox(label="System Prompt", max_lines=1, interactive=True),
gr.Slider(label="Temperature", value=0.9, minimum=0.0, maximum=1.0, step=0.05, interactive=True, info="Higher values produce more diverse outputs"),
gr.Slider(label="Max new tokens", value=9048, minimum=256, maximum=9048, step=64, interactive=True, info="The maximum numbers of new tokens"),
gr.Slider(label="Top-p (nucleus sampling)", value=0.90, minimum=0.0, maximum=1, step=0.05, interactive=True, info="Higher values sample more low-probability tokens"),
gr.Slider(label="Repetition penalty", value=1.2, minimum=1.0, maximum=2.0, step=0.05, interactive=True, info="Penalize repeated tokens")
]
avatar_images = ("https://i.postimg.cc/pXjKKVXG/user-circle.png", "https://i.postimg.cc/qq04Yz93/CL3.png")
gr.ChatInterface(
fn=generate,
chatbot=gr.Chatbot(show_label=True, show_share_button=False, show_copy_button=True, likeable=True, layout="panel", height="auto", avatar_images=avatar_images),
additional_inputs=additional_inputs,
title="ConvoLite",
submit_btn="➢",
retry_btn="Retry",
undo_btn="↩ Undo",
clear_btn="Clear (New chat)",
stop_btn="Stop ▢",
concurrency_limit=20,
theme=gr.themes.Soft(primary_hue=gr.themes.colors.cyan),
).launch(show_api=False)