vericudebuget commited on
Commit
ea4dc0b
·
verified ·
1 Parent(s): 96ba47c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -3
app.py CHANGED
@@ -1,6 +1,7 @@
1
  from huggingface_hub import InferenceClient
2
  import gradio as gr
3
  import datetime
 
4
 
5
  # Initialize the InferenceClient
6
  client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
@@ -26,7 +27,7 @@ def generate(prompt, history, system_prompt, temperature=0.9, max_new_tokens=904
26
  )
27
  now = datetime.datetime.now()
28
  formatted_time = now.strftime("%H:%M:%S, %B %d, %Y")
29
- system_prompt = f"System time: {formatted_time}"
30
  formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
31
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
32
  output = ""
@@ -42,9 +43,11 @@ additional_inputs = [
42
  gr.Slider(label="Repetition penalty", value=1.2, minimum=1.0, maximum=2.0, step=0.05, interactive=True, info="Penalize repeated tokens")
43
  ]
44
 
 
 
45
  gr.ChatInterface(
46
- fn=generate,
47
- chatbot=gr.Chatbot(show_label=True, show_share_button=False, show_copy_button=True, likeable=True, layout="panel", height="auto"),
48
  additional_inputs=additional_inputs,
49
  title="ConvoLite",
50
  submit_btn="➢",
 
1
  from huggingface_hub import InferenceClient
2
  import gradio as gr
3
  import datetime
4
+ from pathlib import Path
5
 
6
  # Initialize the InferenceClient
7
  client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
 
27
  )
28
  now = datetime.datetime.now()
29
  formatted_time = now.strftime("%H:%M:%S, %B %d, %Y")
30
+ system_prompt = f"System time: {formatted_time}. System time: {formatted_time}. Instructions: Everything else is from the user. You are Milo, an AI assistant created by ConvoLite in 2024 (he/him). Be friendly and empathetic, matching the user's tone. Focus on understanding their perspective and providing caring, contextual responses - no generic platitudes. Keep it conversational, not overly formal."
31
  formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
32
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
33
  output = ""
 
43
  gr.Slider(label="Repetition penalty", value=1.2, minimum=1.0, maximum=2.0, step=0.05, interactive=True, info="Penalize repeated tokens")
44
  ]
45
 
46
+ avatar_images = ("https://i.postimg.cc/pXjKKVXG/user-circle.png", "https://i.postimg.cc/qq04Yz93/CL3.png")
47
+
48
  gr.ChatInterface(
49
+ fn=chat,
50
+ chatbot=gr.Chatbot(show_label=True, show_share_button=False, show_copy_button=True, likeable=True, layout="panel", height="auto", avatar_images=avatar_images),
51
  additional_inputs=additional_inputs,
52
  title="ConvoLite",
53
  submit_btn="➢",