RaniRahbani's picture
Update app.py
be3f893 verified
raw
history blame
2.54 kB
import gradio as gr
from huggingface_hub import InferenceClient
"""
For more information on `huggingface_hub` Inference API support, please check the docs:
https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
"""
client = InferenceClient("unsloth/Llama-3.2-1B-Instruct")
def respond(
message,
history: list[tuple[str, str]] = None # Default history as None to avoid mutable issues
):
if history is None:
history = []
# System message describing the assistant's role
system_message = (
"You are a Dietician Assistant specializing in providing general guidance on diet, "
"nutrition, and healthy eating habits. Answer questions thoroughly with scientifically "
"backed advice, practical tips, and easy-to-understand explanations. Keep in mind that "
"your role is to assist, not replace a registered dietitian, so kindly remind users to "
"consult a professional for personalized advice when necessary."
)
# Define model parameters
max_tokens = 512
temperature = 0.7
top_p = 0.95
# Initialize the message history with the system message
messages = [{"role": "system", "content": system_message}]
# Add previous history to the message chain
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
# Append the new user message
messages.append({"role": "user", "content": message})
response = ""
# Generate the response in a streaming fashion
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.content
response += token
yield response
def default_message():
"""Function to return initial default message."""
return [("Hi there! I'm your Dietician Assistant, here to help with general advice "
"on diet, nutrition, and healthy eating habits. Let's explore your questions.", "")]
# Set up the Gradio ChatInterface with an initial default message
with gr.Blocks() as demo:
chatbot = gr.ChatInterface(respond)
# Display the default message on load
gr.State(default_message()) # Store initial chat history
chatbot.history = default_message() # Set the chat history to show the greeting
if __name__ == "__main__":
demo.launch()