import gradio as gr | |
# Load your model using gr.load (assuming the model is hosted on Hugging Face) | |
model = gr.load("models/meta-llama/Llama-3.2-1B") | |
# Function to generate responses from the model | |
def chatbot_response(input_text): | |
# Generate response from the model | |
response = model(input_text) | |
return response | |
# Create the Gradio Blocks interface | |
with gr.Blocks() as demo: | |
# Use type='messages' for the Chatbot to avoid the deprecation warning | |
chatbot = gr.Chatbot(type='messages') # Updated to the 'messages' format | |
with gr.Row(): | |
# Remove the .style method, as it's no longer available | |
txt = gr.Textbox(show_label=False, placeholder="Enter your message and press Enter") | |
# Link the input to the chatbot response function and the Chatbot display | |
txt.submit(chatbot_response, txt, chatbot) | |
# Launch the Gradio interface | |
demo.launch() | |