|
import gradio as gr |
|
from transformers import pipeline |
|
|
|
|
|
from huggingface_hub import login |
|
from transformers import pipeline |
|
|
|
|
|
login(token="your_huggingface_token") |
|
|
|
|
|
model_name = "deepseek-ai/deepseek-7b" |
|
pipe = pipeline("text-generation", model=model_name, tokenizer=model_name) |
|
|
|
|
|
|
|
def chat_with_ai(prompt): |
|
|
|
response = pipe(prompt, max_length=100, do_sample=True) |
|
return response[0]["generated_text"] |
|
|
|
|
|
with gr.Blocks() as demo: |
|
gr.Markdown("# 🤖 AI Chatbot powered by DeepSeek") |
|
|
|
|
|
chatbot = gr.Chatbot() |
|
msg = gr.Textbox(label="Type your message:") |
|
clear = gr.Button("Clear") |
|
|
|
|
|
def respond(message, chat_history): |
|
response = chat_with_ai(message) |
|
chat_history.append((message, response)) |
|
return "", chat_history |
|
|
|
|
|
msg.submit(respond, [msg, chatbot], [msg, chatbot]) |
|
|
|
clear.click(lambda: [], None, chatbot) |
|
|
|
|
|
demo.launch() |