Spaces:
Sleeping
Sleeping
import gradio as gr | |
from transformers import pipeline | |
import torch | |
from huggingface_hub import login | |
import os | |
login( | |
token = os.getenv("HF_TOKEN"), | |
) | |
model_id = "mistralai/Mistral-7B-Instruct-v0.2" | |
print("\n\nDownloading model...\n\n") | |
pipe = pipeline("text-generation", model=model_id, torch_dtype=torch.bfloat16) | |
print("\n\nModel Initialized successfully!!\n\n") | |
def interact(message: str, history: list): | |
chat_history = [msg for msg in history] | |
chat_history.append({ | |
"role":"user", | |
"content": message | |
}) | |
response = pipe(chat_history) | |
print("\n\nResponse: ",response, end="\n\n") | |
return response[0]["generated_text"][-1]["content"] | |
interface = gr.ChatInterface( | |
fn=interact, | |
type="messages", | |
title="Mistral Model Interface" | |
) | |
interface.launch() |