Spaces:
Sleeping
Sleeping
import streamlit as st | |
import time | |
import requests | |
import os | |
from huggingface_hub import InferenceClient | |
# Hugging Face API Setup | |
API_TOKEN = os.environ.get("HUGGINGFACE_API_TOKEN") | |
GPT2XL_API_URL = "https://api-inference.huggingface.co/models/openai-community/gpt2-xl" | |
MISTRAL_MODEL_NAME = "mistralai/Mistral-7B-Instruct-v0.3" | |
client = InferenceClient(api_key=API_TOKEN) | |
# Query GPT-2 XL | |
def query_from_gpt2xl(text: str): | |
headers = {"Authorization": f"Bearer {API_TOKEN}"} | |
while True: | |
response = requests.post(GPT2XL_API_URL, headers=headers, json={"inputs": text}) | |
response_data = response.json() | |
if "error" in response_data and "loading" in response_data["error"]: | |
wait_time = response_data.get("estimated_time", 10) | |
st.info(f"Model is loading. Waiting for {wait_time:.2f} seconds...") | |
time.sleep(wait_time) | |
else: | |
return response_data[0]["generated_text"] | |
# Query Mistral | |
def query_from_mistral(text: str): | |
messages = [{"role": "user", "content": text}] | |
completion = client.chat.completions.create( | |
model=MISTRAL_MODEL_NAME, | |
messages=messages, | |
max_tokens=500, | |
) | |
return completion.choices[0].message["content"] | |
def main(): | |
st.set_page_config(page_title="Multi-Model Chat", layout="centered") | |
st.title("π€ Multi-Model Chat") | |
st.markdown("Chat with either **GPT-2 XL** or **Mistral-7B-Instruct** via Hugging Face API.") | |
if "messages" not in st.session_state: | |
st.session_state.messages = [] | |
model_choice = st.selectbox("Select a model:", ["GPT-2 XL", "Mistral-7B-Instruct"]) | |
with st.form(key="chat_form", clear_on_submit=True): | |
user_input = st.text_input("You:", "") | |
submit = st.form_submit_button("Send") | |
if submit and user_input: | |
st.session_state.messages.append({"role": "user", "content": user_input}) | |
with st.spinner("Fetching response..."): | |
try: | |
if model_choice == "GPT-2 XL": | |
response = query_from_gpt2xl(user_input) | |
elif model_choice == "Mistral-7B-Instruct": | |
response = query_from_mistral(user_input) | |
st.session_state.messages.append({"role": "bot", "content": response}) | |
except Exception as e: | |
st.error(f"Error: {e}") | |
for message in st.session_state.messages: | |
if message["role"] == "user": | |
st.markdown(f"**You:** {message['content']}") | |
elif message["role"] == "bot": | |
st.markdown(f"**Bot:** {message['content']}") | |
if __name__ == "__main__": | |
main() | |