File size: 2,640 Bytes
e2a2d53
3ac47d5
 
 
 
e2a2d53
3ac47d5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import streamlit as st
import time
import requests
import os
from huggingface_hub import InferenceClient

# Hugging Face API Setup
API_TOKEN = os.environ.get("HUGGINGFACE_API_TOKEN")
GPT2XL_API_URL = "https://api-inference.huggingface.co/models/openai-community/gpt2-xl"
MISTRAL_MODEL_NAME = "mistralai/Mistral-7B-Instruct-v0.3"
client = InferenceClient(api_key=API_TOKEN)

# Query GPT-2 XL
def query_from_gpt2xl(text: str):
    headers = {"Authorization": f"Bearer {API_TOKEN}"}
    while True:
        response = requests.post(GPT2XL_API_URL, headers=headers, json={"inputs": text})
        response_data = response.json()
        if "error" in response_data and "loading" in response_data["error"]:
            wait_time = response_data.get("estimated_time", 10)
            st.info(f"Model is loading. Waiting for {wait_time:.2f} seconds...")
            time.sleep(wait_time)
        else:
            return response_data[0]["generated_text"]

# Query Mistral
def query_from_mistral(text: str):
    messages = [{"role": "user", "content": text}]
    completion = client.chat.completions.create(
        model=MISTRAL_MODEL_NAME,
        messages=messages,
        max_tokens=500,
    )
    return completion.choices[0].message["content"]

def main():
    st.set_page_config(page_title="Multi-Model Chat", layout="centered")
    st.title("🤖 Multi-Model Chat")
    st.markdown("Chat with either **GPT-2 XL** or **Mistral-7B-Instruct** via Hugging Face API.")

    if "messages" not in st.session_state:
        st.session_state.messages = []

    model_choice = st.selectbox("Select a model:", ["GPT-2 XL", "Mistral-7B-Instruct"])

    with st.form(key="chat_form", clear_on_submit=True):
        user_input = st.text_input("You:", "")
        submit = st.form_submit_button("Send")

    if submit and user_input:
        st.session_state.messages.append({"role": "user", "content": user_input})
        with st.spinner("Fetching response..."):
            try:
                if model_choice == "GPT-2 XL":
                    response = query_from_gpt2xl(user_input)
                elif model_choice == "Mistral-7B-Instruct":
                    response = query_from_mistral(user_input)
                st.session_state.messages.append({"role": "bot", "content": response})
            except Exception as e:
                st.error(f"Error: {e}")

    for message in st.session_state.messages:
        if message["role"] == "user":
            st.markdown(f"**You:** {message['content']}")
        elif message["role"] == "bot":
            st.markdown(f"**Bot:** {message['content']}")

if __name__ == "__main__":
    main()