Spaces:
Build error
Build error
import torch | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
import streamlit as st | |
import os | |
from dotenv import load_dotenv | |
from airllm import AutoModel | |
# Load environment variables | |
load_dotenv() | |
# Retrieve the API token from the environment variables | |
api_token = os.getenv("HUGGINGFACEHUB_API_TOKEN") | |
# Initialize model and tokenizer using the AutoModel from AirLLM | |
MAX_LENGTH = 128 | |
model = AutoModel.from_pretrained("internlm/internlm2_5-7b") | |
# Streamlit app configuration | |
st.set_page_config( | |
page_title="Conversational Chatbot with internlm2_5-7b-chat and AirLLM", | |
page_icon="π€", | |
layout="wide", | |
initial_sidebar_state="expanded", | |
) | |
# App title | |
st.title("Conversational Chatbot with internlm2_5-7b-chat and AirLLM") | |
# Sidebar configuration | |
st.sidebar.header("Chatbot Configuration") | |
theme = st.sidebar.selectbox("Choose a theme", ["Default", "Dark", "Light"]) | |
# Set theme based on user selection | |
if theme == "Dark": | |
st.markdown( | |
""" | |
<style> | |
.reportview-container { | |
background: #2E2E2E; | |
color: #FFFFFF; | |
} | |
.sidebar .sidebar-content { | |
background: #333333; | |
} | |
</style> | |
""", | |
unsafe_allow_html=True | |
) | |
elif theme == "Light": | |
st.markdown( | |
""" | |
<style> | |
.reportview-container { | |
background: #FFFFFF; | |
color: #000000; | |
} | |
.sidebar .sidebar-content { | |
background: #F5F5F5; | |
} | |
</style> | |
""", | |
unsafe_allow_html=True | |
) | |
# Chat input and output | |
user_input = st.text_input("You: ", "") | |
if st.button("Send"): | |
if user_input: | |
# Tokenize user input | |
input_tokens = model.tokenizer(user_input, | |
return_tensors="pt", | |
return_attention_mask=False, | |
truncation=True, | |
max_length=MAX_LENGTH, | |
padding=False) | |
# Generate response | |
generation_output = model.generate( | |
input_tokens['input_ids'].cuda(), | |
max_new_tokens=20, | |
use_cache=True, | |
return_dict_in_generate=True) | |
# Decode response | |
response = model.tokenizer.decode(generation_output.sequences[0]) | |
st.text_area("Bot:", value=response, height=200, max_chars=None) | |
else: | |
st.warning("Please enter a message.") | |
# Footer | |
st.sidebar.markdown( | |
""" | |
### About | |
This is a conversational chatbot built using the internlm2_5-7b-chat model and AirLLM. | |
""" | |
) | |