SimpleChatbot / app.py
Gokulnath2003's picture
Update app.py
8ff67d3 verified
raw
history blame
4.08 kB
import numpy as np
import streamlit as st
from openai import OpenAI
import os
import sys
from dotenv import load_dotenv, dotenv_values
load_dotenv()
# initialize the client
client = OpenAI(
base_url="https://api-inference.huggingface.co/v1",
api_key=os.environ.get('HUGGINGFACEHUB_API_TOKEN') # Replace with your token
)
# Create supported model
model_links = {
"Meta-Llama-3-8B": "meta-llama/Meta-Llama-3-8B-Instruct"
}
# Pull info about the model to display
model_info = {
"Meta-Llama-3-8B": {
'description': """The Llama (3) model is a **Large Language Model (LLM)** that's able to have question and answer interactions.\n
\nIt was created by the [**Meta's AI**](https://llama.meta.com/) team and has over **8 billion parameters.** \n""",
'logo': 'Llama_logo.png'
}
}
# Random dog images for error message
random_dog = ["0f476473-2d8b-415e-b944-483768418a95.jpg", "1bd75c81-f1d7-4e55-9310-a27595fa8762.jpg"]
def reset_conversation():
'''Resets Conversation'''
st.session_state.conversation = []
st.session_state.messages = []
return None
# Define the available models
models = [key for key in model_links.keys()]
# Create the sidebar with the dropdown for model selection
selected_model = st.sidebar.selectbox("Select Model", models)
# Custom description for SciMom
st.sidebar.write("Built for my mom, with love. This model is pretrained with textbooks of Science NCERT.")
st.sidebar.write("Model used: Meta Llama, trained using: Docker AutoTrain.")
# Create a temperature slider
temp_values = st.sidebar.slider('Select a temperature value', 0.0, 1.0, (0.5))
# Add reset button to clear conversation
st.sidebar.button('Reset Chat', on_click=reset_conversation)
# Create model description
st.sidebar.write(f"You're now chatting with **{selected_model}**")
st.sidebar.markdown(model_info[selected_model]['description'])
st.sidebar.image(model_info[selected_model]['logo'])
st.sidebar.markdown("*Generated content may be inaccurate or false.*")
if "prev_option" not in st.session_state:
st.session_state.prev_option = selected_model
if st.session_state.prev_option != selected_model:
st.session_state.messages = []
st.session_state.prev_option = selected_model
reset_conversation()
# Pull in the model we want to use
repo_id = model_links[selected_model]
st.subheader(f'AI - {selected_model}')
# Set a default model
if selected_model not in st.session_state:
st.session_state[selected_model] = model_links[selected_model]
# Initialize chat history
if "messages" not in st.session_state:
st.session_state.messages = []
# Display chat messages from history on app rerun
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# Accept user input
if prompt := st.chat_input(f"Hi I'm {selected_model}, ask me a question"):
# Display user message in chat message container
with st.chat_message("user"):
st.markdown(prompt)
st.session_state.messages.append({"role": "user", "content": prompt})
# Display assistant response in chat message container
with st.chat_message("assistant"):
try:
stream = client.chat.completions.create(
model=model_links[selected_model],
messages=[
{"role": m["role"], "content": m["content"]}
for m in st.session_state.messages
],
temperature=temp_values,
stream=True,
max_tokens=3000,
)
response = st.write_stream(stream)
except Exception as e:
response = "๐Ÿ˜ตโ€๐Ÿ’ซ Looks like something went wrong! Here's a random pic of a ๐Ÿถ:"
st.write(response)
random_dog_pick = 'https://random.dog/' + random_dog[np.random.randint(len(random_dog))]
st.image(random_dog_pick)
st.write("This was the error message:")
st.write(e)
st.session_state.messages.append({"role": "assistant", "content": response})