import streamlit as st
import os
from dotenv import load_dotenv
load_dotenv()
from groq import Groq
# Load environment variables
GROQ_API_KEY = os.environ.get('GROQ_API_KEY')
PRE_PROMPT = "You are a helpful assistant. You do not respond as 'User' or pretend to be 'User'. You only respond once as Assistant."
if not GROQ_API_KEY:
st.warning("Please add your Groq API key to the .env file.")
st.stop()
# Connect to Groq
client = Groq(api_key=GROQ_API_KEY)
# Set up Streamlit app
st.set_page_config(page_title="LLaMA 3x", page_icon="🦙", layout="wide")
def render_app():
# reduce font sizes for input text boxes
custom_css = """
"""
st.markdown(custom_css, unsafe_allow_html=True)
#Left sidebar menu
st.sidebar.header("LLaMA 3x")
#Set config for a cleaner menu, footer & background:
hide_streamlit_style = """
"""
st.markdown(hide_streamlit_style, unsafe_allow_html=True)
#container for the chat history
response_container = st.container()
#container for the user's text input
container = st.container()
#Set up/Initialize Session State variables:
# Set up/Initialize Session State variables
if 'chat_dialogue' not in st.session_state:
st.session_state['chat_dialogue'] = []
if 'temperature' not in st.session_state:
st.session_state['temperature'] = 0.1
if 'top_p' not in st.session_state:
st.session_state['top_p'] = 0.9
if 'max_seq_len' not in st.session_state:
st.session_state['max_seq_len'] = 512
if 'pre_prompt' not in st.session_state:
st.session_state['pre_prompt'] = PRE_PROMPT
# Model hyperparameters
st.session_state['temperature'] = st.sidebar.slider('Temperature:', min_value=0.01, max_value=5.0, value=0.1, step=0.01)
st.session_state['top_p'] = st.sidebar.slider('Top P:', min_value=0.01, max_value=1.0, value=0.9, step=0.01)
st.session_state['max_seq_len'] = st.sidebar.slider('Max Sequence Length:', min_value=64, max_value=4096, value=2048, step=8)
NEW_P = st.sidebar.text_area('Prompt before the chat starts. Edit here if desired:', PRE_PROMPT, height=60)
if NEW_P != PRE_PROMPT and NEW_P != "" and NEW_P != None:
st.session_state['pre_prompt'] = NEW_P + "\n\n"
else:
st.session_state['pre_prompt'] = PRE_PROMPT
btn_col1, btn_col2 = st.sidebar.columns(2)
# Add the "Clear Chat History" button to the sidebar
def clear_history():
st.session_state['chat_dialogue'] = []
clear_chat_history_button = btn_col1.button("Clear History",
use_container_width=True,
on_click=clear_history)
# add logout button
def logout():
del st.session_state['user_info']
logout_button = btn_col2.button("Logout",
use_container_width=True,
on_click=logout)
# add links to relevant resources for users to select
st.sidebar.write(" ")
logo1 = 'https://storage.googleapis.com/llama2_release/a16z_logo.png'
logo2 = 'https://storage.googleapis.com/llama2_release/Screen%20Shot%202023-07-21%20at%2012.34.05%20PM.png'
st.sidebar.write(" ")
st.sidebar.markdown("*Made with ❤️ by Asman. Not associated with Meta Platforms, Inc.*")
# Display chat messages from history on app rerun
for message in st.session_state.chat_dialogue:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# Accept user input
if prompt := st.chat_input("Message LLaMA 3x...."):
# Add user message to chat history
st.session_state.chat_dialogue.append({"role": "user", "content": prompt})
# Display user message in chat message container
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
message_placeholder = st.empty()
full_response = ""
messages = [{"role": msg["role"], "content": msg["content"]} for msg in st.session_state.chat_dialogue]
chat_completion = client.chat.completions.create(
messages=messages,
model="llama3-70b-8192",
temperature=st.session_state['temperature'],
top_p=st.session_state['top_p'],
max_tokens=st.session_state['max_seq_len']
)
full_response = chat_completion.choices[0].message.content
message_placeholder.markdown(full_response)
# Add assistant response to chat history
st.session_state.chat_dialogue.append({"role": "assistant", "content": full_response})
render_app()