Spaces:
Runtime error
Runtime error
File size: 5,058 Bytes
9ba4a65 3256302 9ba4a65 3256302 9ba4a65 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 |
import streamlit as st
import os
from dotenv import load_dotenv
load_dotenv()
from groq import Groq
# Load environment variables
GROQ_API_KEY = os.environ.get('GROQ_API_KEY')
PRE_PROMPT = "You are a helpful assistant. You do not respond as 'User' or pretend to be 'User'. You only respond once as Assistant."
if not GROQ_API_KEY:
st.warning("Please add your Groq API key to the .env file.")
st.stop()
# Connect to Groq
client = Groq(api_key=GROQ_API_KEY)
# Set up Streamlit app
st.set_page_config(page_title="LLaMA 3x", page_icon="🦙", layout="wide")
def render_app():
# reduce font sizes for input text boxes
custom_css = """
<style>
.stTextArea textarea {font-size: 13px;}
div[data-baseweb="select"] > div {font-size: 13px !important;}
</style>
"""
st.markdown(custom_css, unsafe_allow_html=True)
#Left sidebar menu
st.sidebar.header("LLaMA 3x")
#Set config for a cleaner menu, footer & background:
hide_streamlit_style = """
<style>
#MainMenu {visibility: hidden;}
footer {visibility: hidden;}
</style>
"""
st.markdown(hide_streamlit_style, unsafe_allow_html=True)
#container for the chat history
response_container = st.container()
#container for the user's text input
container = st.container()
#Set up/Initialize Session State variables:
# Set up/Initialize Session State variables
if 'chat_dialogue' not in st.session_state:
st.session_state['chat_dialogue'] = []
if 'temperature' not in st.session_state:
st.session_state['temperature'] = 0.1
if 'top_p' not in st.session_state:
st.session_state['top_p'] = 0.9
if 'max_seq_len' not in st.session_state:
st.session_state['max_seq_len'] = 512
if 'pre_prompt' not in st.session_state:
st.session_state['pre_prompt'] = PRE_PROMPT
# Model hyperparameters
st.session_state['temperature'] = st.sidebar.slider('Temperature:', min_value=0.01, max_value=5.0, value=0.1, step=0.01)
st.session_state['top_p'] = st.sidebar.slider('Top P:', min_value=0.01, max_value=1.0, value=0.9, step=0.01)
st.session_state['max_seq_len'] = st.sidebar.slider('Max Sequence Length:', min_value=64, max_value=4096, value=2048, step=8)
NEW_P = st.sidebar.text_area('Prompt before the chat starts. Edit here if desired:', PRE_PROMPT, height=60)
if NEW_P != PRE_PROMPT and NEW_P != "" and NEW_P != None:
st.session_state['pre_prompt'] = NEW_P + "\n\n"
else:
st.session_state['pre_prompt'] = PRE_PROMPT
btn_col1, btn_col2 = st.sidebar.columns(2)
# Add the "Clear Chat History" button to the sidebar
def clear_history():
st.session_state['chat_dialogue'] = []
clear_chat_history_button = btn_col1.button("Clear History",
use_container_width=True,
on_click=clear_history)
# add logout button
def logout():
del st.session_state['user_info']
logout_button = btn_col2.button("Logout",
use_container_width=True,
on_click=logout)
# add links to relevant resources for users to select
st.sidebar.write(" ")
logo1 = 'https://storage.googleapis.com/llama2_release/a16z_logo.png'
logo2 = 'https://storage.googleapis.com/llama2_release/Screen%20Shot%202023-07-21%20at%2012.34.05%20PM.png'
st.sidebar.write(" ")
st.sidebar.markdown("*Made with ❤️ by Asman. Not associated with Meta Platforms, Inc.*")
# Display chat messages from history on app rerun
for message in st.session_state.chat_dialogue:
with st.chat_message(message["role"]):
st.markdown(message["content"])
# Accept user input
if prompt := st.chat_input("Message LLaMA 3x...."):
# Add user message to chat history
st.session_state.chat_dialogue.append({"role": "user", "content": prompt})
# Display user message in chat message container
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
message_placeholder = st.empty()
full_response = ""
messages = [{"role": msg["role"], "content": msg["content"]} for msg in st.session_state.chat_dialogue]
chat_completion = client.chat.completions.create(
messages=messages,
model="llama3-70b-8192",
temperature=st.session_state['temperature'],
top_p=st.session_state['top_p'],
max_tokens=st.session_state['max_seq_len']
)
full_response = chat_completion.choices[0].message.content
message_placeholder.markdown(full_response)
# Add assistant response to chat history
st.session_state.chat_dialogue.append({"role": "assistant", "content": full_response})
render_app() |