removed Llama2 as an option
Browse files
app.py
CHANGED
@@ -39,7 +39,7 @@ st.markdown("<h2 class='title'>GovBuddy</h2>", unsafe_allow_html=True)
|
|
39 |
# inference_server = "https://api-inference.huggingface.co/models/codellama/CodeLlama-13b-hf"
|
40 |
# inference_server = "https://api-inference.huggingface.co/models/pandasai/bamboo-llm"
|
41 |
|
42 |
-
model_name = st.sidebar.selectbox("Select LLM:", ["llama3","mixtral", "
|
43 |
|
44 |
questions = ('Custom Prompt',
|
45 |
'Plot the monthly average PM2.5 for the year 2023.',
|
|
|
39 |
# inference_server = "https://api-inference.huggingface.co/models/codellama/CodeLlama-13b-hf"
|
40 |
# inference_server = "https://api-inference.huggingface.co/models/pandasai/bamboo-llm"
|
41 |
|
42 |
+
model_name = st.sidebar.selectbox("Select LLM:", ["llama3","mixtral", "gemma"])
|
43 |
|
44 |
questions = ('Custom Prompt',
|
45 |
'Plot the monthly average PM2.5 for the year 2023.',
|