Update app.py
Browse files
app.py
CHANGED
@@ -92,8 +92,8 @@ retriever_tool = RetrieverTool(docs_processed)
|
|
92 |
huggingface_token = os.getenv("HUGGINGFACE_TOKEN")
|
93 |
|
94 |
hf_hub_download(
|
95 |
-
repo_id="bartowski/google_gemma-3-
|
96 |
-
filename="google_gemma-3-
|
97 |
local_dir="./models",
|
98 |
)
|
99 |
hf_hub_download(
|
@@ -303,10 +303,10 @@ demo = gr.ChatInterface(
|
|
303 |
additional_inputs=[
|
304 |
gr.Dropdown(
|
305 |
choices=[
|
306 |
-
"google_gemma-3-
|
307 |
"google_gemma-3-1b-it-Q5_K_M.gguf",
|
308 |
],
|
309 |
-
value="google_gemma-3-
|
310 |
label="Model",
|
311 |
info="Select the AI model to use for chat",
|
312 |
),
|
|
|
92 |
huggingface_token = os.getenv("HUGGINGFACE_TOKEN")
|
93 |
|
94 |
hf_hub_download(
|
95 |
+
repo_id="bartowski/google_gemma-3-4b-it-GGUF",
|
96 |
+
filename="google_gemma-3-4b-it-Q4_K_M.gguf",
|
97 |
local_dir="./models",
|
98 |
)
|
99 |
hf_hub_download(
|
|
|
303 |
additional_inputs=[
|
304 |
gr.Dropdown(
|
305 |
choices=[
|
306 |
+
"google_gemma-3-4b-it-Q4_K_M.gguf",
|
307 |
"google_gemma-3-1b-it-Q5_K_M.gguf",
|
308 |
],
|
309 |
+
value="google_gemma-3-4b-it-Q4_K_M.gguf",
|
310 |
label="Model",
|
311 |
info="Select the AI model to use for chat",
|
312 |
),
|