Update app.py
Browse files
app.py
CHANGED
|
@@ -11,8 +11,8 @@ import gradio as gr
|
|
| 11 |
from huggingface_hub import hf_hub_download
|
| 12 |
from ui import css, PLACEHOLDER
|
| 13 |
|
| 14 |
-
hf_hub_download(repo_id="bartowski/dolphin-2.9.1-yi-1.5-34b-GGUF", filename="dolphin-2.9.1-yi-1.5-34b-Q6_K.gguf", local_dir = "./models")
|
| 15 |
-
|
| 16 |
# hf_hub_download(repo_id="kroonen/dolphin-2.9.2-Phi-3-Medium-GGUF", filename="dolphin-2.9.2-Phi-3-Medium-Q6_K.gguf", local_dir = "./models")
|
| 17 |
hf_hub_download(repo_id="cognitivecomputations/dolphin-2.9.2-qwen2-72b-gguf", filename="qwen2-Q3_K_M.gguf", local_dir = "./models")
|
| 18 |
|
|
@@ -76,9 +76,7 @@ demo = gr.ChatInterface(
|
|
| 76 |
respond,
|
| 77 |
additional_inputs=[
|
| 78 |
gr.Dropdown([
|
| 79 |
-
'dolphin-2.9.1-yi-1.5-34b-Q6_K.gguf',
|
| 80 |
'dolphin-2.9.1-llama-3-70b.Q3_K_M.gguf',
|
| 81 |
-
'dolphin-2.9.2-Phi-3-Medium-Q6_K.gguf',
|
| 82 |
'qwen2-Q3_K_M.gguf'
|
| 83 |
], value="qwen2-Q3_K_M.gguf", label="Model"),
|
| 84 |
gr.Slider(minimum=1, maximum=8192, value=8192, step=1, label="Max tokens"),
|
|
|
|
| 11 |
from huggingface_hub import hf_hub_download
|
| 12 |
from ui import css, PLACEHOLDER
|
| 13 |
|
| 14 |
+
# hf_hub_download(repo_id="bartowski/dolphin-2.9.1-yi-1.5-34b-GGUF", filename="dolphin-2.9.1-yi-1.5-34b-Q6_K.gguf", local_dir = "./models")
|
| 15 |
+
hf_hub_download(repo_id="crusoeai/dolphin-2.9.1-llama-3-70b-GGUF", filename="dolphin-2.9.1-llama-3-70b.Q3_K_M.gguf", local_dir = "./models")
|
| 16 |
# hf_hub_download(repo_id="kroonen/dolphin-2.9.2-Phi-3-Medium-GGUF", filename="dolphin-2.9.2-Phi-3-Medium-Q6_K.gguf", local_dir = "./models")
|
| 17 |
hf_hub_download(repo_id="cognitivecomputations/dolphin-2.9.2-qwen2-72b-gguf", filename="qwen2-Q3_K_M.gguf", local_dir = "./models")
|
| 18 |
|
|
|
|
| 76 |
respond,
|
| 77 |
additional_inputs=[
|
| 78 |
gr.Dropdown([
|
|
|
|
| 79 |
'dolphin-2.9.1-llama-3-70b.Q3_K_M.gguf',
|
|
|
|
| 80 |
'qwen2-Q3_K_M.gguf'
|
| 81 |
], value="qwen2-Q3_K_M.gguf", label="Model"),
|
| 82 |
gr.Slider(minimum=1, maximum=8192, value=8192, step=1, label="Max tokens"),
|