Update app.py
Browse files
app.py
CHANGED
@@ -13,12 +13,8 @@ from ui import css, PLACEHOLDER
|
|
13 |
|
14 |
llm = None
|
15 |
llm_model = None
|
16 |
-
|
17 |
-
|
18 |
-
hf_hub_download(repo_id="bartowski/cognitivecomputations_Dolphin3.0-R1-Mistral-24B-GGUF", filename="cognitivecomputations_Dolphin3.0-R1-Mistral-24B-Q8_0.gguf", local_dir = "./models")
|
19 |
-
# hf_hub_download(repo_id="mradermacher/Dolphin3.0-Mistral-24B-GGUF", filename="Dolphin3.0-Mistral-24B.Q8_0.gguf", local_dir = "./models")
|
20 |
-
# hf_hub_download(repo_id="kroonen/dolphin-2.9.2-Phi-3-Medium-GGUF", filename="dolphin-2.9.2-Phi-3-Medium-Q6_K.gguf", local_dir = "./models")
|
21 |
-
hf_hub_download(repo_id="cognitivecomputations/dolphin-2.9.2-qwen2-72b-gguf", filename="qwen2-Q3_K_M.gguf", local_dir = "./models")
|
22 |
|
23 |
@spaces.GPU(duration=120)
|
24 |
def respond(
|
@@ -85,9 +81,9 @@ demo = gr.ChatInterface(
|
|
85 |
respond,
|
86 |
additional_inputs=[
|
87 |
gr.Dropdown([
|
88 |
-
'
|
89 |
-
'
|
90 |
-
], value="
|
91 |
gr.Slider(minimum=1, maximum=8192, value=8192, step=1, label="Max tokens"),
|
92 |
gr.Slider(minimum=0.05, maximum=4.0, value=0.6, step=0.1, label="Temperature"),
|
93 |
gr.Slider(
|
|
|
13 |
|
14 |
llm = None
|
15 |
llm_model = None
|
16 |
+
hf_hub_download(repo_id="baconnier/Napoleon_24B_V0.2-Q8_0-GGUF", filename="napoleon_24b_v0.2-q8_0.gguf", local_dir = "./models")
|
17 |
+
hf_hub_download(repo_id="baconnier/Napoleon_24B_V0.1-Q8_0-GGUF", filename="napoleon_24b_v0.2-q8_0.gguf", local_dir = "./models")
|
|
|
|
|
|
|
|
|
18 |
|
19 |
@spaces.GPU(duration=120)
|
20 |
def respond(
|
|
|
81 |
respond,
|
82 |
additional_inputs=[
|
83 |
gr.Dropdown([
|
84 |
+
'napoleon_24b_v0.2-q8_0.gguf',
|
85 |
+
'napoleon_24b_v0.1-q8_0.gguf'
|
86 |
+
], value="napoleon_24b_v0.2-q8_0.gguf", label="Model"),
|
87 |
gr.Slider(minimum=1, maximum=8192, value=8192, step=1, label="Max tokens"),
|
88 |
gr.Slider(minimum=0.05, maximum=4.0, value=0.6, step=0.1, label="Temperature"),
|
89 |
gr.Slider(
|