baconnier commited on
Commit
df67f78
·
verified ·
1 Parent(s): 43f7448

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -9
app.py CHANGED
@@ -13,12 +13,8 @@ from ui import css, PLACEHOLDER
13
 
14
  llm = None
15
  llm_model = None
16
- # hf_hub_download(repo_id="bartowski/dolphin-2.9.1-yi-1.5-34b-GGUF", filename="dolphin-2.9.1-yi-1.5-34b-Q6_K.gguf", local_dir = "./models")
17
- # hf_hub_download(repo_id="crusoeai/dolphin-2.9.1-llama-3-70b-GGUF", filename="dolphin-2.9.1-llama-3-70b.Q3_K_M.gguf", local_dir = "./models")
18
- hf_hub_download(repo_id="bartowski/cognitivecomputations_Dolphin3.0-R1-Mistral-24B-GGUF", filename="cognitivecomputations_Dolphin3.0-R1-Mistral-24B-Q8_0.gguf", local_dir = "./models")
19
- # hf_hub_download(repo_id="mradermacher/Dolphin3.0-Mistral-24B-GGUF", filename="Dolphin3.0-Mistral-24B.Q8_0.gguf", local_dir = "./models")
20
- # hf_hub_download(repo_id="kroonen/dolphin-2.9.2-Phi-3-Medium-GGUF", filename="dolphin-2.9.2-Phi-3-Medium-Q6_K.gguf", local_dir = "./models")
21
- hf_hub_download(repo_id="cognitivecomputations/dolphin-2.9.2-qwen2-72b-gguf", filename="qwen2-Q3_K_M.gguf", local_dir = "./models")
22
 
23
  @spaces.GPU(duration=120)
24
  def respond(
@@ -85,9 +81,9 @@ demo = gr.ChatInterface(
85
  respond,
86
  additional_inputs=[
87
  gr.Dropdown([
88
- 'cognitivecomputations_Dolphin3.0-R1-Mistral-24B-Q8_0.gguf',
89
- 'qwen2-Q3_K_M.gguf'
90
- ], value="cognitivecomputations_Dolphin3.0-R1-Mistral-24B-Q8_0.gguf", label="Model"),
91
  gr.Slider(minimum=1, maximum=8192, value=8192, step=1, label="Max tokens"),
92
  gr.Slider(minimum=0.05, maximum=4.0, value=0.6, step=0.1, label="Temperature"),
93
  gr.Slider(
 
13
 
14
  llm = None
15
  llm_model = None
16
+ hf_hub_download(repo_id="baconnier/Napoleon_24B_V0.2-Q8_0-GGUF", filename="napoleon_24b_v0.2-q8_0.gguf", local_dir = "./models")
17
+ hf_hub_download(repo_id="baconnier/Napoleon_24B_V0.1-Q8_0-GGUF", filename="napoleon_24b_v0.2-q8_0.gguf", local_dir = "./models")
 
 
 
 
18
 
19
  @spaces.GPU(duration=120)
20
  def respond(
 
81
  respond,
82
  additional_inputs=[
83
  gr.Dropdown([
84
+ 'napoleon_24b_v0.2-q8_0.gguf',
85
+ 'napoleon_24b_v0.1-q8_0.gguf'
86
+ ], value="napoleon_24b_v0.2-q8_0.gguf", label="Model"),
87
  gr.Slider(minimum=1, maximum=8192, value=8192, step=1, label="Max tokens"),
88
  gr.Slider(minimum=0.05, maximum=4.0, value=0.6, step=0.1, label="Temperature"),
89
  gr.Slider(