Spaces:
Running
on
Zero
Running
on
Zero
remove under 3b models
Browse files
app.py
CHANGED
@@ -37,21 +37,6 @@ st.markdown("""
|
|
37 |
REQUIRED_SPACE_BYTES = 5 * 1024 ** 3 # 5 GB
|
38 |
|
39 |
MODELS = {
|
40 |
-
"Qwen2.5-0.5B-Instruct (Q4_K_M)": {
|
41 |
-
"repo_id": "Qwen/Qwen2.5-0.5B-Instruct-GGUF",
|
42 |
-
"filename": "qwen2.5-0.5b-instruct-q4_k_m.gguf",
|
43 |
-
"description": "Qwen2.5-0.5B-Instruct (Q4_K_M)"
|
44 |
-
},
|
45 |
-
"Gemma-3.1B-it (Q4_K_M)": {
|
46 |
-
"repo_id": "unsloth/gemma-3-1b-it-GGUF",
|
47 |
-
"filename": "gemma-3-1b-it-Q4_K_M.gguf",
|
48 |
-
"description": "Gemma-3.1B-it (Q4_K_M)"
|
49 |
-
},
|
50 |
-
"Qwen2.5-1.5B-Instruct (Q4_K_M)": {
|
51 |
-
"repo_id": "Qwen/Qwen2.5-1.5B-Instruct-GGUF",
|
52 |
-
"filename": "qwen2.5-1.5b-instruct-q4_k_m.gguf",
|
53 |
-
"description": "Qwen2.5-1.5B-Instruct (Q4_K_M)"
|
54 |
-
},
|
55 |
"Qwen2.5-3B-Instruct (Q4_K_M)": {
|
56 |
"repo_id": "Qwen/Qwen2.5-3B-Instruct-GGUF",
|
57 |
"filename": "qwen2.5-3b-instruct-q4_k_m.gguf",
|
|
|
37 |
REQUIRED_SPACE_BYTES = 5 * 1024 ** 3 # 5 GB
|
38 |
|
39 |
MODELS = {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
40 |
"Qwen2.5-3B-Instruct (Q4_K_M)": {
|
41 |
"repo_id": "Qwen/Qwen2.5-3B-Instruct-GGUF",
|
42 |
"filename": "qwen2.5-3b-instruct-q4_k_m.gguf",
|