Spaces:
Running
on
Zero
Running
on
Zero
Upload 2 files
Browse files- llmenv.py +24 -0
- requirements.txt +1 -1
llmenv.py
CHANGED
@@ -145,6 +145,30 @@ llm_models = {
|
|
145 |
#"": ["", MessagesFormatterType.OPEN_CHAT],
|
146 |
#"": ["", MessagesFormatterType.CHATML],
|
147 |
#"": ["", MessagesFormatterType.PHI_3],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
148 |
"Viper-Coder-v1.7-Vsm6.Q4_K_M.gguf": ["mradermacher/Viper-Coder-v1.7-Vsm6-GGUF", MessagesFormatterType.OPEN_CHAT],
|
149 |
"Llama31-8B-oci-text2sql-no-think.Q5_K_M.gguf": ["mradermacher/Llama31-8B-oci-text2sql-no-think-GGUF", MessagesFormatterType.LLAMA_3],
|
150 |
"Llama31-8B-oci-text2sql.Q5_K_M.gguf": ["mradermacher/Llama31-8B-oci-text2sql-GGUF", MessagesFormatterType.LLAMA_3],
|
|
|
145 |
#"": ["", MessagesFormatterType.OPEN_CHAT],
|
146 |
#"": ["", MessagesFormatterType.CHATML],
|
147 |
#"": ["", MessagesFormatterType.PHI_3],
|
148 |
+
"Grey-12b.Q4_K_M.gguf": ["mradermacher/Grey-12b-GGUF", MessagesFormatterType.MISTRAL],
|
149 |
+
"Gemma-2-Ataraxy-Remix-9B.Q4_K_M.gguf": ["mradermacher/Gemma-2-Ataraxy-Remix-9B-GGUF", MessagesFormatterType.ALPACA],
|
150 |
+
"Gemmasutra-9B-v1.1.Q4_K_M.gguf": ["mradermacher/Gemmasutra-9B-v1.1-GGUF", MessagesFormatterType.ALPACA],
|
151 |
+
"Gemma-2-Ataraxy-Doppel-9B.Q4_K_M.gguf": ["mradermacher/Gemma-2-Ataraxy-Doppel-9B-GGUF", MessagesFormatterType.ALPACA],
|
152 |
+
"Gemma-2-Ataraxy-Advanced-9B.Q4_K_M.gguf": ["mradermacher/Gemma-2-Ataraxy-Advanced-9B-GGUF", MessagesFormatterType.ALPACA],
|
153 |
+
"lwd-Mirau-RP-14B.Q4_K_M.gguf": ["mradermacher/lwd-Mirau-RP-14B-GGUF", MessagesFormatterType.OPEN_CHAT],
|
154 |
+
"Driftwood-12B.Q4_K_M.gguf": ["mradermacher/Driftwood-12B-GGUF", MessagesFormatterType.MISTRAL],
|
155 |
+
"Hush-Qwen2.5-7B-RP-v1.3-1M.Q5_K_M.gguf": ["mradermacher/Hush-Qwen2.5-7B-RP-v1.3-1M-GGUF", MessagesFormatterType.OPEN_CHAT],
|
156 |
+
"Hush-Qwen2.5-7B-v1.4.i1-Q5_K_M.gguf": ["mradermacher/Hush-Qwen2.5-7B-v1.4-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
157 |
+
"L3-Urvashi-8B-Slerp.i1-Q5_K_M.gguf": ["mradermacher/L3-Urvashi-8B-Slerp-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
158 |
+
"Gemma-2-gemmama-9b.Q4_K_M.gguf": ["mradermacher/Gemma-2-gemmama-9b-GGUF", MessagesFormatterType.ALPACA],
|
159 |
+
"Llama-3.1-Distilled.Q5_K_M.gguf": ["mradermacher/Llama-3.1-Distilled-GGUF", MessagesFormatterType.LLAMA_3],
|
160 |
+
"Hush-Qwen2.5-7B-v1.1.Q5_K_M.gguf": ["mradermacher/Hush-Qwen2.5-7B-v1.1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
161 |
+
"Hush-Qwen2.5-7B-RP-v1.3.Q5_K_M.gguf": ["mradermacher/Hush-Qwen2.5-7B-RP-v1.3-GGUF", MessagesFormatterType.OPEN_CHAT],
|
162 |
+
"Hush-Qwen2.5-7B-RP-v1.4-1M.Q5_K_M.gguf": ["mradermacher/Hush-Qwen2.5-7B-RP-v1.4-1M-GGUF", MessagesFormatterType.OPEN_CHAT],
|
163 |
+
"Qlast.Q5_K_M.gguf": ["mradermacher/Qlast-GGUF", MessagesFormatterType.OPEN_CHAT],
|
164 |
+
"MN-Ephemeros-12B.Q4_K_M.gguf": ["mradermacher/MN-Ephemeros-12B-GGUF", MessagesFormatterType.MISTRAL],
|
165 |
+
"Sombrero-Opus-14B-Sm2.Q4_K_S.gguf": ["mradermacher/Sombrero-Opus-14B-Sm2-GGUF", MessagesFormatterType.OPEN_CHAT],
|
166 |
+
"Sombrero-Opus-14B-Sm4.Q4_K_M.gguf": ["mradermacher/Sombrero-Opus-14B-Sm4-GGUF", MessagesFormatterType.OPEN_CHAT],
|
167 |
+
"Sombrero-Opus-14B-Sm5.Q4_K_M.gguf": ["mradermacher/Sombrero-Opus-14B-Sm5-GGUF", MessagesFormatterType.OPEN_CHAT],
|
168 |
+
"Hush-Qwen2.5-7B-RP-1M.i1-Q4_K_S.gguf": ["mradermacher/Hush-Qwen2.5-7B-RP-1M-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
169 |
+
"Qwen-1M-DeepSeek-R1-14B.Q4_K_M.gguf": ["mradermacher/Qwen-1M-DeepSeek-R1-14B-GGUF", MessagesFormatterType.OPEN_CHAT],
|
170 |
+
"Captain_BMO-12B-ChatMLified.Q4_K_M.gguf": ["mradermacher/Captain_BMO-12B-ChatMLified-GGUF", MessagesFormatterType.CHATML],
|
171 |
+
"recoilme-gemma-2-9B-v0.2.Q4_K_M.gguf": ["mradermacher/recoilme-gemma-2-9B-v0.2-GGUF", MessagesFormatterType.ALPACA],
|
172 |
"Viper-Coder-v1.7-Vsm6.Q4_K_M.gguf": ["mradermacher/Viper-Coder-v1.7-Vsm6-GGUF", MessagesFormatterType.OPEN_CHAT],
|
173 |
"Llama31-8B-oci-text2sql-no-think.Q5_K_M.gguf": ["mradermacher/Llama31-8B-oci-text2sql-no-think-GGUF", MessagesFormatterType.LLAMA_3],
|
174 |
"Llama31-8B-oci-text2sql.Q5_K_M.gguf": ["mradermacher/Llama31-8B-oci-text2sql-GGUF", MessagesFormatterType.LLAMA_3],
|
requirements.txt
CHANGED
@@ -5,7 +5,7 @@ scikit-build-core
|
|
5 |
https://github.com/abetlen/llama-cpp-python/releases/download/v0.3.4-cu124/llama_cpp_python-0.3.4-cp310-cp310-linux_x86_64.whl
|
6 |
git+https://github.com/Maximilian-Winter/llama-cpp-agent
|
7 |
pybind11>=2.12
|
8 |
-
torch>=2.2.0
|
9 |
torchvision
|
10 |
accelerate
|
11 |
transformers<=4.48.3
|
|
|
5 |
https://github.com/abetlen/llama-cpp-python/releases/download/v0.3.4-cu124/llama_cpp_python-0.3.4-cp310-cp310-linux_x86_64.whl
|
6 |
git+https://github.com/Maximilian-Winter/llama-cpp-agent
|
7 |
pybind11>=2.12
|
8 |
+
#torch>=2.2.0
|
9 |
torchvision
|
10 |
accelerate
|
11 |
transformers<=4.48.3
|