Spaces:
Running
on
Zero
Running
on
Zero
Upload 4 files
Browse files- constants.py +6 -1
- env.py +1 -0
- llmdolphin.py +51 -0
- requirements.txt +1 -1
constants.py
CHANGED
|
@@ -35,6 +35,8 @@ LOAD_DIFFUSERS_FORMAT_MODEL = [
|
|
| 35 |
'John6666/noobai-xl-nai-xl-vpredtestversion-sdxl',
|
| 36 |
'John6666/chadmix-noobai075-illustrious01-v10-sdxl',
|
| 37 |
'OnomaAIResearch/Illustrious-xl-early-release-v0',
|
|
|
|
|
|
|
| 38 |
'John6666/obsession-illustriousxl-v21-sdxl',
|
| 39 |
'eienmojiki/Anything-XL',
|
| 40 |
'eienmojiki/Starry-XL-v5.2',
|
|
@@ -113,6 +115,7 @@ LOAD_DIFFUSERS_FORMAT_MODEL = [
|
|
| 113 |
DIFFUSERS_FORMAT_LORAS = [
|
| 114 |
"nerijs/animation2k-flux",
|
| 115 |
"XLabs-AI/flux-RealismLora",
|
|
|
|
| 116 |
]
|
| 117 |
|
| 118 |
DOWNLOAD_EMBEDS = [
|
|
@@ -288,7 +291,9 @@ MODEL_TYPE_CLASS = {
|
|
| 288 |
"diffusers:FluxPipeline": "FLUX",
|
| 289 |
}
|
| 290 |
|
| 291 |
-
POST_PROCESSING_SAMPLER = ["Use same sampler"] +
|
|
|
|
|
|
|
| 292 |
|
| 293 |
SUBTITLE_GUI = (
|
| 294 |
"### This demo uses [diffusers](https://github.com/huggingface/diffusers)"
|
|
|
|
| 35 |
'John6666/noobai-xl-nai-xl-vpredtestversion-sdxl',
|
| 36 |
'John6666/chadmix-noobai075-illustrious01-v10-sdxl',
|
| 37 |
'OnomaAIResearch/Illustrious-xl-early-release-v0',
|
| 38 |
+
'John6666/illustriousxl-mmmix-v50-sdxl',
|
| 39 |
+
'John6666/illustrious-pencil-xl-v200-sdxl',
|
| 40 |
'John6666/obsession-illustriousxl-v21-sdxl',
|
| 41 |
'eienmojiki/Anything-XL',
|
| 42 |
'eienmojiki/Starry-XL-v5.2',
|
|
|
|
| 115 |
DIFFUSERS_FORMAT_LORAS = [
|
| 116 |
"nerijs/animation2k-flux",
|
| 117 |
"XLabs-AI/flux-RealismLora",
|
| 118 |
+
"Shakker-Labs/FLUX.1-dev-LoRA-Logo-Design",
|
| 119 |
]
|
| 120 |
|
| 121 |
DOWNLOAD_EMBEDS = [
|
|
|
|
| 291 |
"diffusers:FluxPipeline": "FLUX",
|
| 292 |
}
|
| 293 |
|
| 294 |
+
POST_PROCESSING_SAMPLER = ["Use same sampler"] + [
|
| 295 |
+
name_s for name_s in scheduler_names if "Auto-Loader" not in name_s
|
| 296 |
+
]
|
| 297 |
|
| 298 |
SUBTITLE_GUI = (
|
| 299 |
"### This demo uses [diffusers](https://github.com/huggingface/diffusers)"
|
env.py
CHANGED
|
@@ -142,6 +142,7 @@ LOAD_DIFFUSERS_FORMAT_MODEL = [
|
|
| 142 |
'Raelina/Raemu-XL-V4',
|
| 143 |
'Raelina/Raehoshi-illust-XL',
|
| 144 |
'Raelina/Raehoshi-illust-xl-2',
|
|
|
|
| 145 |
"camenduru/FLUX.1-dev-diffusers",
|
| 146 |
"black-forest-labs/FLUX.1-schnell",
|
| 147 |
"sayakpaul/FLUX.1-merged",
|
|
|
|
| 142 |
'Raelina/Raemu-XL-V4',
|
| 143 |
'Raelina/Raehoshi-illust-XL',
|
| 144 |
'Raelina/Raehoshi-illust-xl-2',
|
| 145 |
+
'Raelina/Raehoshi-Illust-XL-2.1',
|
| 146 |
"camenduru/FLUX.1-dev-diffusers",
|
| 147 |
"black-forest-labs/FLUX.1-schnell",
|
| 148 |
"sayakpaul/FLUX.1-merged",
|
llmdolphin.py
CHANGED
|
@@ -75,6 +75,57 @@ llm_models = {
|
|
| 75 |
"ChatWaifu_22B_v2.0_preview.Q4_K_S.gguf": ["mradermacher/ChatWaifu_22B_v2.0_preview-GGUF", MessagesFormatterType.MISTRAL],
|
| 76 |
"ChatWaifu_v1.4.Q5_K_M.gguf": ["mradermacher/ChatWaifu_v1.4-GGUF", MessagesFormatterType.MISTRAL],
|
| 77 |
"ChatWaifu_v1.3.1.Q4_K_M.gguf": ["mradermacher/ChatWaifu_v1.3.1-GGUF", MessagesFormatterType.MISTRAL],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 78 |
"UnslopNemo-12B-v4.1.i1-Q4_K_M.gguf": ["mradermacher/UnslopNemo-12B-v4.1-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 79 |
"MFANN-Llama3.1-Abliterated-Slerp-V3.2.Q5_K_M.gguf": ["mradermacher/MFANN-Llama3.1-Abliterated-Slerp-V3.2-GGUF", MessagesFormatterType.LLAMA_3],
|
| 80 |
"MFANN-Llama3.1-Abliterated-Slerp-TIES.Q5_K_M.gguf": ["mradermacher/MFANN-Llama3.1-Abliterated-Slerp-TIES-GGUF", MessagesFormatterType.LLAMA_3],
|
|
|
|
| 75 |
"ChatWaifu_22B_v2.0_preview.Q4_K_S.gguf": ["mradermacher/ChatWaifu_22B_v2.0_preview-GGUF", MessagesFormatterType.MISTRAL],
|
| 76 |
"ChatWaifu_v1.4.Q5_K_M.gguf": ["mradermacher/ChatWaifu_v1.4-GGUF", MessagesFormatterType.MISTRAL],
|
| 77 |
"ChatWaifu_v1.3.1.Q4_K_M.gguf": ["mradermacher/ChatWaifu_v1.3.1-GGUF", MessagesFormatterType.MISTRAL],
|
| 78 |
+
"Fraken-Maid-TW-Slerp.i1-Q5_K_M.gguf": ["mradermacher/Fraken-Maid-TW-Slerp-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 79 |
+
"KunoichiLake-2x7b.Q4_K_M.gguf": ["mradermacher/KunoichiLake-2x7b-GGUF", MessagesFormatterType.MISTRAL],
|
| 80 |
+
"Llama-3.1-SuperNova-8B-Lite_TIES_with_Base-Q4_K_M.gguf": ["bartowski/Llama-3.1-SuperNova-8B-Lite_TIES_with_Base-GGUF", MessagesFormatterType.LLAMA_3],
|
| 81 |
+
"blossom-v4-qwen-14b.Q4_K_M.gguf": ["mradermacher/blossom-v4-qwen-14b-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 82 |
+
"Cookie_7B.Q5_K_M.gguf": ["mradermacher/Cookie_7B-GGUF", MessagesFormatterType.MISTRAL],
|
| 83 |
+
"Sejong-Qwen-v5.Q5_K_M.gguf": ["mradermacher/Sejong-Qwen-v5-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 84 |
+
"Triunvirato-7b.i1-Q5_K_M.gguf": ["mradermacher/Triunvirato-7b-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 85 |
+
"Qwen2.5-7B-CyberRombos.i1-Q5_K_M.gguf": ["mradermacher/Qwen2.5-7B-CyberRombos-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 86 |
+
"G2-9B-Aletheia-v1.i1-Q4_K_M.gguf": ["mradermacher/G2-9B-Aletheia-v1-i1-GGUF", MessagesFormatterType.ALPACA],
|
| 87 |
+
"Qwen2.5-14B-UpToDate.i1-Q4_K_M.gguf": ["mradermacher/Qwen2.5-14B-UpToDate-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 88 |
+
"Mistral-T5-7B-v1.Q4_K_M.gguf": ["mradermacher/Mistral-T5-7B-v1-GGUF", MessagesFormatterType.MISTRAL],
|
| 89 |
+
"NM-StarUnleashed.i1-Q4_K_M.gguf": ["mradermacher/NM-StarUnleashed-i1-GGUF", MessagesFormatterType.CHATML],
|
| 90 |
+
"WestLake-7B-v2-laser.i1-Q5_K_M.gguf": ["mradermacher/WestLake-7B-v2-laser-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 91 |
+
"Top-Western-Maid-7B.i1-Q4_K_M.gguf": ["mradermacher/Top-Western-Maid-7B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 92 |
+
"MarcoHermes.i1-Q4_K_M.gguf": ["mradermacher/MarcoHermes-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 93 |
+
"Mayoroya.i1-Q4_K_M.gguf": ["mradermacher/Mayoroya-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 94 |
+
"NeuraLake-m7-v2-7B.i1-Q5_K_M.gguf": ["mradermacher/NeuraLake-m7-v2-7B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 95 |
+
"Loyal-Macaroni-Maid-7B.Q5_K_M.gguf": ["mradermacher/Loyal-Macaroni-Maid-7B-GGUF", MessagesFormatterType.MISTRAL],
|
| 96 |
+
"NeuraLake-m7-7B.i1-Q5_K_M.gguf": ["mradermacher/NeuraLake-m7-7B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 97 |
+
"Nero-7B-slerp.i1-Q5_K_M.gguf": ["mradermacher/Nero-7B-slerp-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 98 |
+
"MarcoroCapy-7B.Q5_K_M.gguf": ["mradermacher/MarcoroCapy-7B-GGUF", MessagesFormatterType.MISTRAL],
|
| 99 |
+
"KunaiBeagle-Hermes-7b.Q5_K_M.gguf": ["mradermacher/KunaiBeagle-Hermes-7b-GGUF", MessagesFormatterType.MISTRAL],
|
| 100 |
+
"CapyLake-7B-v2-laser.Q5_K_M.gguf": ["mradermacher/CapyLake-7B-v2-laser-GGUF", MessagesFormatterType.MISTRAL],
|
| 101 |
+
"OmniBeagleMBX-v3-7B.Q5_K_M.gguf": ["mradermacher/OmniBeagleMBX-v3-7B-GGUF", MessagesFormatterType.MISTRAL],
|
| 102 |
+
"Omnarch-7B.Q5_K_M.gguf": ["mradermacher/Omnarch-7B-GGUF", MessagesFormatterType.MISTRAL],
|
| 103 |
+
"FoFoNet-SuperMayo-MBX-slerp.Q5_K_M.gguf": ["mradermacher/FoFoNet-SuperMayo-MBX-slerp-GGUF", MessagesFormatterType.MISTRAL],
|
| 104 |
+
"culturalmixer.Q5_K_M.gguf": ["mradermacher/culturalmixer-GGUF", MessagesFormatterType.MISTRAL],
|
| 105 |
+
"Qwen2.5-7B-task2.i1-Q5_K_M.gguf": ["mradermacher/Qwen2.5-7B-task2-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 106 |
+
"FoFoNet-SuperMBX-slerp.Q5_K_M.gguf": ["mradermacher/FoFoNet-SuperMBX-slerp-GGUF", MessagesFormatterType.MISTRAL],
|
| 107 |
+
"Llama-3.2-3B-Apex.i1-Q5_K_M.gguf": ["mradermacher/Llama-3.2-3B-Apex-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
| 108 |
+
"lumimaid-8B-autotrain.i1-Q5_K_M.gguf": ["mradermacher/lumimaid-8B-autotrain-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
| 109 |
+
"NeuralHermes-2.5-AshhLimaRP-Mistral-7B.Q5_K_M.gguf": ["mradermacher/NeuralHermes-2.5-AshhLimaRP-Mistral-7B-GGUF", MessagesFormatterType.LLAMA_3],
|
| 110 |
+
"CrystalMistralv2.5.Q5_K_M.gguf": ["mradermacher/CrystalMistralv2.5-GGUF", MessagesFormatterType.MISTRAL],
|
| 111 |
+
"Llama-3-Aetheric-Hermes-Lexi-Smaug-8B.Q5_K_M.gguf": ["mradermacher/Llama-3-Aetheric-Hermes-Lexi-Smaug-8B-GGUF", MessagesFormatterType.LLAMA_3],
|
| 112 |
+
"Qwen2.5-3B-Loki.Q5_K_M.gguf": ["mradermacher/Qwen2.5-3B-Loki-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 113 |
+
"L3.2-Rogue-Creative-Instruct-Uncensored-Abliterated-7B.Q5_K_M.gguf": ["mradermacher/L3.2-Rogue-Creative-Instruct-Uncensored-Abliterated-7B-GGUF", MessagesFormatterType.LLAMA_3],
|
| 114 |
+
"G2-9B-Sugarquill-v0.i1-Q4_K_M.gguf": ["mradermacher/G2-9B-Sugarquill-v0-i1-GGUF", MessagesFormatterType.ALPACA],
|
| 115 |
+
"medius-erebus-magnum-14b.i1-Q4_K_M.gguf": ["mradermacher/medius-erebus-magnum-14b-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 116 |
+
"L3.1-Start-10B.Q4_K_M.gguf": ["mradermacher/L3.1-Start-10B-GGUF", MessagesFormatterType.LLAMA_3],
|
| 117 |
+
"cybertron-v4-qw7B-MGS-Q5_K_M.gguf": ["bartowski/cybertron-v4-qw7B-MGS-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 118 |
+
"CompassJudger-1-14B-Instruct.Q4_K_M.gguf": ["mradermacher/CompassJudger-1-14B-Instruct-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 119 |
+
"FIxtral.0.2.Q5_K_M.gguf": ["mradermacher/FIxtral.0.2-GGUF", MessagesFormatterType.MISTRAL],
|
| 120 |
+
"Starcannon-Unleashed-12B-v1.0.Q4_K_M.gguf": ["mradermacher/Starcannon-Unleashed-12B-v1.0-GGUF", MessagesFormatterType.MISTRAL],
|
| 121 |
+
"Rombos-Qwen2.5-7B-Inst-BaseMerge-TIES.Q5_K_M.gguf": ["mradermacher/Rombos-Qwen2.5-7B-Inst-BaseMerge-TIES-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 122 |
+
"WIP_Damascus-8B-TIES.i1-Q5_K_M.gguf": ["mradermacher/WIP_Damascus-8B-TIES-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
| 123 |
+
"Qwen2.5_7B_IST_StoryGen_vanilla.i1-Q5_K_M.gguf": ["mradermacher/Qwen2.5_7B_IST_StoryGen_vanilla-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 124 |
+
"Supernova-Blackhole_V0.1.i1-Q5_K_M.gguf": ["mradermacher/Supernova-Blackhole_V0.1-i1-GGUF", MessagesFormatterType.LLAMA_3],
|
| 125 |
+
"Gemmaslerp2-9B.i1-Q4_K_M.gguf": ["mradermacher/Gemmaslerp2-9B-i1-GGUF", MessagesFormatterType.ALPACA],
|
| 126 |
+
"Eidolon-v3-14B.i1-Q4_K_M.gguf": ["mradermacher/Eidolon-v3-14B-i1-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 127 |
+
"MN-WORDSTORM-pt4-RCM-Cliffhanger-18.5B-Instruct.i1-Q4_K_M.gguf": ["mradermacher/MN-WORDSTORM-pt4-RCM-Cliffhanger-18.5B-Instruct-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 128 |
+
"MN-WORDSTORM-pt5-RCM-Extra-Intense-18.5B-Instruct.i1-Q4_K_M.gguf": ["mradermacher/MN-WORDSTORM-pt5-RCM-Extra-Intense-18.5B-Instruct-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 129 |
"UnslopNemo-12B-v4.1.i1-Q4_K_M.gguf": ["mradermacher/UnslopNemo-12B-v4.1-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 130 |
"MFANN-Llama3.1-Abliterated-Slerp-V3.2.Q5_K_M.gguf": ["mradermacher/MFANN-Llama3.1-Abliterated-Slerp-V3.2-GGUF", MessagesFormatterType.LLAMA_3],
|
| 131 |
"MFANN-Llama3.1-Abliterated-Slerp-TIES.Q5_K_M.gguf": ["mradermacher/MFANN-Llama3.1-Abliterated-Slerp-TIES-GGUF", MessagesFormatterType.LLAMA_3],
|
requirements.txt
CHANGED
|
@@ -4,7 +4,7 @@ diffusers
|
|
| 4 |
invisible_watermark
|
| 5 |
transformers
|
| 6 |
xformers
|
| 7 |
-
git+https://github.com/R3gm/stablepy.git@
|
| 8 |
torch==2.2.0
|
| 9 |
gdown
|
| 10 |
opencv-python
|
|
|
|
| 4 |
invisible_watermark
|
| 5 |
transformers
|
| 6 |
xformers
|
| 7 |
+
git+https://github.com/R3gm/stablepy.git@ed51089 # -b refactor_sampler_fix
|
| 8 |
torch==2.2.0
|
| 9 |
gdown
|
| 10 |
opencv-python
|