type
stringclasses 1
value | id
stringlengths 5
122
| num_branches
int64 1
1.76k
| branches
listlengths 1
1.76k
| main_branch_size
int64 0
32,943B
|
---|---|---|---|---|
model
|
unrahul/CodeLlama-7b-hf-asym_int5
| 1 |
[
"main"
] | 5,070,510,647 |
model
|
AlignmentResearch/robust_llm_pythia-14m_niki-052_imdb_gcg_seed-0
| 31 |
[
"adv-training-round-29",
"adv-training-round-28",
"adv-training-round-27",
"adv-training-round-26",
"adv-training-round-25",
"adv-training-round-24",
"adv-training-round-23",
"adv-training-round-22",
"adv-training-round-21",
"adv-training-round-20",
"adv-training-round-19",
"adv-training-round-18",
"adv-training-round-17",
"adv-training-round-16",
"adv-training-round-15",
"adv-training-round-14",
"adv-training-round-13",
"adv-training-round-12",
"adv-training-round-11",
"adv-training-round-10",
"adv-training-round-9",
"adv-training-round-8",
"adv-training-round-7",
"adv-training-round-6",
"adv-training-round-5",
"adv-training-round-4",
"adv-training-round-3",
"adv-training-round-2",
"adv-training-round-1",
"adv-training-round-0",
"main"
] | 1,519 |
model
|
unrahul/CodeLlama-7b-hf-fp4
| 1 |
[
"main"
] | 358,705,673 |
model
|
unrahul/Mistral-7B-Instruct-v0.2-fp4
| 1 |
[
"main"
] | 4,305,148,414 |
model
|
AlignmentResearch/robust_llm_pythia-70m_niki-052_imdb_gcg_seed-1
| 31 |
[
"adv-training-round-29",
"adv-training-round-28",
"adv-training-round-27",
"adv-training-round-26",
"adv-training-round-25",
"adv-training-round-24",
"adv-training-round-23",
"adv-training-round-22",
"adv-training-round-21",
"adv-training-round-20",
"adv-training-round-19",
"adv-training-round-18",
"adv-training-round-17",
"adv-training-round-16",
"adv-training-round-15",
"adv-training-round-14",
"adv-training-round-13",
"adv-training-round-12",
"adv-training-round-11",
"adv-training-round-10",
"adv-training-round-9",
"adv-training-round-8",
"adv-training-round-7",
"adv-training-round-6",
"adv-training-round-5",
"adv-training-round-4",
"adv-training-round-3",
"adv-training-round-2",
"adv-training-round-1",
"adv-training-round-0",
"main"
] | 1,519 |
model
|
Kukedlc/NeuralLLaMa-3-8b-ORPO-v0.2
| 1 |
[
"main"
] | 16,069,758,171 |
model
|
unrahul/Mistral-7B-Instruct-v0.2-bf16
| 1 |
[
"main"
] | 9,945,356,712 |
model
|
Gerard-1705/bertin_base_climate_detection_spa_v2
| 1 |
[
"main"
] | 502,199,553 |
model
|
AlignmentResearch/robust_llm_pythia-70m_niki-052_imdb_gcg_seed-0
| 31 |
[
"adv-training-round-29",
"adv-training-round-28",
"adv-training-round-27",
"adv-training-round-26",
"adv-training-round-25",
"adv-training-round-24",
"adv-training-round-23",
"adv-training-round-22",
"adv-training-round-21",
"adv-training-round-20",
"adv-training-round-19",
"adv-training-round-18",
"adv-training-round-17",
"adv-training-round-16",
"adv-training-round-15",
"adv-training-round-14",
"adv-training-round-13",
"adv-training-round-12",
"adv-training-round-11",
"adv-training-round-10",
"adv-training-round-9",
"adv-training-round-8",
"adv-training-round-7",
"adv-training-round-6",
"adv-training-round-5",
"adv-training-round-4",
"adv-training-round-3",
"adv-training-round-2",
"adv-training-round-1",
"adv-training-round-0",
"main"
] | 1,519 |
model
|
RichardErkhov/ise-uiuc_-_Magicoder-S-DS-6.7B-gguf
| 1 |
[
"main"
] | 81,607,091,903 |
model
|
yifanxie/malachite-gibbon-step2
| 1 |
[
"main"
] | 5,034,192,015 |
model
|
zaq-hack/Llama-3-Lumimaid-8B-v0.1-OAS-8bpw-exl2
| 1 |
[
"main"
] | 8,341,070,977 |
model
|
EthanRhys/Gerald-Robotnik
| 1 |
[
"main"
] | 67,888,856 |
model
|
thorirhrafn/icellama_domar_finetune_v3
| 1 |
[
"main"
] | 268,517,141 |
model
|
unrahul/CodeLlama-7b-hf-sym_int8
| 1 |
[
"main"
] | 7,548,188,154 |
model
|
VoTrongTinh/vietnamese-correction-v2
| 1 |
[
"main"
] | 1,519 |
model
|
AndySilver/alelanza1
| 1 |
[
"main"
] | 256,057,862 |
model
|
unrahul/Mistral-7B-Instruct-v0.2-asym_int4
| 1 |
[
"main"
] | 4,527,348,384 |
model
|
LinxuanPastel/SuneoV2
| 1 |
[
"main"
] | 106,189,345 |
model
|
AlignmentResearch/robust_llm_pythia-160m_niki-052_imdb_gcg_seed-1
| 31 |
[
"adv-training-round-29",
"adv-training-round-28",
"adv-training-round-27",
"adv-training-round-26",
"adv-training-round-25",
"adv-training-round-24",
"adv-training-round-23",
"adv-training-round-22",
"adv-training-round-21",
"adv-training-round-20",
"adv-training-round-19",
"adv-training-round-18",
"adv-training-round-17",
"adv-training-round-16",
"adv-training-round-15",
"adv-training-round-14",
"adv-training-round-13",
"adv-training-round-12",
"adv-training-round-11",
"adv-training-round-10",
"adv-training-round-9",
"adv-training-round-8",
"adv-training-round-7",
"adv-training-round-6",
"adv-training-round-5",
"adv-training-round-4",
"adv-training-round-3",
"adv-training-round-2",
"adv-training-round-1",
"adv-training-round-0",
"main"
] | 1,519 |
model
|
unrahul/Mistral-7B-Instruct-v0.2-fp8_e4m3
| 1 |
[
"main"
] | 8,082,574,103 |
model
|
dmusingu/Llama-3-8B-sft-lora-ultrachat
| 1 |
[
"main"
] | 227,284,377 |
model
|
Ho-ot/pegasus-fyp
| 1 |
[
"main"
] | 2,285,754,637 |
model
|
AlignmentResearch/robust_llm_pythia-160m_niki-052_imdb_gcg_seed-2
| 31 |
[
"adv-training-round-29",
"adv-training-round-28",
"adv-training-round-27",
"adv-training-round-26",
"adv-training-round-25",
"adv-training-round-24",
"adv-training-round-23",
"adv-training-round-22",
"adv-training-round-21",
"adv-training-round-20",
"adv-training-round-19",
"adv-training-round-18",
"adv-training-round-17",
"adv-training-round-16",
"adv-training-round-15",
"adv-training-round-14",
"adv-training-round-13",
"adv-training-round-12",
"adv-training-round-11",
"adv-training-round-10",
"adv-training-round-9",
"adv-training-round-8",
"adv-training-round-7",
"adv-training-round-6",
"adv-training-round-5",
"adv-training-round-4",
"adv-training-round-3",
"adv-training-round-2",
"adv-training-round-1",
"adv-training-round-0",
"main"
] | 1,519 |
model
|
unrahul/Mistral-7B-Instruct-v0.2-nf4
| 1 |
[
"main"
] | 4,305,148,414 |
model
|
herutriana44/ReVa_AI_Vaccine_Design
| 1 |
[
"main"
] | 365,998,464 |
model
|
TheRealheavy/SniperCalm
| 1 |
[
"main"
] | 221,212,898 |
model
|
dongxiat/ggml-PhoWhisper-base
| 1 |
[
"main"
] | 147,953,122 |
model
|
dongxiat/ggml-PhoWhisper-small
| 1 |
[
"main"
] | 487,603,626 |
model
|
RichardErkhov/Locutusque_-_Orca-2-13b-SFT-v6-4bits
| 1 |
[
"main"
] | 7,795,763,459 |
model
|
AriaRahmati/Stage12AMTPart3
| 1 |
[
"main"
] | 472,342,984 |
model
|
unrahul/Mistral-7B-Instruct-v0.2-sym_int5
| 1 |
[
"main"
] | 5,193,977,579 |
model
|
Fischerboot/idontknowhatimdoinhere
| 1 |
[
"main"
] | 21,465,426,450 |
model
|
Soondra/llama-model.bin
| 1 |
[
"main"
] | 1,957 |
model
|
Karimdigou/__
| 1 |
[
"main"
] | 1,615 |
model
|
unrahul/CodeLlama-7b-hf-sym_int4
| 1 |
[
"main"
] | 4,038,114,430 |
model
|
ULRs/xlm-roberta-large-pos-tagging-ur
| 1 |
[
"main"
] | 2,257,666,060 |
model
|
qymyz/wav2vec2-russian-base-til-demo-colab
| 1 |
[
"main"
] | 1,262,071,194 |
model
|
unrahul/Mistral-7B-Instruct-v0.2-fp8
| 1 |
[
"main"
] | 8,082,574,098 |
model
|
saaduddinM/Gemma7B_add_large
| 1 |
[
"main"
] | 22,057,318 |
model
|
Ketansomewhere/Lung_Ultrasound_Diffusion_720p
| 1 |
[
"main"
] | 454,746,277 |
model
|
kaaang-g/noffbot-v3_01
| 1 |
[
"main"
] | 47,017,539 |
model
|
egoist000/yelp_roberta_sentiment_analysis
| 1 |
[
"main"
] | 502,250,464 |
model
|
hitevaibh/mistral-finetuned-samsum
| 1 |
[
"main"
] | 29,584,275 |
model
|
abc88767/4sc2
| 1 |
[
"main"
] | 3,295,853,121 |
model
|
unrahul/Mistral-7B-Instruct-v0.2-sym_int8
| 1 |
[
"main"
] | 8,082,574,103 |
model
|
unrahul/Mistral-7B-Instruct-v0.2-asym_int5
| 1 |
[
"main"
] | 5,416,176,909 |
model
|
abc88767/3sc3
| 1 |
[
"main"
] | 3,295,853,121 |
model
|
unrahul/Mistral-7B-Instruct-v0.2-sym_int4
| 1 |
[
"main"
] | 4,305,148,419 |
model
|
unrahul/Mistral-7B-Instruct-v0.2-mixed_fp8
| 1 |
[
"main"
] | 8,082,574,104 |
model
|
RichardErkhov/Locutusque_-_Orca-2-13b-SFT-v6-gguf
| 1 |
[
"main"
] | 157,102,556,830 |
model
|
Fischerboot/Mistral-CatMacaroni-Uncensored-11b-Moistral-11B-SLERP
| 1 |
[
"main"
] | 1,519 |
model
|
unrahul/Mistral-7B-Instruct-v0.2-mixed_fp4
| 1 |
[
"main"
] | 4,305,148,420 |
model
|
simmo/legal-llama-3
| 1 |
[
"main"
] | 5,097,705,539 |
model
|
Sumegh20/Text_Summarization_test
| 1 |
[
"main"
] | 1,519 |
model
|
abc88767/5c3
| 1 |
[
"main"
] | 3,295,853,120 |
model
|
unrahul/Mistral-7B-Instruct-v0.2-nf3
| 1 |
[
"main"
] | 3,416,349,126 |
model
|
mradermacher/SilverMaiden-7B-slerp-GGUF
| 1 |
[
"main"
] | 73,415,913,120 |
model
|
BlackRose104/Dolphinmix_v.1
| 1 |
[
"main"
] | 5,675,026,730 |
model
|
AlignmentResearch/robust_llm_pythia-410m_niki-052_imdb_gcg_seed-1
| 31 |
[
"adv-training-round-29",
"adv-training-round-28",
"adv-training-round-27",
"adv-training-round-26",
"adv-training-round-25",
"adv-training-round-24",
"adv-training-round-23",
"adv-training-round-22",
"adv-training-round-21",
"adv-training-round-20",
"adv-training-round-19",
"adv-training-round-18",
"adv-training-round-17",
"adv-training-round-16",
"adv-training-round-15",
"adv-training-round-14",
"adv-training-round-13",
"adv-training-round-12",
"adv-training-round-11",
"adv-training-round-10",
"adv-training-round-9",
"adv-training-round-8",
"adv-training-round-7",
"adv-training-round-6",
"adv-training-round-5",
"adv-training-round-4",
"adv-training-round-3",
"adv-training-round-2",
"adv-training-round-1",
"adv-training-round-0",
"main"
] | 1,519 |
model
|
unrahul/Mistral-7B-Instruct-v0.2-fp16
| 1 |
[
"main"
] | 14,485,892,845 |
model
|
andricValdez/bert-base-cased-finetuned-pan24
| 1 |
[
"main"
] | 434,163,684 |
model
|
shkna1368/mt5-base-finetuned-mt5-poem
| 1 |
[
"main"
] | 1,519 |
model
|
lanhf/mamba-qa-vn
| 1 |
[
"main"
] | 558,753,906 |
model
|
OPT21/llama3-test-function-calling
| 1 |
[
"main"
] | 74,443,874 |
model
|
BlackRose104/LoRAs
| 1 |
[
"main"
] | 1,663,639,896 |
model
|
PrunaAI/refuelai-Llama-3-Refueled-GGUF-smashed
| 1 |
[
"main"
] | 88,794,963,776 |
model
|
LiquAId/whisper-tiny-german-V2-HanNeurAI
| 1 |
[
"main"
] | 153,173,976 |
model
|
lokkkko/df
| 1 |
[
"main"
] | 1,519 |
model
|
Felladrin/gguf-Phi-3-mini-4k-instruct
| 1 |
[
"main"
] | 34,612,833,504 |
model
|
blockblockblock/Dark-Miqu-70B-bpw4.2-exl2
| 1 |
[
"main"
] | 36,670,179,765 |
model
|
AlignmentResearch/robust_llm_pythia-410m_niki-052_imdb_gcg_seed-0
| 31 |
[
"adv-training-round-29",
"adv-training-round-28",
"adv-training-round-27",
"adv-training-round-26",
"adv-training-round-25",
"adv-training-round-24",
"adv-training-round-23",
"adv-training-round-22",
"adv-training-round-21",
"adv-training-round-20",
"adv-training-round-19",
"adv-training-round-18",
"adv-training-round-17",
"adv-training-round-16",
"adv-training-round-15",
"adv-training-round-14",
"adv-training-round-13",
"adv-training-round-12",
"adv-training-round-11",
"adv-training-round-10",
"adv-training-round-9",
"adv-training-round-8",
"adv-training-round-7",
"adv-training-round-6",
"adv-training-round-5",
"adv-training-round-4",
"adv-training-round-3",
"adv-training-round-2",
"adv-training-round-1",
"adv-training-round-0",
"main"
] | 1,519 |
model
|
puimar/Meta-Llama-3-8B-Instruct-4bit
| 1 |
[
"main"
] | 1,519 |
model
|
LeroyDyer/LCARS_AI_StarTrek_Computer
| 1 |
[
"main"
] | 14,485,816,485 |
model
|
DUAL-GPO/phi-2-gpo-newSFT-b0.001-15k-i1
| 1 |
[
"main"
] | 171,307,159 |
model
|
AlignmentResearch/robust_llm_pythia-410m_niki-052_imdb_gcg_seed-2
| 31 |
[
"adv-training-round-29",
"adv-training-round-28",
"adv-training-round-27",
"adv-training-round-26",
"adv-training-round-25",
"adv-training-round-24",
"adv-training-round-23",
"adv-training-round-22",
"adv-training-round-21",
"adv-training-round-20",
"adv-training-round-19",
"adv-training-round-18",
"adv-training-round-17",
"adv-training-round-16",
"adv-training-round-15",
"adv-training-round-14",
"adv-training-round-13",
"adv-training-round-12",
"adv-training-round-11",
"adv-training-round-10",
"adv-training-round-9",
"adv-training-round-8",
"adv-training-round-7",
"adv-training-round-6",
"adv-training-round-5",
"adv-training-round-4",
"adv-training-round-3",
"adv-training-round-2",
"adv-training-round-1",
"adv-training-round-0",
"main"
] | 1,519 |
model
|
LinxuanPastel/SuneoV333
| 1 |
[
"main"
] | 106,183,676 |
model
|
reach-vb/parler-expresso-spk-24-epc-g-12-w-100-v2
| 1 |
[
"main"
] | 15,073,945,654 |
model
|
jh51664jh/randommodel
| 1 |
[
"main"
] | 1,730 |
model
|
kotokounity/Choi-Jungeun
| 1 |
[
"main"
] | 84,454,841 |
model
|
bamartin1618/XiJinping
| 1 |
[
"main"
] | 11,272,381,514 |
model
|
Duleesha/DepressionDetection
| 1 |
[
"main"
] | 134,242,328 |
model
|
unrahul/TinyLlama-1.1B-Chat-v1.0-sym_int4
| 1 |
[
"main"
] | 814,468,131 |
model
|
unrahul/TinyLlama-1.1B-Chat-v1.0-asym_int4
| 1 |
[
"main"
] | 846,793,764 |
model
|
unrahul/TinyLlama-1.1B-Chat-v1.0-sym_int5
| 1 |
[
"main"
] | 943,770,659 |
model
|
unrahul/TinyLlama-1.1B-Chat-v1.0-asym_int5
| 1 |
[
"main"
] | 976,096,292 |
model
|
unrahul/TinyLlama-1.1B-Chat-v1.0-sym_int8
| 1 |
[
"main"
] | 1,364,003,875 |
model
|
unrahul/TinyLlama-1.1B-Chat-v1.0-nf3
| 1 |
[
"main"
] | 685,165,598 |
model
|
unrahul/TinyLlama-1.1B-Chat-v1.0-nf4
| 1 |
[
"main"
] | 814,468,126 |
model
|
unrahul/TinyLlama-1.1B-Chat-v1.0-fp4
| 1 |
[
"main"
] | 814,468,126 |
model
|
unrahul/TinyLlama-1.1B-Chat-v1.0-fp8
| 1 |
[
"main"
] | 1,364,003,870 |
model
|
unrahul/TinyLlama-1.1B-Chat-v1.0-fp8_e4m3
| 1 |
[
"main"
] | 1,364,003,875 |
model
|
unrahul/TinyLlama-1.1B-Chat-v1.0-fp8_e5m2
| 1 |
[
"main"
] | 1,364,003,875 |
model
|
unrahul/TinyLlama-1.1B-Chat-v1.0-fp16
| 1 |
[
"main"
] | 2,202,517,087 |
model
|
DUAL-GPO/phi-2-gpo-newSFT-b0.001-30k-i1
| 1 |
[
"main"
] | 171,411,832 |
model
|
unrahul/TinyLlama-1.1B-Chat-v1.0-bf16
| 1 |
[
"main"
] | 2,202,517,088 |
model
|
unrahul/TinyLlama-1.1B-Chat-v1.0-mixed_fp4
| 1 |
[
"main"
] | 814,468,132 |
model
|
unrahul/TinyLlama-1.1B-Chat-v1.0-mixed_fp8
| 1 |
[
"main"
] | 1,364,003,876 |
model
|
unrahul/dolly-v2-3b-sym_int4
| 1 |
[
"main"
] | 1,925,255,627 |
model
|
unrahul/dolly-v2-3b-asym_int4
| 1 |
[
"main"
] | 2,007,921,228 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.