type
stringclasses 1
value | id
stringlengths 5
122
| num_branches
int64 1
1.76k
| branches
listlengths 1
1.76k
| main_branch_size
int64 0
32,943B
|
---|---|---|---|---|
model
|
gbueno86/Meta-Llama-3-120b-LumiLumimaid-GGUF
| 1 |
[
"main"
] | 118,340,526,776 |
model
|
jproboszcz/opus-mt-en-pl
| 1 |
[
"main"
] | 224,965,417 |
model
|
lfmatosmelo/gpt2-sm-metaphor-list-source-domain-prediction
| 1 |
[
"main"
] | 501,178,549 |
model
|
PlayerBPlaytime/other-models
| 1 |
[
"main"
] | 464,183,863 |
model
|
zainalikhokhar/llama2_default
| 1 |
[
"main"
] | 7,480 |
model
|
stafdif/Monna2
| 1 |
[
"main"
] | 19,000,431 |
model
|
bhanuk31/spacy-setfit-finetuned-de
| 1 |
[
"main"
] | 197,533,251 |
model
|
lfmatosmelo/gpt2-sm-metaphor-list-target-domain-prediction
| 1 |
[
"main"
] | 501,178,549 |
model
|
aniruddh10124/code-search-net-tokenizer
| 1 |
[
"main"
] | 3,469,031 |
model
|
OnlyCheeini/outputs
| 1 |
[
"main"
] | 170,132,208 |
model
|
Elalimy/english_spelling_correction
| 1 |
[
"main"
] | 561,366,560 |
model
|
EthanRhys/Blaze-the-Cat-Current
| 1 |
[
"main"
] | 64,362,611 |
model
|
RichardErkhov/PygmalionAI_-_pygmalion-2-13b-gguf
| 1 |
[
"main"
] | 157,102,108,378 |
model
|
lfmatosmelo/gpt2-sm-lcc-en-subset-source-domain-prediction
| 1 |
[
"main"
] | 501,178,549 |
model
|
RichardErkhov/h2oai_-_h2o-danube2-1.8b-base-4bits
| 1 |
[
"main"
] | 1,268,154,659 |
model
|
RichardErkhov/h2oai_-_h2o-danube2-1.8b-base-8bits
| 1 |
[
"main"
] | 1,999,712,971 |
model
|
abdullahT/Tweet_sentiment
| 1 |
[
"main"
] | 134,276,012 |
model
|
waxberry/model_out
| 1 |
[
"main"
] | 14,571,894,066 |
model
|
lfmatosmelo/gpt2-sm-lcc-en-subset-target-domain-prediction
| 1 |
[
"main"
] | 501,178,549 |
model
|
RichardErkhov/h2oai_-_h2o-danube2-1.8b-base-gguf
| 1 |
[
"main"
] | 22,389,019,164 |
model
|
Miyuutsu/CuteCore_XL
| 1 |
[
"main"
] | 48,566,366,277 |
model
|
nicknochnack/manimadapter
| 1 |
[
"main"
] | 1,519 |
model
|
blockblockblock/Dark-Miqu-70B-bpw4.6-exl2
| 1 |
[
"main"
] | 40,096,536,977 |
model
|
kingron/rons_models
| 1 |
[
"main"
] | 1,519 |
model
|
L010101/opt-6.7b-lora
| 1 |
[
"main"
] | 27,287,496 |
model
|
lfmatosmelo/gpt2-sm-lcc-en-subset-source-lexeme-prediction
| 1 |
[
"main"
] | 501,178,549 |
model
|
ilanaliouchouche/gte-base-lazy-teacher
| 1 |
[
"main"
] | 550,514,476 |
model
|
vg055/xlm-roberta-base-finetuned-IberAuTexTification2024-5fCV-2epo-task2-v1
| 1 |
[
"main"
] | 1,134,493,285 |
model
|
AlignmentResearch/robust_llm_pythia-410m_niki-046_enronspam_random-token-1280_seed-1
| 31 |
[
"adv-training-round-29",
"adv-training-round-28",
"adv-training-round-27",
"adv-training-round-26",
"adv-training-round-25",
"adv-training-round-24",
"adv-training-round-23",
"adv-training-round-22",
"adv-training-round-21",
"adv-training-round-20",
"adv-training-round-19",
"adv-training-round-18",
"adv-training-round-17",
"adv-training-round-16",
"adv-training-round-15",
"adv-training-round-14",
"adv-training-round-13",
"adv-training-round-12",
"adv-training-round-11",
"adv-training-round-10",
"adv-training-round-9",
"adv-training-round-8",
"adv-training-round-7",
"adv-training-round-6",
"adv-training-round-5",
"adv-training-round-4",
"adv-training-round-3",
"adv-training-round-2",
"adv-training-round-1",
"adv-training-round-0",
"main"
] | 1,519 |
model
|
MhmdSyd/AceGPT-V1.5-13B-Chat_GGUF
| 1 |
[
"main"
] | 66,223,783,708 |
model
|
lfmatosmelo/gpt2-sm-lcc-en-subset-target-lexeme-prediction
| 1 |
[
"main"
] | 501,178,549 |
model
|
solidrust/DeepMagic-Coder-7b-Alt-AWQ
| 1 |
[
"main"
] | 3,894,965,019 |
model
|
crrodrvi/First_Order_Motion
| 1 |
[
"main"
] | 832,427,426 |
model
|
lfmatosmelo/gpt2-sm-trofi-classification
| 1 |
[
"main"
] | 501,178,449 |
model
|
lfmatosmelo/gpt2-sm-vua-pos-classification
| 1 |
[
"main"
] | 501,178,453 |
model
|
mdosama39/banglat5-finetuned-headlineBT5_1000_batch8_Normal
| 1 |
[
"main"
] | 994,232,717 |
model
|
lfmatosmelo/gpt2-sm-vua-verb-classification
| 1 |
[
"main"
] | 501,178,455 |
model
|
kapliff89/distilbert-base-uncased-finetuned-imdb
| 1 |
[
"main"
] | 268,919,569 |
model
|
raiyan007/whisper-base-common16.1
| 1 |
[
"main"
] | 292,629,614 |
model
|
iujinasena/tinyllama-1_1b-conv
| 1 |
[
"main"
] | 2,202,471,508 |
model
|
jason89923/emo_lora_model
| 1 |
[
"main"
] | 167,835,068 |
model
|
anandanand84/phi3-4k-lowercase
| 1 |
[
"main"
] | 15,286,213,561 |
model
|
solidrust/Everyone-LLM-7b-Base-AWQ
| 1 |
[
"main"
] | 4,153,175,879 |
model
|
jadapeart/inspirobot
| 1 |
[
"main"
] | 2,287 |
model
|
bdpoff/phi-3-mini-json-editor
| 1 |
[
"main"
] | 7,642,792,852 |
model
|
meidoroido/riyonaAI
| 1 |
[
"main"
] | 1,353,806,440 |
model
|
paulo037/stable-code-instruct-3b-spider
| 1 |
[
"main"
] | 27,550,763,821 |
model
|
RichardErkhov/Locutusque_-_Orca-2-13b-SFT_v5-4bits
| 1 |
[
"main"
] | 7,795,761,543 |
model
|
abc88767/2c21
| 1 |
[
"main"
] | 3,295,853,121 |
model
|
darmendarizp/prediction_sentiment
| 1 |
[
"main"
] | 1,519 |
model
|
solidrust/llama-3-stinky-v2-8B-AWQ
| 1 |
[
"main"
] | 5,737,143,396 |
model
|
yifanxie/overjoyed-koel-1-1-1
| 1 |
[
"main"
] | 5,034,193,117 |
model
|
paulrojasg/bert-finetuned-ner-1
| 1 |
[
"main"
] | 432,084,832 |
model
|
EthanRhys/Omochao-Current
| 1 |
[
"main"
] | 59,770,558 |
model
|
nem012/r327b
| 1 |
[
"main"
] | 14,485,820,103 |
model
|
RichardErkhov/Locutusque_-_Orca-2-13b-SFT_v5-gguf
| 1 |
[
"main"
] | 157,102,554,914 |
model
|
Zeus04/lora_model
| 1 |
[
"main"
] | 176,971,840 |
model
|
RichardErkhov/MediaTek-Research_-_Breeze-7B-Instruct-v1_0-gguf
| 1 |
[
"main"
] | 91,064,115,602 |
model
|
awang11/models
| 1 |
[
"main"
] | 1,519 |
model
|
harshal-11/Llama-7b-PoliticalBias-Finetune
| 1 |
[
"main"
] | 1,519 |
model
|
Zannatul/google-vit-base-patch16-384-in21k-batch_16_epoch_3_classes_24_final_withAug_12th_May
| 1 |
[
"main"
] | 1,519 |
model
|
johnatanebonilla/parse
| 1 |
[
"main"
] | 1,519 |
model
|
OwOpeepeepoopoo/test11c
| 1 |
[
"main"
] | 3,295,853,123 |
model
|
AlignmentResearch/robust_llm_pythia-410m_niki-046_enronspam_random-token-1280_seed-2
| 31 |
[
"adv-training-round-29",
"adv-training-round-28",
"adv-training-round-27",
"adv-training-round-26",
"adv-training-round-25",
"adv-training-round-24",
"adv-training-round-23",
"adv-training-round-22",
"adv-training-round-21",
"adv-training-round-20",
"adv-training-round-19",
"adv-training-round-18",
"adv-training-round-17",
"adv-training-round-16",
"adv-training-round-15",
"adv-training-round-14",
"adv-training-round-13",
"adv-training-round-12",
"adv-training-round-11",
"adv-training-round-10",
"adv-training-round-9",
"adv-training-round-8",
"adv-training-round-7",
"adv-training-round-6",
"adv-training-round-5",
"adv-training-round-4",
"adv-training-round-3",
"adv-training-round-2",
"adv-training-round-1",
"adv-training-round-0",
"main"
] | 1,519 |
model
|
DataScientist1122/t5-finetune-medquad
| 1 |
[
"main"
] | 892,470,668 |
model
|
roshinishetty333/llama-2-7b-lora-tuned
| 1 |
[
"main"
] | 33,578,974 |
model
|
solidrust/Llama-3-Soliloquy-8B-v2-AWQ
| 1 |
[
"main"
] | 5,737,143,596 |
model
|
Druluth/Llama-2-7b-chat-mto-5epocas
| 1 |
[
"main"
] | 13,479,328,408 |
model
|
awang11/vggsound_diffusion
| 1 |
[
"main"
] | 14,453,027,682 |
model
|
abdullahT/AT-Llama-2-7b-chat-Tweet-Sentiment
| 1 |
[
"main"
] | 134,242,395 |
model
|
mdosama39/banglat5-finetuned-headlineBT5_1000_WithIp_Normal
| 1 |
[
"main"
] | 994,232,718 |
model
|
abc88767/22c21
| 1 |
[
"main"
] | 3,295,853,063 |
model
|
mudler/LocalAI-Llama3-8b-Function-Call-v0.2
| 1 |
[
"main"
] | 16,069,720,983 |
model
|
yifanxie/ubiquitous-urchin-1
| 1 |
[
"main"
] | 5,034,192,002 |
model
|
zzczczzc/flash
| 1 |
[
"main"
] | 120,634,431 |
model
|
IntervitensInc/kek_mk4
| 1 |
[
"main"
] | 3,295,875,156 |
model
|
quangtqv/crypto_Gliner_12_5_2024
| 1 |
[
"main"
] | 1,782,045,113 |
model
|
sebylg5/Cry
| 1 |
[
"main"
] | 1,519 |
model
|
Xindun/Ma
| 1 |
[
"main"
] | 1,550 |
model
|
Druluth/Llama-2-7b-chat-mto-12epocas
| 1 |
[
"main"
] | 13,479,328,408 |
model
|
mudler/LocalAI-Llama3-8b-Function-Call-v0.2-GGUF
| 1 |
[
"main"
] | 61,533,633,051 |
model
|
sreguyal/results
| 1 |
[
"main"
] | 1,519 |
model
|
RichardErkhov/beomi_-_Llama-3-Open-Ko-8B-Instruct-preview-gguf
| 1 |
[
"main"
] | 99,035,555,816 |
model
|
kanaluvu/bloomz-1b1-finetuned
| 1 |
[
"main"
] | 1,563,994,107 |
model
|
Xindun/your_model
| 1 |
[
"main"
] | 16,069,725,302 |
model
|
abc88767/3sc21
| 1 |
[
"main"
] | 3,295,853,063 |
model
|
mudler/Asinello-Minerva-3B-v0.1
| 1 |
[
"main"
] | 5,791,445,551 |
model
|
diegodayan/repo_name
| 1 |
[
"main"
] | 1,519 |
model
|
Chastimban/asdASD
| 1 |
[
"main"
] | 1,552 |
model
|
AlignmentResearch/robust_llm_pythia-1b_niki-046_enronspam_random-token-1280_seed-1
| 31 |
[
"adv-training-round-29",
"adv-training-round-28",
"adv-training-round-27",
"adv-training-round-26",
"adv-training-round-25",
"adv-training-round-24",
"adv-training-round-23",
"adv-training-round-22",
"adv-training-round-21",
"adv-training-round-20",
"adv-training-round-19",
"adv-training-round-18",
"adv-training-round-17",
"adv-training-round-16",
"adv-training-round-15",
"adv-training-round-14",
"adv-training-round-13",
"adv-training-round-12",
"adv-training-round-11",
"adv-training-round-10",
"adv-training-round-9",
"adv-training-round-8",
"adv-training-round-7",
"adv-training-round-6",
"adv-training-round-5",
"adv-training-round-4",
"adv-training-round-3",
"adv-training-round-2",
"adv-training-round-1",
"adv-training-round-0",
"main"
] | 1,519 |
model
|
thorirhrafn/GPT1B_domar_RLHF_e1
| 1 |
[
"main"
] | 50,345,320 |
model
|
Druluth/Llama-2-7b-chat-mto-2epocas
| 1 |
[
"main"
] | 13,479,328,408 |
model
|
johnatanebonilla/lama-2-7b-parse_AnCora_sample-GGUF
| 1 |
[
"main"
] | 1,519 |
model
|
Selinebee/Shybaker
| 1 |
[
"main"
] | 1,519 |
model
|
mudler/Asinello-Minerva-3B-v0.1-GGUF
| 1 |
[
"main"
] | 21,906,837,707 |
model
|
furrutiav/boolqa_probing_vae_embds
| 1 |
[
"main"
] | 9,294 |
model
|
GraydientPlatformAPI/epicrealism7
| 1 |
[
"main"
] | 6,941,387,302 |
model
|
Yuki20/unsloth_Alpaca_8b_120s
| 1 |
[
"main"
] | 167,835,064 |
model
|
GraydientPlatformAPI/fantasticmix-k2
| 1 |
[
"main"
] | 6,941,387,302 |
model
|
ivykopal/english_adapter_wikiann_prompt_100k
| 1 |
[
"main"
] | 310,620 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.