sha
null | last_modified
null | library_name
stringclasses 154
values | text
stringlengths 1
900k
| metadata
stringlengths 2
348k
| pipeline_tag
stringclasses 45
values | id
stringlengths 5
122
| tags
listlengths 1
1.84k
| created_at
stringlengths 25
25
| arxiv
listlengths 0
201
| languages
listlengths 0
1.83k
| tags_str
stringlengths 17
9.34k
| text_str
stringlengths 0
389k
| text_lists
listlengths 0
722
| processed_texts
listlengths 1
723
| tokens_length
listlengths 1
723
| input_texts
listlengths 1
61
| embeddings
listlengths 768
768
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
null | null |
transformers
|
This model is pre-trained **XLNET** with 12 layers.
It comes with paper: SBERT-WK: A Sentence Embedding Method By Dissecting BERT-based Word Models
Project Page: [SBERT-WK](https://github.com/BinWang28/SBERT-WK-Sentence-Embedding)
|
{}
|
text-generation
|
binwang/xlnet-base-cased
|
[
"transformers",
"pytorch",
"safetensors",
"xlnet",
"text-generation",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #safetensors #xlnet #text-generation #autotrain_compatible #endpoints_compatible #region-us
|
This model is pre-trained XLNET with 12 layers.
It comes with paper: SBERT-WK: A Sentence Embedding Method By Dissecting BERT-based Word Models
Project Page: SBERT-WK
|
[] |
[
"TAGS\n#transformers #pytorch #safetensors #xlnet #text-generation #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
42
] |
[
"passage: TAGS\n#transformers #pytorch #safetensors #xlnet #text-generation #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
-0.06216706335544586,
0.04049227014183998,
-0.006952002178877592,
-0.022902699187397957,
0.12593424320220947,
-0.028739485889673233,
0.16516925394535065,
0.10992224514484406,
0.020284533500671387,
-0.03208116069436073,
0.12686282396316528,
0.1950623095035553,
-0.036491185426712036,
0.1738133281469345,
-0.10890424996614456,
-0.15534834563732147,
0.09114565700292587,
0.047338176518678665,
-0.01360977441072464,
0.11253863573074341,
0.08643124252557755,
-0.07993802428245544,
0.055752821266651154,
-0.060451678931713104,
-0.11164622008800507,
0.03094988316297531,
0.062424805015325546,
-0.12433211505413055,
0.10108179599046707,
0.037835899740457535,
0.15584303438663483,
0.05891934037208557,
-0.07096296548843384,
-0.19018498063087463,
0.038951992988586426,
0.03143744170665741,
-0.0851529985666275,
0.03514266759157181,
0.08443792164325714,
-0.08436597883701324,
0.03327455371618271,
0.008842416107654572,
-0.040754176676273346,
0.040027886629104614,
-0.07241697609424591,
-0.06299341470003128,
0.0018297446658834815,
0.03756681829690933,
0.0843002051115036,
0.13254179060459137,
-0.009978032670915127,
0.25713256001472473,
-0.0772021934390068,
0.16531158983707428,
0.1340492069721222,
-0.3365802466869354,
-0.01207501906901598,
0.07692254334688187,
0.12756018340587616,
0.1104305312037468,
-0.012556731700897217,
0.05047596991062164,
0.04140127822756767,
-0.006517765112221241,
0.026227930560708046,
-0.05957570672035217,
-0.1246388778090477,
0.005330250132828951,
-0.08743328601121902,
-0.03381533548235893,
0.18576288223266602,
-0.06311006844043732,
0.0702471137046814,
-0.07200174033641815,
-0.11983899772167206,
-0.03488028421998024,
-0.007075457368046045,
0.008290438912808895,
-0.047786153852939606,
0.032153986394405365,
0.020583990961313248,
-0.03297668695449829,
-0.13446544110774994,
0.028592554852366447,
-0.17623350024223328,
0.21747563779354095,
0.043278321623802185,
0.06422711163759232,
-0.1914921998977661,
0.05203013867139816,
0.12070166319608688,
-0.12992854416370392,
0.058904845267534256,
-0.10901349782943726,
0.05718604847788811,
0.023963892832398415,
-0.02958598919212818,
-0.11185000091791153,
0.15615589916706085,
0.08011860400438309,
-0.030957307666540146,
0.02083815634250641,
-0.05096960440278053,
0.100763238966465,
0.018334228545427322,
0.0602283701300621,
0.008679759688675404,
-0.047707971185445786,
0.10393436998128891,
-0.04845559969544411,
0.05319372937083244,
-0.05766938626766205,
-0.11053688824176788,
0.015165344811975956,
0.08958321064710617,
0.14322949945926666,
0.05970923230051994,
0.08487424999475479,
-0.01645781099796295,
0.015155166387557983,
0.10419363528490067,
-0.08883177489042282,
0.011649846099317074,
0.014982706867158413,
0.023172108456492424,
0.06517408043146133,
-0.012194931507110596,
0.018733281642198563,
-0.1287817358970642,
0.046595048159360886,
-0.047372739762067795,
-0.0031180556397885084,
-0.03302765265107155,
-0.09208884090185165,
0.03174325078725815,
-0.05848066881299019,
0.01564311422407627,
-0.1838882863521576,
-0.1027689203619957,
-0.021625516936182976,
0.009057609364390373,
0.03947145864367485,
-0.030520858243107796,
-0.03921353444457054,
-0.05942245200276375,
0.00491336639970541,
-0.06753244251012802,
-0.07164334505796432,
-0.06868945062160492,
0.10727861523628235,
-0.028033463284373283,
0.06549487262964249,
-0.10633580386638641,
0.032157059758901596,
-0.1288289576768875,
-0.04919929429888725,
-0.04333255812525749,
0.028788939118385315,
-0.07761860638856888,
0.16282807290554047,
0.053804121911525726,
-0.02855798229575157,
-0.04592636972665787,
0.04944070428609848,
-0.041393719613552094,
0.1592354029417038,
-0.07500309497117996,
-0.0839005783200264,
0.2744694650173187,
-0.13881205022335052,
-0.1872720718383789,
0.1035049706697464,
0.020859116688370705,
-0.034927185624837875,
0.10932569950819016,
0.16415220499038696,
0.06779920309782028,
-0.06149518862366676,
0.025654414668679237,
0.1196885034441948,
-0.12982799112796783,
-0.12992869317531586,
-0.05205352231860161,
0.0708894282579422,
-0.06274919956922531,
0.048476897180080414,
0.02486536279320717,
0.0803413912653923,
-0.058041345328092575,
-0.06647022813558578,
-0.04710940644145012,
-0.04669373854994774,
0.05424860119819641,
0.034805651754140854,
0.08315233141183853,
-0.08674360066652298,
-0.026375049725174904,
-0.07524023950099945,
0.008230348117649555,
-0.03602324798703194,
0.033780645579099655,
-0.06810236722230911,
0.0815015584230423,
0.004964377731084824,
0.05412008985877037,
-0.1600935459136963,
-0.16931389272212982,
0.005809323862195015,
0.09188728034496307,
-0.029794780537486076,
0.08633594214916229,
0.046599436551332474,
-0.011590639129281044,
0.017513711005449295,
-0.06114934757351875,
0.19505691528320312,
0.0070118047297000885,
-0.06943047791719437,
-0.0898122563958168,
0.08135590702295303,
-0.07017689943313599,
0.019164372235536575,
-0.0958024114370346,
0.03429897502064705,
-0.016857268288731575,
0.09501682966947556,
0.012503144331276417,
0.05647452548146248,
-0.024404771625995636,
0.03335634991526604,
-0.09749407321214676,
0.007299173157662153,
0.07584638893604279,
0.016230508685112,
-0.1007474884390831,
0.17741499841213226,
-0.1842389702796936,
0.37139788269996643,
0.21078190207481384,
-0.2651304304599762,
0.05023634806275368,
0.026421964168548584,
-0.01496178936213255,
0.006330128759145737,
0.026805978268384933,
0.024509476497769356,
-0.009905369952321053,
-0.009850475937128067,
0.17327991127967834,
-0.0708680972456932,
-0.04280330613255501,
0.01650388166308403,
-0.08175276964902878,
0.006899806670844555,
0.0696759968996048,
0.019663743674755096,
-0.12839250266551971,
0.177470400929451,
0.1594492644071579,
0.000337750359904021,
0.14206033945083618,
-0.030140643939375877,
0.04183850437402725,
0.08464422076940536,
0.04077412933111191,
-0.021562958136200905,
-0.0499751977622509,
-0.10915056616067886,
-0.010064132511615753,
0.04865887016057968,
-0.005301858298480511,
0.09253454208374023,
-0.13759948313236237,
-0.055571068078279495,
-0.015574709512293339,
-0.033707696944475174,
-0.022542065009474754,
0.046601127833127975,
0.04049098119139671,
0.12028447538614273,
-0.08657455444335938,
-0.11424976587295532,
0.08647044003009796,
-0.03888263553380966,
-0.09479589760303497,
0.18527927994728088,
-0.12614020705223083,
-0.3814904987812042,
-0.1282927542924881,
-0.07738294452428818,
-0.08053737878799438,
0.02037915401160717,
0.09219858050346375,
-0.09749985486268997,
-0.04215829446911812,
-0.03151572123169899,
-0.06787306070327759,
0.026435593143105507,
0.050324611365795135,
-0.052165839821100235,
0.07749059051275253,
-0.018525123596191406,
-0.10462124645709991,
-0.039733003824949265,
-0.05916546657681465,
-0.07301731407642365,
0.16206954419612885,
-0.06597163528203964,
0.09934371709823608,
0.1579710692167282,
-0.0030130832456052303,
0.025339042767882347,
-0.026922594755887985,
0.14428111910820007,
-0.06624820828437805,
-0.006295415572822094,
0.17968854308128357,
-0.057846784591674805,
0.09482963383197784,
0.1271447092294693,
0.010071842931210995,
-0.06297805905342102,
0.0338418111205101,
-0.05186527222394943,
-0.06860917806625366,
-0.26469868421554565,
-0.1417602002620697,
-0.07633744925260544,
0.06080036982893944,
-0.015492280945181847,
0.08622865378856659,
0.15717852115631104,
0.06852131336927414,
-0.029685107991099358,
-0.07807593047618866,
0.031572263687849045,
0.05449285730719566,
0.15095122158527374,
0.0085527328774333,
0.15424907207489014,
-0.08106362074613571,
-0.13263970613479614,
0.06238839775323868,
0.046202074736356735,
0.13616439700126648,
0.048621200025081635,
0.009328209795057774,
0.03622497618198395,
0.15712089836597443,
0.14955025911331177,
0.14629493653774261,
0.008952796459197998,
-0.049095071852207184,
0.027293112128973007,
-0.024861998856067657,
-0.01984332501888275,
-0.010898071341216564,
-0.05800369381904602,
-0.11180747300386429,
-0.03714370355010033,
-0.05763905122876167,
0.11573225259780884,
0.07608997076749802,
-0.00146139704156667,
-0.19403310120105743,
0.0626140683889389,
0.07355768978595734,
-0.04953401908278465,
-0.06388441473245621,
0.0690792053937912,
0.05605195835232735,
-0.09512870013713837,
0.0837019607424736,
-0.06612367182970047,
0.09410583227872849,
-0.01777883805334568,
0.07836073637008667,
-0.04676394909620285,
-0.09563879668712616,
0.02934962324798107,
0.09718608111143112,
-0.24085870385169983,
0.21840250492095947,
-0.020031869411468506,
0.0029759330209344625,
-0.07791084796190262,
-0.007468912750482559,
0.007700842805206776,
0.17704223096370697,
0.10118265450000763,
-0.0063797663897275925,
-0.06284572184085846,
-0.11557459086179733,
-0.06108506768941879,
0.06143087521195412,
0.11076954007148743,
0.04691372811794281,
0.02338489331305027,
-0.020808588713407516,
-0.04603927209973335,
-0.012591144070029259,
-0.07281915098428726,
-0.0025283137802034616,
-0.15434154868125916,
0.04211440682411194,
0.11298418045043945,
0.050730377435684204,
-0.020236987620592117,
-0.036584265530109406,
-0.1460203230381012,
0.248916357755661,
-0.11087480932474136,
-0.11901739239692688,
-0.05820181220769882,
-0.10917427390813828,
0.02972598746418953,
-0.08102705329656601,
0.04657129570841789,
-0.0515240915119648,
0.040763009339571,
-0.08709432929754257,
-0.20196537673473358,
0.08855030685663223,
-0.13788175582885742,
-0.04429588094353676,
-0.045215435326099396,
0.16734063625335693,
-0.06087813526391983,
-0.027020538225769997,
0.030547235161066055,
-0.015803588554263115,
-0.11475634574890137,
-0.09749352186918259,
-0.04531116038560867,
-0.0056783342733979225,
-0.011820197105407715,
0.05271105095744133,
-0.05883542448282242,
-0.1590103805065155,
-0.05623948574066162,
-0.00030073654488660395,
0.24242641031742096,
0.1995847374200821,
-0.0636889860033989,
0.10737019777297974,
0.18277667462825775,
-0.05796539783477783,
-0.34419846534729004,
-0.1238211989402771,
-0.12079519778490067,
-0.06409366428852081,
-0.018690338358283043,
-0.07097233086824417,
0.13967080414295197,
0.004903338383883238,
-0.0352618508040905,
0.15247003734111786,
-0.2399289757013321,
-0.072364442050457,
0.14100472629070282,
0.011869825422763824,
0.34471622109413147,
-0.1601930558681488,
-0.06615538895130157,
-0.0324099101126194,
-0.04131646826863289,
0.1463167518377304,
-0.07348374277353287,
0.06260991096496582,
0.01316816546022892,
0.029804496094584465,
0.0355985052883625,
-0.07036780565977097,
0.05899825692176819,
-0.031820349395275116,
0.05882423743605614,
-0.08654841035604477,
-0.00969606265425682,
0.0498163215816021,
-0.007161490153521299,
0.035004664212465286,
-0.11383914947509766,
0.02356104925274849,
-0.04695048928260803,
-0.03854241594672203,
-0.07908175140619278,
0.07240162044763565,
0.03520047292113304,
-0.02333264797925949,
0.0040486715734004974,
-0.07686543464660645,
0.021386386826634407,
0.02589837647974491,
0.2270703911781311,
-0.024631107226014137,
0.19896157085895538,
0.17663949728012085,
0.13170002400875092,
-0.19102847576141357,
0.035768743604421616,
-0.035908833146095276,
-0.06725230067968369,
0.0872216448187828,
-0.019982613623142242,
0.06864368170499802,
0.11603393405675888,
-0.07160792499780655,
0.01663466915488243,
0.09723496437072754,
0.026537185534834862,
-0.037952955812215805,
0.15103019773960114,
-0.2477114498615265,
-0.02025170624256134,
-0.05493275076150894,
0.0008324664668180048,
0.05848577618598938,
0.1401602327823639,
0.15403109788894653,
0.015118165872991085,
-0.03760737553238869,
-0.029035057872533798,
0.028244927525520325,
-0.01391720212996006,
0.09932316094636917,
0.04931383207440376,
0.03812979534268379,
-0.14608487486839294,
0.04801790788769722,
0.009302404709160328,
-0.14225536584854126,
-0.015688175335526466,
0.18134813010692596,
-0.1471942663192749,
-0.13908754289150238,
0.0012438686098903418,
0.11068549007177353,
-0.11329387873411179,
-0.09452056884765625,
-0.10138843208551407,
-0.14288358390331268,
0.027827449142932892,
0.19683754444122314,
0.08435961604118347,
0.08264926075935364,
0.022891996428370476,
-0.03637141361832619,
-0.053942758589982986,
0.04373443126678467,
0.032310064882040024,
0.048777226358652115,
-0.14541658759117126,
0.05168911814689636,
0.004061523824930191,
0.1389475017786026,
-0.10383281856775284,
-0.02433442324399948,
-0.12663021683692932,
0.050031956285238266,
-0.08991130441427231,
-0.017197823151946068,
-0.06632677465677261,
-0.039535101503133774,
-0.0195305235683918,
-0.04083976149559021,
-0.03531589359045029,
-0.00957413949072361,
-0.07866303622722626,
0.03202715888619423,
-0.007903837598860264,
-0.04044011980295181,
-0.11647728085517883,
-0.03258826956152916,
0.03549189865589142,
-0.028332265093922615,
0.1011267751455307,
0.0984625294804573,
-0.11067377775907516,
0.08835842460393906,
-0.2207251489162445,
-0.08167695999145508,
0.13451793789863586,
0.04681502655148506,
0.03538469225168228,
0.11795378476381302,
0.030018480494618416,
0.139913409948349,
0.018773533403873444,
0.04423825815320015,
-0.008639955893158913,
-0.11375356465578079,
0.06815952062606812,
-0.033598676323890686,
-0.10934042930603027,
-0.025363821536302567,
-0.08964159339666367,
0.08624068647623062,
-0.004280705936253071,
0.19022776186466217,
-0.08040215820074081,
0.09632444381713867,
-0.06905997544527054,
0.026163890957832336,
0.0074530490674078465,
-0.21539999544620514,
-0.08341517299413681,
-0.032408129423856735,
0.03270888701081276,
-0.01374470628798008,
0.25287488102912903,
0.04949743300676346,
0.005624027457088232,
0.036297839134931564,
0.014743763022124767,
0.013173133134841919,
0.03090897761285305,
0.18813678622245789,
0.07874593883752823,
-0.04587263613939285,
-0.09843636304140091,
0.007268817629665136,
0.017906473949551582,
-0.06357147544622421,
0.1222911924123764,
0.05933308228850365,
0.03483620285987854,
0.08968096226453781,
0.02734498120844364,
0.044697996228933334,
-0.09524103999137878,
-0.21300199627876282,
-0.11826947331428528,
0.05702533945441246,
0.01305035687983036,
0.01740524172782898,
0.17343835532665253,
0.035247061401605606,
-0.001849802560172975,
-0.011819842271506786,
-0.036242157220840454,
-0.19723807275295258,
-0.12052461504936218,
-0.10300872474908829,
-0.10467138141393661,
-0.00842705462127924,
-0.09209752827882767,
-0.015515006147325039,
0.00818620901554823,
0.05101597681641579,
-0.06788245588541031,
0.10998313128948212,
0.11819677799940109,
-0.04575442150235176,
0.04738442227244377,
-0.019032882526516914,
0.025640076026320457,
0.05890645831823349,
-0.0050412132404744625,
-0.11848992854356766,
0.000022003509002388455,
-0.028384095057845116,
0.058232132345438004,
-0.04993707314133644,
0.09099658578634262,
-0.15053610503673553,
-0.10946928709745407,
-0.0563630647957325,
0.05365069583058357,
-0.021560031920671463,
0.13893020153045654,
0.026417309418320656,
-0.013429436832666397,
0.04505341872572899,
0.20358425378799438,
-0.051707834005355835,
-0.1664048135280609,
-0.009013636969029903,
0.20230458676815033,
0.02660316787660122,
0.10471894592046738,
-0.04177597165107727,
0.011832511983811855,
-0.08421826362609863,
0.34322577714920044,
0.33784523606300354,
-0.05198773741722107,
0.06412138789892197,
-0.03213423117995262,
0.033670030534267426,
0.030346261337399483,
0.1296585202217102,
0.13541921973228455,
0.39514487981796265,
-0.058389730751514435,
-0.0014010278973728418,
-0.04121999070048332,
0.013879934325814247,
-0.17971830070018768,
-0.000754977052565664,
0.02068675123155117,
-0.030041348189115524,
-0.04703560099005699,
0.10178884118795395,
-0.1888098120689392,
0.16451306641101837,
-0.05870428308844566,
-0.1488785296678543,
-0.025672663003206253,
0.01150430180132389,
0.217340886592865,
-0.010208559222519398,
0.057313427329063416,
-0.006294540595263243,
-0.0690852627158165,
0.023459719493985176,
-0.024035269394516945,
-0.15496790409088135,
0.0037274404894560575,
0.03080599382519722,
-0.12137214839458466,
0.06553536653518677,
-0.001873314380645752,
0.057654980570077896,
0.05170338973402977,
0.01617307960987091,
-0.04885212704539299,
0.12484479695558548,
0.0032084172125905752,
-0.08330480009317398,
0.04451991617679596,
-0.00016416350263170898,
0.013501019217073917,
-0.05534280836582184,
0.05399652197957039,
-0.09456926584243774,
0.036152906715869904,
-0.06749844551086426,
-0.0328073687851429,
-0.013802471570670605,
0.02723105065524578,
-0.028705943375825882,
0.050682950764894485,
0.04381537809967995,
-0.021401535719633102,
0.017634274438023567,
-0.03861250728368759,
-0.0351911261677742,
-0.02215433679521084,
-0.0510709248483181,
-0.055205654352903366,
-0.1828417181968689,
-0.06218384578824043,
0.1252320557832718,
0.04381036385893822,
-0.16001474857330322,
0.03216592222452164,
-0.14691738784313202,
0.022462565451860428,
-0.19274355471134186,
0.022633487358689308,
0.12827058136463165,
0.009881632402539253,
0.0017405512044206262,
-0.012676029466092587,
0.04242435097694397,
0.07050803303718567,
-0.07365813106298447,
-0.1141567975282669
] |
null | null |
transformers
|
[bioformer-8L](https://huggingface.co/bioformers/bioformer-8L) fined-tuned on the [BC2GM](https://doi.org/10.1186/gb-2008-9-s2-s2) dataset for 10 epochs.
This fine-tuned model can be used for NER for genes/proteins.
|
{"language": ["en"], "license": "apache-2.0", "pipeline_tag": "token-classification"}
|
token-classification
|
bioformers/bioformer-8L-bc2gm
|
[
"transformers",
"pytorch",
"safetensors",
"bert",
"token-classification",
"en",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #safetensors #bert #token-classification #en #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us
|
bioformer-8L fined-tuned on the BC2GM dataset for 10 epochs.
This fine-tuned model can be used for NER for genes/proteins.
|
[] |
[
"TAGS\n#transformers #pytorch #safetensors #bert #token-classification #en #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
52
] |
[
"passage: TAGS\n#transformers #pytorch #safetensors #bert #token-classification #en #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
-0.059595197439193726,
0.10631763190031052,
-0.007629459258168936,
0.0244675874710083,
0.07692740112543106,
-0.009490981698036194,
0.1120014414191246,
0.09707333892583847,
0.016386302188038826,
-0.058308474719524384,
0.14559929072856903,
0.2322559505701065,
-0.02104523777961731,
0.09640208631753922,
-0.0772639587521553,
-0.18908371031284332,
0.11321170628070831,
0.031629133969545364,
-0.027390873059630394,
0.11348471790552139,
0.11655383557081223,
-0.059609029442071915,
0.03608550876379013,
-0.010012947022914886,
-0.050080087035894394,
0.005900037940591574,
0.06558991968631744,
-0.1263318806886673,
0.10472848266363144,
0.010667749680578709,
0.1416918933391571,
0.048578500747680664,
-0.010780079290270805,
-0.17827226221561432,
0.01685168221592903,
0.04325312748551369,
-0.05701589956879616,
0.06061201170086861,
0.06445474177598953,
-0.042393025010824203,
-0.03439084067940712,
0.04937899485230446,
0.013264483772218227,
0.043994951993227005,
-0.06446723639965057,
-0.2367478609085083,
-0.07986585795879364,
0.09175827354192734,
0.08877503871917725,
0.06225430965423584,
0.04686594381928444,
0.2166328728199005,
-0.1363559067249298,
0.07040996849536896,
0.10097770392894745,
-0.3378860056400299,
0.008846734650433064,
0.06439774483442307,
0.011660018935799599,
-0.020830940455198288,
-0.017887143418192863,
0.01634209230542183,
0.05087721347808838,
0.013740013353526592,
0.07426079362630844,
-0.04597567394375801,
-0.12318819016218185,
0.018208418041467667,
-0.07754059135913849,
-0.061774663627147675,
0.1946762651205063,
0.0051047285087406635,
0.01380863320082426,
-0.016571272164583206,
-0.09115388244390488,
0.03032490611076355,
-0.010035322979092598,
0.015539965592324734,
0.015580558218061924,
0.07992756366729736,
0.07526838034391403,
0.02796023152768612,
-0.13954363763332367,
0.0038283425383269787,
-0.19779418408870697,
0.1512414664030075,
0.03748834878206253,
0.08667577058076859,
-0.12928499281406403,
0.06433986872434616,
0.04397663101553917,
-0.1079297736287117,
0.013299165293574333,
-0.08847339451313019,
0.10585624724626541,
0.006872594356536865,
-0.0216996930539608,
0.10459595918655396,
0.14807040989398956,
0.24554948508739471,
0.008490723557770252,
0.0006220643990673125,
-0.03798950836062431,
0.09819090366363525,
-0.02439701557159424,
0.06169116124510765,
0.004773916210979223,
0.0029138021636754274,
0.12058168649673462,
-0.08234737813472748,
0.07648736983537674,
-0.015147379599511623,
-0.09438566118478775,
-0.008250034414231777,
0.07007094472646713,
0.14284799993038177,
0.03614111989736557,
0.050720322877168655,
-0.05599583312869072,
0.025464754551649094,
0.17661522328853607,
-0.0716419592499733,
0.0030076471157372,
0.008878698572516441,
0.04219090938568115,
0.02228664979338646,
0.021391183137893677,
0.030783358961343765,
-0.03333854302763939,
0.11593122035264969,
-0.056953441351652145,
-0.034543007612228394,
-0.020107511430978775,
-0.018121900036931038,
0.08384237438440323,
-0.09292449057102203,
0.07103262841701508,
-0.17778323590755463,
-0.12789620459079742,
0.054290201514959335,
0.05843212455511093,
0.03949759155511856,
-0.06971442699432373,
0.0627439096570015,
-0.020450809970498085,
-0.011543171480298042,
-0.09382392466068268,
-0.04808494448661804,
-0.08693333715200424,
0.06649096310138702,
-0.06748238950967789,
-0.005129937082529068,
-0.12065643072128296,
0.036540694534778595,
-0.15144887566566467,
0.020572766661643982,
-0.0577455498278141,
-0.0837194174528122,
-0.11143656820058823,
0.1909424215555191,
-0.049468033015728,
-0.051477499306201935,
0.023392388597130775,
-0.0006517790025100112,
-0.04389803856611252,
0.10010425001382828,
-0.05719350650906563,
-0.047356847673654556,
0.19699619710445404,
-0.15346479415893555,
-0.19086122512817383,
0.0805756002664566,
0.018746525049209595,
-0.01342928409576416,
0.0699700340628624,
0.12589630484580994,
0.09753774106502533,
-0.04877757653594017,
0.05581682175397873,
0.1286648064851761,
-0.10490857809782028,
-0.16889147460460663,
0.03976103290915489,
-0.03041401319205761,
-0.1507398933172226,
0.056760575622320175,
-0.027175601571798325,
0.09410244226455688,
-0.02349107526242733,
-0.08472321182489395,
-0.06348812580108643,
-0.061779458075761795,
0.03498120605945587,
0.023742884397506714,
0.03657194599509239,
-0.08156696707010269,
-0.011193453334271908,
-0.047948677092790604,
0.04690099135041237,
0.04283229634165764,
0.023988088592886925,
-0.10232236236333847,
0.08006595820188522,
0.0033755141776055098,
0.012584766373038292,
-0.12195257097482681,
-0.06227247044444084,
0.021048063412308693,
-0.02183537557721138,
-0.04614357277750969,
0.06171775981783867,
0.05298782140016556,
-0.04034341126680374,
0.003946306183934212,
-0.04714684933423996,
0.14881442487239838,
0.0766487866640091,
-0.011257418431341648,
-0.13094469904899597,
0.030676545575261116,
-0.054146356880664825,
0.03355014696717262,
-0.01124284416437149,
0.023657727986574173,
0.0611737035214901,
0.12396371364593506,
-0.02489505149424076,
0.09484673291444778,
-0.03664865344762802,
-0.000142398159368895,
-0.05330050364136696,
-0.009033212438225746,
0.11087857931852341,
0.03429315239191055,
-0.059645961970090866,
0.1477213203907013,
-0.07704055309295654,
0.34404459595680237,
0.19459190964698792,
-0.16285039484500885,
0.05102197453379631,
-0.005693621933460236,
-0.03512963280081749,
-0.014240749180316925,
0.026536915451288223,
0.03738819435238838,
-0.024659400805830956,
0.009891473688185215,
0.13615724444389343,
-0.05331426113843918,
-0.05181577056646347,
-0.004111792892217636,
-0.07191430032253265,
-0.01862928457558155,
0.034959230571985245,
0.11031261831521988,
-0.18773338198661804,
0.1949310451745987,
0.33231258392333984,
-0.010407802648842335,
0.02769985795021057,
-0.09829138219356537,
0.023424124345183372,
0.04188505932688713,
-0.010857561603188515,
-0.025153521448373795,
0.029730141162872314,
-0.1020900160074234,
0.02931899018585682,
0.09101535379886627,
0.037710871547460556,
0.027564043179154396,
-0.14617547392845154,
-0.04594927281141281,
0.0005768557311967015,
0.002950448077172041,
-0.022592024877667427,
0.05258798226714134,
0.004609347321093082,
0.09558837860822678,
-0.03956138715147972,
-0.15612749755382538,
0.13214664161205292,
0.0009200698114000261,
-0.06517138332128525,
0.1568019688129425,
-0.1592644453048706,
-0.2499362677335739,
-0.09997566789388657,
-0.11091836541891098,
-0.016624461859464645,
0.011807420291006565,
0.10858207195997238,
-0.05040875822305679,
-0.07083499431610107,
0.005267674569040537,
-0.08259241282939911,
0.011951121501624584,
0.04637390747666359,
-0.022241583094000816,
0.06026598438620567,
0.03687203302979469,
-0.10821034759283066,
-0.05574595555663109,
0.007621741853654385,
-0.06301957368850708,
0.09565864503383636,
-0.03548980504274368,
0.0565580315887928,
0.1262890100479126,
0.009045802988111973,
0.004031754098832607,
-0.03719327971339226,
0.13278941810131073,
-0.02155284211039543,
0.024742111563682556,
0.21494249999523163,
-0.05969719961285591,
0.08979412168264389,
0.16621504724025726,
0.04078880324959755,
-0.04664229974150658,
0.014198161661624908,
-0.05004282668232918,
-0.08011195808649063,
-0.23994417488574982,
-0.11594166606664658,
-0.06807024031877518,
0.06262688338756561,
0.044930994510650635,
0.09121332317590714,
0.1185266375541687,
0.10531827062368393,
-0.012906715273857117,
-0.04558880999684334,
-0.002682699589058757,
0.056127361953258514,
0.21051685512065887,
-0.010734698735177517,
0.11484015733003616,
-0.09072810411453247,
-0.07946362346410751,
0.09703280031681061,
0.08375775814056396,
0.10550329834222794,
0.11143471300601959,
-0.01877782680094242,
0.07917140424251556,
0.2380746752023697,
0.09401649981737137,
0.14397072792053223,
0.0060878172516822815,
-0.028690529987215996,
-0.031617820262908936,
-0.011649219319224358,
-0.05533507838845253,
0.025410039350390434,
-0.046027153730392456,
-0.08183522522449493,
-0.02575746551156044,
-0.12770137190818787,
0.08474810421466827,
0.18414148688316345,
0.031717922538518906,
-0.16124606132507324,
0.019827721640467644,
0.08781170845031738,
-0.009311218746006489,
-0.026490410789847374,
0.11675476282835007,
-0.0708768367767334,
-0.0788249745965004,
0.10491454601287842,
-0.047420185059309006,
0.097123883664608,
-0.005846098996698856,
0.05215346813201904,
-0.004947523586452007,
-0.08903983980417252,
0.06545103341341019,
0.1297169029712677,
-0.23861956596374512,
0.21504756808280945,
-0.015931053087115288,
-0.02445845864713192,
-0.08135662227869034,
0.013686065562069416,
0.057181499898433685,
0.22974897921085358,
0.11625958979129791,
0.02436232939362526,
-0.11592810600996017,
-0.07512018829584122,
-0.07763414829969406,
0.043618958443403244,
0.01129867136478424,
0.0014053157065063715,
-0.05197974294424057,
-0.07176268100738525,
-0.01478944718837738,
0.019009049981832504,
-0.003704957664012909,
-0.042290687561035156,
-0.1050403043627739,
0.0327347069978714,
0.10741148889064789,
0.04168447479605675,
-0.08872824907302856,
-0.029534634202718735,
-0.1355004757642746,
0.17636659741401672,
-0.1088709831237793,
-0.0793815478682518,
-0.08503847569227219,
-0.15265773236751556,
0.05100608617067337,
-0.058765411376953125,
0.08026053011417389,
-0.08279121667146683,
0.006286883261054754,
-0.044234324246644974,
-0.18449975550174713,
0.10897718369960785,
-0.16726337373256683,
-0.07496177405118942,
-0.04734254628419876,
0.14166449010372162,
-0.09050673991441727,
-0.0010082157095894217,
0.04015079513192177,
0.002123621990904212,
-0.07761029154062271,
-0.11402487754821777,
-0.009571169503033161,
0.006474085617810488,
0.06321746110916138,
-0.010682391002774239,
-0.07764094322919846,
-0.0885096862912178,
0.02230287902057171,
-0.024300487712025642,
0.1825769692659378,
0.22545233368873596,
-0.08529490232467651,
0.12561868131160736,
0.23147578537464142,
-0.039628904312849045,
-0.3182145059108734,
-0.16395695507526398,
-0.16413220763206482,
-0.11094354093074799,
0.00946718268096447,
-0.08942510932683945,
0.16214993596076965,
0.07534264028072357,
-0.1102471724152565,
0.08382916450500488,
-0.13942518830299377,
-0.061759982258081436,
0.2562066614627838,
0.014020739123225212,
0.3100128769874573,
-0.1261322945356369,
-0.06293918192386627,
-0.04347341135144234,
-0.15158842504024506,
0.11952613294124603,
-0.09458768367767334,
0.018279504030942917,
-0.0028460531029850245,
-0.014908469282090664,
-0.0105310482904315,
-0.07361216843128204,
0.1151808649301529,
-0.032727889716625214,
0.05070047825574875,
-0.11032845079898834,
-0.009167310781776905,
0.06449837982654572,
-0.020154280588030815,
0.047452960163354874,
-0.10327593237161636,
0.054281916469335556,
-0.02973555028438568,
-0.01783146895468235,
-0.06745825707912445,
0.11551990360021591,
0.0035355251748114824,
-0.08237697184085846,
-0.02713950350880623,
-0.014035383239388466,
0.0032904164399951696,
-0.019548678770661354,
0.23817157745361328,
0.0533728264272213,
0.10620903968811035,
0.1415189802646637,
0.07724647223949432,
-0.19441430270671844,
-0.004892610013484955,
-0.0880972146987915,
-0.09032591432332993,
0.08275473117828369,
-0.08097861707210541,
0.07521849125623703,
0.09847386181354523,
-0.053555700927972794,
0.030957749113440514,
0.07722925394773483,
0.014392501674592495,
-0.07189072668552399,
0.13734029233455658,
-0.19336922466754913,
-0.0029275917913764715,
0.016645127907395363,
0.11135733127593994,
0.05746306851506233,
0.13255950808525085,
0.1132577583193779,
0.011508218944072723,
-0.03902217000722885,
0.00994537491351366,
0.038947444409132004,
-0.06459175050258636,
0.05133867263793945,
0.06503661721944809,
0.028675276786088943,
-0.1153540387749672,
0.0938890129327774,
0.005607145372778177,
-0.13422808051109314,
-0.049310486763715744,
0.03707799315452576,
-0.16499435901641846,
-0.1251576989889145,
0.012925009243190289,
0.042720671743154526,
-0.0956338495016098,
-0.13257832825183868,
-0.03245978429913521,
-0.1568082720041275,
0.050797589123249054,
0.13024407625198364,
0.13169004023075104,
0.10422632843255997,
0.014499234035611153,
-0.048640262335538864,
0.013356425799429417,
0.008865008130669594,
-0.06856595724821091,
0.030806060880422592,
-0.16222138702869415,
-0.05038931965827942,
0.02064080908894539,
0.09524060785770416,
-0.05986732617020607,
-0.0012853862717747688,
-0.10596384853124619,
0.03725794330239296,
-0.12649501860141754,
-0.01643923483788967,
-0.09022706001996994,
0.004841700196266174,
0.03337377309799194,
-0.09527548402547836,
-0.01847096160054207,
0.017560835927724838,
-0.10123640298843384,
0.012896465137600899,
0.01566278003156185,
0.04947790503501892,
-0.11697810143232346,
-0.055427901446819305,
0.08668754994869232,
-0.023936275392770767,
0.11237453669309616,
0.06576550751924515,
-0.07070298492908478,
0.09061338752508163,
-0.15505056083202362,
-0.09907937049865723,
0.10854674130678177,
0.03758507966995239,
0.035369597375392914,
-0.014077352359890938,
0.002548821968957782,
0.12766733765602112,
-0.041569482535123825,
0.039211712777614594,
0.045912981033325195,
-0.13729402422904968,
-0.0176172386854887,
0.01935543492436409,
-0.1300545185804367,
0.0009233326418325305,
-0.14588460326194763,
0.1408264935016632,
-0.0297873355448246,
0.20152471959590912,
-0.008935026824474335,
0.02818414941430092,
-0.06230417266488075,
0.01030105259269476,
-0.0493222177028656,
-0.1846861094236374,
-0.135090634226799,
-0.015834858641028404,
-0.04791738837957382,
-0.019033024087548256,
0.25928908586502075,
0.004040257539600134,
-0.04947160929441452,
0.07740054279565811,
0.02997889183461666,
0.021803302690386772,
0.03991980105638504,
0.21982316672801971,
0.04124981909990311,
-0.022249987348914146,
-0.09143567830324173,
-0.01731627993285656,
0.006730312015861273,
-0.17266257107257843,
0.058640364557504654,
0.10775097459554672,
0.027582576498389244,
0.03858624026179314,
0.043156567960977554,
0.0037038924638181925,
-0.13953903317451477,
-0.18088886141777039,
-0.013304623775184155,
0.06269197165966034,
0.03547661751508713,
0.09343685954809189,
0.13970057666301727,
-0.009570972062647343,
0.0016306423349305987,
-0.0555904395878315,
0.0031276573427021503,
-0.1811908781528473,
-0.11354430019855499,
-0.07511179894208908,
-0.1033962294459343,
0.008128470741212368,
-0.030051978304982185,
-0.038698285818099976,
0.10348473489284515,
0.050203386694192886,
-0.05385258421301842,
0.045468322932720184,
-0.02600432001054287,
0.009103916585445404,
0.016581980511546135,
0.01494691614061594,
-0.030824335291981697,
-0.0011615883558988571,
-0.0636662170290947,
-0.12034732103347778,
-0.0118890181183815,
-0.052707038819789886,
0.0007633204804733396,
-0.03248484060168266,
0.056867413222789764,
-0.09264763444662094,
-0.062434274703264236,
-0.05244990438222885,
0.022352026775479317,
-0.02624649740755558,
0.1205274686217308,
-0.0007312627276405692,
0.04951237887144089,
0.09091947227716446,
0.15412627160549164,
-0.06287951022386551,
-0.17018520832061768,
-0.053630657494068146,
0.23211349546909332,
0.021259645000100136,
0.0738338977098465,
0.030710235238075256,
0.02688610553741455,
-0.04032766819000244,
0.28390538692474365,
0.2609561085700989,
-0.02433340810239315,
0.05970100313425064,
-0.03252284228801727,
0.008852454833686352,
0.023858537897467613,
0.12555477023124695,
0.13468500971794128,
0.20234768092632294,
-0.07987125217914581,
-0.020072948187589645,
-0.041369445621967316,
0.012630997225642204,
-0.1578705608844757,
0.04768473282456398,
-0.025302475318312645,
-0.05368791148066521,
-0.04729656130075455,
0.08546026051044464,
-0.06392765045166016,
0.09854686260223389,
0.015809010714292526,
-0.08856336772441864,
-0.04839025065302849,
0.00021682998340111226,
0.21112284064292908,
-0.007045868318527937,
0.019623814150691032,
-0.03936074301600456,
-0.05413379892706871,
0.07122472673654556,
-0.011057602241635323,
-0.1692773848772049,
-0.06059876084327698,
0.07795844227075577,
0.023028375580906868,
0.1936575025320053,
0.015643781051039696,
0.07010204344987869,
0.08995462954044342,
0.04785418510437012,
-0.11184735596179962,
0.08989690989255905,
0.03424299880862236,
-0.08333216607570648,
-0.015863552689552307,
-0.13479553163051605,
-0.012995379976928234,
-0.04742743447422981,
0.016247572377324104,
-0.1095607653260231,
0.03379101678729057,
-0.014433617703616619,
-0.06995736062526703,
-0.04177592322230339,
0.058106571435928345,
-0.050807029008865356,
0.05923372134566307,
-0.00640771072357893,
-0.027699260041117668,
-0.0522528700530529,
-0.06646668165922165,
0.009650778956711292,
0.027686018496751785,
-0.17377161979675293,
-0.056983496993780136,
-0.02521589770913124,
0.0053129009902477264,
0.07856842130422592,
0.024405689910054207,
-0.044164109975099564,
-0.03560051694512367,
-0.08570306748151779,
0.0035788060631603003,
-0.1742836982011795,
0.013486085459589958,
0.06408070772886276,
0.02360478974878788,
-0.007453749421983957,
-0.036508526653051376,
-0.007974712178111076,
0.01947420835494995,
-0.08894628286361694,
-0.09263118356466293
] |
null | null |
transformers
|
[bioformer-cased-v1.0](https://huggingface.co/bioformers/bioformer-cased-v1.0) fined-tuned on the [MNLI](https://cims.nyu.edu/~sbowman/multinli/) dataset for 2 epochs.
The fine-tuning process was performed on two NVIDIA GeForce GTX 1080 Ti GPUs (11GB). The parameters are:
```
max_seq_length=512
per_device_train_batch_size=16
total train batch size (w. parallel, distributed & accumulation) = 32
learning_rate=3e-5
```
## Evaluation results
eval_accuracy = 0.803973
## Speed
In our experiments, the inference speed of Bioformer is 3x as fast as BERT-base/BioBERT/PubMedBERT, and is 40% faster than DistilBERT.
## More information
The Multi-Genre Natural Language Inference Corpus is a crowdsourced collection of sentence pairs with textual entailment annotations. Given a premise sentence and a hypothesis sentence, the task is to predict whether the premise entails the hypothesis (entailment), contradicts the hypothesis (contradiction), or neither (neutral). The premise sentences are gathered from ten different sources, including transcribed speech, fiction, and government reports. The authors of the benchmark use the standard test set, for which they obtained private labels from the RTE authors, and evaluate on both the matched (in-domain) and mismatched (cross-domain) section. They also uses and recommend the SNLI corpus as 550k examples of auxiliary training data. (source: https://huggingface.co/datasets/glue)
|
{}
|
text-classification
|
bioformers/bioformer-8L-mnli
|
[
"transformers",
"pytorch",
"safetensors",
"bert",
"text-classification",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #safetensors #bert #text-classification #autotrain_compatible #endpoints_compatible #region-us
|
bioformer-cased-v1.0 fined-tuned on the MNLI dataset for 2 epochs.
The fine-tuning process was performed on two NVIDIA GeForce GTX 1080 Ti GPUs (11GB). The parameters are:
## Evaluation results
eval_accuracy = 0.803973
## Speed
In our experiments, the inference speed of Bioformer is 3x as fast as BERT-base/BioBERT/PubMedBERT, and is 40% faster than DistilBERT.
## More information
The Multi-Genre Natural Language Inference Corpus is a crowdsourced collection of sentence pairs with textual entailment annotations. Given a premise sentence and a hypothesis sentence, the task is to predict whether the premise entails the hypothesis (entailment), contradicts the hypothesis (contradiction), or neither (neutral). The premise sentences are gathered from ten different sources, including transcribed speech, fiction, and government reports. The authors of the benchmark use the standard test set, for which they obtained private labels from the RTE authors, and evaluate on both the matched (in-domain) and mismatched (cross-domain) section. They also uses and recommend the SNLI corpus as 550k examples of auxiliary training data. (source: URL
|
[
"## Evaluation results\n\neval_accuracy = 0.803973",
"## Speed\n\nIn our experiments, the inference speed of Bioformer is 3x as fast as BERT-base/BioBERT/PubMedBERT, and is 40% faster than DistilBERT.",
"## More information\nThe Multi-Genre Natural Language Inference Corpus is a crowdsourced collection of sentence pairs with textual entailment annotations. Given a premise sentence and a hypothesis sentence, the task is to predict whether the premise entails the hypothesis (entailment), contradicts the hypothesis (contradiction), or neither (neutral). The premise sentences are gathered from ten different sources, including transcribed speech, fiction, and government reports. The authors of the benchmark use the standard test set, for which they obtained private labels from the RTE authors, and evaluate on both the matched (in-domain) and mismatched (cross-domain) section. They also uses and recommend the SNLI corpus as 550k examples of auxiliary training data. (source: URL"
] |
[
"TAGS\n#transformers #pytorch #safetensors #bert #text-classification #autotrain_compatible #endpoints_compatible #region-us \n",
"## Evaluation results\n\neval_accuracy = 0.803973",
"## Speed\n\nIn our experiments, the inference speed of Bioformer is 3x as fast as BERT-base/BioBERT/PubMedBERT, and is 40% faster than DistilBERT.",
"## More information\nThe Multi-Genre Natural Language Inference Corpus is a crowdsourced collection of sentence pairs with textual entailment annotations. Given a premise sentence and a hypothesis sentence, the task is to predict whether the premise entails the hypothesis (entailment), contradicts the hypothesis (contradiction), or neither (neutral). The premise sentences are gathered from ten different sources, including transcribed speech, fiction, and government reports. The authors of the benchmark use the standard test set, for which they obtained private labels from the RTE authors, and evaluate on both the matched (in-domain) and mismatched (cross-domain) section. They also uses and recommend the SNLI corpus as 550k examples of auxiliary training data. (source: URL"
] |
[
41,
15,
45,
187
] |
[
"passage: TAGS\n#transformers #pytorch #safetensors #bert #text-classification #autotrain_compatible #endpoints_compatible #region-us \n## Evaluation results\n\neval_accuracy = 0.803973## Speed\n\nIn our experiments, the inference speed of Bioformer is 3x as fast as BERT-base/BioBERT/PubMedBERT, and is 40% faster than DistilBERT.## More information\nThe Multi-Genre Natural Language Inference Corpus is a crowdsourced collection of sentence pairs with textual entailment annotations. Given a premise sentence and a hypothesis sentence, the task is to predict whether the premise entails the hypothesis (entailment), contradicts the hypothesis (contradiction), or neither (neutral). The premise sentences are gathered from ten different sources, including transcribed speech, fiction, and government reports. The authors of the benchmark use the standard test set, for which they obtained private labels from the RTE authors, and evaluate on both the matched (in-domain) and mismatched (cross-domain) section. They also uses and recommend the SNLI corpus as 550k examples of auxiliary training data. (source: URL"
] |
[
-0.0268918015062809,
0.07412702590227127,
-0.002960555488243699,
0.0576358400285244,
-0.06537050753831863,
0.013036997988820076,
0.14435023069381714,
0.07624328881502151,
0.09376644343137741,
0.02665145881474018,
0.03123488463461399,
0.022045275196433067,
0.06754318624734879,
-0.012184021063148975,
-0.05554593726992607,
-0.10173814743757248,
0.040581703186035156,
-0.014221983030438423,
0.16343064606189728,
0.11052606254816055,
0.055131424218416214,
-0.09689921885728836,
0.028855593875050545,
-0.025886062532663345,
-0.009188327006995678,
0.047893933951854706,
0.07738480716943741,
-0.06268487125635147,
0.09805697202682495,
0.010828041471540928,
0.11055703461170197,
0.08701563626527786,
0.0792151466012001,
-0.06705155968666077,
-0.0022053192369639874,
0.013816559687256813,
0.08380942046642303,
0.0744859054684639,
0.01454991102218628,
0.057358723133802414,
-0.07617130130529404,
0.04334759712219238,
0.04636269062757492,
0.014067229814827442,
-0.10681051015853882,
-0.081924669444561,
0.01162765920162201,
-0.061337705701589584,
-0.025499150156974792,
0.09508095681667328,
-0.03796990215778351,
0.22132201492786407,
-0.20356214046478271,
0.07262663543224335,
0.14685457944869995,
-0.17928168177604675,
-0.051432475447654724,
-0.004560996778309345,
0.06094695255160332,
0.18178623914718628,
-0.040564216673374176,
-0.041977714747190475,
-0.003536824369803071,
-0.004106732551008463,
0.09058462828397751,
-0.0370636060833931,
-0.10308880358934402,
-0.005919246468693018,
-0.15779109299182892,
-0.004520893562585115,
0.28235194087028503,
-0.013539724051952362,
-0.11644445359706879,
-0.01920885592699051,
-0.07556050270795822,
0.2217768430709839,
0.054119814187288284,
-0.08517739921808243,
0.026274023577570915,
-0.028817974030971527,
0.022767603397369385,
-0.008638337254524231,
-0.07877503335475922,
-0.1311907023191452,
-0.09091748297214508,
0.13552309572696686,
0.06433737277984619,
0.08690335601568222,
0.029342681169509888,
0.1261606216430664,
0.08651991933584213,
-0.14820227026939392,
0.030071880668401718,
-0.07412097603082657,
-0.08483471721410751,
0.007942566648125648,
-0.007328611798584461,
-0.18244895339012146,
0.004675458651036024,
0.05554297938942909,
0.01129908487200737,
0.009425568394362926,
-0.04386448487639427,
0.05026642233133316,
0.04886103421449661,
0.10118713229894638,
-0.008744594641029835,
0.042107805609703064,
0.029891809448599815,
0.05771556496620178,
0.010766394436359406,
0.03708488866686821,
0.018517399206757545,
-0.05134112387895584,
0.01248939149081707,
0.012309822253882885,
-0.047406528145074844,
-0.012640021741390228,
-0.06475961953401566,
0.0075194560922682285,
0.04912877827882767,
-0.12025539577007294,
-0.061818771064281464,
0.008327594958245754,
-0.13520261645317078,
-0.06369853019714355,
-0.007933923043310642,
-0.0344332717359066,
-0.09416601061820984,
0.02466251701116562,
-0.0647679790854454,
-0.04961621016263962,
-0.08237102627754211,
-0.12325600534677505,
-0.00247922888956964,
-0.013802080415189266,
-0.04017274081707001,
-0.0897357389330864,
-0.1954934448003769,
-0.02494646981358528,
0.029405348002910614,
-0.06240306422114372,
-0.013034950941801071,
-0.042659446597099304,
0.07539192587137222,
-0.028516773134469986,
-0.07436726242303848,
-0.02825509011745453,
-0.05034838616847992,
0.0739646926522255,
0.0007061279029585421,
0.055784691125154495,
0.02110908180475235,
-0.009976334869861603,
-0.20997801423072815,
-0.035177990794181824,
-0.12270046770572662,
0.10800935328006744,
-0.07444017380475998,
-0.010022825561463833,
-0.04250838980078697,
-0.05910874158143997,
-0.08805224299430847,
0.011619181372225285,
0.008365534245967865,
0.1667477935552597,
-0.10372038930654526,
0.00306275999173522,
0.2012867033481598,
-0.10137391835451126,
-0.03802867233753204,
0.1967872828245163,
-0.08806762844324112,
0.11729483306407928,
0.1649523824453354,
0.19512347877025604,
0.072442427277565,
-0.09554893523454666,
0.005492089316248894,
0.03639183193445206,
-0.011335067451000214,
0.12116651237010956,
0.06374458223581314,
0.06866229325532913,
-0.11591140925884247,
0.02612948790192604,
-0.08462914824485779,
0.004585024435073137,
-0.01637888327240944,
-0.03916165605187416,
-0.019777163863182068,
-0.03907836228609085,
0.011134687811136246,
0.03779856488108635,
0.04400322213768959,
-0.044421687722206116,
-0.06489811837673187,
0.004125176463276148,
0.02666185423731804,
-0.06147618219256401,
-0.03982916846871376,
0.0011759873013943434,
-0.021072223782539368,
-0.012329320423305035,
-0.043785158544778824,
-0.10723059624433517,
-0.051064878702163696,
0.12330574542284012,
-0.06747447699308395,
0.05857357382774353,
0.08967136591672897,
-0.0056374892592430115,
0.018850257620215416,
-0.04550633952021599,
-0.008700240403413773,
0.024365896359086037,
-0.04208818078041077,
-0.058082398027181625,
-0.05955100432038307,
0.0615796260535717,
0.010765894316136837,
-0.05114135146141052,
-0.1592705249786377,
0.048687562346458435,
-0.1286412924528122,
-0.01708439365029335,
0.008526203222572803,
-0.027641091495752335,
0.04311249777674675,
-0.01894867978990078,
-0.02913479506969452,
0.018288759514689445,
-0.012401053681969643,
0.030880963429808617,
-0.04907749965786934,
0.06860624253749847,
-0.08175435662269592,
-0.028190191835165024,
-0.0013625407591462135,
-0.03972681611776352,
-0.06227570399641991,
-0.011954685673117638,
-0.0601334273815155,
-0.015621012076735497,
-0.16582340002059937,
-0.09109117835760117,
0.12972204387187958,
-0.011815965175628662,
0.037439074367284775,
-0.16632618010044098,
-0.11409895867109299,
-0.022142944857478142,
-0.07984517514705658,
0.08464253693819046,
0.0826881006360054,
-0.004414258524775505,
-0.11141255497932434,
0.06454487890005112,
-0.0007661542622372508,
-0.07090366631746292,
0.13804398477077484,
-0.016607096418738365,
-0.11334267258644104,
0.02588651329278946,
0.04588886350393295,
-0.06341207027435303,
0.09643326699733734,
-0.05937764421105385,
0.04330238327383995,
0.04412975162267685,
0.06520233303308487,
-0.052404966205358505,
-0.13862521946430206,
0.048782430589199066,
-0.005075809080153704,
-0.07343694567680359,
-0.1195487231016159,
-0.10811608284711838,
-0.004364194348454475,
0.12232162803411484,
0.01550415437668562,
-0.016627095639705658,
-0.02246270515024662,
-0.04408683255314827,
-0.1200508177280426,
0.1906808614730835,
-0.01869856007397175,
-0.2050609439611435,
-0.10113537311553955,
0.05926289036870003,
0.01893872395157814,
0.01956765726208687,
0.012911471538245678,
-0.027139494195580482,
-0.02875286526978016,
-0.11094024777412415,
0.03107997588813305,
0.041997943073511124,
-0.05454472079873085,
-0.11604592949151993,
0.060475561767816544,
0.004747679457068443,
-0.10632292181253433,
-0.021441878750920296,
-0.08415180444717407,
-0.05838623642921448,
0.05255062133073807,
-0.10961528867483139,
0.0517183318734169,
0.0672299712896347,
-0.010625671595335007,
-0.07404042780399323,
-0.062439925968647,
0.14988009631633759,
0.00019087181135546416,
0.08050711452960968,
0.10531740635633469,
-0.07674771547317505,
0.06204557046294212,
0.14705400168895721,
0.02710568904876709,
-0.06211400777101517,
0.08083868026733398,
-0.008455200120806694,
-0.029180997982621193,
-0.2936733067035675,
-0.002192240906879306,
-0.0366201214492321,
-0.030732976272702217,
0.010917008854448795,
-0.00023518106900155544,
-0.020770518109202385,
0.10890885442495346,
-0.02443540468811989,
0.00035471716546453536,
0.11192048341035843,
0.03809140622615814,
0.015978442505002022,
0.016100434586405754,
0.15836964547634125,
-0.021749436855316162,
-0.10027942806482315,
0.06950551271438599,
0.017601246014237404,
0.268380343914032,
-0.09504611790180206,
0.0647730901837349,
0.06882230937480927,
0.10072147101163864,
0.020955845713615417,
0.12371392548084259,
-0.029563965275883675,
0.04064301773905754,
-0.0578317828476429,
-0.07921027392148972,
-0.07194805890321732,
0.007987652905285358,
-0.12234681099653244,
0.05363147705793381,
-0.009942070581018925,
-0.0714944526553154,
0.09028134495019913,
0.22080296277999878,
0.06044657528400421,
-0.23334698379039764,
-0.09652680158615112,
0.04152700677514076,
-0.08129077404737473,
-0.08291158080101013,
0.050072167068719864,
0.15050920844078064,
-0.06682240217924118,
-0.002704486483708024,
0.018391255289316177,
0.07850354164838791,
-0.05037643387913704,
0.03613748401403427,
-0.06452460587024689,
0.0022533179726451635,
-0.051557477563619614,
0.1011197566986084,
-0.17912212014198303,
0.2046474814414978,
-0.0009569268440827727,
0.06670378148555756,
-0.09566403925418854,
-0.0162343867123127,
-0.002632460556924343,
-0.02659503184258938,
0.15248043835163116,
0.016818685457110405,
-0.03383681923151016,
-0.035072267055511475,
-0.1294427067041397,
0.08643321692943573,
0.039924051612615585,
0.014195886440575123,
0.10460622608661652,
0.037300653755664825,
0.06376265734434128,
0.02085266448557377,
-0.05450065806508064,
-0.15362589061260223,
-0.06946330517530441,
-0.031501270830631256,
-0.028393171727657318,
-0.09825310856103897,
-0.03404088690876961,
-0.05446659028530121,
0.07741741836071014,
0.1453813761472702,
-0.1393376886844635,
-0.11530580371618271,
-0.08701805025339127,
0.06973210722208023,
0.05390379577875137,
-0.03346104919910431,
-0.07482365518808365,
-0.01575230248272419,
0.09719467163085938,
0.043344203382730484,
-0.04581188037991524,
0.019456038251519203,
-0.051070377230644226,
-0.1404125690460205,
-0.10451943427324295,
0.08921003341674805,
0.1816626489162445,
0.10548866540193558,
0.03239431977272034,
0.01858191378414631,
-0.03769378736615181,
-0.06221520155668259,
-0.04834571108222008,
0.004577558487653732,
0.06866534054279327,
0.09399241954088211,
-0.15828226506710052,
-0.09188713133335114,
-0.1359969973564148,
-0.025062138214707375,
0.10155977308750153,
0.1273198425769806,
-0.03073991648852825,
0.08464298397302628,
0.2417527735233307,
-0.148825541138649,
-0.2006019651889801,
0.09348787367343903,
0.0885794535279274,
0.014964129775762558,
0.02985651232302189,
-0.18250130116939545,
0.14242734014987946,
0.15996447205543518,
-0.026008659973740578,
-0.09417937695980072,
-0.12571203708648682,
-0.08962006866931915,
0.09531614184379578,
0.08138583600521088,
0.1756880283355713,
-0.08937933295965195,
-0.024682315066456795,
-0.041363783180713654,
-0.01642407849431038,
0.11500327289104462,
-0.03297392651438713,
0.051308393478393555,
-0.00796081405133009,
-0.042165156453847885,
0.05095294862985611,
-0.05645061656832695,
0.07736054807901382,
0.009104604832828045,
0.09143255650997162,
-0.0312048327177763,
0.0321478433907032,
0.0632820799946785,
-0.027004750445485115,
0.17280952632427216,
0.057666778564453125,
0.06830936670303345,
-0.01928107626736164,
-0.08081343024969101,
-0.10479848086833954,
0.037628933787345886,
-0.014370754361152649,
-0.0761309266090393,
-0.1003081277012825,
0.06323637068271637,
0.06121986359357834,
-0.006491710431873798,
0.006464221049100161,
-0.08939563482999802,
0.017464935779571533,
0.04644162207841873,
0.18775106966495514,
0.007938966155052185,
0.04807356372475624,
0.06828957051038742,
-0.005032928194850683,
0.06935879588127136,
-0.07765013724565506,
0.0634809285402298,
0.14411573112010956,
0.00019043410429731011,
0.08423180878162384,
0.06718345731496811,
-0.0807105079293251,
-0.00223525520414114,
0.004565717186778784,
-0.16944865882396698,
-0.06343746930360794,
-0.011343919672071934,
-0.046458300203084946,
-0.16668806970119476,
-0.06635287404060364,
0.08425112813711166,
-0.07262829691171646,
-0.009917009621858597,
-0.012712126597762108,
0.059423208236694336,
0.010467375628650188,
0.22737698256969452,
0.01408433634787798,
0.03552091866731644,
-0.04202623292803764,
0.15484103560447693,
0.09669669717550278,
0.008230306208133698,
-0.0024814321659505367,
0.029699688777327538,
-0.11273996531963348,
0.018777532503008842,
-0.049226269125938416,
0.028964752331376076,
0.04637359827756882,
-0.05468778684735298,
-0.18724776804447174,
-0.1585283875465393,
0.026956327259540558,
0.1286081075668335,
0.03802475705742836,
-0.0016180019592866302,
-0.006828089244663715,
0.0344671830534935,
-0.024213479831814766,
0.13093937933444977,
0.05878465995192528,
0.06157182902097702,
-0.009389737620949745,
0.03018268197774887,
0.01672486960887909,
-0.024601051583886147,
-0.07067951560020447,
-0.00953647680580616,
-0.17909854650497437,
0.060919325798749924,
-0.09971579909324646,
0.08621545881032944,
0.006501398514956236,
-0.034745119512081146,
-0.010368049144744873,
-0.04135463014245033,
-0.027102047577500343,
0.023976212367415428,
-0.046799108386039734,
0.05738737806677818,
0.00847876351326704,
0.048989418894052505,
-0.1048133373260498,
-0.05346782132983208,
0.08866089582443237,
-0.08206026256084442,
0.052202120423316956,
0.0699312835931778,
-0.08153823763132095,
0.06648553162813187,
-0.20074214041233063,
0.019180357456207275,
0.05824744328856468,
0.14548836648464203,
-0.07568113505840302,
-0.1070934385061264,
0.050764258950948715,
0.07831194251775742,
0.020662160590291023,
-0.013094709254801273,
0.058191586285829544,
-0.0785762295126915,
0.08125610649585724,
0.07198125869035721,
-0.09894150495529175,
-0.07791539281606674,
0.00006787043093936518,
0.019266203045845032,
0.07028628140687943,
0.1366240531206131,
-0.05525100976228714,
-0.026819495484232903,
-0.16971220076084137,
-0.025734631344676018,
0.06981600075960159,
-0.08841095119714737,
-0.12553119659423828,
-0.06357359141111374,
0.09009336680173874,
0.01946457102894783,
0.25574803352355957,
0.050949450582265854,
-0.004195855930447578,
0.009603521786630154,
-0.019688306376338005,
0.15028536319732666,
-0.007868270389735699,
0.07891672104597092,
0.01802617497742176,
-0.0938248559832573,
-0.017015928402543068,
-0.06970347464084625,
-0.06599143147468567,
-0.08553440868854523,
0.15734019875526428,
0.18369954824447632,
0.05429581180214882,
-0.012040494941174984,
0.07383457571268082,
0.0575239472091198,
0.01712222397327423,
-0.06460902094841003,
0.07221183180809021,
0.02786318212747574,
0.03589509427547455,
0.1646926999092102,
0.08231348544359207,
-0.0726289227604866,
0.08823137730360031,
-0.06618418544530869,
-0.11911074072122574,
-0.19541731476783752,
-0.12149529159069061,
-0.042743705213069916,
0.01044303085654974,
-0.023368695750832558,
-0.12435939162969589,
-0.008536295033991337,
0.04754367843270302,
0.017560282722115517,
-0.0015497051645070314,
0.1058138906955719,
-0.021889405325055122,
-0.05158871039748192,
0.04556569829583168,
-0.016217784956097603,
0.0006934002740308642,
-0.02752162702381611,
0.007650651037693024,
0.08609495311975479,
0.12687702476978302,
0.03040405921638012,
0.017446860671043396,
0.008697647601366043,
-0.09869524091482162,
-0.02722206525504589,
-0.04487691447138786,
-0.024334458634257317,
-0.026824692264199257,
-0.03658147528767586,
0.03167741000652313,
0.07759428024291992,
-0.04224911332130432,
0.03076029196381569,
0.20798180997371674,
-0.058846816420555115,
-0.0961259976029396,
-0.1375119984149933,
0.2781968116760254,
-0.004552796017378569,
-0.01775072142481804,
0.08326154947280884,
-0.07674305140972137,
0.021282227709889412,
0.21544966101646423,
0.09952788054943085,
-0.01600571908056736,
-0.043678443878889084,
-0.0028821551240980625,
0.00245195091702044,
0.014604393392801285,
0.1521254926919937,
0.018472081050276756,
0.23538760840892792,
-0.09045102447271347,
0.07821948081254959,
-0.009102242067456245,
0.008722000755369663,
-0.03306824341416359,
0.03421591594815254,
0.03448803722858429,
0.0358026921749115,
-0.18191908299922943,
0.06586837023496628,
0.02657358907163143,
-0.17196126282215118,
-0.07673189789056778,
0.012953419238328934,
-0.051023971289396286,
-0.004724997561424971,
-0.12003012746572495,
-0.12475921958684921,
0.03031296283006668,
-0.007735385559499264,
0.03622552007436752,
0.06505578756332397,
0.03067564219236374,
-0.09256555885076523,
-0.040616948157548904,
0.10332007706165314,
-0.007711976766586304,
0.212087482213974,
0.0400041900575161,
0.17488907277584076,
0.05797876790165901,
-0.055391933768987656,
-0.08985096216201782,
0.14238262176513672,
0.009979735128581524,
-0.0513109527528286,
0.004785400815308094,
0.07530214637517929,
0.024657348170876503,
0.10758058726787567,
0.08528844267129898,
0.013516807928681374,
0.03323223814368248,
-0.02302262745797634,
-0.06870117783546448,
-0.1553114354610443,
0.016489414498209953,
-0.03284649923443794,
0.06632214784622192,
0.16741694509983063,
0.008017963729798794,
0.031463660299777985,
-0.11178849637508392,
-0.0005259450990706682,
0.03664647415280342,
0.02188671939074993,
0.06580083817243576,
-0.215335413813591,
-0.0025155472103506327,
0.09968587756156921,
0.024054724723100662,
-0.3083972930908203,
-0.030433684587478638,
-0.03630451858043671,
-0.03168696537613869,
0.03731580451130867,
0.0365881584584713,
0.014283890835940838,
0.08736880123615265,
-0.03894748538732529,
-0.22552703320980072,
0.0035767483059316874,
0.11137094348669052,
-0.06652194261550903,
-0.06741167604923248
] |
null | null |
transformers
|
[bioformer-8L](https://huggingface.co/bioformers/bioformer-8L) fined-tuned on the [NCBI Disease](https://doi.org/10.1016/j.jbi.2013.12.006) dataset for 10 epochs.
This fine-tuned model can be used for NER for diseases.
|
{"language": ["en"], "license": "apache-2.0"}
|
token-classification
|
bioformers/bioformer-8L-ncbi-disease
|
[
"transformers",
"pytorch",
"safetensors",
"bert",
"token-classification",
"en",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #safetensors #bert #token-classification #en #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us
|
bioformer-8L fined-tuned on the NCBI Disease dataset for 10 epochs.
This fine-tuned model can be used for NER for diseases.
|
[] |
[
"TAGS\n#transformers #pytorch #safetensors #bert #token-classification #en #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
52
] |
[
"passage: TAGS\n#transformers #pytorch #safetensors #bert #token-classification #en #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
-0.059595197439193726,
0.10631763190031052,
-0.007629459258168936,
0.0244675874710083,
0.07692740112543106,
-0.009490981698036194,
0.1120014414191246,
0.09707333892583847,
0.016386302188038826,
-0.058308474719524384,
0.14559929072856903,
0.2322559505701065,
-0.02104523777961731,
0.09640208631753922,
-0.0772639587521553,
-0.18908371031284332,
0.11321170628070831,
0.031629133969545364,
-0.027390873059630394,
0.11348471790552139,
0.11655383557081223,
-0.059609029442071915,
0.03608550876379013,
-0.010012947022914886,
-0.050080087035894394,
0.005900037940591574,
0.06558991968631744,
-0.1263318806886673,
0.10472848266363144,
0.010667749680578709,
0.1416918933391571,
0.048578500747680664,
-0.010780079290270805,
-0.17827226221561432,
0.01685168221592903,
0.04325312748551369,
-0.05701589956879616,
0.06061201170086861,
0.06445474177598953,
-0.042393025010824203,
-0.03439084067940712,
0.04937899485230446,
0.013264483772218227,
0.043994951993227005,
-0.06446723639965057,
-0.2367478609085083,
-0.07986585795879364,
0.09175827354192734,
0.08877503871917725,
0.06225430965423584,
0.04686594381928444,
0.2166328728199005,
-0.1363559067249298,
0.07040996849536896,
0.10097770392894745,
-0.3378860056400299,
0.008846734650433064,
0.06439774483442307,
0.011660018935799599,
-0.020830940455198288,
-0.017887143418192863,
0.01634209230542183,
0.05087721347808838,
0.013740013353526592,
0.07426079362630844,
-0.04597567394375801,
-0.12318819016218185,
0.018208418041467667,
-0.07754059135913849,
-0.061774663627147675,
0.1946762651205063,
0.0051047285087406635,
0.01380863320082426,
-0.016571272164583206,
-0.09115388244390488,
0.03032490611076355,
-0.010035322979092598,
0.015539965592324734,
0.015580558218061924,
0.07992756366729736,
0.07526838034391403,
0.02796023152768612,
-0.13954363763332367,
0.0038283425383269787,
-0.19779418408870697,
0.1512414664030075,
0.03748834878206253,
0.08667577058076859,
-0.12928499281406403,
0.06433986872434616,
0.04397663101553917,
-0.1079297736287117,
0.013299165293574333,
-0.08847339451313019,
0.10585624724626541,
0.006872594356536865,
-0.0216996930539608,
0.10459595918655396,
0.14807040989398956,
0.24554948508739471,
0.008490723557770252,
0.0006220643990673125,
-0.03798950836062431,
0.09819090366363525,
-0.02439701557159424,
0.06169116124510765,
0.004773916210979223,
0.0029138021636754274,
0.12058168649673462,
-0.08234737813472748,
0.07648736983537674,
-0.015147379599511623,
-0.09438566118478775,
-0.008250034414231777,
0.07007094472646713,
0.14284799993038177,
0.03614111989736557,
0.050720322877168655,
-0.05599583312869072,
0.025464754551649094,
0.17661522328853607,
-0.0716419592499733,
0.0030076471157372,
0.008878698572516441,
0.04219090938568115,
0.02228664979338646,
0.021391183137893677,
0.030783358961343765,
-0.03333854302763939,
0.11593122035264969,
-0.056953441351652145,
-0.034543007612228394,
-0.020107511430978775,
-0.018121900036931038,
0.08384237438440323,
-0.09292449057102203,
0.07103262841701508,
-0.17778323590755463,
-0.12789620459079742,
0.054290201514959335,
0.05843212455511093,
0.03949759155511856,
-0.06971442699432373,
0.0627439096570015,
-0.020450809970498085,
-0.011543171480298042,
-0.09382392466068268,
-0.04808494448661804,
-0.08693333715200424,
0.06649096310138702,
-0.06748238950967789,
-0.005129937082529068,
-0.12065643072128296,
0.036540694534778595,
-0.15144887566566467,
0.020572766661643982,
-0.0577455498278141,
-0.0837194174528122,
-0.11143656820058823,
0.1909424215555191,
-0.049468033015728,
-0.051477499306201935,
0.023392388597130775,
-0.0006517790025100112,
-0.04389803856611252,
0.10010425001382828,
-0.05719350650906563,
-0.047356847673654556,
0.19699619710445404,
-0.15346479415893555,
-0.19086122512817383,
0.0805756002664566,
0.018746525049209595,
-0.01342928409576416,
0.0699700340628624,
0.12589630484580994,
0.09753774106502533,
-0.04877757653594017,
0.05581682175397873,
0.1286648064851761,
-0.10490857809782028,
-0.16889147460460663,
0.03976103290915489,
-0.03041401319205761,
-0.1507398933172226,
0.056760575622320175,
-0.027175601571798325,
0.09410244226455688,
-0.02349107526242733,
-0.08472321182489395,
-0.06348812580108643,
-0.061779458075761795,
0.03498120605945587,
0.023742884397506714,
0.03657194599509239,
-0.08156696707010269,
-0.011193453334271908,
-0.047948677092790604,
0.04690099135041237,
0.04283229634165764,
0.023988088592886925,
-0.10232236236333847,
0.08006595820188522,
0.0033755141776055098,
0.012584766373038292,
-0.12195257097482681,
-0.06227247044444084,
0.021048063412308693,
-0.02183537557721138,
-0.04614357277750969,
0.06171775981783867,
0.05298782140016556,
-0.04034341126680374,
0.003946306183934212,
-0.04714684933423996,
0.14881442487239838,
0.0766487866640091,
-0.011257418431341648,
-0.13094469904899597,
0.030676545575261116,
-0.054146356880664825,
0.03355014696717262,
-0.01124284416437149,
0.023657727986574173,
0.0611737035214901,
0.12396371364593506,
-0.02489505149424076,
0.09484673291444778,
-0.03664865344762802,
-0.000142398159368895,
-0.05330050364136696,
-0.009033212438225746,
0.11087857931852341,
0.03429315239191055,
-0.059645961970090866,
0.1477213203907013,
-0.07704055309295654,
0.34404459595680237,
0.19459190964698792,
-0.16285039484500885,
0.05102197453379631,
-0.005693621933460236,
-0.03512963280081749,
-0.014240749180316925,
0.026536915451288223,
0.03738819435238838,
-0.024659400805830956,
0.009891473688185215,
0.13615724444389343,
-0.05331426113843918,
-0.05181577056646347,
-0.004111792892217636,
-0.07191430032253265,
-0.01862928457558155,
0.034959230571985245,
0.11031261831521988,
-0.18773338198661804,
0.1949310451745987,
0.33231258392333984,
-0.010407802648842335,
0.02769985795021057,
-0.09829138219356537,
0.023424124345183372,
0.04188505932688713,
-0.010857561603188515,
-0.025153521448373795,
0.029730141162872314,
-0.1020900160074234,
0.02931899018585682,
0.09101535379886627,
0.037710871547460556,
0.027564043179154396,
-0.14617547392845154,
-0.04594927281141281,
0.0005768557311967015,
0.002950448077172041,
-0.022592024877667427,
0.05258798226714134,
0.004609347321093082,
0.09558837860822678,
-0.03956138715147972,
-0.15612749755382538,
0.13214664161205292,
0.0009200698114000261,
-0.06517138332128525,
0.1568019688129425,
-0.1592644453048706,
-0.2499362677335739,
-0.09997566789388657,
-0.11091836541891098,
-0.016624461859464645,
0.011807420291006565,
0.10858207195997238,
-0.05040875822305679,
-0.07083499431610107,
0.005267674569040537,
-0.08259241282939911,
0.011951121501624584,
0.04637390747666359,
-0.022241583094000816,
0.06026598438620567,
0.03687203302979469,
-0.10821034759283066,
-0.05574595555663109,
0.007621741853654385,
-0.06301957368850708,
0.09565864503383636,
-0.03548980504274368,
0.0565580315887928,
0.1262890100479126,
0.009045802988111973,
0.004031754098832607,
-0.03719327971339226,
0.13278941810131073,
-0.02155284211039543,
0.024742111563682556,
0.21494249999523163,
-0.05969719961285591,
0.08979412168264389,
0.16621504724025726,
0.04078880324959755,
-0.04664229974150658,
0.014198161661624908,
-0.05004282668232918,
-0.08011195808649063,
-0.23994417488574982,
-0.11594166606664658,
-0.06807024031877518,
0.06262688338756561,
0.044930994510650635,
0.09121332317590714,
0.1185266375541687,
0.10531827062368393,
-0.012906715273857117,
-0.04558880999684334,
-0.002682699589058757,
0.056127361953258514,
0.21051685512065887,
-0.010734698735177517,
0.11484015733003616,
-0.09072810411453247,
-0.07946362346410751,
0.09703280031681061,
0.08375775814056396,
0.10550329834222794,
0.11143471300601959,
-0.01877782680094242,
0.07917140424251556,
0.2380746752023697,
0.09401649981737137,
0.14397072792053223,
0.0060878172516822815,
-0.028690529987215996,
-0.031617820262908936,
-0.011649219319224358,
-0.05533507838845253,
0.025410039350390434,
-0.046027153730392456,
-0.08183522522449493,
-0.02575746551156044,
-0.12770137190818787,
0.08474810421466827,
0.18414148688316345,
0.031717922538518906,
-0.16124606132507324,
0.019827721640467644,
0.08781170845031738,
-0.009311218746006489,
-0.026490410789847374,
0.11675476282835007,
-0.0708768367767334,
-0.0788249745965004,
0.10491454601287842,
-0.047420185059309006,
0.097123883664608,
-0.005846098996698856,
0.05215346813201904,
-0.004947523586452007,
-0.08903983980417252,
0.06545103341341019,
0.1297169029712677,
-0.23861956596374512,
0.21504756808280945,
-0.015931053087115288,
-0.02445845864713192,
-0.08135662227869034,
0.013686065562069416,
0.057181499898433685,
0.22974897921085358,
0.11625958979129791,
0.02436232939362526,
-0.11592810600996017,
-0.07512018829584122,
-0.07763414829969406,
0.043618958443403244,
0.01129867136478424,
0.0014053157065063715,
-0.05197974294424057,
-0.07176268100738525,
-0.01478944718837738,
0.019009049981832504,
-0.003704957664012909,
-0.042290687561035156,
-0.1050403043627739,
0.0327347069978714,
0.10741148889064789,
0.04168447479605675,
-0.08872824907302856,
-0.029534634202718735,
-0.1355004757642746,
0.17636659741401672,
-0.1088709831237793,
-0.0793815478682518,
-0.08503847569227219,
-0.15265773236751556,
0.05100608617067337,
-0.058765411376953125,
0.08026053011417389,
-0.08279121667146683,
0.006286883261054754,
-0.044234324246644974,
-0.18449975550174713,
0.10897718369960785,
-0.16726337373256683,
-0.07496177405118942,
-0.04734254628419876,
0.14166449010372162,
-0.09050673991441727,
-0.0010082157095894217,
0.04015079513192177,
0.002123621990904212,
-0.07761029154062271,
-0.11402487754821777,
-0.009571169503033161,
0.006474085617810488,
0.06321746110916138,
-0.010682391002774239,
-0.07764094322919846,
-0.0885096862912178,
0.02230287902057171,
-0.024300487712025642,
0.1825769692659378,
0.22545233368873596,
-0.08529490232467651,
0.12561868131160736,
0.23147578537464142,
-0.039628904312849045,
-0.3182145059108734,
-0.16395695507526398,
-0.16413220763206482,
-0.11094354093074799,
0.00946718268096447,
-0.08942510932683945,
0.16214993596076965,
0.07534264028072357,
-0.1102471724152565,
0.08382916450500488,
-0.13942518830299377,
-0.061759982258081436,
0.2562066614627838,
0.014020739123225212,
0.3100128769874573,
-0.1261322945356369,
-0.06293918192386627,
-0.04347341135144234,
-0.15158842504024506,
0.11952613294124603,
-0.09458768367767334,
0.018279504030942917,
-0.0028460531029850245,
-0.014908469282090664,
-0.0105310482904315,
-0.07361216843128204,
0.1151808649301529,
-0.032727889716625214,
0.05070047825574875,
-0.11032845079898834,
-0.009167310781776905,
0.06449837982654572,
-0.020154280588030815,
0.047452960163354874,
-0.10327593237161636,
0.054281916469335556,
-0.02973555028438568,
-0.01783146895468235,
-0.06745825707912445,
0.11551990360021591,
0.0035355251748114824,
-0.08237697184085846,
-0.02713950350880623,
-0.014035383239388466,
0.0032904164399951696,
-0.019548678770661354,
0.23817157745361328,
0.0533728264272213,
0.10620903968811035,
0.1415189802646637,
0.07724647223949432,
-0.19441430270671844,
-0.004892610013484955,
-0.0880972146987915,
-0.09032591432332993,
0.08275473117828369,
-0.08097861707210541,
0.07521849125623703,
0.09847386181354523,
-0.053555700927972794,
0.030957749113440514,
0.07722925394773483,
0.014392501674592495,
-0.07189072668552399,
0.13734029233455658,
-0.19336922466754913,
-0.0029275917913764715,
0.016645127907395363,
0.11135733127593994,
0.05746306851506233,
0.13255950808525085,
0.1132577583193779,
0.011508218944072723,
-0.03902217000722885,
0.00994537491351366,
0.038947444409132004,
-0.06459175050258636,
0.05133867263793945,
0.06503661721944809,
0.028675276786088943,
-0.1153540387749672,
0.0938890129327774,
0.005607145372778177,
-0.13422808051109314,
-0.049310486763715744,
0.03707799315452576,
-0.16499435901641846,
-0.1251576989889145,
0.012925009243190289,
0.042720671743154526,
-0.0956338495016098,
-0.13257832825183868,
-0.03245978429913521,
-0.1568082720041275,
0.050797589123249054,
0.13024407625198364,
0.13169004023075104,
0.10422632843255997,
0.014499234035611153,
-0.048640262335538864,
0.013356425799429417,
0.008865008130669594,
-0.06856595724821091,
0.030806060880422592,
-0.16222138702869415,
-0.05038931965827942,
0.02064080908894539,
0.09524060785770416,
-0.05986732617020607,
-0.0012853862717747688,
-0.10596384853124619,
0.03725794330239296,
-0.12649501860141754,
-0.01643923483788967,
-0.09022706001996994,
0.004841700196266174,
0.03337377309799194,
-0.09527548402547836,
-0.01847096160054207,
0.017560835927724838,
-0.10123640298843384,
0.012896465137600899,
0.01566278003156185,
0.04947790503501892,
-0.11697810143232346,
-0.055427901446819305,
0.08668754994869232,
-0.023936275392770767,
0.11237453669309616,
0.06576550751924515,
-0.07070298492908478,
0.09061338752508163,
-0.15505056083202362,
-0.09907937049865723,
0.10854674130678177,
0.03758507966995239,
0.035369597375392914,
-0.014077352359890938,
0.002548821968957782,
0.12766733765602112,
-0.041569482535123825,
0.039211712777614594,
0.045912981033325195,
-0.13729402422904968,
-0.0176172386854887,
0.01935543492436409,
-0.1300545185804367,
0.0009233326418325305,
-0.14588460326194763,
0.1408264935016632,
-0.0297873355448246,
0.20152471959590912,
-0.008935026824474335,
0.02818414941430092,
-0.06230417266488075,
0.01030105259269476,
-0.0493222177028656,
-0.1846861094236374,
-0.135090634226799,
-0.015834858641028404,
-0.04791738837957382,
-0.019033024087548256,
0.25928908586502075,
0.004040257539600134,
-0.04947160929441452,
0.07740054279565811,
0.02997889183461666,
0.021803302690386772,
0.03991980105638504,
0.21982316672801971,
0.04124981909990311,
-0.022249987348914146,
-0.09143567830324173,
-0.01731627993285656,
0.006730312015861273,
-0.17266257107257843,
0.058640364557504654,
0.10775097459554672,
0.027582576498389244,
0.03858624026179314,
0.043156567960977554,
0.0037038924638181925,
-0.13953903317451477,
-0.18088886141777039,
-0.013304623775184155,
0.06269197165966034,
0.03547661751508713,
0.09343685954809189,
0.13970057666301727,
-0.009570972062647343,
0.0016306423349305987,
-0.0555904395878315,
0.0031276573427021503,
-0.1811908781528473,
-0.11354430019855499,
-0.07511179894208908,
-0.1033962294459343,
0.008128470741212368,
-0.030051978304982185,
-0.038698285818099976,
0.10348473489284515,
0.050203386694192886,
-0.05385258421301842,
0.045468322932720184,
-0.02600432001054287,
0.009103916585445404,
0.016581980511546135,
0.01494691614061594,
-0.030824335291981697,
-0.0011615883558988571,
-0.0636662170290947,
-0.12034732103347778,
-0.0118890181183815,
-0.052707038819789886,
0.0007633204804733396,
-0.03248484060168266,
0.056867413222789764,
-0.09264763444662094,
-0.062434274703264236,
-0.05244990438222885,
0.022352026775479317,
-0.02624649740755558,
0.1205274686217308,
-0.0007312627276405692,
0.04951237887144089,
0.09091947227716446,
0.15412627160549164,
-0.06287951022386551,
-0.17018520832061768,
-0.053630657494068146,
0.23211349546909332,
0.021259645000100136,
0.0738338977098465,
0.030710235238075256,
0.02688610553741455,
-0.04032766819000244,
0.28390538692474365,
0.2609561085700989,
-0.02433340810239315,
0.05970100313425064,
-0.03252284228801727,
0.008852454833686352,
0.023858537897467613,
0.12555477023124695,
0.13468500971794128,
0.20234768092632294,
-0.07987125217914581,
-0.020072948187589645,
-0.041369445621967316,
0.012630997225642204,
-0.1578705608844757,
0.04768473282456398,
-0.025302475318312645,
-0.05368791148066521,
-0.04729656130075455,
0.08546026051044464,
-0.06392765045166016,
0.09854686260223389,
0.015809010714292526,
-0.08856336772441864,
-0.04839025065302849,
0.00021682998340111226,
0.21112284064292908,
-0.007045868318527937,
0.019623814150691032,
-0.03936074301600456,
-0.05413379892706871,
0.07122472673654556,
-0.011057602241635323,
-0.1692773848772049,
-0.06059876084327698,
0.07795844227075577,
0.023028375580906868,
0.1936575025320053,
0.015643781051039696,
0.07010204344987869,
0.08995462954044342,
0.04785418510437012,
-0.11184735596179962,
0.08989690989255905,
0.03424299880862236,
-0.08333216607570648,
-0.015863552689552307,
-0.13479553163051605,
-0.012995379976928234,
-0.04742743447422981,
0.016247572377324104,
-0.1095607653260231,
0.03379101678729057,
-0.014433617703616619,
-0.06995736062526703,
-0.04177592322230339,
0.058106571435928345,
-0.050807029008865356,
0.05923372134566307,
-0.00640771072357893,
-0.027699260041117668,
-0.0522528700530529,
-0.06646668165922165,
0.009650778956711292,
0.027686018496751785,
-0.17377161979675293,
-0.056983496993780136,
-0.02521589770913124,
0.0053129009902477264,
0.07856842130422592,
0.024405689910054207,
-0.044164109975099564,
-0.03560051694512367,
-0.08570306748151779,
0.0035788060631603003,
-0.1742836982011795,
0.013486085459589958,
0.06408070772886276,
0.02360478974878788,
-0.007453749421983957,
-0.036508526653051376,
-0.007974712178111076,
0.01947420835494995,
-0.08894628286361694,
-0.09263118356466293
] |
null | null |
transformers
|
[bioformer-8L](https://huggingface.co/bioformers/bioformer-8L) fined-tuned on the [QNLI](https://huggingface.co/datasets/glue) dataset for 2 epochs.
The fine-tuning process was performed on two NVIDIA GeForce GTX 1080 Ti GPUs (11GB). The parameters are:
```
max_seq_length=512
per_device_train_batch_size=16
total train batch size (w. parallel, distributed & accumulation) = 32
learning_rate=3e-5
```
## Evaluation results
eval_accuracy = 0.883397
## More information
The QNLI (Question-answering NLI) dataset is a Natural Language Inference dataset automatically derived from the Stanford Question Answering Dataset v1.1 (SQuAD). SQuAD v1.1 consists of question-paragraph pairs, where one of the sentences in the paragraph (drawn from Wikipedia) contains the answer to the corresponding question (written by an annotator). The dataset was converted into sentence pair classification by forming a pair between each question and each sentence in the corresponding context, and filtering out pairs with low lexical overlap between the question and the context sentence. The task is to determine whether the context sentence contains the answer to the question. This modified version of the original task removes the requirement that the model select the exact answer, but also removes the simplifying assumptions that the answer is always present in the input and that lexical overlap is a reliable cue. The QNLI dataset is part of GLEU benchmark.
(source: https://paperswithcode.com/dataset/qnli)
Original GLUE paper: https://arxiv.org/abs/1804.07461
|
{"language": ["en"], "license": "apache-2.0"}
|
text-classification
|
bioformers/bioformer-8L-qnli
|
[
"transformers",
"pytorch",
"safetensors",
"bert",
"text-classification",
"en",
"arxiv:1804.07461",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"1804.07461"
] |
[
"en"
] |
TAGS
#transformers #pytorch #safetensors #bert #text-classification #en #arxiv-1804.07461 #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us
|
bioformer-8L fined-tuned on the QNLI dataset for 2 epochs.
The fine-tuning process was performed on two NVIDIA GeForce GTX 1080 Ti GPUs (11GB). The parameters are:
## Evaluation results
eval_accuracy = 0.883397
## More information
The QNLI (Question-answering NLI) dataset is a Natural Language Inference dataset automatically derived from the Stanford Question Answering Dataset v1.1 (SQuAD). SQuAD v1.1 consists of question-paragraph pairs, where one of the sentences in the paragraph (drawn from Wikipedia) contains the answer to the corresponding question (written by an annotator). The dataset was converted into sentence pair classification by forming a pair between each question and each sentence in the corresponding context, and filtering out pairs with low lexical overlap between the question and the context sentence. The task is to determine whether the context sentence contains the answer to the question. This modified version of the original task removes the requirement that the model select the exact answer, but also removes the simplifying assumptions that the answer is always present in the input and that lexical overlap is a reliable cue. The QNLI dataset is part of GLEU benchmark.
(source: URL
Original GLUE paper: URL
|
[
"## Evaluation results\neval_accuracy = 0.883397",
"## More information\nThe QNLI (Question-answering NLI) dataset is a Natural Language Inference dataset automatically derived from the Stanford Question Answering Dataset v1.1 (SQuAD). SQuAD v1.1 consists of question-paragraph pairs, where one of the sentences in the paragraph (drawn from Wikipedia) contains the answer to the corresponding question (written by an annotator). The dataset was converted into sentence pair classification by forming a pair between each question and each sentence in the corresponding context, and filtering out pairs with low lexical overlap between the question and the context sentence. The task is to determine whether the context sentence contains the answer to the question. This modified version of the original task removes the requirement that the model select the exact answer, but also removes the simplifying assumptions that the answer is always present in the input and that lexical overlap is a reliable cue. The QNLI dataset is part of GLEU benchmark.\n(source: URL\n\nOriginal GLUE paper: URL"
] |
[
"TAGS\n#transformers #pytorch #safetensors #bert #text-classification #en #arxiv-1804.07461 #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us \n",
"## Evaluation results\neval_accuracy = 0.883397",
"## More information\nThe QNLI (Question-answering NLI) dataset is a Natural Language Inference dataset automatically derived from the Stanford Question Answering Dataset v1.1 (SQuAD). SQuAD v1.1 consists of question-paragraph pairs, where one of the sentences in the paragraph (drawn from Wikipedia) contains the answer to the corresponding question (written by an annotator). The dataset was converted into sentence pair classification by forming a pair between each question and each sentence in the corresponding context, and filtering out pairs with low lexical overlap between the question and the context sentence. The task is to determine whether the context sentence contains the answer to the question. This modified version of the original task removes the requirement that the model select the exact answer, but also removes the simplifying assumptions that the answer is always present in the input and that lexical overlap is a reliable cue. The QNLI dataset is part of GLEU benchmark.\n(source: URL\n\nOriginal GLUE paper: URL"
] |
[
59,
15,
233
] |
[
"passage: TAGS\n#transformers #pytorch #safetensors #bert #text-classification #en #arxiv-1804.07461 #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us \n## Evaluation results\neval_accuracy = 0.883397## More information\nThe QNLI (Question-answering NLI) dataset is a Natural Language Inference dataset automatically derived from the Stanford Question Answering Dataset v1.1 (SQuAD). SQuAD v1.1 consists of question-paragraph pairs, where one of the sentences in the paragraph (drawn from Wikipedia) contains the answer to the corresponding question (written by an annotator). The dataset was converted into sentence pair classification by forming a pair between each question and each sentence in the corresponding context, and filtering out pairs with low lexical overlap between the question and the context sentence. The task is to determine whether the context sentence contains the answer to the question. This modified version of the original task removes the requirement that the model select the exact answer, but also removes the simplifying assumptions that the answer is always present in the input and that lexical overlap is a reliable cue. The QNLI dataset is part of GLEU benchmark.\n(source: URL\n\nOriginal GLUE paper: URL"
] |
[
-0.050867728888988495,
0.06072592735290527,
-0.0007133181788958609,
0.06442955881357193,
0.002042328240349889,
0.008119173347949982,
0.05529621243476868,
0.07119231671094894,
0.028423160314559937,
0.13526418805122375,
0.04243595153093338,
0.0856427550315857,
0.06659512221813202,
0.018964502960443497,
-0.018784550949931145,
-0.03576007857918739,
0.03429596498608589,
-0.027083124965429306,
0.05512522906064987,
0.10983002185821533,
0.03643789514899254,
-0.10741573572158813,
0.019735757261514664,
-0.032871048897504807,
-0.040826037526130676,
-0.001698456471785903,
0.031315580010414124,
-0.020684869959950447,
0.1042894795536995,
0.11159855127334595,
0.03091319464147091,
0.0737430527806282,
0.0011513311183080077,
-0.19640333950519562,
0.012413945980370045,
0.023863688111305237,
0.0011902136029675603,
0.02348863147199154,
-0.015406129881739616,
0.013903908431529999,
-0.12206780910491943,
-0.025333993136882782,
0.022497108206152916,
0.014154458418488503,
-0.09699764847755432,
-0.02487429603934288,
-0.09916654974222183,
0.03302540257573128,
0.10906153172254562,
0.07957884669303894,
-0.047940559685230255,
0.1791331022977829,
-0.10941364616155624,
0.07576705515384674,
0.1135973110795021,
-0.2511182725429535,
-0.00900151114910841,
0.04522060602903366,
0.01205355953425169,
0.0768776684999466,
-0.07077868282794952,
0.02987777628004551,
0.010757187381386757,
-0.015280759893357754,
0.03477583825588226,
-0.0843881368637085,
-0.1529913693666458,
0.008141831494867802,
-0.11021428555250168,
-0.01524991076439619,
0.20710761845111847,
0.0012318259105086327,
-0.053220655769109726,
-0.09096948057413101,
-0.024872321635484695,
0.14482338726520538,
0.042138971388339996,
-0.04800178483128548,
0.012122816406190395,
-0.023658018559217453,
0.012354901060461998,
-0.08642034977674484,
-0.07357626408338547,
-0.10903804749250412,
-0.14231465756893158,
0.07417050004005432,
0.015756860375404358,
0.042747728526592255,
-0.07038019597530365,
0.13765588402748108,
-0.0712532177567482,
-0.11489936709403992,
-0.0571574866771698,
0.004178470466285944,
-0.1494438499212265,
-0.01625385694205761,
-0.007702738977968693,
-0.0692318007349968,
0.016486171633005142,
0.16795013844966888,
-0.06442337483167648,
0.016823776066303253,
-0.08983512967824936,
0.05885019525885582,
0.12493210285902023,
0.23532356321811676,
-0.04077143594622612,
-0.09788785129785538,
0.062048960477113724,
0.019065001979470253,
0.06836589425802231,
-0.05083829164505005,
-0.03526332229375839,
-0.06053348630666733,
0.0677429735660553,
0.11347740143537521,
0.13904623687267303,
0.014970721676945686,
-0.04654388129711151,
-0.017330026254057884,
-0.011326013132929802,
-0.13497070968151093,
-0.02254294417798519,
0.02009187638759613,
-0.08135953545570374,
-0.060006868094205856,
0.011435936205089092,
0.0035361081827431917,
-0.08746392279863358,
-0.01869248040020466,
-0.09343492239713669,
-0.018526040017604828,
0.0006792755448259413,
-0.1407395452260971,
0.024786869063973427,
-0.07286564260721207,
-0.06400585174560547,
-0.06959613412618637,
-0.1702708750963211,
-0.10756267607212067,
-0.038740284740924835,
-0.043736640363931656,
-0.03908136859536171,
-0.06456499546766281,
0.07241785526275635,
-0.05626285448670387,
-0.027879886329174042,
-0.06057324633002281,
-0.01552379596978426,
0.05687534064054489,
0.028584975749254227,
0.07500649988651276,
-0.02793159894645214,
0.03839874267578125,
-0.12679477035999298,
0.041008736938238144,
-0.08619076013565063,
0.14403648674488068,
-0.05616626515984535,
-0.0021228701807558537,
-0.07583823800086975,
0.0011027607833966613,
0.011904294602572918,
-0.018999962136149406,
0.0016669424949213862,
0.15701095759868622,
-0.15603268146514893,
-0.023427795618772507,
0.18655937910079956,
-0.09933065623044968,
-0.15779589116573334,
0.07834691554307938,
-0.04233243688941002,
0.16351954638957977,
0.14511029422283173,
0.1704394519329071,
0.13671959936618805,
-0.06585733592510223,
-0.1234409511089325,
0.01049723569303751,
-0.03561786934733391,
0.13996127247810364,
0.05589234456419945,
0.004037912003695965,
0.005922036245465279,
0.030145181342959404,
-0.16676834225654602,
-0.014968836680054665,
0.012482917867600918,
-0.07986778020858765,
-0.0011131978826597333,
-0.020500194281339645,
-0.0027227168902754784,
-0.02307438850402832,
-0.016165800392627716,
0.04948928579688072,
-0.05019169673323631,
-0.018077539280056953,
0.014076941646635532,
-0.10103069245815277,
0.018371986225247383,
-0.04084155708551407,
0.13349834084510803,
-0.05061745643615723,
0.02834661491215229,
-0.16181422770023346,
-0.17255118489265442,
0.0634584054350853,
0.05942780524492264,
0.049077995121479034,
-0.015649519860744476,
-0.03212861716747284,
0.021716229617595673,
0.016735678538680077,
-0.0010189813328906894,
-0.0303743127733469,
-0.034796420484781265,
-0.061443865299224854,
-0.0634695291519165,
0.016343945637345314,
-0.011640675365924835,
0.10154368728399277,
-0.039211004972457886,
0.0006197771290317178,
-0.06274688243865967,
-0.06084344908595085,
0.012903990224003792,
0.011279172264039516,
0.005840871017426252,
0.01827886700630188,
-0.05196287855505943,
-0.018754707649350166,
0.0030907930340617895,
0.007244202774018049,
-0.07209577411413193,
0.11690793931484222,
-0.18917834758758545,
-0.16788458824157715,
0.0185532383620739,
-0.030077390372753143,
-0.07909736782312393,
-0.11985351890325546,
-0.047713227570056915,
-0.027759848162531853,
-0.14807240664958954,
-0.06585858762264252,
0.18312104046344757,
0.04964390769600868,
0.026277340948581696,
-0.11486835777759552,
-0.07052837312221527,
-0.042476776987314224,
0.008314241655170918,
0.010914023034274578,
0.047908514738082886,
0.04661297798156738,
-0.0674123764038086,
0.060883719474077225,
-0.017620619386434555,
-0.03469904884696007,
0.12502028048038483,
-0.019678214564919472,
-0.06780266016721725,
-0.07236401736736298,
0.07081678509712219,
-0.02353237196803093,
0.10622485727071762,
-0.005847678519785404,
0.019952477887272835,
0.06104680150747299,
0.014441991224884987,
-0.019675685092806816,
-0.11714916676282883,
0.021878981962800026,
0.052555620670318604,
-0.055324915796518326,
-0.10711868852376938,
-0.040414921939373016,
0.012827398255467415,
0.0964425802230835,
0.0160139799118042,
0.09073466807603836,
-0.034689150750637054,
-0.04546329379081726,
-0.14222343266010284,
0.19519351422786713,
-0.07182533293962479,
-0.3455769121646881,
-0.08131416141986847,
0.07478837668895721,
-0.03353245556354523,
-0.02518436498939991,
0.031051281839609146,
0.013621285557746887,
-0.026626182720065117,
-0.09054111689329147,
0.08961819112300873,
0.04614381864666939,
-0.08584863692522049,
-0.09528795629739761,
0.02807675302028656,
-0.09102916717529297,
-0.1154542788863182,
-0.0055811223573982716,
-0.044886842370033264,
-0.09525147825479507,
0.06060776486992836,
-0.055159252136945724,
0.11272051185369492,
0.10203521698713303,
-0.000704489357303828,
-0.004281119909137487,
-0.0755600780248642,
0.21572993695735931,
-0.08934104442596436,
0.12776944041252136,
0.06635584682226181,
-0.017374280840158463,
0.02877575345337391,
0.14231805503368378,
-0.052536774426698685,
-0.04192919656634331,
0.03890085965394974,
0.07003267109394073,
-0.03925569728016853,
-0.2193257212638855,
-0.0708470493555069,
-0.017770355567336082,
0.043456997722387314,
0.0038993596099317074,
0.009522772394120693,
0.029620438814163208,
0.031926870346069336,
-0.05085703730583191,
0.013343014754354954,
0.009942888282239437,
0.04147728160023689,
0.15683282911777496,
-0.0339636392891407,
0.15573708713054657,
-0.020246319472789764,
0.004587221425026655,
0.06915069371461868,
0.11320009082555771,
0.2223491668701172,
-0.057358939200639725,
0.0957789197564125,
0.09734509140253067,
0.13727690279483795,
-0.027951959520578384,
0.051221515983343124,
-0.03501781448721886,
0.021146897226572037,
-0.01530955359339714,
-0.09888843446969986,
-0.0026904165279120207,
0.0408756248652935,
0.07262735068798065,
0.000611678056884557,
-0.02729814499616623,
0.004065067507326603,
0.06520957499742508,
0.14964431524276733,
0.03943534567952156,
-0.06522170454263687,
-0.041952356696128845,
0.04101003333926201,
0.061379365622997284,
-0.008081788197159767,
0.040723878890275955,
0.11174000054597855,
-0.08612217009067535,
-0.02090352214872837,
-0.003198839258402586,
0.12910470366477966,
-0.06078493222594261,
-0.010687611065804958,
-0.11110042035579681,
-0.1403750628232956,
-0.02253819815814495,
0.13576464354991913,
-0.18739451467990875,
0.09264741092920303,
0.0699707642197609,
0.011042606085538864,
-0.05197550728917122,
-0.025128794834017754,
-0.050600092858076096,
0.05182594060897827,
0.20005358755588531,
0.004907455760985613,
0.023667654022574425,
-0.1333443522453308,
-0.0777563601732254,
0.07205818593502045,
0.012400978244841099,
-0.01800675503909588,
0.10002988576889038,
0.008711647242307663,
0.05171307548880577,
-0.014674666337668896,
0.0884178876876831,
-0.05110635980963707,
-0.1494164913892746,
-0.008124464191496372,
0.03183763846755028,
0.002205710392445326,
-0.006026079412549734,
-0.039177391678094864,
0.10495265573263168,
0.15656223893165588,
-0.18614712357521057,
-0.07704706490039825,
-0.10546151548624039,
0.03956963121891022,
0.08103348314762115,
-0.11499223858118057,
-0.055315904319286346,
-0.04292077198624611,
0.05030228570103645,
-0.049955565482378006,
-0.081572026014328,
0.03301042690873146,
-0.055493470281362534,
-0.05853192135691643,
-0.05217049643397331,
0.09493350982666016,
0.08817601948976517,
0.09431762993335724,
0.05213797837495804,
0.023283060640096664,
-0.04971837252378464,
-0.15093402564525604,
-0.00949362013489008,
0.0635451003909111,
0.055711496621370316,
0.07019517570734024,
-0.1283356100320816,
-0.05338471382856369,
-0.11261211335659027,
0.04000226780772209,
0.19765785336494446,
0.12283803522586823,
-0.106939397752285,
0.1343023031949997,
0.17507749795913696,
-0.10813863575458527,
-0.2501404583454132,
0.004077848978340626,
-0.0032921854872256517,
0.03599397465586662,
0.09695660322904587,
-0.13020960986614227,
0.06199699267745018,
0.04931517690420151,
-0.005742708221077919,
0.05820786580443382,
-0.28410202264785767,
-0.07153572887182236,
0.104161337018013,
0.04517161101102829,
0.12024673074483871,
-0.11882158368825912,
-0.03220456838607788,
0.036186642944812775,
-0.030935650691390038,
0.1853695660829544,
-0.08799125254154205,
0.06900607794523239,
-0.011201472021639347,
0.0323188342154026,
0.03266459330916405,
-0.037371329963207245,
0.08145337551832199,
0.00794199202209711,
0.03521833196282387,
-0.00311055569909513,
-0.06989892572164536,
0.10307945311069489,
-0.05597115308046341,
0.1460990011692047,
0.013168929144740105,
0.12394023686647415,
-0.020659325644373894,
-0.051105938851833344,
-0.08263303339481354,
0.07049451768398285,
-0.07275841385126114,
-0.07664157450199127,
-0.10606243461370468,
0.06384709477424622,
0.09314537048339844,
0.01602347567677498,
0.01521360594779253,
-0.047906000167131424,
0.05336642265319824,
0.09285396337509155,
0.05473650246858597,
0.016117094084620476,
-0.15120519697666168,
0.008064025081694126,
0.01928097940981388,
0.09229135513305664,
-0.020098509266972542,
0.07354526221752167,
0.1251663863658905,
0.08309733867645264,
0.11316892504692078,
0.048226673156023026,
-0.0767584964632988,
0.023783313110470772,
-0.030147923156619072,
-0.1642804592847824,
-0.03996848315000534,
-0.02667928673326969,
-0.07595393806695938,
-0.08581158518791199,
-0.07333191484212875,
0.09230928868055344,
-0.005720602814108133,
-0.017420832067728043,
0.02541780099272728,
0.05134540796279907,
0.05913563817739487,
0.1962260603904724,
0.045679740607738495,
0.025967048481106758,
-0.05826669931411743,
0.07785087078809738,
0.12022144347429276,
-0.08172549307346344,
0.04896616190671921,
0.011964760720729828,
-0.09489960223436356,
-0.020945679396390915,
-0.05840307101607323,
0.0029865039978176355,
-0.07214469462633133,
-0.07187201082706451,
-0.07901835441589355,
-0.03566671162843704,
-0.001018816838040948,
0.1435256004333496,
-0.0007033048896118999,
0.10453367233276367,
-0.016760116443037987,
-0.05460677295923233,
-0.020911656320095062,
0.15040241181850433,
-0.05828025937080383,
0.0045441570691764355,
0.020031454041600227,
0.032156944274902344,
0.0820213183760643,
0.14169247448444366,
-0.04958970099687576,
-0.13285328447818756,
-0.17028595507144928,
0.03144519776105881,
-0.05023328587412834,
0.06132850795984268,
-0.012598772533237934,
-0.0379769429564476,
-0.03551799803972244,
-0.011965937912464142,
-0.012980730272829533,
0.03749840706586838,
0.014768533408641815,
-0.0006555926520377398,
-0.05968034639954567,
0.05476554110646248,
-0.21309201419353485,
-0.04858817905187607,
0.075434111058712,
-0.0149122579023242,
0.071912482380867,
-0.012436221353709698,
-0.03951379656791687,
0.013705611228942871,
-0.09275564551353455,
-0.018820153549313545,
-0.0392092689871788,
0.06913196295499802,
0.020402079448103905,
-0.1587667167186737,
0.004954170435667038,
0.006495937239378691,
-0.03802719712257385,
0.028861917555332184,
-0.007582925725728273,
-0.08927629142999649,
0.06152850016951561,
-0.060417454689741135,
-0.1043144017457962,
-0.054262544959783554,
0.04641128331422806,
-0.04369937255978584,
0.03337136283516884,
0.08369893580675125,
-0.0378432460129261,
0.07680220156908035,
-0.14917993545532227,
-0.056646041572093964,
0.054296690970659256,
0.05336011201143265,
-0.07152824103832245,
-0.06696055084466934,
0.029735112562775612,
-0.004275255370885134,
0.15838445723056793,
0.0353463813662529,
0.054073516279459,
0.0034419612493366003,
0.054206132888793945,
0.059238970279693604,
-0.05178452283143997,
0.05688418447971344,
0.030105996876955032,
-0.028514772653579712,
0.054846830666065216,
0.0415751151740551,
-0.08236199617385864,
-0.029594935476779938,
0.17221280932426453,
-0.0270890761166811,
0.07799894362688065,
0.024109503254294395,
0.0592631995677948,
-0.053088605403900146,
0.005495191551744938,
-0.03665145859122276,
-0.020425552502274513,
-0.03139317035675049,
0.009412331506609917,
0.15658816695213318,
0.10214167088270187,
-0.12196823209524155,
0.12211944162845612,
0.05921813100576401,
-0.11603023111820221,
-0.1449151337146759,
-0.12155748903751373,
-0.05351186543703079,
-0.0009112873813137412,
-0.0096169114112854,
-0.14442454278469086,
-0.01880492828786373,
0.01938708685338497,
0.011619742028415203,
-0.04582427069544792,
0.12286567687988281,
-0.06801575422286987,
-0.13788272440433502,
0.017345884814858437,
0.014651237055659294,
0.06323114037513733,
-0.013281554915010929,
0.08935263007879257,
0.0641700029373169,
0.035675905644893646,
0.06329162418842316,
0.09439659118652344,
0.0945572480559349,
0.008897515945136547,
-0.07332189381122589,
-0.03145352005958557,
-0.04178585857152939,
0.004372710827738047,
0.024637462571263313,
0.13235971331596375,
0.051132313907146454,
-0.002888729562982917,
0.00888080894947052,
0.22902034223079681,
-0.0016011056723073125,
-0.028805561363697052,
-0.10706312209367752,
0.16529357433319092,
-0.009330544620752335,
-0.022335439920425415,
0.026385704055428505,
-0.10970252752304077,
-0.008492402732372284,
0.21719318628311157,
0.09124716371297836,
-0.020952310413122177,
-0.016909966245293617,
-0.02611560933291912,
0.017840595915913582,
0.048452284187078476,
0.07410548627376556,
0.029191426932811737,
0.37816891074180603,
-0.07592317461967468,
-0.017307091504335403,
-0.05876766890287399,
-0.023807326331734657,
-0.030783142894506454,
0.037996865808963776,
0.028733130544424057,
0.026686949655413628,
-0.11797896772623062,
0.08896677196025848,
0.0060081081464886665,
-0.0917564108967781,
-0.0158600565046072,
-0.08976667374372482,
-0.08421758562326431,
-0.006658976431936026,
0.0031726814340800047,
-0.1085645854473114,
0.016890401020646095,
0.010688717477023602,
0.026022057980298996,
0.10715902596712112,
-0.011424393393099308,
-0.1099880263209343,
-0.04593227803707123,
0.0826859101653099,
0.016610726714134216,
0.12662211060523987,
-0.012924287468194962,
0.1073879674077034,
0.06964219361543655,
0.007013141177594662,
-0.02315324917435646,
0.11509857326745987,
0.051068954169750214,
-0.05395954102277756,
0.01592671312391758,
0.07368951290845871,
0.059737347066402435,
0.10923490673303604,
0.11951272934675217,
-0.0035790824331343174,
0.05954721197485924,
-0.10874099284410477,
-0.011268489994108677,
-0.19801320135593414,
0.03730587288737297,
-0.04180896654725075,
0.1414753794670105,
0.09128870069980621,
-0.00948562566190958,
0.008396906778216362,
-0.09144952893257141,
-0.015258350409567356,
-0.015607333742082119,
0.11667428910732269,
0.0037639844231307507,
-0.09611795842647552,
0.07116653025150299,
0.0026157493703067303,
-0.019520776346325874,
-0.2944202125072479,
-0.02712058089673519,
0.047063734382390976,
-0.05299074202775955,
0.09039638936519623,
0.10172910988330841,
0.019038835540413857,
0.020770207047462463,
-0.057070884853601456,
-0.26343101263046265,
0.05438931658864021,
0.0745774507522583,
-0.08245920389890671,
-0.09952165186405182
] |
null | null |
transformers
|
[bioformer-8L](https://huggingface.co/bioformers/bioformer-8L) fined-tuned on the [SQuAD1](https://rajpurkar.github.io/SQuAD-explorer) dataset for 3 epochs.
The fine-tuning process was performed on a single P100 GPUs (16GB). The hyperparameters are:
```
max_seq_length=512
per_device_train_batch_size=16
gradient_accumulation_steps=1
total train batch size (w. parallel, distributed & accumulation) = 16
learning_rate=3e-5
num_train_epochs=3
```
## Evaluation results
```
"eval_exact_match": 78.55250709555345
"eval_f1": 85.91482799690257
```
Bioformer's performance is on par with [DistilBERT](https://arxiv.org/pdf/1910.01108.pdf) (EM/F1: 77.7/85.8),
although Bioformer was pretrained only on biomedical texts.
## Speed
In our experiments, the inference speed of Bioformer is 3x as fast as BERT-base/BioBERT/PubMedBERT, and is 40% faster than DistilBERT.
|
{"language": ["en"], "license": "apache-2.0", "pipeline_tag": "question-answering"}
|
question-answering
|
bioformers/bioformer-8L-squad1
|
[
"transformers",
"pytorch",
"safetensors",
"bert",
"question-answering",
"en",
"arxiv:1910.01108",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"1910.01108"
] |
[
"en"
] |
TAGS
#transformers #pytorch #safetensors #bert #question-answering #en #arxiv-1910.01108 #license-apache-2.0 #endpoints_compatible #region-us
|
bioformer-8L fined-tuned on the SQuAD1 dataset for 3 epochs.
The fine-tuning process was performed on a single P100 GPUs (16GB). The hyperparameters are:
## Evaluation results
Bioformer's performance is on par with DistilBERT (EM/F1: 77.7/85.8),
although Bioformer was pretrained only on biomedical texts.
## Speed
In our experiments, the inference speed of Bioformer is 3x as fast as BERT-base/BioBERT/PubMedBERT, and is 40% faster than DistilBERT.
|
[
"## Evaluation results\n\n\n\nBioformer's performance is on par with DistilBERT (EM/F1: 77.7/85.8), \nalthough Bioformer was pretrained only on biomedical texts.",
"## Speed\nIn our experiments, the inference speed of Bioformer is 3x as fast as BERT-base/BioBERT/PubMedBERT, and is 40% faster than DistilBERT."
] |
[
"TAGS\n#transformers #pytorch #safetensors #bert #question-answering #en #arxiv-1910.01108 #license-apache-2.0 #endpoints_compatible #region-us \n",
"## Evaluation results\n\n\n\nBioformer's performance is on par with DistilBERT (EM/F1: 77.7/85.8), \nalthough Bioformer was pretrained only on biomedical texts.",
"## Speed\nIn our experiments, the inference speed of Bioformer is 3x as fast as BERT-base/BioBERT/PubMedBERT, and is 40% faster than DistilBERT."
] |
[
52,
42,
45
] |
[
"passage: TAGS\n#transformers #pytorch #safetensors #bert #question-answering #en #arxiv-1910.01108 #license-apache-2.0 #endpoints_compatible #region-us \n## Evaluation results\n\n\n\nBioformer's performance is on par with DistilBERT (EM/F1: 77.7/85.8), \nalthough Bioformer was pretrained only on biomedical texts.## Speed\nIn our experiments, the inference speed of Bioformer is 3x as fast as BERT-base/BioBERT/PubMedBERT, and is 40% faster than DistilBERT."
] |
[
-0.019239705055952072,
0.04117913544178009,
-0.0034886342473328114,
0.00858395267277956,
-0.03463754430413246,
-0.02597624808549881,
-0.026189126074314117,
0.1510728895664215,
0.06564942002296448,
0.10182034969329834,
0.1532437652349472,
0.07900135219097137,
-0.0044415732845664024,
0.16049428284168243,
-0.06339624524116516,
-0.053049977868795395,
0.08684936910867691,
0.06103181838989258,
-0.04565024748444557,
0.022407270967960358,
0.037121109664440155,
-0.1851140260696411,
0.06415702402591705,
0.02887064591050148,
0.024171549826860428,
0.024958014488220215,
0.12059763818979263,
-0.005566926673054695,
0.06389108300209045,
-0.030123455449938774,
0.04857448861002922,
0.06905729323625565,
0.04262101277709007,
0.0052858819253742695,
0.02606925182044506,
-0.0018136315047740936,
0.1338556408882141,
0.13824738562107086,
-0.0059454478323459625,
0.09311094880104065,
0.004626544192433357,
0.0374889075756073,
0.004164434038102627,
0.0711665228009224,
-0.008353658951818943,
-0.18255357444286346,
-0.04944971948862076,
-0.05350280553102493,
-0.057144299149513245,
0.026586759835481644,
-0.03366546332836151,
0.13791407644748688,
-0.15654054284095764,
0.0719367116689682,
0.1912122517824173,
-0.1892673373222351,
-0.08548107743263245,
-0.15532398223876953,
0.293415904045105,
0.02080904133617878,
-0.11969876289367676,
0.02587161958217621,
0.08433142304420471,
0.025650715455412865,
0.10160478204488754,
-0.04492448270320892,
0.09620732814073563,
0.031806960701942444,
-0.08951940387487411,
0.13643258810043335,
0.17419327795505524,
-0.022370927035808563,
-0.1402764469385147,
0.03641738370060921,
-0.1216159388422966,
0.09063125401735306,
-0.01254013180732727,
-0.10388688743114471,
0.040780141949653625,
-0.06308320164680481,
-0.08403988182544708,
0.016020923852920532,
-0.02908562682569027,
-0.11395128816366196,
-0.08472313731908798,
0.2622491121292114,
0.06181612238287926,
0.08018842339515686,
0.08015424758195877,
0.07535078376531601,
0.1795801967382431,
-0.12974491715431213,
-0.022403649985790253,
-0.05849175155162811,
-0.009648889303207397,
-0.03610513359308243,
-0.07629263401031494,
0.10361810028553009,
0.10494452714920044,
0.16793686151504517,
0.04792894423007965,
0.013688824139535427,
0.02092297188937664,
-0.006311101373285055,
-0.06141524016857147,
0.07724782079458237,
-0.09459662437438965,
-0.031241141259670258,
0.05812560021877289,
0.12456658482551575,
-0.04622673988342285,
0.06807460635900497,
0.0018452754011377692,
-0.014534837566316128,
0.0679686963558197,
0.013837688602507114,
-0.17134658992290497,
-0.04086732119321823,
-0.11482369899749756,
0.02952185645699501,
0.06725548207759857,
-0.06448081135749817,
-0.04751358926296234,
0.02548231929540634,
-0.1154356524348259,
0.1508178412914276,
0.04322250187397003,
-0.0754549577832222,
0.0009293461916968226,
0.04036198928952217,
-0.1591162383556366,
-0.04449347034096718,
-0.037400033324956894,
-0.07470813393592834,
-0.021269507706165314,
0.12688663601875305,
0.02628372609615326,
-0.07284383475780487,
-0.04729224368929863,
0.06765954941511154,
0.01773209683597088,
-0.13316018879413605,
-0.015334416180849075,
0.03175662085413933,
-0.005569624248892069,
0.07729212194681168,
-0.09178116917610168,
0.0817030668258667,
-0.08614891767501831,
0.15911322832107544,
0.08061717450618744,
0.16091462969779968,
-0.07604274153709412,
0.02444625273346901,
-0.1497579663991928,
-0.004088204819709063,
-0.1796792447566986,
-0.030778756365180016,
-0.1532900631427765,
-0.03489697724580765,
-0.14285412430763245,
-0.09182365983724594,
-0.10405241698026657,
-0.029783019796013832,
0.0770932212471962,
0.12203659117221832,
-0.15523064136505127,
-0.026949021965265274,
0.09741610288619995,
0.07104330509901047,
-0.0820627361536026,
0.1480122208595276,
-0.05060704052448273,
0.07705535739660263,
0.09446638077497482,
0.21297526359558105,
-0.08159788697957993,
-0.23048867285251617,
0.016311192885041237,
0.03754745423793793,
0.027026239782571793,
-0.034075621515512466,
0.15207690000534058,
0.03673700988292694,
-0.004321548622101545,
0.0621781088411808,
-0.05470134690403938,
0.20883138477802277,
-0.08879110217094421,
0.02245195396244526,
-0.004841163754463196,
-0.12371977418661118,
0.09768268465995789,
-0.018870536237955093,
0.012236865237355232,
-0.07871375977993011,
0.03182677924633026,
0.09895210713148117,
0.09034207463264465,
-0.11320161819458008,
-0.04511900246143341,
-0.01328047551214695,
0.0033650195691734552,
-0.0711086317896843,
-0.016356639564037323,
-0.06268775463104248,
-0.034279681742191315,
0.07208022475242615,
-0.052535079419612885,
-0.052734408527612686,
0.14510993659496307,
0.02403455600142479,
0.060095056891441345,
-0.016799092292785645,
0.014154743403196335,
0.02668578550219536,
0.009564470499753952,
-0.08792508393526077,
-0.04362083971500397,
0.104981929063797,
-0.03591771423816681,
-0.03646526113152504,
-0.0337432362139225,
-0.018180955201387405,
-0.0226072296500206,
0.11959245800971985,
-0.04301940277218819,
-0.027698952704668045,
0.008743665181100368,
-0.06271707266569138,
-0.07666060328483582,
-0.010273799300193787,
0.03816784918308258,
-0.017551323398947716,
-0.0009810527553781867,
0.12177234143018723,
0.19765955209732056,
0.21123433113098145,
0.027488986030220985,
0.09903687983751297,
-0.046577341854572296,
-0.007067509926855564,
-0.046609316021203995,
-0.027098016813397408,
-0.11707891523838043,
-0.08840696513652802,
0.07707379758358002,
0.02698841132223606,
0.18294982612133026,
-0.12476524710655212,
-0.08123590052127838,
0.027054954320192337,
-0.051348939538002014,
0.12433579564094543,
0.08210544288158417,
0.04685388505458832,
-0.14650285243988037,
0.04660988971590996,
0.2678586542606354,
0.01706613600254059,
0.14274346828460693,
-0.046922340989112854,
-0.15924079716205597,
-0.015963314101099968,
-0.014041442424058914,
-0.07725226879119873,
0.14863461256027222,
-0.007489679381251335,
0.06325021386146545,
0.004629023373126984,
-0.0175539031624794,
-0.018299967050552368,
-0.06587382405996323,
0.0016515129245817661,
-0.033143822103738785,
-0.04598642885684967,
-0.03568841144442558,
0.012501712888479233,
-0.02754884958267212,
0.10965811461210251,
0.0016307756304740906,
-0.15138936042785645,
-0.012382125481963158,
0.06773702800273895,
-0.013556678779423237,
0.20645421743392944,
-0.0277385376393795,
-0.05723997578024864,
-0.04430312663316727,
-0.11198964715003967,
0.048053428530693054,
0.020713601261377335,
0.023035362362861633,
-0.05063096433877945,
-0.07000706344842911,
0.02121935412287712,
-0.0351567268371582,
-0.01626358926296234,
0.04805590584874153,
0.03222043067216873,
-0.025310717523097992,
0.12260721623897552,
-0.08608518540859222,
-0.026853052899241447,
-0.08995650708675385,
0.11224786192178726,
0.004523939918726683,
-0.052404433488845825,
0.08220764249563217,
0.036149367690086365,
-0.06304475665092468,
-0.07846149057149887,
-0.03288661316037178,
0.2236398309469223,
0.039406873285770416,
0.00023425287508871406,
0.19222170114517212,
0.03122766874730587,
0.06119762361049652,
-0.026422835886478424,
0.04022243246436119,
-0.08539731055498123,
0.04962584748864174,
0.021073661744594574,
0.061972957104444504,
-0.18795551359653473,
-0.001114869024604559,
-0.00023136039089877158,
-0.016666484996676445,
0.04201614111661911,
-0.002061712322756648,
-0.09113403409719467,
0.008449805900454521,
0.025360632687807083,
0.03193550556898117,
-0.01952592469751835,
0.0052335807122290134,
0.06633366644382477,
-0.08098268508911133,
0.22719605267047882,
0.09388910233974457,
-0.14022839069366455,
0.05715532973408699,
0.026478426530957222,
0.13621622323989868,
-0.026428665965795517,
0.056832119822502136,
-0.037597645074129105,
0.12128622084856033,
0.03943142294883728,
0.20996922254562378,
-0.0288232434540987,
-0.07417496293783188,
-0.12334126979112625,
-0.03120136633515358,
-0.11787796020507812,
-0.03542688861489296,
-0.06664159893989563,
-0.01532308291643858,
-0.02828536368906498,
0.05761023238301277,
0.07825569808483124,
0.11421222239732742,
0.06956713646650314,
-0.22267529368400574,
-0.09477759152650833,
0.03381993621587753,
-0.0504022091627121,
-0.09413966536521912,
0.06202775985002518,
0.2270985245704651,
0.03401494026184082,
-0.07514629513025284,
-0.06088133528828621,
0.06025567278265953,
0.09523741155862808,
0.09256178885698318,
-0.0036341254599392414,
0.021591607481241226,
-0.009282339364290237,
0.06427936255931854,
-0.12255872786045074,
0.05302029848098755,
-0.012814385816454887,
0.04016280546784401,
0.00561881810426712,
0.013579503633081913,
0.032059621065855026,
0.0728701800107956,
0.12768429517745972,
0.006577154155820608,
0.08615222573280334,
-0.11736086010932922,
-0.2760993242263794,
0.042527321726083755,
0.05547073483467102,
0.09938773512840271,
0.1064828559756279,
-0.05560872331261635,
0.010855944827198982,
-0.006296874489635229,
-0.07300767302513123,
-0.14545060694217682,
-0.012209154665470123,
0.029952244833111763,
-0.09537821263074875,
0.01962418667972088,
-0.06692583113908768,
-0.0731092318892479,
-0.020927228033542633,
0.13630443811416626,
-0.09697706252336502,
-0.03931102529168129,
-0.13028329610824585,
0.08262570947408676,
0.15766876935958862,
-0.06513147801160812,
-0.030268676578998566,
-0.05657146871089935,
0.04124446213245392,
0.041428565979003906,
-0.10099402070045471,
0.01892450824379921,
-0.09398936480283737,
-0.0960509330034256,
-0.13349218666553497,
0.0419602207839489,
-0.062153834849596024,
0.12134722620248795,
0.06486448645591736,
-0.054213304072618484,
-0.1847383975982666,
-0.0718957930803299,
0.03653424233198166,
-0.10380254685878754,
0.03789913281798363,
-0.047831907868385315,
-0.1977539360523224,
0.009812166914343834,
-0.050993911921978,
0.04772870987653732,
0.06759950518608093,
0.179973304271698,
-0.01596771366894245,
0.05022011697292328,
0.24134832620620728,
0.008940275758504868,
-0.27286040782928467,
-0.0410856269299984,
0.12425323575735092,
-0.01859491504728794,
-0.05594895780086517,
-0.10792788863182068,
0.2642689347267151,
0.20108257234096527,
-0.0007345626945607364,
0.02774820476770401,
-0.018679283559322357,
-0.09704703837633133,
0.04257144406437874,
0.014810534194111824,
0.3557019531726837,
-0.1134929358959198,
-0.03143911063671112,
-0.04889264702796936,
-0.09875411540269852,
0.0165720097720623,
-0.07891496270895004,
0.08335572481155396,
-0.07101652771234512,
-0.015935534611344337,
-0.016368048265576363,
-0.0881085991859436,
0.10262742638587952,
-0.08771420270204544,
0.0024863306898623705,
0.06898211687803268,
-0.09896191209554672,
0.043455030769109726,
0.03346041589975357,
0.16442067921161652,
-0.007622691337019205,
0.02474403567612171,
0.025408700108528137,
-0.020694822072982788,
-0.09241316467523575,
-0.004113335628062487,
0.04971093311905861,
-0.04697458818554878,
-0.10563525557518005,
0.08181675523519516,
-0.03629915416240692,
-0.005637915804982185,
0.05579724535346031,
-0.03523693606257439,
-0.13729241490364075,
0.041506193578243256,
0.041261956095695496,
-0.1652388572692871,
-0.03896509110927582,
0.05558346211910248,
-0.0446978360414505,
0.012212296947836876,
-0.16021472215652466,
0.03839075565338135,
0.164388045668602,
-0.051141221076250076,
0.0768834799528122,
0.0680597797036171,
-0.04772605746984482,
0.014559399336576462,
0.007461877539753914,
-0.20115920901298523,
-0.03547871485352516,
-0.011699938215315342,
-0.03374345600605011,
-0.14001920819282532,
0.06103367358446121,
0.1209256649017334,
-0.07454933971166611,
-0.03650711849331856,
-0.0454745888710022,
-0.005009789485484362,
-0.021985642611980438,
0.32063373923301697,
0.07992903143167496,
0.06778443604707718,
-0.07478360831737518,
-0.02542594075202942,
-0.0128274941816926,
0.08877963572740555,
-0.03950588405132294,
-0.061658427119255066,
-0.11131986230611801,
0.011587404645979404,
-0.09022638201713562,
0.14737600088119507,
-0.027245398610830307,
0.05181436240673065,
-0.2504567503929138,
-0.11990755051374435,
0.05737008526921272,
0.1882690042257309,
0.03708679974079132,
-0.053147297352552414,
-0.12705537676811218,
-0.0329422801733017,
-0.010471872985363007,
0.13523609936237335,
-0.08282577991485596,
0.0736750066280365,
0.004203686024993658,
0.15980342030525208,
-0.12362752854824066,
0.018127523362636566,
-0.1112082302570343,
0.0737060010433197,
-0.12226466089487076,
-0.0019199119415134192,
-0.27253541350364685,
-0.020131215453147888,
0.05522643029689789,
-0.08681139349937439,
0.023273861035704613,
-0.07314477115869522,
0.03877531364560127,
0.026962274685502052,
-0.06788846850395203,
0.04281340539455414,
0.06939021497964859,
0.006516376510262489,
-0.1326993703842163,
-0.059786371886730194,
0.030326835811138153,
-0.05396554246544838,
-0.002055640332400799,
0.06873446702957153,
0.05405637249350548,
-0.008859154768288136,
0.036224737763404846,
-0.13122765719890594,
0.102481909096241,
0.0848095715045929,
-0.011852121911942959,
-0.06440623104572296,
0.00941009446978569,
0.03714446723461151,
0.056918248534202576,
-0.017660124227404594,
0.020629368722438812,
-0.01927940919995308,
-0.10703737288713455,
0.01971438154578209,
-0.039570827037096024,
-0.08582571893930435,
-0.0695762112736702,
0.07285647094249725,
0.16710823774337769,
0.16526614129543304,
-0.01257476955652237,
-0.06438609212636948,
-0.13170035183429718,
0.007428019307553768,
0.018018607050180435,
-0.2599913775920868,
-0.1555403620004654,
-0.05545978620648384,
0.06323758512735367,
0.024400092661380768,
0.13897624611854553,
-0.026127230376005173,
0.01947212964296341,
-0.02091566100716591,
-0.05024801939725876,
0.10404578596353531,
0.02625458687543869,
0.23110172152519226,
0.05398644134402275,
-0.001834757043980062,
-0.08207985758781433,
0.006820402108132839,
0.13369949162006378,
0.11287838965654373,
0.15164485573768616,
0.19433064758777618,
0.032821398228406906,
0.08150383830070496,
-0.03751835599541664,
0.03126133233308792,
-0.036609429866075516,
-0.11440963298082352,
0.04353826120495796,
-0.06240679696202278,
0.05512996390461922,
0.1993301659822464,
-0.020860567688941956,
-0.10620729625225067,
-0.0726129412651062,
-0.11943534761667252,
-0.09607718884944916,
-0.12371440976858139,
0.25294119119644165,
-0.05516195297241211,
-0.004347274079918861,
-0.04606464505195618,
-0.17227524518966675,
0.07152864336967468,
-0.06477817893028259,
0.002610418014228344,
0.014594296924769878,
0.13943515717983246,
-0.029654555022716522,
-0.06571381539106369,
0.0765066146850586,
0.035144999623298645,
-0.018355343490839005,
0.013618615455925465,
-0.055551618337631226,
-0.017167698591947556,
0.04487530514597893,
0.05952486768364906,
-0.056864187121391296,
-0.06541867554187775,
-0.09035379439592361,
-0.05460166186094284,
-0.03887780383229256,
-0.05847998335957527,
-0.030527830123901367,
-0.05754838511347771,
0.022387083619832993,
0.05613650754094124,
-0.003760205814614892,
-0.007458334323018789,
0.09470674395561218,
-0.06922651827335358,
0.004176161717623472,
-0.08735093474388123,
0.24985674023628235,
0.07953976839780807,
0.14122380316257477,
0.016453443095088005,
0.007257699966430664,
-0.023646827787160873,
0.3694702386856079,
-0.1400342434644699,
-0.18115538358688354,
-0.07337018847465515,
0.06455211341381073,
-0.004141310229897499,
-0.00024839057005010545,
0.1285228729248047,
0.07618872076272964,
0.027381962165236473,
-0.05253414437174797,
-0.014697856269776821,
-0.03397822380065918,
-0.09821411967277527,
-0.2045556902885437,
-0.05026552081108093,
0.08275074511766434,
0.0572042390704155,
-0.15579122304916382,
-0.06815911829471588,
0.14127294719219208,
-0.040898896753787994,
-0.08381213992834091,
-0.0613405779004097,
-0.0724281594157219,
-0.05917651951313019,
-0.023622214794158936,
-0.11399981379508972,
-0.008140997029840946,
-0.0929008200764656,
0.0859602689743042,
0.16338199377059937,
-0.01697504334151745,
-0.10253672301769257,
0.008113715797662735,
0.12853623926639557,
-0.03585952892899513,
0.08774873614311218,
0.05969011038541794,
0.08707627654075623,
0.07015352696180344,
-0.04201404005289078,
-0.11858904361724854,
0.12209869921207428,
-0.022119037806987762,
-0.17194023728370667,
0.014758618548512459,
0.06789103150367737,
0.04300081729888916,
0.02800978533923626,
-0.053123489022254944,
-0.01931707374751568,
-0.03582124039530754,
0.027519024908542633,
-0.11600884050130844,
-0.048815518617630005,
0.02498897723853588,
-0.0038353961426764727,
0.0667271688580513,
0.12357648462057114,
-0.03528165817260742,
-0.01930011622607708,
-0.11815342307090759,
0.053245797753334045,
0.076402448117733,
0.037993207573890686,
-0.03706842660903931,
-0.18577906489372253,
-0.06607142835855484,
0.048368994146585464,
0.011838503181934357,
-0.25834235548973083,
-0.02586214989423752,
-0.09996611624956131,
0.06310722976922989,
-0.043430641293525696,
0.0621979646384716,
-0.011086670681834221,
0.063321053981781,
0.032226819545030594,
-0.14605730772018433,
-0.013530979864299297,
-0.006866766139864922,
-0.06228933855891228,
-0.014561912976205349
] |
null | null |
transformers
|
**_NOTE: `bioformer-cased-v1.0` has been renamed to `bioformer-8L`. All links to `bioformer-cased-v1.0` will automatically redirect to `bioformer-8L`, including git operations. However, to avoid confusion, we recommend updating any existing local clones to point to the new repository URL._**
Bioformer-8L is a lightweight BERT model for biomedical text mining. Bioformer-8L uses a biomedical vocabulary and is pre-trained from scratch only on biomedical domain corpora. Our experiments show that Bioformer-8L is 3x as fast as BERT-base, and achieves comparable or even better performance than BioBERT/PubMedBERT on downstream NLP tasks.
Bioformer-8L has 8 layers (transformer blocks) with a hidden embedding size of 512, and the number of self-attention heads is 8. Its total number of parameters is 42,820,610.
**The usage of Bioformer-8L is the same as a standard BERT model. The documentation of BERT can be found [here](https://huggingface.co/docs/transformers/model_doc/bert).**
## Vocabulary of Bioformer-8L
Bioformer-8L uses a cased WordPiece vocabulary trained from a biomedical corpus, which included all PubMed abstracts (33 million, as of Feb 1, 2021) and 1 million PMC full-text articles. PMC has 3.6 million articles but we down-sampled them to 1 million such that the total size of PubMed abstracts and PMC full-text articles are approximately equal. To mitigate the out-of-vocabulary issue and include special symbols (e.g. male and female symbols) in biomedical literature, we trained Bioformer’s vocabulary from the Unicode text of the two resources. The vocabulary size of Bioformer-8L is 32768 (2^15), which is similar to that of the original BERT.
## Pre-training of Bioformer-8L
Bioformer-8L was pre-trained from scratch on the same corpus as the vocabulary (33 million PubMed abstracts + 1 million PMC full-text articles). For the masked language modeling (MLM) objective, we used whole-word masking with a masking rate of 15%. There are debates on whether the next sentence prediction (NSP) objective could improve the performance on downstream tasks. We include it in our pre-training experiment in case the prediction of the next sentence is needed by end-users. Sentence segmentation of all training text was performed using [SciSpacy](https://allenai.github.io/scispacy/).
Pre-training of Bioformer-8L was performed on a single Cloud TPU device (TPUv2, 8 cores, 8GB memory per core). The maximum input sequence length was fixed to 512, and the batch size was set to 256. We pre-trained Bioformer-8L for 2 million steps, which took about 8.3 days.
## Usage
Prerequisites: python3, pytorch, transformers and datasets
We have tested the following commands on Python v3.9.16, PyTorch v1.13.1+cu117, Datasets v2.9.0 and Transformers v4.26.
To install pytorch, please refer to instructions [here](https://pytorch.org/get-started/locally).
To install the `transformers` and `datasets` library:
```
pip install transformers
pip install datasets
```
### Filling mask
```
from transformers import pipeline
unmasker8L = pipeline('fill-mask', model='bioformers/bioformer-8L')
unmasker8L("[MASK] refers to a group of diseases that affect how the body uses blood sugar (glucose)")
unmasker16L = pipeline('fill-mask', model='bioformers/bioformer-16L')
unmasker16L("[MASK] refers to a group of diseases that affect how the body uses blood sugar (glucose)")
```
Output of `bioformer-8L`:
```
[{'score': 0.3207533359527588,
'token': 13473,
'token_str': 'Diabetes',
'sequence': 'Diabetes refers to a group of diseases that affect how the body uses blood sugar ( glucose )'},
{'score': 0.19234347343444824,
'token': 17740,
'token_str': 'Obesity',
'sequence': 'Obesity refers to a group of diseases that affect how the body uses blood sugar ( glucose )'},
{'score': 0.09200277179479599,
'token': 10778,
'token_str': 'T2DM',
'sequence': 'T2DM refers to a group of diseases that affect how the body uses blood sugar ( glucose )'},
{'score': 0.08494312316179276,
'token': 2228,
'token_str': 'It',
'sequence': 'It refers to a group of diseases that affect how the body uses blood sugar ( glucose )'},
{'score': 0.0412776917219162,
'token': 22263,
'token_str':
'Hypertension',
'sequence': 'Hypertension refers to a group of diseases that affect how the body uses blood sugar ( glucose )'}]
```
Output of `bioformer-16L`:
```
[{'score': 0.7262957692146301,
'token': 13473,
'token_str': 'Diabetes',
'sequence': 'Diabetes refers to a group of diseases that affect how the body uses blood sugar ( glucose )'},
{'score': 0.124954953789711,
'token': 10778,
'token_str': 'T2DM',
'sequence': 'T2DM refers to a group of diseases that affect how the body uses blood sugar ( glucose )'},
{'score': 0.04062706232070923,
'token': 2228,
'token_str': 'It',
'sequence': 'It refers to a group of diseases that affect how the body uses blood sugar ( glucose )'},
{'score': 0.022694870829582214,
'token': 17740,
'token_str': 'Obesity',
'sequence': 'Obesity refers to a group of diseases that affect how the body uses blood sugar ( glucose )'},
{'score': 0.009743048809468746,
'token': 13960,
'token_str': 'T2D',
'sequence': 'T2D refers to a group of diseases that affect how the body uses blood sugar ( glucose )'}]
```
## Awards
Bioformer-8L achieved top performance (highest micro-F1 score) in the BioCreative VII COVID-19 multi-label topic classification challenge (https://doi.org/10.1093/database/baac069)
## Links
[Bioformer-16L](https://huggingface.co/bioformers/bioformer-16L)
## Acknowledgment
Training and evaluation of Bioformer-8L is supported by the Google TPU Research Cloud (TRC) program, the Intramural Research Program of the National Library of Medicine (NLM), National Institutes of Health (NIH), and NIH/NLM grants LM012895 and 1K99LM014024-01.
## Questions
If you have any questions, please submit an issue here: https://github.com/WGLab/bioformer/issues
You can also send an email to Li Fang ([email protected], https://fangli80.github.io/).
## Citation
You can cite our preprint on arXiv:
Fang L, Chen Q, Wei C-H, Lu Z, Wang K: Bioformer: an efficient transformer language model for biomedical text mining. arXiv preprint arXiv:2302.01588 (2023). DOI: https://doi.org/10.48550/arXiv.2302.01588
BibTeX format:
```
@ARTICLE{fangli2023bioformer,
author = {{Fang}, Li and {Chen}, Qingyu and {Wei}, Chih-Hsuan and {Lu}, Zhiyong and {Wang}, Kai},
title = "{Bioformer: an efficient transformer language model for biomedical text mining}",
journal = {arXiv preprint arXiv:2302.01588},
year = {2023}
}
```
|
{"language": ["en"], "license": "apache-2.0", "pipeline_tag": "fill-mask"}
|
fill-mask
|
bioformers/bioformer-8L
|
[
"transformers",
"pytorch",
"tf",
"safetensors",
"bert",
"fill-mask",
"en",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #tf #safetensors #bert #fill-mask #en #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us
|
_NOTE: 'bioformer-cased-v1.0' has been renamed to 'bioformer-8L'. All links to 'bioformer-cased-v1.0' will automatically redirect to 'bioformer-8L', including git operations. However, to avoid confusion, we recommend updating any existing local clones to point to the new repository URL._
Bioformer-8L is a lightweight BERT model for biomedical text mining. Bioformer-8L uses a biomedical vocabulary and is pre-trained from scratch only on biomedical domain corpora. Our experiments show that Bioformer-8L is 3x as fast as BERT-base, and achieves comparable or even better performance than BioBERT/PubMedBERT on downstream NLP tasks.
Bioformer-8L has 8 layers (transformer blocks) with a hidden embedding size of 512, and the number of self-attention heads is 8. Its total number of parameters is 42,820,610.
The usage of Bioformer-8L is the same as a standard BERT model. The documentation of BERT can be found here.
## Vocabulary of Bioformer-8L
Bioformer-8L uses a cased WordPiece vocabulary trained from a biomedical corpus, which included all PubMed abstracts (33 million, as of Feb 1, 2021) and 1 million PMC full-text articles. PMC has 3.6 million articles but we down-sampled them to 1 million such that the total size of PubMed abstracts and PMC full-text articles are approximately equal. To mitigate the out-of-vocabulary issue and include special symbols (e.g. male and female symbols) in biomedical literature, we trained Bioformer’s vocabulary from the Unicode text of the two resources. The vocabulary size of Bioformer-8L is 32768 (2^15), which is similar to that of the original BERT.
## Pre-training of Bioformer-8L
Bioformer-8L was pre-trained from scratch on the same corpus as the vocabulary (33 million PubMed abstracts + 1 million PMC full-text articles). For the masked language modeling (MLM) objective, we used whole-word masking with a masking rate of 15%. There are debates on whether the next sentence prediction (NSP) objective could improve the performance on downstream tasks. We include it in our pre-training experiment in case the prediction of the next sentence is needed by end-users. Sentence segmentation of all training text was performed using SciSpacy.
Pre-training of Bioformer-8L was performed on a single Cloud TPU device (TPUv2, 8 cores, 8GB memory per core). The maximum input sequence length was fixed to 512, and the batch size was set to 256. We pre-trained Bioformer-8L for 2 million steps, which took about 8.3 days.
## Usage
Prerequisites: python3, pytorch, transformers and datasets
We have tested the following commands on Python v3.9.16, PyTorch v1.13.1+cu117, Datasets v2.9.0 and Transformers v4.26.
To install pytorch, please refer to instructions here.
To install the 'transformers' and 'datasets' library:
### Filling mask
Output of 'bioformer-8L':
Output of 'bioformer-16L':
## Awards
Bioformer-8L achieved top performance (highest micro-F1 score) in the BioCreative VII COVID-19 multi-label topic classification challenge (URL
## Links
Bioformer-16L
## Acknowledgment
Training and evaluation of Bioformer-8L is supported by the Google TPU Research Cloud (TRC) program, the Intramural Research Program of the National Library of Medicine (NLM), National Institutes of Health (NIH), and NIH/NLM grants LM012895 and 1K99LM014024-01.
## Questions
If you have any questions, please submit an issue here: URL
You can also send an email to Li Fang (fangli9@URL, URL
You can cite our preprint on arXiv:
Fang L, Chen Q, Wei C-H, Lu Z, Wang K: Bioformer: an efficient transformer language model for biomedical text mining. arXiv preprint arXiv:2302.01588 (2023). DOI: URL
BibTeX format:
|
[
"## Vocabulary of Bioformer-8L\nBioformer-8L uses a cased WordPiece vocabulary trained from a biomedical corpus, which included all PubMed abstracts (33 million, as of Feb 1, 2021) and 1 million PMC full-text articles. PMC has 3.6 million articles but we down-sampled them to 1 million such that the total size of PubMed abstracts and PMC full-text articles are approximately equal. To mitigate the out-of-vocabulary issue and include special symbols (e.g. male and female symbols) in biomedical literature, we trained Bioformer’s vocabulary from the Unicode text of the two resources. The vocabulary size of Bioformer-8L is 32768 (2^15), which is similar to that of the original BERT.",
"## Pre-training of Bioformer-8L\nBioformer-8L was pre-trained from scratch on the same corpus as the vocabulary (33 million PubMed abstracts + 1 million PMC full-text articles). For the masked language modeling (MLM) objective, we used whole-word masking with a masking rate of 15%. There are debates on whether the next sentence prediction (NSP) objective could improve the performance on downstream tasks. We include it in our pre-training experiment in case the prediction of the next sentence is needed by end-users. Sentence segmentation of all training text was performed using SciSpacy.\n\nPre-training of Bioformer-8L was performed on a single Cloud TPU device (TPUv2, 8 cores, 8GB memory per core). The maximum input sequence length was fixed to 512, and the batch size was set to 256. We pre-trained Bioformer-8L for 2 million steps, which took about 8.3 days.",
"## Usage\n\nPrerequisites: python3, pytorch, transformers and datasets\n\nWe have tested the following commands on Python v3.9.16, PyTorch v1.13.1+cu117, Datasets v2.9.0 and Transformers v4.26.\n\nTo install pytorch, please refer to instructions here.\n\nTo install the 'transformers' and 'datasets' library:",
"### Filling mask\n\n\n\nOutput of 'bioformer-8L':\n\n\n\nOutput of 'bioformer-16L':",
"## Awards\nBioformer-8L achieved top performance (highest micro-F1 score) in the BioCreative VII COVID-19 multi-label topic classification challenge (URL",
"## Links\n\nBioformer-16L",
"## Acknowledgment\n\nTraining and evaluation of Bioformer-8L is supported by the Google TPU Research Cloud (TRC) program, the Intramural Research Program of the National Library of Medicine (NLM), National Institutes of Health (NIH), and NIH/NLM grants LM012895 and 1K99LM014024-01.",
"## Questions\nIf you have any questions, please submit an issue here: URL\n\nYou can also send an email to Li Fang (fangli9@URL, URL\n\n\nYou can cite our preprint on arXiv:\n\nFang L, Chen Q, Wei C-H, Lu Z, Wang K: Bioformer: an efficient transformer language model for biomedical text mining. arXiv preprint arXiv:2302.01588 (2023). DOI: URL\n\n\nBibTeX format:"
] |
[
"TAGS\n#transformers #pytorch #tf #safetensors #bert #fill-mask #en #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us \n",
"## Vocabulary of Bioformer-8L\nBioformer-8L uses a cased WordPiece vocabulary trained from a biomedical corpus, which included all PubMed abstracts (33 million, as of Feb 1, 2021) and 1 million PMC full-text articles. PMC has 3.6 million articles but we down-sampled them to 1 million such that the total size of PubMed abstracts and PMC full-text articles are approximately equal. To mitigate the out-of-vocabulary issue and include special symbols (e.g. male and female symbols) in biomedical literature, we trained Bioformer’s vocabulary from the Unicode text of the two resources. The vocabulary size of Bioformer-8L is 32768 (2^15), which is similar to that of the original BERT.",
"## Pre-training of Bioformer-8L\nBioformer-8L was pre-trained from scratch on the same corpus as the vocabulary (33 million PubMed abstracts + 1 million PMC full-text articles). For the masked language modeling (MLM) objective, we used whole-word masking with a masking rate of 15%. There are debates on whether the next sentence prediction (NSP) objective could improve the performance on downstream tasks. We include it in our pre-training experiment in case the prediction of the next sentence is needed by end-users. Sentence segmentation of all training text was performed using SciSpacy.\n\nPre-training of Bioformer-8L was performed on a single Cloud TPU device (TPUv2, 8 cores, 8GB memory per core). The maximum input sequence length was fixed to 512, and the batch size was set to 256. We pre-trained Bioformer-8L for 2 million steps, which took about 8.3 days.",
"## Usage\n\nPrerequisites: python3, pytorch, transformers and datasets\n\nWe have tested the following commands on Python v3.9.16, PyTorch v1.13.1+cu117, Datasets v2.9.0 and Transformers v4.26.\n\nTo install pytorch, please refer to instructions here.\n\nTo install the 'transformers' and 'datasets' library:",
"### Filling mask\n\n\n\nOutput of 'bioformer-8L':\n\n\n\nOutput of 'bioformer-16L':",
"## Awards\nBioformer-8L achieved top performance (highest micro-F1 score) in the BioCreative VII COVID-19 multi-label topic classification challenge (URL",
"## Links\n\nBioformer-16L",
"## Acknowledgment\n\nTraining and evaluation of Bioformer-8L is supported by the Google TPU Research Cloud (TRC) program, the Intramural Research Program of the National Library of Medicine (NLM), National Institutes of Health (NIH), and NIH/NLM grants LM012895 and 1K99LM014024-01.",
"## Questions\nIf you have any questions, please submit an issue here: URL\n\nYou can also send an email to Li Fang (fangli9@URL, URL\n\n\nYou can cite our preprint on arXiv:\n\nFang L, Chen Q, Wei C-H, Lu Z, Wang K: Bioformer: an efficient transformer language model for biomedical text mining. arXiv preprint arXiv:2302.01588 (2023). DOI: URL\n\n\nBibTeX format:"
] |
[
54,
180,
217,
90,
25,
36,
6,
77,
105
] |
[
"passage: TAGS\n#transformers #pytorch #tf #safetensors #bert #fill-mask #en #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us \n## Vocabulary of Bioformer-8L\nBioformer-8L uses a cased WordPiece vocabulary trained from a biomedical corpus, which included all PubMed abstracts (33 million, as of Feb 1, 2021) and 1 million PMC full-text articles. PMC has 3.6 million articles but we down-sampled them to 1 million such that the total size of PubMed abstracts and PMC full-text articles are approximately equal. To mitigate the out-of-vocabulary issue and include special symbols (e.g. male and female symbols) in biomedical literature, we trained Bioformer’s vocabulary from the Unicode text of the two resources. The vocabulary size of Bioformer-8L is 32768 (2^15), which is similar to that of the original BERT.## Pre-training of Bioformer-8L\nBioformer-8L was pre-trained from scratch on the same corpus as the vocabulary (33 million PubMed abstracts + 1 million PMC full-text articles). For the masked language modeling (MLM) objective, we used whole-word masking with a masking rate of 15%. There are debates on whether the next sentence prediction (NSP) objective could improve the performance on downstream tasks. We include it in our pre-training experiment in case the prediction of the next sentence is needed by end-users. Sentence segmentation of all training text was performed using SciSpacy.\n\nPre-training of Bioformer-8L was performed on a single Cloud TPU device (TPUv2, 8 cores, 8GB memory per core). The maximum input sequence length was fixed to 512, and the batch size was set to 256. We pre-trained Bioformer-8L for 2 million steps, which took about 8.3 days."
] |
[
-0.05647857487201691,
0.04730188474059105,
-0.003238373203203082,
0.0005547406617552042,
0.05931759253144264,
0.014831350184977055,
0.0699847936630249,
0.11069706082344055,
-0.05027611181139946,
0.09666968137025833,
0.05334116145968437,
-0.09076055884361267,
0.011165192350745201,
0.07687756419181824,
0.07306980341672897,
-0.21482212841510773,
0.02185959741473198,
0.009632393717765808,
-0.02446516416966915,
0.048767708241939545,
0.08768858015537262,
-0.08642172068357468,
0.06263744831085205,
-0.02116345800459385,
-0.02338218316435814,
0.027758540585637093,
0.05033230409026146,
-0.026180898770689964,
0.07581744343042374,
0.03472740948200226,
0.03179331496357918,
-0.026121938601136208,
0.06915981322526932,
-0.1460651308298111,
0.008137760683894157,
0.05994565039873123,
0.0638478547334671,
0.07519423961639404,
0.06616920977830887,
0.03948454186320305,
0.24183312058448792,
-0.057599008083343506,
0.05116041377186775,
0.05690186470746994,
-0.07434912025928497,
-0.1204221323132515,
-0.05733355134725571,
0.03977401927113533,
0.051981233060359955,
0.040922749787569046,
0.002004158915951848,
0.09722121059894562,
-0.01767977885901928,
0.10269976407289505,
0.13909724354743958,
-0.2583554983139038,
-0.017227962613105774,
0.03885020315647125,
0.07873210310935974,
0.07327034324407578,
-0.031721990555524826,
0.049825381487607956,
0.018369214609265327,
0.0789099708199501,
0.09162585437297821,
-0.012966342270374298,
0.11182309687137604,
-0.03427456319332123,
-0.09158243238925934,
-0.03283106908202171,
0.14238522946834564,
-0.03734631463885307,
-0.07273189723491669,
-0.0404244139790535,
-0.0676618441939354,
0.012696655467152596,
-0.02046099118888378,
-0.026849811896681786,
0.02960529737174511,
-0.010379510931670666,
0.005320896860212088,
-0.0015102755278348923,
-0.08255930244922638,
-0.060487501323223114,
-0.0845576673746109,
0.16608035564422607,
0.028060998767614365,
0.05765914171934128,
-0.00982193648815155,
0.013023835606873035,
-0.033441316336393356,
-0.07055815309286118,
-0.030454084277153015,
-0.006003032438457012,
-0.05218348652124405,
-0.00006322836998151615,
-0.11001438647508621,
-0.13112594187259674,
-0.0017350666457787156,
0.0015029072528705,
-0.12528587877750397,
0.005514480639249086,
0.058461129665374756,
0.04370946064591408,
0.03473169729113579,
0.041665881872177124,
-0.06296686083078384,
-0.035521991550922394,
0.03917013108730316,
-0.03052925318479538,
0.011006215587258339,
0.023890603333711624,
-0.034440867602825165,
0.06397980451583862,
0.051552724093198776,
0.007744495291262865,
-0.06413497775793076,
-0.018644046038389206,
-0.008076922968029976,
-0.03684881329536438,
0.045440807938575745,
-0.13013064861297607,
-0.011950886808335781,
-0.021614912897348404,
-0.022685175761580467,
0.05959828197956085,
0.05681739002466202,
-0.0956173688173294,
-0.13186821341514587,
0.03675656393170357,
-0.07937928289175034,
-0.01924077235162258,
-0.11923597007989883,
-0.16194158792495728,
0.036248091608285904,
-0.012072049081325531,
-0.06039776653051376,
-0.10524279624223709,
-0.08723483979701996,
-0.011824794113636017,
0.06496730446815491,
-0.02473570592701435,
-0.015892108902335167,
0.00832926481962204,
0.03891896829009056,
-0.008847471326589584,
0.012606976553797722,
0.044493190944194794,
-0.03029564581811428,
0.042020026594400406,
-0.007781840395182371,
0.11291781812906265,
-0.06849610060453415,
0.0254661962389946,
-0.051520198583602905,
0.03641768917441368,
-0.18996918201446533,
0.022619836032390594,
0.0008195413392968476,
-0.04352688416838646,
-0.0700124055147171,
-0.0779254138469696,
-0.1197190210223198,
0.0366256944835186,
0.07789668440818787,
0.05263533443212509,
-0.03594564273953438,
-0.0547022670507431,
0.13639914989471436,
-0.07252505421638489,
-0.06424874067306519,
0.12004567682743073,
0.0009150457917712629,
0.021340010687708855,
0.11789458990097046,
0.09685774147510529,
-0.011669108644127846,
-0.07252487540245056,
-0.021250084042549133,
-0.018004847690463066,
0.0084345992654562,
-0.03868947923183441,
0.06895334273576736,
-0.006166399922221899,
0.012538881041109562,
0.04293142259120941,
0.013681802898645401,
-0.005441705696284771,
-0.0541616827249527,
0.0072435359470546246,
-0.022612914443016052,
-0.03687627613544464,
-0.08471216261386871,
-0.0019448577659204602,
0.00018285959959030151,
-0.0802716463804245,
-0.012121873907744884,
0.11066190898418427,
0.06385055929422379,
-0.08672450482845306,
0.03266063705086708,
-0.03402756527066231,
0.01972925290465355,
-0.17176571488380432,
0.017097027972340584,
-0.1934753954410553,
-0.02540956437587738,
0.0260922834277153,
0.03519617021083832,
0.08015784621238708,
0.13057942688465118,
0.02340632490813732,
0.09093829989433289,
-0.10104870051145554,
0.0767258033156395,
-0.02013753168284893,
-0.031374741345644,
-0.08831365406513214,
-0.11927537620067596,
-0.021041907370090485,
-0.07165506482124329,
-0.07532698661088943,
-0.025978628545999527,
0.0009519081213511527,
-0.1040373146533966,
0.021968906745314598,
0.008429571986198425,
-0.08180440962314606,
0.060901425778865814,
0.02372286282479763,
-0.02803855575621128,
0.004636105615645647,
0.044378891587257385,
-0.000057635075791040435,
-0.07322853803634644,
0.11211536079645157,
-0.12978599965572357,
0.07336485385894775,
0.0587700791656971,
0.05024193972349167,
-0.028635293245315552,
0.02083554118871689,
-0.03079756163060665,
-0.017088599503040314,
-0.1142508015036583,
-0.09799781441688538,
0.19165052473545074,
-0.0028630183078348637,
0.12482423335313797,
-0.15012376010417938,
-0.043383270502090454,
0.027873529121279716,
-0.016745198518037796,
0.006060605403035879,
0.050511520355939865,
0.09444164484739304,
-0.17833328247070312,
0.023507196456193924,
0.009772163815796375,
0.03903365507721901,
0.12005937844514847,
0.035128120332956314,
-0.13048098981380463,
-0.029507825151085854,
-0.013055847026407719,
-0.0006891240482218564,
0.10524368286132812,
-0.05544046685099602,
-0.002607973525300622,
0.052026644349098206,
0.051108211278915405,
0.05904045328497887,
-0.07168512046337128,
0.07387780398130417,
0.04005969315767288,
-0.04808102920651436,
-0.08392138034105301,
-0.059693288058042526,
-0.02104051411151886,
0.1507248431444168,
0.0176868736743927,
0.010308756493031979,
-0.06270425021648407,
0.00926261581480503,
-0.12525732815265656,
0.16668352484703064,
-0.07477319985628128,
-0.17828917503356934,
-0.13313840329647064,
0.022627349942922592,
-0.03885498270392418,
0.035734813660383224,
-0.029368923977017403,
-0.02063385583460331,
-0.09120073169469833,
-0.10294923186302185,
0.012136255390942097,
-0.00009730811143526807,
0.024318793788552284,
0.01893654465675354,
0.03946426138281822,
0.06543977558612823,
-0.1497955322265625,
-0.03399718180298805,
-0.06496802717447281,
0.012232253327965736,
0.025974562391638756,
-0.07145505398511887,
0.08231870830059052,
0.15184229612350464,
-0.08161191642284393,
-0.006284262519329786,
-0.0024780461098998785,
0.14294669032096863,
-0.01027845498174429,
0.08692336827516556,
0.1400902271270752,
0.04051213338971138,
-0.0033350721932947636,
-0.0139293959364295,
0.04631861299276352,
-0.07688821852207184,
0.06410123407840729,
0.0316736102104187,
-0.09422212094068527,
-0.20869873464107513,
-0.09396646916866302,
-0.06668093800544739,
-0.05786175653338432,
0.044425200670957565,
0.030776606872677803,
-0.10700797289609909,
-0.0018584877252578735,
-0.0655524805188179,
0.01925249956548214,
-0.007194675039499998,
0.0582234151661396,
-0.05371427908539772,
-0.029801268130540848,
0.14268748462200165,
-0.030558839440345764,
-0.006088683847337961,
0.0861029401421547,
-0.03630661219358444,
0.2189813107252121,
-0.053341079503297806,
0.05582864210009575,
0.06854171305894852,
-0.06975404173135757,
0.058860912919044495,
0.17715387046337128,
-0.0913403183221817,
0.0024087983183562756,
-0.05091790854930878,
-0.05512043833732605,
-0.06813450902700424,
0.027161961421370506,
-0.0358300507068634,
0.022589679807424545,
-0.06805279850959778,
0.015578560531139374,
0.024313990026712418,
0.27306097745895386,
0.09135624766349792,
-0.16677020490169525,
-0.07723553478717804,
0.0005070467595942318,
-0.05657513067126274,
-0.04675658047199249,
0.03229048475623131,
0.25904902815818787,
-0.0022102813236415386,
0.040432803332805634,
-0.0014883686089888215,
0.10000474750995636,
-0.002532828599214554,
0.02493470534682274,
-0.041690174490213394,
0.05175608769059181,
-0.027990220114588737,
0.0625104233622551,
-0.11959223449230194,
0.1468684822320938,
0.05147970840334892,
0.09634846448898315,
-0.055392492562532425,
-0.0010506205726414919,
0.036726679652929306,
0.053707681596279144,
0.042199406772851944,
0.06592162698507309,
-0.1700114756822586,
-0.07101406902074814,
-0.11234010756015778,
0.05386070907115936,
0.08148440718650818,
0.03737431392073631,
0.14241057634353638,
0.009046888910233974,
-0.012131870724260807,
-0.008345754817128181,
-0.05575382336974144,
-0.09325359016656876,
-0.09352068603038788,
0.0032231623772531748,
-0.04555753991007805,
-0.11643202602863312,
-0.04910683259367943,
-0.023658057674765587,
0.039175860583782196,
0.239582359790802,
-0.04517200589179993,
-0.07681500166654587,
-0.11510397493839264,
0.1097952276468277,
0.04781597480177879,
-0.04502210393548012,
-0.02234012633562088,
0.027659567072987556,
0.12339358776807785,
-0.017351046204566956,
-0.12294497340917587,
0.04368738457560539,
-0.07563475519418716,
-0.10733857750892639,
-0.04887120798230171,
0.1165316253900528,
0.004857705906033516,
0.11302129179239273,
0.04282164201140404,
0.040441595017910004,
-0.004859707318246365,
-0.07745257019996643,
0.033444322645664215,
0.06296445429325104,
0.08448321372270584,
0.06876217573881149,
-0.2544267773628235,
0.04440424218773842,
-0.002865708200260997,
-0.016908513382077217,
0.0704406201839447,
0.13482069969177246,
-0.03448592126369476,
0.07135191559791565,
0.23869876563549042,
-0.12177781760692596,
-0.21995556354522705,
-0.008795633912086487,
0.004373318050056696,
0.07321462780237198,
0.039369724690914154,
-0.14521893858909607,
0.11812121421098709,
0.09202193468809128,
0.04347614571452141,
0.030252350494265556,
-0.20276521146297455,
-0.1097523421049118,
-0.023485977202653885,
0.08052937686443329,
0.23805172741413116,
-0.06892695277929306,
0.007733769714832306,
-0.08687324076890945,
0.03806999325752258,
0.01310500968247652,
-0.04598580673336983,
0.15622767806053162,
-0.06470426172018051,
-0.0527455098927021,
0.03020847961306572,
-0.054410241544246674,
0.078106589615345,
-0.03837408870458603,
0.06804675608873367,
0.008981588296592236,
0.08526475727558136,
0.11817196011543274,
-0.0029323091730475426,
0.20116986334323883,
-0.060041703283786774,
0.035167545080184937,
-0.05759149789810181,
-0.08550821244716644,
-0.0739060714840889,
-0.03533024713397026,
-0.013118213042616844,
-0.03427616134285927,
-0.0707414522767067,
0.04089277982711792,
0.055160850286483765,
-0.0052501182071864605,
-0.017453886568546295,
-0.11342751234769821,
0.07543738186359406,
0.14290130138397217,
0.1325322836637497,
-0.1665903776884079,
-0.06248600408434868,
0.035760167986154556,
-0.011357193812727928,
0.07074503600597382,
-0.12285779416561127,
0.045807644724845886,
0.11257238686084747,
0.02485079877078533,
0.06003151834011078,
0.05768425390124321,
-0.07467415928840637,
-0.011414892040193081,
0.07950355857610703,
-0.09346848726272583,
-0.15780243277549744,
-0.018214648589491844,
-0.006569727323949337,
-0.1450493186712265,
-0.029898272827267647,
0.15196581184864044,
-0.059963323175907135,
-0.00623444514349103,
-0.007654376793652773,
0.03842717409133911,
-0.06637902557849884,
0.24731716513633728,
0.03995361551642418,
0.034163206815719604,
-0.05319090932607651,
0.06007146090269089,
0.03081858716905117,
-0.07747679203748703,
0.0324631929397583,
0.09397128224372864,
-0.09039246290922165,
-0.030661767348647118,
0.0506158284842968,
0.11130103468894958,
0.06573013216257095,
-0.04077887907624245,
-0.15698690712451935,
-0.13890942931175232,
0.10638384521007538,
0.13509786128997803,
0.027765851467847824,
0.02337314561009407,
-0.06783393025398254,
-0.01972651481628418,
-0.10843084752559662,
0.07665351033210754,
-0.0165941771119833,
0.05605710670351982,
0.03809383511543274,
0.1456160992383957,
-0.02249453216791153,
-0.006569833494722843,
-0.057837001979351044,
-0.013380654156208038,
-0.08960765600204468,
0.005462180823087692,
-0.10083895921707153,
-0.051129668951034546,
-0.014573557302355766,
-0.07642802596092224,
-0.01584363728761673,
0.0458294041454792,
-0.043499525636434555,
0.03156158700585365,
-0.045577649027109146,
-0.031208304688334465,
-0.048092618584632874,
0.012103994376957417,
-0.04642881453037262,
-0.029806990176439285,
-0.008008559234440327,
-0.06272988021373749,
0.04399272799491882,
-0.015813814476132393,
-0.03695839270949364,
-0.04785973206162453,
-0.014944681897759438,
-0.016390521079301834,
0.06577616184949875,
0.0836901068687439,
-0.03460899740457535,
-0.14825479686260223,
0.04870401322841644,
0.05147591978311539,
0.020855695009231567,
0.02649286389350891,
0.09580843150615692,
-0.06729970127344131,
0.022352276369929314,
-0.039631038904190063,
-0.04707813262939453,
-0.08631773293018341,
-0.03234296664595604,
0.02261306904256344,
0.104240283370018,
0.11878068745136261,
-0.06711115688085556,
-0.00827014073729515,
-0.21845844388008118,
0.02047768421471119,
0.02947084605693817,
-0.04819831624627113,
-0.07392558455467224,
-0.004166546743363142,
0.05643976107239723,
0.04018831253051758,
0.14826267957687378,
-0.0014619346475228667,
-0.038236215710639954,
0.045380085706710815,
0.04962170869112015,
0.03893796354532242,
-0.04438243806362152,
0.006591493729501963,
-0.017242131754755974,
-0.05626489967107773,
-0.008401450701057911,
0.06661660969257355,
0.039099570363759995,
0.11014515161514282,
0.21923306584358215,
0.1034790500998497,
0.20721766352653503,
0.07917051017284393,
-0.05527476966381073,
0.042932718992233276,
-0.018072504550218582,
0.0036871950142085552,
0.05076208710670471,
0.0964534804224968,
0.04854203015565872,
-0.01520540937781334,
0.18485668301582336,
-0.17095820605754852,
0.12904301285743713,
-0.027596691623330116,
-0.054895441979169846,
-0.09110874682664871,
-0.06003830209374428,
-0.042247120290994644,
-0.015063599683344364,
-0.028371309861540794,
-0.16309396922588348,
-0.001250561559572816,
0.037801168859004974,
0.042564984411001205,
-0.016734588891267776,
0.06346610188484192,
-0.20184798538684845,
-0.02086479589343071,
0.019740479066967964,
-0.0009383754804730415,
0.05238322541117668,
0.06403037905693054,
-0.013254736550152302,
0.04153582081198692,
0.007607725448906422,
0.09720874577760696,
0.05071108043193817,
0.03972256928682327,
-0.003838800359517336,
-0.004605071619153023,
-0.050400007516145706,
-0.0027130600064992905,
-0.08230458945035934,
0.06454978883266449,
0.23617105185985565,
0.016486262902617455,
-0.08460436016321182,
0.02614653669297695,
0.10660914331674576,
-0.05847310274839401,
-0.09120649844408035,
-0.14435432851314545,
0.19467788934707642,
0.09014090150594711,
0.006516754627227783,
0.0016480616759508848,
-0.036588333547115326,
-0.07939663529396057,
0.20972506701946259,
0.11078439652919769,
-0.04149923846125603,
-0.02832930162549019,
-0.029666010290384293,
-0.006472856272011995,
0.015293888747692108,
0.13567979633808136,
0.03816381096839905,
0.2551726996898651,
-0.0061024087481200695,
0.03772706910967827,
-0.059257905930280685,
0.025512930005788803,
-0.08648017048835754,
0.04541819915175438,
0.04652182385325432,
-0.0029429979622364044,
-0.04316763952374458,
0.01811608485877514,
0.045811377465724945,
-0.0712333396077156,
-0.020681535825133324,
-0.08300351351499557,
-0.07863814383745193,
-0.0018045254983007908,
-0.06303607672452927,
-0.029712462797760963,
0.0746898502111435,
-0.012451655231416225,
0.07527042925357819,
0.10391604900360107,
-0.00768169155344367,
-0.14290277659893036,
-0.11282923817634583,
0.09728410094976425,
-0.043517179787158966,
0.10899100452661514,
0.035153333097696304,
0.04527326300740242,
0.05325532704591751,
-0.026066675782203674,
-0.0803687795996666,
0.08712252974510193,
-0.05861065536737442,
-0.05644959211349487,
0.06720977276563644,
0.14831826090812683,
-0.01727835275232792,
0.055498573929071426,
0.012227458879351616,
-0.022149743512272835,
0.0038302969187498093,
-0.023815782740712166,
-0.03739720955491066,
-0.060628555715084076,
0.0779985561966896,
-0.0646219328045845,
0.12727878987789154,
0.19729557633399963,
0.02380332536995411,
0.005853376816958189,
-0.04369956627488136,
0.05556359887123108,
0.031134262681007385,
0.026165610179305077,
-0.020926445722579956,
-0.21467015147209167,
-0.0017790294950827956,
-0.07913914322853088,
-0.021640995517373085,
-0.3840865194797516,
-0.11842560023069382,
0.0031477203592658043,
-0.04885798320174217,
-0.0390116386115551,
0.08526867628097534,
0.0431947223842144,
0.04369639232754707,
-0.06427272409200668,
-0.14255976676940918,
0.055368710309267044,
0.06428089737892151,
-0.12361331284046173,
-0.08901997655630112
] |
null | null |
transformers
|
# BlueBert-Base, Uncased, PubMed and MIMIC-III
## Model description
A BERT model pre-trained on PubMed abstracts and clinical notes ([MIMIC-III](https://mimic.physionet.org/)).
## Intended uses & limitations
#### How to use
Please see https://github.com/ncbi-nlp/bluebert
## Training data
We provide [preprocessed PubMed texts](https://ftp.ncbi.nlm.nih.gov/pub/lu/Suppl/NCBI-BERT/pubmed_uncased_sentence_nltk.txt.tar.gz) that were used to pre-train the BlueBERT models.
The corpus contains ~4000M words extracted from the [PubMed ASCII code version](https://www.ncbi.nlm.nih.gov/research/bionlp/APIs/BioC-PubMed/).
Pre-trained model: https://huggingface.co/bert-base-uncased
## Training procedure
* lowercasing the text
* removing speical chars `\x00`-`\x7F`
* tokenizing the text using the [NLTK Treebank tokenizer](https://www.nltk.org/_modules/nltk/tokenize/treebank.html)
Below is a code snippet for more details.
```python
value = value.lower()
value = re.sub(r'[\r\n]+', ' ', value)
value = re.sub(r'[^\x00-\x7F]+', ' ', value)
tokenized = TreebankWordTokenizer().tokenize(value)
sentence = ' '.join(tokenized)
sentence = re.sub(r"\s's\b", "'s", sentence)
```
### BibTeX entry and citation info
```bibtex
@InProceedings{peng2019transfer,
author = {Yifan Peng and Shankai Yan and Zhiyong Lu},
title = {Transfer Learning in Biomedical Natural Language Processing: An Evaluation of BERT and ELMo on Ten Benchmarking Datasets},
booktitle = {Proceedings of the 2019 Workshop on Biomedical Natural Language Processing (BioNLP 2019)},
year = {2019},
pages = {58--65},
}
```
### Acknowledgments
This work was supported by the Intramural Research Programs of the National Institutes of Health, National Library of
Medicine and Clinical Center. This work was supported by the National Library of Medicine of the National Institutes of Health under award number 4R00LM013001-01.
We are also grateful to the authors of BERT and ELMo to make the data and codes publicly available.
We would like to thank Dr Sun Kim for processing the PubMed texts.
### Disclaimer
This tool shows the results of research conducted in the Computational Biology Branch, NCBI. The information produced
on this website is not intended for direct diagnostic use or medical decision-making without review and oversight
by a clinical professional. Individuals should not change their health behavior solely on the basis of information
produced on this website. NIH does not independently verify the validity or utility of the information produced
by this tool. If you have questions about the information produced on this website, please see a health care
professional. More information about NCBI's disclaimer policy is available.
|
{"language": ["en"], "license": "cc0-1.0", "tags": ["bert", "bluebert"], "datasets": ["PubMed", "MIMIC-III"]}
| null |
bionlp/bluebert_pubmed_mimic_uncased_L-12_H-768_A-12
|
[
"transformers",
"pytorch",
"jax",
"bert",
"bluebert",
"en",
"dataset:PubMed",
"dataset:MIMIC-III",
"license:cc0-1.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #bert #bluebert #en #dataset-PubMed #dataset-MIMIC-III #license-cc0-1.0 #endpoints_compatible #region-us
|
# BlueBert-Base, Uncased, PubMed and MIMIC-III
## Model description
A BERT model pre-trained on PubMed abstracts and clinical notes (MIMIC-III).
## Intended uses & limitations
#### How to use
Please see URL
## Training data
We provide preprocessed PubMed texts that were used to pre-train the BlueBERT models.
The corpus contains ~4000M words extracted from the PubMed ASCII code version.
Pre-trained model: URL
## Training procedure
* lowercasing the text
* removing speical chars '\x00'-'\x7F'
* tokenizing the text using the NLTK Treebank tokenizer
Below is a code snippet for more details.
### BibTeX entry and citation info
### Acknowledgments
This work was supported by the Intramural Research Programs of the National Institutes of Health, National Library of
Medicine and Clinical Center. This work was supported by the National Library of Medicine of the National Institutes of Health under award number 4R00LM013001-01.
We are also grateful to the authors of BERT and ELMo to make the data and codes publicly available.
We would like to thank Dr Sun Kim for processing the PubMed texts.
### Disclaimer
This tool shows the results of research conducted in the Computational Biology Branch, NCBI. The information produced
on this website is not intended for direct diagnostic use or medical decision-making without review and oversight
by a clinical professional. Individuals should not change their health behavior solely on the basis of information
produced on this website. NIH does not independently verify the validity or utility of the information produced
by this tool. If you have questions about the information produced on this website, please see a health care
professional. More information about NCBI's disclaimer policy is available.
|
[
"# BlueBert-Base, Uncased, PubMed and MIMIC-III",
"## Model description\n\nA BERT model pre-trained on PubMed abstracts and clinical notes (MIMIC-III).",
"## Intended uses & limitations",
"#### How to use\n\nPlease see URL",
"## Training data\n\nWe provide preprocessed PubMed texts that were used to pre-train the BlueBERT models. \nThe corpus contains ~4000M words extracted from the PubMed ASCII code version. \n\nPre-trained model: URL",
"## Training procedure\n\n* lowercasing the text\n* removing speical chars '\\x00'-'\\x7F'\n* tokenizing the text using the NLTK Treebank tokenizer\n\nBelow is a code snippet for more details.",
"### BibTeX entry and citation info",
"### Acknowledgments\n\nThis work was supported by the Intramural Research Programs of the National Institutes of Health, National Library of\nMedicine and Clinical Center. This work was supported by the National Library of Medicine of the National Institutes of Health under award number 4R00LM013001-01.\n\nWe are also grateful to the authors of BERT and ELMo to make the data and codes publicly available.\n\nWe would like to thank Dr Sun Kim for processing the PubMed texts.",
"### Disclaimer\n\nThis tool shows the results of research conducted in the Computational Biology Branch, NCBI. The information produced\non this website is not intended for direct diagnostic use or medical decision-making without review and oversight\nby a clinical professional. Individuals should not change their health behavior solely on the basis of information\nproduced on this website. NIH does not independently verify the validity or utility of the information produced\nby this tool. If you have questions about the information produced on this website, please see a health care\nprofessional. More information about NCBI's disclaimer policy is available."
] |
[
"TAGS\n#transformers #pytorch #jax #bert #bluebert #en #dataset-PubMed #dataset-MIMIC-III #license-cc0-1.0 #endpoints_compatible #region-us \n",
"# BlueBert-Base, Uncased, PubMed and MIMIC-III",
"## Model description\n\nA BERT model pre-trained on PubMed abstracts and clinical notes (MIMIC-III).",
"## Intended uses & limitations",
"#### How to use\n\nPlease see URL",
"## Training data\n\nWe provide preprocessed PubMed texts that were used to pre-train the BlueBERT models. \nThe corpus contains ~4000M words extracted from the PubMed ASCII code version. \n\nPre-trained model: URL",
"## Training procedure\n\n* lowercasing the text\n* removing speical chars '\\x00'-'\\x7F'\n* tokenizing the text using the NLTK Treebank tokenizer\n\nBelow is a code snippet for more details.",
"### BibTeX entry and citation info",
"### Acknowledgments\n\nThis work was supported by the Intramural Research Programs of the National Institutes of Health, National Library of\nMedicine and Clinical Center. This work was supported by the National Library of Medicine of the National Institutes of Health under award number 4R00LM013001-01.\n\nWe are also grateful to the authors of BERT and ELMo to make the data and codes publicly available.\n\nWe would like to thank Dr Sun Kim for processing the PubMed texts.",
"### Disclaimer\n\nThis tool shows the results of research conducted in the Computational Biology Branch, NCBI. The information produced\non this website is not intended for direct diagnostic use or medical decision-making without review and oversight\nby a clinical professional. Individuals should not change their health behavior solely on the basis of information\nproduced on this website. NIH does not independently verify the validity or utility of the information produced\nby this tool. If you have questions about the information produced on this website, please see a health care\nprofessional. More information about NCBI's disclaimer policy is available."
] |
[
53,
19,
26,
9,
8,
52,
56,
11,
109,
130
] |
[
"passage: TAGS\n#transformers #pytorch #jax #bert #bluebert #en #dataset-PubMed #dataset-MIMIC-III #license-cc0-1.0 #endpoints_compatible #region-us \n# BlueBert-Base, Uncased, PubMed and MIMIC-III## Model description\n\nA BERT model pre-trained on PubMed abstracts and clinical notes (MIMIC-III).## Intended uses & limitations#### How to use\n\nPlease see URL## Training data\n\nWe provide preprocessed PubMed texts that were used to pre-train the BlueBERT models. \nThe corpus contains ~4000M words extracted from the PubMed ASCII code version. \n\nPre-trained model: URL## Training procedure\n\n* lowercasing the text\n* removing speical chars '\\x00'-'\\x7F'\n* tokenizing the text using the NLTK Treebank tokenizer\n\nBelow is a code snippet for more details.### BibTeX entry and citation info### Acknowledgments\n\nThis work was supported by the Intramural Research Programs of the National Institutes of Health, National Library of\nMedicine and Clinical Center. This work was supported by the National Library of Medicine of the National Institutes of Health under award number 4R00LM013001-01.\n\nWe are also grateful to the authors of BERT and ELMo to make the data and codes publicly available.\n\nWe would like to thank Dr Sun Kim for processing the PubMed texts.### Disclaimer\n\nThis tool shows the results of research conducted in the Computational Biology Branch, NCBI. The information produced\non this website is not intended for direct diagnostic use or medical decision-making without review and oversight\nby a clinical professional. Individuals should not change their health behavior solely on the basis of information\nproduced on this website. NIH does not independently verify the validity or utility of the information produced\nby this tool. If you have questions about the information produced on this website, please see a health care\nprofessional. More information about NCBI's disclaimer policy is available."
] |
[
-0.007672597654163837,
0.28489142656326294,
-0.006376971956342459,
0.003260997124016285,
0.04170110821723938,
-0.007052050903439522,
0.056028518825769424,
0.13582441210746765,
-0.0020174614619463682,
0.16862145066261292,
0.008054436184465885,
0.1137438490986824,
0.0946488305926323,
0.11032941192388535,
0.050831377506256104,
-0.22647273540496826,
0.04239482060074806,
0.009692962281405926,
0.1650891900062561,
0.0476178303360939,
0.031684305518865585,
-0.05013490840792656,
0.03654412180185318,
-0.018859706819057465,
-0.01579943299293518,
-0.011615617200732231,
0.0008849713485687971,
0.006328279618173838,
0.05639534443616867,
-0.005043424665927887,
0.004624849651008844,
0.008717616088688374,
0.05609659105539322,
-0.24439965188503265,
0.003029635176062584,
0.0451732762157917,
0.008345618844032288,
0.10258062183856964,
-0.02706332318484783,
-0.05262565612792969,
0.18875165283679962,
-0.14901353418827057,
0.04492686316370964,
0.029453162103891373,
-0.0899457260966301,
-0.1997559815645218,
-0.12287593632936478,
0.16093257069587708,
0.026753781363368034,
0.04097543656826019,
0.008801350370049477,
0.11644936352968216,
-0.015168789774179459,
0.01630871184170246,
0.1224406361579895,
-0.20657199621200562,
0.011298045516014099,
0.05663788318634033,
0.05765974149107933,
0.11665995419025421,
-0.09694631397724152,
0.008675944060087204,
-0.02789580635726452,
-0.015474112704396248,
0.05366456136107445,
-0.012981174513697624,
0.0716557726264,
0.004352751187980175,
-0.08077636361122131,
-0.04097404703497887,
0.06771118193864822,
-0.04956880211830139,
-0.0805642306804657,
-0.116547591984272,
-0.033856749534606934,
0.06423202902078629,
0.054512083530426025,
-0.05736232176423073,
0.06775880604982376,
-0.02150304801762104,
0.09804260730743408,
-0.02576642483472824,
-0.047346603125333786,
-0.025508107617497444,
-0.024549268186092377,
0.06979676336050034,
-0.00008894260099623352,
0.03413057699799538,
0.03458625450730324,
0.07560135424137115,
0.02066364884376526,
-0.07055214047431946,
-0.09553135931491852,
-0.03897245600819588,
-0.05662068724632263,
-0.008037982508540154,
0.04022543132305145,
-0.036680445075035095,
0.04583723470568657,
0.1910894215106964,
-0.06262079626321793,
-0.01997269317507744,
-0.07571297883987427,
-0.02637750655412674,
0.10841802507638931,
0.02197413705289364,
-0.08189939707517624,
-0.059373144060373306,
-0.0372837670147419,
0.02466288022696972,
0.10017605870962143,
-0.015456592664122581,
0.0010551137384027243,
0.006921823136508465,
-0.03159477934241295,
0.08464263379573822,
0.004042144399136305,
-0.026636891067028046,
-0.06138654798269272,
-0.016642624512314796,
0.23667237162590027,
-0.08771218359470367,
-0.0063697160221636295,
-0.0028952565044164658,
0.030319510027766228,
0.04740477353334427,
0.07547244429588318,
-0.021524600684642792,
-0.048549190163612366,
0.1230035200715065,
-0.08384434878826141,
-0.006267966236919165,
-0.04318397864699364,
-0.029645511880517006,
0.06612046808004379,
-0.0719883143901825,
-0.02573094144463539,
-0.06167168542742729,
-0.005544808227568865,
-0.04971599951386452,
-0.01125978771597147,
-0.037411272525787354,
0.0796806588768959,
0.01315371971577406,
0.04279567301273346,
-0.008906284347176552,
-0.00598976481705904,
-0.03726539760828018,
-0.036784827709198,
0.028155013918876648,
-0.12073840945959091,
0.06417969614267349,
-0.06514738500118256,
-0.029596950858831406,
-0.09556498378515244,
0.03460565581917763,
-0.09381811320781708,
-0.020304687321186066,
-0.06458385288715363,
-0.01909458637237549,
-0.1046249195933342,
-0.0016927275573834777,
-0.07488804310560226,
-0.04362994432449341,
0.019619211554527283,
0.09474140405654907,
-0.12711161375045776,
-0.029009489342570305,
0.21165715157985687,
-0.09960469603538513,
-0.04185165837407112,
0.0721702128648758,
-0.022511502727866173,
0.011775895021855831,
0.08330007642507553,
0.20063737034797668,
0.08251765370368958,
-0.14649340510368347,
-0.1333903968334198,
-0.08927217125892639,
-0.05291609466075897,
0.05685709789395332,
0.042529381811618805,
-0.08033651858568192,
0.02157830074429512,
0.01971893198788166,
-0.08404400199651718,
-0.08551269769668579,
-0.011670855805277824,
0.007318724878132343,
0.008354080840945244,
-0.0742703229188919,
-0.010189009830355644,
0.013228926807641983,
-0.05370281636714935,
0.0024611749686300755,
-0.03187454119324684,
0.05716312676668167,
0.09958011656999588,
-0.012997070327401161,
0.01894993521273136,
-0.06740587949752808,
-0.041283730417490005,
0.0004269183846190572,
-0.011585269123315811,
-0.15204860270023346,
0.06757160276174545,
0.05727725103497505,
-0.1305433064699173,
0.055554334074258804,
-0.06673290580511093,
-0.01912674307823181,
0.06966172158718109,
-0.042501240968704224,
0.014799542725086212,
0.024214891716837883,
0.005501607432961464,
-0.0601503923535347,
-0.10485472530126572,
-0.03600040450692177,
-0.024797286838293076,
0.06168297678232193,
-0.11879552900791168,
0.022404732182621956,
-0.0034234817139804363,
0.13456588983535767,
0.05161619558930397,
-0.11979936063289642,
0.08338714390993118,
-0.004058727994561195,
0.025595737621188164,
-0.0038366199005395174,
-0.011547228321433067,
-0.011225159280002117,
0.008980310522019863,
0.02143818326294422,
-0.15376946330070496,
-0.16622483730316162,
-0.012894177809357643,
0.1483854502439499,
0.02579273097217083,
-0.029438262805342674,
-0.04435308277606964,
0.0014329705154523253,
-0.11003394424915314,
-0.10292646288871765,
0.09306950122117996,
0.007250893395394087,
0.028637930750846863,
-0.025352919474244118,
-0.06671962887048721,
-0.03133898973464966,
-0.021695943549275398,
-0.030845336616039276,
0.014448452740907669,
0.018225068226456642,
-0.15689687430858612,
0.020311269909143448,
0.0051088337786495686,
0.12088733166456223,
0.19828245043754578,
0.03706464543938637,
-0.12212219834327698,
-0.07944227010011673,
-0.059334591031074524,
0.05150968208909035,
0.11646745353937149,
-0.026247670873999596,
0.04676618427038193,
0.054944977164268494,
-0.0016438509337604046,
0.013401038013398647,
-0.005007839761674404,
0.02563881129026413,
-0.002400154946371913,
-0.018744267523288727,
-0.049842555075883865,
-0.018306313082575798,
-0.022782891988754272,
0.12725479900836945,
0.042872101068496704,
0.1656804084777832,
-0.04719126224517822,
-0.03570137545466423,
-0.10028792917728424,
0.11537149548530579,
-0.11211913824081421,
-0.21742849051952362,
-0.1539539396762848,
0.02024836465716362,
0.037443798035383224,
0.020978165790438652,
-0.008458171039819717,
-0.05695578083395958,
-0.061506982892751694,
-0.12424984574317932,
-0.010254578664898872,
0.059107162058353424,
-0.08144477009773254,
-0.029150264337658882,
0.036592841148376465,
0.025428904220461845,
-0.11355935037136078,
0.010348550044000149,
-0.05523210018873215,
0.0022221573162823915,
0.006745389197021723,
-0.025009194388985634,
0.08265526592731476,
0.11411280930042267,
0.047207754105329514,
-0.03900700435042381,
0.03485465049743652,
0.07930777966976166,
-0.05869175121188164,
0.12295252829790115,
0.07427946478128433,
-0.004954935517162085,
0.04222681745886803,
0.08999104052782059,
0.03775041177868843,
-0.05146482586860657,
0.046009909361600876,
0.023091698065400124,
-0.012133692391216755,
-0.2746995687484741,
-0.0540870800614357,
-0.03325439989566803,
-0.041524969041347504,
-0.00003985929652117193,
0.04186741262674332,
0.09263722598552704,
0.0038477499037981033,
-0.07452118396759033,
0.019547302275896072,
-0.00764929223805666,
0.07199609279632568,
0.037102289497852325,
0.02894783765077591,
0.07681997120380402,
-0.06699863076210022,
0.048062052577733994,
0.1111246719956398,
-0.0024704679381102324,
0.15640594065189362,
0.009185904636979103,
0.21351593732833862,
0.10867957770824432,
0.06318312138319016,
0.06042878329753876,
0.05850483104586601,
0.048131827265024185,
0.049048759043216705,
0.00526147335767746,
-0.08683522790670395,
-0.07272768020629883,
0.02500038780272007,
-0.058024078607559204,
-0.0030655711889266968,
0.024830641224980354,
-0.09214557707309723,
0.03967292979359627,
0.13280488550662994,
0.07388453185558319,
-0.06981638818979263,
-0.08657332509756088,
0.05629459023475647,
-0.06724882125854492,
-0.09485939145088196,
-0.029883576557040215,
0.1497209370136261,
-0.0945156067609787,
0.04188232123851776,
-0.0007281510625034571,
0.06079382821917534,
-0.13925500214099884,
0.01684199832379818,
-0.04546351358294487,
-0.03844716027379036,
-0.0592491440474987,
0.056429240852594376,
-0.08639531582593918,
0.06906983256340027,
0.028303319588303566,
0.0765964537858963,
-0.06950672715902328,
0.014032242819666862,
0.0006195663590915501,
0.09459052979946136,
0.11723463982343674,
0.03951139748096466,
-0.014400538057088852,
-0.016481894999742508,
-0.11100757122039795,
-0.018521524965763092,
0.11522077769041061,
-0.15612836182117462,
0.07742111384868622,
0.013728349469602108,
-0.012587465345859528,
-0.09644239395856857,
-0.09739670902490616,
-0.18178462982177734,
-0.1412612348794937,
0.11229749023914337,
-0.06522243469953537,
0.1222328469157219,
-0.028175974264740944,
-0.027564559131860733,
0.09198758006095886,
0.09102347493171692,
-0.23125307261943817,
-0.053551580756902695,
-0.1558283269405365,
0.016265051439404488,
0.0920948013663292,
-0.05281699076294899,
0.0014532292261719704,
0.004649054724723101,
0.1192033663392067,
-0.00727112265303731,
-0.1142301857471466,
-0.042248085141181946,
-0.04690481349825859,
-0.19239439070224762,
-0.07175500690937042,
0.15708312392234802,
0.12489563226699829,
0.06207086890935898,
0.005784391425549984,
0.10388118028640747,
0.036383256316185,
-0.07901492714881897,
0.07769197970628738,
0.21877844631671906,
0.15186117589473724,
0.05508774518966675,
-0.09934623539447784,
-0.09406648576259613,
-0.09313042461872101,
-0.03873863071203232,
-0.006234432570636272,
0.13441269099712372,
-0.05374174192547798,
0.15407389402389526,
0.16970805823802948,
-0.15109948813915253,
-0.16586890816688538,
-0.03542647510766983,
-0.01888485811650753,
-0.013500337488949299,
0.10743580758571625,
-0.2519918382167816,
0.09473995864391327,
0.13307777047157288,
-0.003264998085796833,
-0.018175555393099785,
-0.08931740373373032,
-0.07653441280126572,
-0.054474640637636185,
0.07487720251083374,
-0.033071257174015045,
-0.10141561180353165,
-0.11004684865474701,
0.03226660564541817,
-0.16182221472263336,
0.15848924219608307,
-0.0777125358581543,
0.031188840046525,
-0.08266515284776688,
-0.03347949683666229,
0.05790824070572853,
-0.03333306685090065,
0.09657550603151321,
0.018131207674741745,
0.06120575591921806,
-0.07002156227827072,
-0.049721941351890564,
0.041127465665340424,
-0.04430774971842766,
0.1164977103471756,
0.0520431324839592,
0.02460348792374134,
-0.09161288291215897,
-0.030095793306827545,
-0.09498877078294754,
-0.0558997206389904,
-0.061068929731845856,
-0.09166828542947769,
-0.12320312112569809,
0.10352121293544769,
0.05627685785293579,
-0.000810901983641088,
0.09557541459798813,
-0.11319228261709213,
0.05232219770550728,
0.10023441910743713,
0.18818753957748413,
0.01480093877762556,
0.06537862867116928,
-0.00019996984337922186,
-0.07343018054962158,
0.02489413507282734,
-0.15906572341918945,
0.03301693871617317,
0.10736016929149628,
0.06301625818014145,
0.09602268040180206,
-0.002393794944509864,
-0.17277118563652039,
-0.02879193052649498,
0.0751740112900734,
-0.1237097680568695,
-0.18038246035575867,
0.018297135829925537,
0.005402880255132914,
-0.11808972805738449,
0.020210329443216324,
0.0904625803232193,
-0.021881965920329094,
-0.036450717598199844,
-0.0006567696109414101,
0.07362498342990875,
-0.009819979779422283,
0.0753943994641304,
0.05126035213470459,
0.011863254941999912,
-0.06112390756607056,
0.06419005990028381,
0.1445275843143463,
0.0043587093241512775,
-0.0037828125059604645,
0.06855329871177673,
-0.06952079385519028,
-0.04888978227972984,
-0.07344858348369598,
0.04400406405329704,
0.06871302425861359,
-0.07098542153835297,
0.02134678326547146,
-0.03109569288790226,
0.008966058492660522,
0.18474425375461578,
-0.045035626739263535,
0.09702830761671066,
-0.009830251336097717,
0.02902742475271225,
-0.09812702238559723,
0.07591691613197327,
-0.08755198866128922,
0.06516138464212418,
0.057476289570331573,
0.028350386768579483,
-0.004485853016376495,
-0.029605945572257042,
-0.025555552914738655,
-0.01804892159998417,
-0.0688687413930893,
-0.05468963831663132,
-0.1221328005194664,
-0.01496676355600357,
-0.07195236533880234,
-0.08208072185516357,
-0.04768115282058716,
-0.022040430456399918,
0.029963912442326546,
-0.00000832407567941118,
0.01955513097345829,
-0.02942601777613163,
0.007778164930641651,
0.07583095133304596,
-0.1508917361497879,
0.0297428946942091,
0.07270117849111557,
-0.0731726661324501,
0.11320051550865173,
-0.041016191244125366,
0.0005434202612377703,
0.022185659036040306,
-0.08801073580980301,
0.028917985036969185,
-0.01166113093495369,
0.08240088820457458,
-0.004059983417391777,
-0.12614445388317108,
-0.05828571692109108,
-0.013808498159050941,
-0.060652103275060654,
0.04066718742251396,
0.06855908781290054,
-0.04653715342283249,
0.06838880479335785,
0.027050122618675232,
0.0010983238462358713,
-0.08106013387441635,
0.011681891977787018,
0.048953596502542496,
-0.01842726394534111,
0.07841720432043076,
-0.019567975774407387,
-0.002340521663427353,
-0.1377251297235489,
-0.02938166819512844,
0.02849600836634636,
-0.008048759773373604,
-0.025159651413559914,
-0.010170920751988888,
0.03505256399512291,
0.07192801684141159,
0.14245566725730896,
-0.08981052041053772,
-0.06289850920438766,
0.04473133385181427,
0.03778911754488945,
-0.011689643375575542,
-0.02961929328739643,
0.036805108189582825,
0.029716666787862778,
-0.05726531893014908,
-0.0010826662182807922,
-0.04473939910531044,
-0.0685148537158966,
-0.01144483219832182,
0.1556077003479004,
0.11162428557872772,
0.15730030834674835,
-0.06768147647380829,
-0.05132339149713516,
-0.002171772764995694,
-0.1600559502840042,
-0.06945602595806122,
-0.005748175084590912,
-0.03418194502592087,
0.004090669099241495,
-0.0014573335647583008,
0.14036020636558533,
-0.1654360443353653,
0.09069783985614777,
0.017596235498785973,
-0.07018358260393143,
-0.07417075335979462,
-0.19221477210521698,
0.011500732973217964,
0.013772934675216675,
-0.03508714959025383,
-0.10979392379522324,
0.10748369246721268,
0.12876099348068237,
0.0003582900098990649,
0.013301325961947441,
0.07388409227132797,
-0.07420632988214493,
0.019379211589694023,
0.038268379867076874,
0.004521660041064024,
-0.0180826298892498,
-0.06688473373651505,
0.10141598433256149,
0.012512018904089928,
0.01136852242052555,
0.10367652773857117,
0.09991702437400818,
0.07128202170133591,
-0.04517984017729759,
-0.013766402378678322,
-0.11036288738250732,
0.0625167191028595,
-0.036494769155979156,
-0.04962988570332527,
0.19565318524837494,
0.12314503639936447,
-0.0003075340355280787,
0.02797412872314453,
0.20553822815418243,
-0.013939072377979755,
-0.002271217992529273,
-0.11404544115066528,
0.07589488476514816,
0.025703828781843185,
0.0022840574383735657,
0.006452797446399927,
-0.1017652302980423,
0.036300256848335266,
0.14526575803756714,
0.0006781985866837204,
0.06894414126873016,
0.02043280377984047,
0.002427761908620596,
0.028130315244197845,
0.06159484013915062,
0.11201316863298416,
0.022899825125932693,
0.23100297152996063,
-0.0105661666020751,
0.046676572412252426,
-0.03576965257525444,
-0.04280508682131767,
-0.07943946123123169,
0.020871734246611595,
-0.05557077005505562,
-0.044929735362529755,
-0.04903688654303551,
0.05962243303656578,
-0.08685039728879929,
-0.20621514320373535,
0.0784551277756691,
-0.06272900849580765,
-0.069573312997818,
-0.03928481042385101,
-0.007055043242871761,
-0.08796579390764236,
-0.007286147680133581,
0.0597989484667778,
0.009662795811891556,
0.2880169749259949,
0.010373464785516262,
-0.057777103036642075,
-0.041030850261449814,
0.035198286175727844,
-0.10055617243051529,
0.20349960029125214,
0.036105453968048096,
0.00504318717867136,
0.04188361391425133,
-0.021037522703409195,
-0.15279024839401245,
0.1327103227376938,
-0.03775670751929283,
-0.01772109977900982,
0.008064967580139637,
0.1657577008008957,
0.06514346599578857,
0.10482396930456161,
0.03969550505280495,
-0.03288998827338219,
0.01808200031518936,
0.09534458816051483,
-0.023474207147955894,
-0.07744248956441879,
0.09404170513153076,
-0.0633256733417511,
0.11550069600343704,
0.06604772061109543,
-0.0264765415340662,
0.03405130282044411,
-0.0432092659175396,
0.02800295501947403,
0.03306903690099716,
0.10271750390529633,
0.014070089906454086,
-0.12331303209066391,
0.05337245762348175,
-0.07149547338485718,
0.05585994943976402,
-0.201334610581398,
-0.037075795233249664,
0.014810649678111076,
-0.02754898928105831,
-0.05670749396085739,
0.06682228296995163,
0.03612780570983887,
0.026545489206910133,
-0.05366295203566551,
-0.1312427669763565,
0.038695164024829865,
0.10614920407533646,
-0.07739134877920151,
-0.02971014752984047
] |
null | null |
transformers
|
# BlueBert-Base, Uncased, PubMed and MIMIC-III
## Model description
A BERT model pre-trained on PubMed abstracts and clinical notes ([MIMIC-III](https://mimic.physionet.org/)).
## Intended uses & limitations
#### How to use
Please see https://github.com/ncbi-nlp/bluebert
## Training data
We provide [preprocessed PubMed texts](https://ftp.ncbi.nlm.nih.gov/pub/lu/Suppl/NCBI-BERT/pubmed_uncased_sentence_nltk.txt.tar.gz) that were used to pre-train the BlueBERT models.
The corpus contains ~4000M words extracted from the [PubMed ASCII code version](https://www.ncbi.nlm.nih.gov/research/bionlp/APIs/BioC-PubMed/).
Pre-trained model: https://huggingface.co/bert-large-uncased
## Training procedure
* lowercasing the text
* removing speical chars `\x00`-`\x7F`
* tokenizing the text using the [NLTK Treebank tokenizer](https://www.nltk.org/_modules/nltk/tokenize/treebank.html)
Below is a code snippet for more details.
```python
value = value.lower()
value = re.sub(r'[\r\n]+', ' ', value)
value = re.sub(r'[^\x00-\x7F]+', ' ', value)
tokenized = TreebankWordTokenizer().tokenize(value)
sentence = ' '.join(tokenized)
sentence = re.sub(r"\s's\b", "'s", sentence)
```
### BibTeX entry and citation info
```bibtex
@InProceedings{peng2019transfer,
author = {Yifan Peng and Shankai Yan and Zhiyong Lu},
title = {Transfer Learning in Biomedical Natural Language Processing: An Evaluation of BERT and ELMo on Ten Benchmarking Datasets},
booktitle = {Proceedings of the 2019 Workshop on Biomedical Natural Language Processing (BioNLP 2019)},
year = {2019},
pages = {58--65},
}
```
### Acknowledgments
This work was supported by the Intramural Research Programs of the National Institutes of Health, National Library of
Medicine and Clinical Center. This work was supported by the National Library of Medicine of the National Institutes of Health under award number 4R00LM013001-01.
We are also grateful to the authors of BERT and ELMo to make the data and codes publicly available.
We would like to thank Dr Sun Kim for processing the PubMed texts.
### Disclaimer
This tool shows the results of research conducted in the Computational Biology Branch, NCBI. The information produced
on this website is not intended for direct diagnostic use or medical decision-making without review and oversight
by a clinical professional. Individuals should not change their health behavior solely on the basis of information
produced on this website. NIH does not independently verify the validity or utility of the information produced
by this tool. If you have questions about the information produced on this website, please see a health care
professional. More information about NCBI's disclaimer policy is available.
|
{"language": ["en"], "license": "cc0-1.0", "tags": ["bert", "bluebert"], "datasets": ["PubMed", "MIMIC-III"]}
| null |
bionlp/bluebert_pubmed_mimic_uncased_L-24_H-1024_A-16
|
[
"transformers",
"pytorch",
"jax",
"bert",
"bluebert",
"en",
"dataset:PubMed",
"dataset:MIMIC-III",
"license:cc0-1.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #bert #bluebert #en #dataset-PubMed #dataset-MIMIC-III #license-cc0-1.0 #endpoints_compatible #region-us
|
# BlueBert-Base, Uncased, PubMed and MIMIC-III
## Model description
A BERT model pre-trained on PubMed abstracts and clinical notes (MIMIC-III).
## Intended uses & limitations
#### How to use
Please see URL
## Training data
We provide preprocessed PubMed texts that were used to pre-train the BlueBERT models.
The corpus contains ~4000M words extracted from the PubMed ASCII code version.
Pre-trained model: URL
## Training procedure
* lowercasing the text
* removing speical chars '\x00'-'\x7F'
* tokenizing the text using the NLTK Treebank tokenizer
Below is a code snippet for more details.
### BibTeX entry and citation info
### Acknowledgments
This work was supported by the Intramural Research Programs of the National Institutes of Health, National Library of
Medicine and Clinical Center. This work was supported by the National Library of Medicine of the National Institutes of Health under award number 4R00LM013001-01.
We are also grateful to the authors of BERT and ELMo to make the data and codes publicly available.
We would like to thank Dr Sun Kim for processing the PubMed texts.
### Disclaimer
This tool shows the results of research conducted in the Computational Biology Branch, NCBI. The information produced
on this website is not intended for direct diagnostic use or medical decision-making without review and oversight
by a clinical professional. Individuals should not change their health behavior solely on the basis of information
produced on this website. NIH does not independently verify the validity or utility of the information produced
by this tool. If you have questions about the information produced on this website, please see a health care
professional. More information about NCBI's disclaimer policy is available.
|
[
"# BlueBert-Base, Uncased, PubMed and MIMIC-III",
"## Model description\n\nA BERT model pre-trained on PubMed abstracts and clinical notes (MIMIC-III).",
"## Intended uses & limitations",
"#### How to use\n\nPlease see URL",
"## Training data\n\nWe provide preprocessed PubMed texts that were used to pre-train the BlueBERT models. \nThe corpus contains ~4000M words extracted from the PubMed ASCII code version. \n\nPre-trained model: URL",
"## Training procedure\n\n* lowercasing the text\n* removing speical chars '\\x00'-'\\x7F'\n* tokenizing the text using the NLTK Treebank tokenizer\n\nBelow is a code snippet for more details.",
"### BibTeX entry and citation info",
"### Acknowledgments\n\nThis work was supported by the Intramural Research Programs of the National Institutes of Health, National Library of\nMedicine and Clinical Center. This work was supported by the National Library of Medicine of the National Institutes of Health under award number 4R00LM013001-01.\n\nWe are also grateful to the authors of BERT and ELMo to make the data and codes publicly available.\n\nWe would like to thank Dr Sun Kim for processing the PubMed texts.",
"### Disclaimer\n\nThis tool shows the results of research conducted in the Computational Biology Branch, NCBI. The information produced\non this website is not intended for direct diagnostic use or medical decision-making without review and oversight\nby a clinical professional. Individuals should not change their health behavior solely on the basis of information\nproduced on this website. NIH does not independently verify the validity or utility of the information produced\nby this tool. If you have questions about the information produced on this website, please see a health care\nprofessional. More information about NCBI's disclaimer policy is available."
] |
[
"TAGS\n#transformers #pytorch #jax #bert #bluebert #en #dataset-PubMed #dataset-MIMIC-III #license-cc0-1.0 #endpoints_compatible #region-us \n",
"# BlueBert-Base, Uncased, PubMed and MIMIC-III",
"## Model description\n\nA BERT model pre-trained on PubMed abstracts and clinical notes (MIMIC-III).",
"## Intended uses & limitations",
"#### How to use\n\nPlease see URL",
"## Training data\n\nWe provide preprocessed PubMed texts that were used to pre-train the BlueBERT models. \nThe corpus contains ~4000M words extracted from the PubMed ASCII code version. \n\nPre-trained model: URL",
"## Training procedure\n\n* lowercasing the text\n* removing speical chars '\\x00'-'\\x7F'\n* tokenizing the text using the NLTK Treebank tokenizer\n\nBelow is a code snippet for more details.",
"### BibTeX entry and citation info",
"### Acknowledgments\n\nThis work was supported by the Intramural Research Programs of the National Institutes of Health, National Library of\nMedicine and Clinical Center. This work was supported by the National Library of Medicine of the National Institutes of Health under award number 4R00LM013001-01.\n\nWe are also grateful to the authors of BERT and ELMo to make the data and codes publicly available.\n\nWe would like to thank Dr Sun Kim for processing the PubMed texts.",
"### Disclaimer\n\nThis tool shows the results of research conducted in the Computational Biology Branch, NCBI. The information produced\non this website is not intended for direct diagnostic use or medical decision-making without review and oversight\nby a clinical professional. Individuals should not change their health behavior solely on the basis of information\nproduced on this website. NIH does not independently verify the validity or utility of the information produced\nby this tool. If you have questions about the information produced on this website, please see a health care\nprofessional. More information about NCBI's disclaimer policy is available."
] |
[
53,
19,
26,
9,
8,
52,
56,
11,
109,
130
] |
[
"passage: TAGS\n#transformers #pytorch #jax #bert #bluebert #en #dataset-PubMed #dataset-MIMIC-III #license-cc0-1.0 #endpoints_compatible #region-us \n# BlueBert-Base, Uncased, PubMed and MIMIC-III## Model description\n\nA BERT model pre-trained on PubMed abstracts and clinical notes (MIMIC-III).## Intended uses & limitations#### How to use\n\nPlease see URL## Training data\n\nWe provide preprocessed PubMed texts that were used to pre-train the BlueBERT models. \nThe corpus contains ~4000M words extracted from the PubMed ASCII code version. \n\nPre-trained model: URL## Training procedure\n\n* lowercasing the text\n* removing speical chars '\\x00'-'\\x7F'\n* tokenizing the text using the NLTK Treebank tokenizer\n\nBelow is a code snippet for more details.### BibTeX entry and citation info### Acknowledgments\n\nThis work was supported by the Intramural Research Programs of the National Institutes of Health, National Library of\nMedicine and Clinical Center. This work was supported by the National Library of Medicine of the National Institutes of Health under award number 4R00LM013001-01.\n\nWe are also grateful to the authors of BERT and ELMo to make the data and codes publicly available.\n\nWe would like to thank Dr Sun Kim for processing the PubMed texts.### Disclaimer\n\nThis tool shows the results of research conducted in the Computational Biology Branch, NCBI. The information produced\non this website is not intended for direct diagnostic use or medical decision-making without review and oversight\nby a clinical professional. Individuals should not change their health behavior solely on the basis of information\nproduced on this website. NIH does not independently verify the validity or utility of the information produced\nby this tool. If you have questions about the information produced on this website, please see a health care\nprofessional. More information about NCBI's disclaimer policy is available."
] |
[
-0.007672597654163837,
0.28489142656326294,
-0.006376971956342459,
0.003260997124016285,
0.04170110821723938,
-0.007052050903439522,
0.056028518825769424,
0.13582441210746765,
-0.0020174614619463682,
0.16862145066261292,
0.008054436184465885,
0.1137438490986824,
0.0946488305926323,
0.11032941192388535,
0.050831377506256104,
-0.22647273540496826,
0.04239482060074806,
0.009692962281405926,
0.1650891900062561,
0.0476178303360939,
0.031684305518865585,
-0.05013490840792656,
0.03654412180185318,
-0.018859706819057465,
-0.01579943299293518,
-0.011615617200732231,
0.0008849713485687971,
0.006328279618173838,
0.05639534443616867,
-0.005043424665927887,
0.004624849651008844,
0.008717616088688374,
0.05609659105539322,
-0.24439965188503265,
0.003029635176062584,
0.0451732762157917,
0.008345618844032288,
0.10258062183856964,
-0.02706332318484783,
-0.05262565612792969,
0.18875165283679962,
-0.14901353418827057,
0.04492686316370964,
0.029453162103891373,
-0.0899457260966301,
-0.1997559815645218,
-0.12287593632936478,
0.16093257069587708,
0.026753781363368034,
0.04097543656826019,
0.008801350370049477,
0.11644936352968216,
-0.015168789774179459,
0.01630871184170246,
0.1224406361579895,
-0.20657199621200562,
0.011298045516014099,
0.05663788318634033,
0.05765974149107933,
0.11665995419025421,
-0.09694631397724152,
0.008675944060087204,
-0.02789580635726452,
-0.015474112704396248,
0.05366456136107445,
-0.012981174513697624,
0.0716557726264,
0.004352751187980175,
-0.08077636361122131,
-0.04097404703497887,
0.06771118193864822,
-0.04956880211830139,
-0.0805642306804657,
-0.116547591984272,
-0.033856749534606934,
0.06423202902078629,
0.054512083530426025,
-0.05736232176423073,
0.06775880604982376,
-0.02150304801762104,
0.09804260730743408,
-0.02576642483472824,
-0.047346603125333786,
-0.025508107617497444,
-0.024549268186092377,
0.06979676336050034,
-0.00008894260099623352,
0.03413057699799538,
0.03458625450730324,
0.07560135424137115,
0.02066364884376526,
-0.07055214047431946,
-0.09553135931491852,
-0.03897245600819588,
-0.05662068724632263,
-0.008037982508540154,
0.04022543132305145,
-0.036680445075035095,
0.04583723470568657,
0.1910894215106964,
-0.06262079626321793,
-0.01997269317507744,
-0.07571297883987427,
-0.02637750655412674,
0.10841802507638931,
0.02197413705289364,
-0.08189939707517624,
-0.059373144060373306,
-0.0372837670147419,
0.02466288022696972,
0.10017605870962143,
-0.015456592664122581,
0.0010551137384027243,
0.006921823136508465,
-0.03159477934241295,
0.08464263379573822,
0.004042144399136305,
-0.026636891067028046,
-0.06138654798269272,
-0.016642624512314796,
0.23667237162590027,
-0.08771218359470367,
-0.0063697160221636295,
-0.0028952565044164658,
0.030319510027766228,
0.04740477353334427,
0.07547244429588318,
-0.021524600684642792,
-0.048549190163612366,
0.1230035200715065,
-0.08384434878826141,
-0.006267966236919165,
-0.04318397864699364,
-0.029645511880517006,
0.06612046808004379,
-0.0719883143901825,
-0.02573094144463539,
-0.06167168542742729,
-0.005544808227568865,
-0.04971599951386452,
-0.01125978771597147,
-0.037411272525787354,
0.0796806588768959,
0.01315371971577406,
0.04279567301273346,
-0.008906284347176552,
-0.00598976481705904,
-0.03726539760828018,
-0.036784827709198,
0.028155013918876648,
-0.12073840945959091,
0.06417969614267349,
-0.06514738500118256,
-0.029596950858831406,
-0.09556498378515244,
0.03460565581917763,
-0.09381811320781708,
-0.020304687321186066,
-0.06458385288715363,
-0.01909458637237549,
-0.1046249195933342,
-0.0016927275573834777,
-0.07488804310560226,
-0.04362994432449341,
0.019619211554527283,
0.09474140405654907,
-0.12711161375045776,
-0.029009489342570305,
0.21165715157985687,
-0.09960469603538513,
-0.04185165837407112,
0.0721702128648758,
-0.022511502727866173,
0.011775895021855831,
0.08330007642507553,
0.20063737034797668,
0.08251765370368958,
-0.14649340510368347,
-0.1333903968334198,
-0.08927217125892639,
-0.05291609466075897,
0.05685709789395332,
0.042529381811618805,
-0.08033651858568192,
0.02157830074429512,
0.01971893198788166,
-0.08404400199651718,
-0.08551269769668579,
-0.011670855805277824,
0.007318724878132343,
0.008354080840945244,
-0.0742703229188919,
-0.010189009830355644,
0.013228926807641983,
-0.05370281636714935,
0.0024611749686300755,
-0.03187454119324684,
0.05716312676668167,
0.09958011656999588,
-0.012997070327401161,
0.01894993521273136,
-0.06740587949752808,
-0.041283730417490005,
0.0004269183846190572,
-0.011585269123315811,
-0.15204860270023346,
0.06757160276174545,
0.05727725103497505,
-0.1305433064699173,
0.055554334074258804,
-0.06673290580511093,
-0.01912674307823181,
0.06966172158718109,
-0.042501240968704224,
0.014799542725086212,
0.024214891716837883,
0.005501607432961464,
-0.0601503923535347,
-0.10485472530126572,
-0.03600040450692177,
-0.024797286838293076,
0.06168297678232193,
-0.11879552900791168,
0.022404732182621956,
-0.0034234817139804363,
0.13456588983535767,
0.05161619558930397,
-0.11979936063289642,
0.08338714390993118,
-0.004058727994561195,
0.025595737621188164,
-0.0038366199005395174,
-0.011547228321433067,
-0.011225159280002117,
0.008980310522019863,
0.02143818326294422,
-0.15376946330070496,
-0.16622483730316162,
-0.012894177809357643,
0.1483854502439499,
0.02579273097217083,
-0.029438262805342674,
-0.04435308277606964,
0.0014329705154523253,
-0.11003394424915314,
-0.10292646288871765,
0.09306950122117996,
0.007250893395394087,
0.028637930750846863,
-0.025352919474244118,
-0.06671962887048721,
-0.03133898973464966,
-0.021695943549275398,
-0.030845336616039276,
0.014448452740907669,
0.018225068226456642,
-0.15689687430858612,
0.020311269909143448,
0.0051088337786495686,
0.12088733166456223,
0.19828245043754578,
0.03706464543938637,
-0.12212219834327698,
-0.07944227010011673,
-0.059334591031074524,
0.05150968208909035,
0.11646745353937149,
-0.026247670873999596,
0.04676618427038193,
0.054944977164268494,
-0.0016438509337604046,
0.013401038013398647,
-0.005007839761674404,
0.02563881129026413,
-0.002400154946371913,
-0.018744267523288727,
-0.049842555075883865,
-0.018306313082575798,
-0.022782891988754272,
0.12725479900836945,
0.042872101068496704,
0.1656804084777832,
-0.04719126224517822,
-0.03570137545466423,
-0.10028792917728424,
0.11537149548530579,
-0.11211913824081421,
-0.21742849051952362,
-0.1539539396762848,
0.02024836465716362,
0.037443798035383224,
0.020978165790438652,
-0.008458171039819717,
-0.05695578083395958,
-0.061506982892751694,
-0.12424984574317932,
-0.010254578664898872,
0.059107162058353424,
-0.08144477009773254,
-0.029150264337658882,
0.036592841148376465,
0.025428904220461845,
-0.11355935037136078,
0.010348550044000149,
-0.05523210018873215,
0.0022221573162823915,
0.006745389197021723,
-0.025009194388985634,
0.08265526592731476,
0.11411280930042267,
0.047207754105329514,
-0.03900700435042381,
0.03485465049743652,
0.07930777966976166,
-0.05869175121188164,
0.12295252829790115,
0.07427946478128433,
-0.004954935517162085,
0.04222681745886803,
0.08999104052782059,
0.03775041177868843,
-0.05146482586860657,
0.046009909361600876,
0.023091698065400124,
-0.012133692391216755,
-0.2746995687484741,
-0.0540870800614357,
-0.03325439989566803,
-0.041524969041347504,
-0.00003985929652117193,
0.04186741262674332,
0.09263722598552704,
0.0038477499037981033,
-0.07452118396759033,
0.019547302275896072,
-0.00764929223805666,
0.07199609279632568,
0.037102289497852325,
0.02894783765077591,
0.07681997120380402,
-0.06699863076210022,
0.048062052577733994,
0.1111246719956398,
-0.0024704679381102324,
0.15640594065189362,
0.009185904636979103,
0.21351593732833862,
0.10867957770824432,
0.06318312138319016,
0.06042878329753876,
0.05850483104586601,
0.048131827265024185,
0.049048759043216705,
0.00526147335767746,
-0.08683522790670395,
-0.07272768020629883,
0.02500038780272007,
-0.058024078607559204,
-0.0030655711889266968,
0.024830641224980354,
-0.09214557707309723,
0.03967292979359627,
0.13280488550662994,
0.07388453185558319,
-0.06981638818979263,
-0.08657332509756088,
0.05629459023475647,
-0.06724882125854492,
-0.09485939145088196,
-0.029883576557040215,
0.1497209370136261,
-0.0945156067609787,
0.04188232123851776,
-0.0007281510625034571,
0.06079382821917534,
-0.13925500214099884,
0.01684199832379818,
-0.04546351358294487,
-0.03844716027379036,
-0.0592491440474987,
0.056429240852594376,
-0.08639531582593918,
0.06906983256340027,
0.028303319588303566,
0.0765964537858963,
-0.06950672715902328,
0.014032242819666862,
0.0006195663590915501,
0.09459052979946136,
0.11723463982343674,
0.03951139748096466,
-0.014400538057088852,
-0.016481894999742508,
-0.11100757122039795,
-0.018521524965763092,
0.11522077769041061,
-0.15612836182117462,
0.07742111384868622,
0.013728349469602108,
-0.012587465345859528,
-0.09644239395856857,
-0.09739670902490616,
-0.18178462982177734,
-0.1412612348794937,
0.11229749023914337,
-0.06522243469953537,
0.1222328469157219,
-0.028175974264740944,
-0.027564559131860733,
0.09198758006095886,
0.09102347493171692,
-0.23125307261943817,
-0.053551580756902695,
-0.1558283269405365,
0.016265051439404488,
0.0920948013663292,
-0.05281699076294899,
0.0014532292261719704,
0.004649054724723101,
0.1192033663392067,
-0.00727112265303731,
-0.1142301857471466,
-0.042248085141181946,
-0.04690481349825859,
-0.19239439070224762,
-0.07175500690937042,
0.15708312392234802,
0.12489563226699829,
0.06207086890935898,
0.005784391425549984,
0.10388118028640747,
0.036383256316185,
-0.07901492714881897,
0.07769197970628738,
0.21877844631671906,
0.15186117589473724,
0.05508774518966675,
-0.09934623539447784,
-0.09406648576259613,
-0.09313042461872101,
-0.03873863071203232,
-0.006234432570636272,
0.13441269099712372,
-0.05374174192547798,
0.15407389402389526,
0.16970805823802948,
-0.15109948813915253,
-0.16586890816688538,
-0.03542647510766983,
-0.01888485811650753,
-0.013500337488949299,
0.10743580758571625,
-0.2519918382167816,
0.09473995864391327,
0.13307777047157288,
-0.003264998085796833,
-0.018175555393099785,
-0.08931740373373032,
-0.07653441280126572,
-0.054474640637636185,
0.07487720251083374,
-0.033071257174015045,
-0.10141561180353165,
-0.11004684865474701,
0.03226660564541817,
-0.16182221472263336,
0.15848924219608307,
-0.0777125358581543,
0.031188840046525,
-0.08266515284776688,
-0.03347949683666229,
0.05790824070572853,
-0.03333306685090065,
0.09657550603151321,
0.018131207674741745,
0.06120575591921806,
-0.07002156227827072,
-0.049721941351890564,
0.041127465665340424,
-0.04430774971842766,
0.1164977103471756,
0.0520431324839592,
0.02460348792374134,
-0.09161288291215897,
-0.030095793306827545,
-0.09498877078294754,
-0.0558997206389904,
-0.061068929731845856,
-0.09166828542947769,
-0.12320312112569809,
0.10352121293544769,
0.05627685785293579,
-0.000810901983641088,
0.09557541459798813,
-0.11319228261709213,
0.05232219770550728,
0.10023441910743713,
0.18818753957748413,
0.01480093877762556,
0.06537862867116928,
-0.00019996984337922186,
-0.07343018054962158,
0.02489413507282734,
-0.15906572341918945,
0.03301693871617317,
0.10736016929149628,
0.06301625818014145,
0.09602268040180206,
-0.002393794944509864,
-0.17277118563652039,
-0.02879193052649498,
0.0751740112900734,
-0.1237097680568695,
-0.18038246035575867,
0.018297135829925537,
0.005402880255132914,
-0.11808972805738449,
0.020210329443216324,
0.0904625803232193,
-0.021881965920329094,
-0.036450717598199844,
-0.0006567696109414101,
0.07362498342990875,
-0.009819979779422283,
0.0753943994641304,
0.05126035213470459,
0.011863254941999912,
-0.06112390756607056,
0.06419005990028381,
0.1445275843143463,
0.0043587093241512775,
-0.0037828125059604645,
0.06855329871177673,
-0.06952079385519028,
-0.04888978227972984,
-0.07344858348369598,
0.04400406405329704,
0.06871302425861359,
-0.07098542153835297,
0.02134678326547146,
-0.03109569288790226,
0.008966058492660522,
0.18474425375461578,
-0.045035626739263535,
0.09702830761671066,
-0.009830251336097717,
0.02902742475271225,
-0.09812702238559723,
0.07591691613197327,
-0.08755198866128922,
0.06516138464212418,
0.057476289570331573,
0.028350386768579483,
-0.004485853016376495,
-0.029605945572257042,
-0.025555552914738655,
-0.01804892159998417,
-0.0688687413930893,
-0.05468963831663132,
-0.1221328005194664,
-0.01496676355600357,
-0.07195236533880234,
-0.08208072185516357,
-0.04768115282058716,
-0.022040430456399918,
0.029963912442326546,
-0.00000832407567941118,
0.01955513097345829,
-0.02942601777613163,
0.007778164930641651,
0.07583095133304596,
-0.1508917361497879,
0.0297428946942091,
0.07270117849111557,
-0.0731726661324501,
0.11320051550865173,
-0.041016191244125366,
0.0005434202612377703,
0.022185659036040306,
-0.08801073580980301,
0.028917985036969185,
-0.01166113093495369,
0.08240088820457458,
-0.004059983417391777,
-0.12614445388317108,
-0.05828571692109108,
-0.013808498159050941,
-0.060652103275060654,
0.04066718742251396,
0.06855908781290054,
-0.04653715342283249,
0.06838880479335785,
0.027050122618675232,
0.0010983238462358713,
-0.08106013387441635,
0.011681891977787018,
0.048953596502542496,
-0.01842726394534111,
0.07841720432043076,
-0.019567975774407387,
-0.002340521663427353,
-0.1377251297235489,
-0.02938166819512844,
0.02849600836634636,
-0.008048759773373604,
-0.025159651413559914,
-0.010170920751988888,
0.03505256399512291,
0.07192801684141159,
0.14245566725730896,
-0.08981052041053772,
-0.06289850920438766,
0.04473133385181427,
0.03778911754488945,
-0.011689643375575542,
-0.02961929328739643,
0.036805108189582825,
0.029716666787862778,
-0.05726531893014908,
-0.0010826662182807922,
-0.04473939910531044,
-0.0685148537158966,
-0.01144483219832182,
0.1556077003479004,
0.11162428557872772,
0.15730030834674835,
-0.06768147647380829,
-0.05132339149713516,
-0.002171772764995694,
-0.1600559502840042,
-0.06945602595806122,
-0.005748175084590912,
-0.03418194502592087,
0.004090669099241495,
-0.0014573335647583008,
0.14036020636558533,
-0.1654360443353653,
0.09069783985614777,
0.017596235498785973,
-0.07018358260393143,
-0.07417075335979462,
-0.19221477210521698,
0.011500732973217964,
0.013772934675216675,
-0.03508714959025383,
-0.10979392379522324,
0.10748369246721268,
0.12876099348068237,
0.0003582900098990649,
0.013301325961947441,
0.07388409227132797,
-0.07420632988214493,
0.019379211589694023,
0.038268379867076874,
0.004521660041064024,
-0.0180826298892498,
-0.06688473373651505,
0.10141598433256149,
0.012512018904089928,
0.01136852242052555,
0.10367652773857117,
0.09991702437400818,
0.07128202170133591,
-0.04517984017729759,
-0.013766402378678322,
-0.11036288738250732,
0.0625167191028595,
-0.036494769155979156,
-0.04962988570332527,
0.19565318524837494,
0.12314503639936447,
-0.0003075340355280787,
0.02797412872314453,
0.20553822815418243,
-0.013939072377979755,
-0.002271217992529273,
-0.11404544115066528,
0.07589488476514816,
0.025703828781843185,
0.0022840574383735657,
0.006452797446399927,
-0.1017652302980423,
0.036300256848335266,
0.14526575803756714,
0.0006781985866837204,
0.06894414126873016,
0.02043280377984047,
0.002427761908620596,
0.028130315244197845,
0.06159484013915062,
0.11201316863298416,
0.022899825125932693,
0.23100297152996063,
-0.0105661666020751,
0.046676572412252426,
-0.03576965257525444,
-0.04280508682131767,
-0.07943946123123169,
0.020871734246611595,
-0.05557077005505562,
-0.044929735362529755,
-0.04903688654303551,
0.05962243303656578,
-0.08685039728879929,
-0.20621514320373535,
0.0784551277756691,
-0.06272900849580765,
-0.069573312997818,
-0.03928481042385101,
-0.007055043242871761,
-0.08796579390764236,
-0.007286147680133581,
0.0597989484667778,
0.009662795811891556,
0.2880169749259949,
0.010373464785516262,
-0.057777103036642075,
-0.041030850261449814,
0.035198286175727844,
-0.10055617243051529,
0.20349960029125214,
0.036105453968048096,
0.00504318717867136,
0.04188361391425133,
-0.021037522703409195,
-0.15279024839401245,
0.1327103227376938,
-0.03775670751929283,
-0.01772109977900982,
0.008064967580139637,
0.1657577008008957,
0.06514346599578857,
0.10482396930456161,
0.03969550505280495,
-0.03288998827338219,
0.01808200031518936,
0.09534458816051483,
-0.023474207147955894,
-0.07744248956441879,
0.09404170513153076,
-0.0633256733417511,
0.11550069600343704,
0.06604772061109543,
-0.0264765415340662,
0.03405130282044411,
-0.0432092659175396,
0.02800295501947403,
0.03306903690099716,
0.10271750390529633,
0.014070089906454086,
-0.12331303209066391,
0.05337245762348175,
-0.07149547338485718,
0.05585994943976402,
-0.201334610581398,
-0.037075795233249664,
0.014810649678111076,
-0.02754898928105831,
-0.05670749396085739,
0.06682228296995163,
0.03612780570983887,
0.026545489206910133,
-0.05366295203566551,
-0.1312427669763565,
0.038695164024829865,
0.10614920407533646,
-0.07739134877920151,
-0.02971014752984047
] |
null | null |
transformers
|
# BlueBert-Base, Uncased, PubMed
## Model description
A BERT model pre-trained on PubMed abstracts
## Intended uses & limitations
#### How to use
Please see https://github.com/ncbi-nlp/bluebert
## Training data
We provide [preprocessed PubMed texts](https://ftp.ncbi.nlm.nih.gov/pub/lu/Suppl/NCBI-BERT/pubmed_uncased_sentence_nltk.txt.tar.gz) that were used to pre-train the BlueBERT models.
The corpus contains ~4000M words extracted from the [PubMed ASCII code version](https://www.ncbi.nlm.nih.gov/research/bionlp/APIs/BioC-PubMed/).
Pre-trained model: https://huggingface.co/bert-base-uncased
## Training procedure
* lowercasing the text
* removing speical chars `\x00`-`\x7F`
* tokenizing the text using the [NLTK Treebank tokenizer](https://www.nltk.org/_modules/nltk/tokenize/treebank.html)
Below is a code snippet for more details.
```python
value = value.lower()
value = re.sub(r'[\r\n]+', ' ', value)
value = re.sub(r'[^\x00-\x7F]+', ' ', value)
tokenized = TreebankWordTokenizer().tokenize(value)
sentence = ' '.join(tokenized)
sentence = re.sub(r"\s's\b", "'s", sentence)
```
### BibTeX entry and citation info
```bibtex
@InProceedings{peng2019transfer,
author = {Yifan Peng and Shankai Yan and Zhiyong Lu},
title = {Transfer Learning in Biomedical Natural Language Processing: An Evaluation of BERT and ELMo on Ten Benchmarking Datasets},
booktitle = {Proceedings of the 2019 Workshop on Biomedical Natural Language Processing (BioNLP 2019)},
year = {2019},
pages = {58--65},
}
```
|
{"language": ["en"], "license": "cc0-1.0", "tags": ["bluebert"], "datasets": ["pubmed"]}
| null |
bionlp/bluebert_pubmed_uncased_L-12_H-768_A-12
|
[
"transformers",
"pytorch",
"bluebert",
"en",
"dataset:pubmed",
"license:cc0-1.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #bluebert #en #dataset-pubmed #license-cc0-1.0 #endpoints_compatible #region-us
|
# BlueBert-Base, Uncased, PubMed
## Model description
A BERT model pre-trained on PubMed abstracts
## Intended uses & limitations
#### How to use
Please see URL
## Training data
We provide preprocessed PubMed texts that were used to pre-train the BlueBERT models.
The corpus contains ~4000M words extracted from the PubMed ASCII code version.
Pre-trained model: URL
## Training procedure
* lowercasing the text
* removing speical chars '\x00'-'\x7F'
* tokenizing the text using the NLTK Treebank tokenizer
Below is a code snippet for more details.
### BibTeX entry and citation info
|
[
"# BlueBert-Base, Uncased, PubMed",
"## Model description\n\nA BERT model pre-trained on PubMed abstracts",
"## Intended uses & limitations",
"#### How to use\n\nPlease see URL",
"## Training data\n\nWe provide preprocessed PubMed texts that were used to pre-train the BlueBERT models. \nThe corpus contains ~4000M words extracted from the PubMed ASCII code version. \n\nPre-trained model: URL",
"## Training procedure\n\n* lowercasing the text\n* removing speical chars '\\x00'-'\\x7F'\n* tokenizing the text using the NLTK Treebank tokenizer\n\nBelow is a code snippet for more details.",
"### BibTeX entry and citation info"
] |
[
"TAGS\n#transformers #pytorch #bluebert #en #dataset-pubmed #license-cc0-1.0 #endpoints_compatible #region-us \n",
"# BlueBert-Base, Uncased, PubMed",
"## Model description\n\nA BERT model pre-trained on PubMed abstracts",
"## Intended uses & limitations",
"#### How to use\n\nPlease see URL",
"## Training data\n\nWe provide preprocessed PubMed texts that were used to pre-train the BlueBERT models. \nThe corpus contains ~4000M words extracted from the PubMed ASCII code version. \n\nPre-trained model: URL",
"## Training procedure\n\n* lowercasing the text\n* removing speical chars '\\x00'-'\\x7F'\n* tokenizing the text using the NLTK Treebank tokenizer\n\nBelow is a code snippet for more details.",
"### BibTeX entry and citation info"
] |
[
40,
14,
16,
9,
8,
52,
56,
11
] |
[
"passage: TAGS\n#transformers #pytorch #bluebert #en #dataset-pubmed #license-cc0-1.0 #endpoints_compatible #region-us \n# BlueBert-Base, Uncased, PubMed## Model description\n\nA BERT model pre-trained on PubMed abstracts## Intended uses & limitations#### How to use\n\nPlease see URL## Training data\n\nWe provide preprocessed PubMed texts that were used to pre-train the BlueBERT models. \nThe corpus contains ~4000M words extracted from the PubMed ASCII code version. \n\nPre-trained model: URL## Training procedure\n\n* lowercasing the text\n* removing speical chars '\\x00'-'\\x7F'\n* tokenizing the text using the NLTK Treebank tokenizer\n\nBelow is a code snippet for more details.### BibTeX entry and citation info"
] |
[
-0.032565776258707047,
0.1432485282421112,
-0.001035172725096345,
0.052044063806533813,
0.08335095643997192,
-0.02759615331888199,
0.10668426752090454,
0.10914885997772217,
-0.12407352030277252,
0.0752844363451004,
0.121692955493927,
0.15657950937747955,
0.04071548208594322,
0.11867568641901016,
-0.05854639410972595,
-0.3453459143638611,
0.054730694741010666,
0.09559285640716553,
0.13332077860832214,
0.09460862725973129,
0.08879342675209045,
-0.04551278427243233,
0.06409647315740585,
-0.03738102689385414,
-0.08381420373916626,
-0.03353690728545189,
-0.014308705925941467,
-0.030760008841753006,
0.0805777981877327,
0.050085507333278656,
-0.010736918076872826,
0.036360275000333786,
0.03971417620778084,
-0.23604093492031097,
0.021613141521811485,
0.027213070541620255,
-0.023678071796894073,
0.1226692721247673,
-0.002913402859121561,
-0.030903253704309464,
0.16614268720149994,
-0.07400580495595932,
0.028691722080111504,
0.0427950918674469,
-0.13067345321178436,
-0.08656968176364899,
-0.09006927907466888,
0.1700519621372223,
0.1178998127579689,
0.019237350672483444,
0.004418142605572939,
0.14271435141563416,
-0.06396567076444626,
0.08548415452241898,
0.17636249959468842,
-0.29796743392944336,
-0.023086844012141228,
0.14227354526519775,
0.16917572915554047,
0.08989657461643219,
-0.08465850353240967,
0.022941824048757553,
0.08067718148231506,
0.022997522726655006,
0.01559118926525116,
-0.07327493280172348,
0.029947273433208466,
-0.0016869938699528575,
-0.1596657782793045,
-0.046334996819496155,
0.09026394039392471,
-0.005979602225124836,
-0.1053515076637268,
-0.07241314649581909,
-0.05393006280064583,
0.0510779470205307,
-0.001144012319855392,
-0.0036790689919143915,
0.025201715528964996,
0.007720819674432278,
0.01415867730975151,
-0.15514162182807922,
-0.05116172134876251,
-0.09419191628694534,
0.03136828914284706,
0.15907077491283417,
0.0342375785112381,
0.033616695553064346,
-0.06268128007650375,
0.11335895955562592,
-0.08033198863267899,
-0.091804638504982,
-0.04935867339372635,
-0.07492175698280334,
0.06497898697853088,
-0.018568074330687523,
-0.06041065976023674,
-0.07567004859447479,
0.018242981284856796,
0.2723148465156555,
0.0349125899374485,
-0.005948527250438929,
-0.07634620368480682,
0.08451887965202332,
0.03904052451252937,
0.0816490650177002,
-0.07432355731725693,
-0.013603372499346733,
0.06977618485689163,
0.0791563168168068,
0.06608501076698303,
0.0029693646356463432,
-0.12726716697216034,
-0.056228987872600555,
0.02867891453206539,
0.06453505903482437,
0.0074700526893138885,
0.023736845701932907,
-0.02545429766178131,
-0.06532015651464462,
0.13058719038963318,
-0.06319474428892136,
0.0057249972596764565,
-0.036784328520298004,
-0.010966042056679726,
-0.06060638651251793,
0.07182373851537704,
0.0026479712687432766,
-0.0964825376868248,
0.098696768283844,
-0.06808558851480484,
0.037479691207408905,
-0.057175565510988235,
-0.11483284831047058,
-0.0005327460821717978,
-0.19882449507713318,
0.007232644595205784,
-0.06310116499662399,
-0.18658322095870972,
-0.036035362631082535,
0.04356428608298302,
-0.010030874982476234,
0.016389112919569016,
-0.02456323429942131,
-0.013649115338921547,
-0.057623304426670074,
-0.040052443742752075,
-0.020722869783639908,
-0.032389115542173386,
0.06832366436719894,
-0.05545586720108986,
0.041120950132608414,
-0.19855356216430664,
0.02148432843387127,
-0.10832169651985168,
0.024550529196858406,
-0.15452462434768677,
0.06007012352347374,
-0.055654022842645645,
-0.01323305070400238,
-0.10406995564699173,
-0.06348727643489838,
-0.026132026687264442,
0.02704540826380253,
0.0787687599658966,
0.10861511528491974,
-0.18176662921905518,
-0.005445956718176603,
0.2915910482406616,
-0.08090104907751083,
-0.04892907291650772,
0.11780527234077454,
-0.08319035917520523,
0.07034043967723846,
0.10405193269252777,
0.22980351746082306,
0.017786765471100807,
-0.10351716727018356,
0.019651224836707115,
0.04273428022861481,
-0.0246109701693058,
-0.03723737224936485,
0.04349958896636963,
-0.08729755878448486,
-0.13153105974197388,
0.004652522969990969,
-0.13622629642486572,
-0.04253296181559563,
-0.022963842377066612,
-0.017628733068704605,
0.00212555262260139,
-0.07288195937871933,
0.019376717507839203,
0.003004814498126507,
0.016288328915834427,
-0.005853644572198391,
-0.05425185337662697,
0.1411164104938507,
0.07328827679157257,
-0.03798144683241844,
0.06479512155056,
-0.025654688477516174,
0.02527468465268612,
-0.11580285429954529,
0.003038342110812664,
-0.18936802446842194,
0.15956328809261322,
0.03776320442557335,
-0.01258594449609518,
0.04988681524991989,
-0.021409064531326294,
0.010702354833483696,
0.06898067891597748,
-0.08092793077230453,
-0.002118125557899475,
0.016568830236792564,
-0.0194692425429821,
-0.16013182699680328,
-0.12313668429851532,
-0.07365354150533676,
-0.050576213747262955,
-0.04740529507398605,
-0.18186664581298828,
0.08626622706651688,
-0.049163222312927246,
0.05484411120414734,
-0.01582428067922592,
-0.01374462153762579,
0.06873723119497299,
0.037510164082050323,
-0.016675787046551704,
-0.05441029742360115,
0.057550471276044846,
0.04879848659038544,
-0.08546838909387589,
0.04723741486668587,
-0.11410976201295853,
-0.09037069231271744,
0.0842788964509964,
0.05695563182234764,
-0.005013499408960342,
-0.022680552676320076,
-0.055102840065956116,
-0.01606469415128231,
-0.0817440077662468,
-0.046553753316402435,
0.1565428376197815,
0.015414589084684849,
0.1571391075849533,
-0.12076523154973984,
-0.0616789385676384,
-0.030700936913490295,
-0.07500655949115753,
0.015170382335782051,
0.07131016999483109,
0.03850214555859566,
-0.16557317972183228,
0.06795185804367065,
-0.036196980625391006,
-0.0483657531440258,
0.20025290548801422,
0.0010038268519565463,
-0.07289169728755951,
-0.041880104690790176,
0.032325781881809235,
0.02376396767795086,
0.14285026490688324,
-0.018960827961564064,
-0.005224443972110748,
0.04086006060242653,
0.012282232753932476,
0.058082517236471176,
-0.11330534517765045,
-0.016689728945493698,
0.006443836726248264,
-0.025060473009943962,
-0.036239560693502426,
0.006992855109274387,
-0.03522097319364548,
0.10935693979263306,
0.0429953895509243,
0.03650175407528877,
0.04714803397655487,
-0.0003441545704845339,
-0.1020563617348671,
0.22022569179534912,
-0.08868429809808731,
-0.16138646006584167,
-0.14599651098251343,
0.006970349699258804,
-0.007277173455804586,
0.03623972460627556,
0.05198915675282478,
-0.0388248972594738,
-0.009933371096849442,
-0.08075737953186035,
0.009357634000480175,
-0.03348839282989502,
-0.04548808932304382,
-0.08864015340805054,
0.0004242435679771006,
0.006965065374970436,
-0.14688631892204285,
-0.013756179250776768,
-0.05920032784342766,
0.0093708960339427,
-0.01657509058713913,
-0.08725400269031525,
0.07617079466581345,
0.09501710534095764,
-0.03969310596585274,
0.006869584787636995,
-0.03669176623225212,
0.19373753666877747,
0.006432206369936466,
0.012314465828239918,
0.07846566289663315,
-0.07540418207645416,
0.04424662888050079,
0.07924014329910278,
0.012300867587327957,
-0.07155691832304001,
0.020697368308901787,
0.005969990976154804,
-0.04889143630862236,
-0.25448864698410034,
-0.05245745927095413,
-0.03444699943065643,
0.08639992773532867,
0.08771464973688126,
0.060896359384059906,
0.11958467215299606,
0.07315933704376221,
-0.05294926464557648,
0.11671233922243118,
0.01524159125983715,
0.15368473529815674,
-0.07748283445835114,
0.006126164458692074,
0.07906617969274521,
-0.061701517552137375,
0.014777714386582375,
0.09138525277376175,
0.0005636248970404267,
0.14003482460975647,
0.009781097993254662,
0.11998902261257172,
0.11528633534908295,
0.036369819194078445,
0.08366386592388153,
0.1058075949549675,
-0.03558655455708504,
0.041470255702733994,
-0.014437876641750336,
-0.07193686068058014,
-0.07833657413721085,
0.01980351097881794,
-0.053543951362371445,
0.061860259622335434,
-0.046944908797740936,
-0.02447429485619068,
0.021564628928899765,
0.20897896587848663,
0.03823181986808777,
-0.18949460983276367,
-0.08323539048433304,
0.03862617909908295,
-0.03109188750386238,
-0.12455331534147263,
0.030217932537198067,
0.09998827427625656,
-0.11741809546947479,
0.015975603833794594,
-0.05311710759997368,
0.1364365667104721,
-0.09090901911258698,
0.0323493666946888,
-0.06564600020647049,
0.0024403759744018316,
-0.05830205976963043,
0.0833297148346901,
-0.32345789670944214,
0.17062628269195557,
0.048944566398859024,
0.09521299600601196,
-0.025075022131204605,
0.004741260316222906,
0.0163884237408638,
0.07189661264419556,
0.1465008407831192,
0.013499541208148003,
0.10012179613113403,
-0.022007033228874207,
-0.10320966690778732,
-0.005955467000603676,
0.08018215000629425,
-0.08606602996587753,
0.053612567484378815,
0.01593593694269657,
0.027522090822458267,
-0.040331728756427765,
-0.11629807204008102,
-0.22272679209709167,
-0.13109803199768066,
0.027434978634119034,
-0.048428863286972046,
0.07563630491495132,
-0.040425509214401245,
-0.03235386684536934,
0.13786649703979492,
0.15101781487464905,
-0.12566140294075012,
-0.06900713592767715,
-0.12825924158096313,
0.06514589488506317,
0.05924367532134056,
-0.040671639144420624,
0.026446234434843063,
-0.02326749823987484,
0.04832243546843529,
-0.08258765935897827,
-0.09190002083778381,
0.058498021215200424,
-0.07849347591400146,
-0.14630712568759918,
-0.03358946368098259,
0.1714974045753479,
0.1050625815987587,
0.03502912446856499,
0.03796513378620148,
0.01477876864373684,
0.0059294626116752625,
-0.10891075432300568,
0.006045982241630554,
0.08827506750822067,
0.16740165650844574,
0.0423189178109169,
-0.135157510638237,
-0.04462696984410286,
-0.03827010467648506,
0.02133158966898918,
0.14627082645893097,
0.19559930264949799,
-0.050802092999219894,
0.10432732105255127,
0.20258721709251404,
-0.15136601030826569,
-0.16985084116458893,
0.024515772238373756,
-0.006279297638684511,
0.01667688600718975,
-0.03644126281142235,
-0.20479243993759155,
0.047836508601903915,
0.07570286095142365,
-0.01785963959991932,
-0.022166676819324493,
-0.16539260745048523,
-0.10011713951826096,
0.10671192407608032,
0.08499083667993546,
0.14615651965141296,
-0.1393333375453949,
-0.0991736650466919,
-0.05446843430399895,
-0.19887304306030273,
0.1457996517419815,
-0.08664074540138245,
0.10846824198961258,
-0.05925489589571953,
-0.047834545373916626,
0.034455813467502594,
-0.04700199514627457,
0.12753599882125854,
0.05197352170944214,
0.06558509916067123,
-0.06759515404701233,
-0.14044590294361115,
0.09914463013410568,
-0.07520484924316406,
0.15764528512954712,
-0.06992218643426895,
0.061024706810712814,
-0.16769270598888397,
-0.033096153289079666,
-0.09020764380693436,
-0.002895382000133395,
-0.05770270153880119,
-0.0923520028591156,
-0.04990623518824577,
0.04309935122728348,
0.04859190434217453,
0.041685376316308975,
0.17318831384181976,
-0.06367357820272446,
0.054770130664110184,
0.06627572327852249,
0.19346016645431519,
0.06638212502002716,
0.00854884646832943,
-0.003222923493012786,
-0.057076346129179,
0.11646459996700287,
-0.15452560782432556,
0.010311892256140709,
0.09133384376764297,
0.0676913633942604,
0.12918613851070404,
0.056965604424476624,
-0.07221376895904541,
-0.0004539572400972247,
-0.009221842512488365,
-0.17486566305160522,
-0.12906798720359802,
-0.005652719177305698,
0.028681667521595955,
-0.08325659483671188,
0.07664373517036438,
0.09411962330341339,
-0.061266858130693436,
-0.07757758349180222,
-0.010074038989841938,
0.025719884783029556,
-0.0913379117846489,
0.09000822901725769,
0.11573977023363113,
0.05285476893186569,
-0.08584675937891006,
0.06284522265195847,
0.05095129460096359,
0.04557303711771965,
0.04884328693151474,
0.0649239644408226,
-0.09008731693029404,
-0.0544377863407135,
0.049870431423187256,
0.1279681771993637,
0.004065630491822958,
-0.034399863332509995,
-0.059508875012397766,
-0.03095143660902977,
0.03769479691982269,
0.24703431129455566,
0.05098516494035721,
0.036797117441892624,
-0.054435744881629944,
0.012373032979667187,
-0.15962283313274384,
0.042039334774017334,
-0.011260006576776505,
0.06076040118932724,
0.0072171990759670734,
0.053505789488554,
-0.017036639153957367,
0.024788955226540565,
-0.033326976001262665,
-0.005515484604984522,
-0.17509698867797852,
-0.009598412550985813,
-0.2453565150499344,
0.0404634065926075,
-0.07267244160175323,
-0.033802345395088196,
-0.08436667919158936,
-0.00796198658645153,
0.030442439019680023,
0.02058904990553856,
-0.07582994550466537,
-0.008607336319983006,
-0.002040069317445159,
0.029620684683322906,
-0.09124191105365753,
0.017216745764017105,
-0.0023171233478933573,
-0.029962530359625816,
0.07831431180238724,
-0.005293817259371281,
-0.0014222266618162394,
-0.009533651173114777,
-0.09422683715820312,
-0.0032225940376520157,
0.026092153042554855,
-0.031189268454909325,
-0.0011357305338606238,
-0.011813472956418991,
0.005085271317511797,
-0.025022558867931366,
0.0019968191627413034,
0.007380209397524595,
0.13748246431350708,
-0.10374588519334793,
0.0920829325914383,
0.0048494404181838036,
-0.00047722909948788583,
-0.04439966008067131,
0.07455641776323318,
0.008848773315548897,
0.0786178857088089,
0.11778096854686737,
-0.05333786830306053,
0.023348679766058922,
-0.11547961831092834,
-0.005427532363682985,
0.003531152382493019,
-0.09057998657226562,
-0.08854302763938904,
-0.04690864682197571,
0.021487727761268616,
0.023700661957263947,
0.16264064610004425,
0.008296261541545391,
-0.036221299320459366,
0.016416925936937332,
0.03964073583483696,
0.07130388915538788,
-0.010919073596596718,
0.23353460431098938,
-0.025249822065234184,
-0.05208692327141762,
0.017752278596162796,
0.08054269105195999,
0.003454718505963683,
-0.0021873589139431715,
0.12345481663942337,
0.1443176418542862,
0.05030902847647667,
0.07046309113502502,
-0.008926404640078545,
0.02394578605890274,
-0.13765211403369904,
-0.07515531778335571,
0.11863207817077637,
0.03694436326622963,
-0.03384555131196976,
0.13067252933979034,
0.1257045567035675,
-0.15639802813529968,
0.06121983006596565,
0.09686338156461716,
-0.08489343523979187,
-0.15683774650096893,
-0.18975703418254852,
-0.00585548160597682,
0.007152298931032419,
-0.01294865645468235,
-0.13734716176986694,
-0.017255494371056557,
0.11792097985744476,
0.0021357897203415632,
-0.013163461349904537,
0.05118545889854431,
-0.032969992607831955,
-0.04641472175717354,
0.09093383699655533,
-0.03468465059995651,
0.039901621639728546,
-0.09464476257562637,
0.03509778156876564,
0.04578292369842529,
-0.07139609009027481,
0.061151906847953796,
0.01448318362236023,
0.06331706047058105,
0.018984336405992508,
-0.03382333368062973,
-0.08998307585716248,
0.03374247997999191,
0.05800392106175423,
0.08629558235406876,
0.2315007597208023,
0.07322410494089127,
-0.09164629131555557,
0.031092196702957153,
0.22232452034950256,
-0.018795689567923546,
-0.07223622500896454,
-0.09035579115152359,
0.17424596846103668,
0.05984210968017578,
0.007782594300806522,
-0.030294284224510193,
-0.09167385846376419,
0.08061935007572174,
0.20757870376110077,
0.14956101775169373,
0.03467890992760658,
0.009497946128249168,
0.02420778013765812,
0.009072725661098957,
0.1369893103837967,
0.08436508476734161,
0.04073069617152214,
0.16855023801326752,
-0.06846866011619568,
-0.029810482636094093,
-0.0013285111635923386,
-0.07704672962427139,
-0.07403644174337387,
0.02308877371251583,
0.022769730538129807,
-0.09467359632253647,
-0.030434755608439445,
0.05771997570991516,
-0.14507979154586792,
-0.062116120010614395,
-0.05176067724823952,
-0.09330932050943375,
-0.10056782513856888,
-0.04630579054355621,
-0.004940628539770842,
-0.022038200870156288,
0.05096861720085144,
0.0097368648275733,
-0.012082521803677082,
0.16583706438541412,
-0.030796246603131294,
-0.12280738353729248,
-0.06644376367330551,
0.04358479380607605,
-0.06339152902364731,
0.14863431453704834,
-0.004786082077771425,
0.028745699673891068,
0.08193977177143097,
0.007604834623634815,
-0.0907958447933197,
0.06843239068984985,
-0.04185095056891441,
0.02258402481675148,
0.04340328648686409,
0.08212126791477203,
-0.008171199820935726,
0.08271679282188416,
0.006071192212402821,
-0.1178164929151535,
-0.005307093262672424,
-0.0023410851135849953,
-0.11452129483222961,
-0.14205312728881836,
0.07190581411123276,
-0.07233569025993347,
0.11937106400728226,
0.12687668204307556,
-0.07425834238529205,
-0.02050125226378441,
-0.07003717869520187,
0.035310711711645126,
0.07630129158496857,
0.01770094409584999,
-0.012155843898653984,
-0.09609217196702957,
-0.002486639190465212,
0.004504182375967503,
0.009813432581722736,
-0.29436901211738586,
-0.033069293946027756,
-0.0668133944272995,
-0.05702054500579834,
-0.11268255859613419,
0.059505049139261246,
0.095759816467762,
0.05091598257422447,
-0.023653706535696983,
-0.1333695352077484,
-0.010974916629493237,
0.08531268686056137,
-0.1110900342464447,
-0.09578975290060043
] |
null | null |
transformers
|
# BlueBert-Base, Uncased, PubMed
## Model description
A BERT model pre-trained on PubMed abstracts.
## Intended uses & limitations
#### How to use
Please see https://github.com/ncbi-nlp/bluebert
## Training data
We provide [preprocessed PubMed texts](https://ftp.ncbi.nlm.nih.gov/pub/lu/Suppl/NCBI-BERT/pubmed_uncased_sentence_nltk.txt.tar.gz) that were used to pre-train the BlueBERT models.
The corpus contains ~4000M words extracted from the [PubMed ASCII code version](https://www.ncbi.nlm.nih.gov/research/bionlp/APIs/BioC-PubMed/).
Pre-trained model: https://huggingface.co/bert-large-uncased
## Training procedure
* lowercasing the text
* removing speical chars `\x00`-`\x7F`
* tokenizing the text using the [NLTK Treebank tokenizer](https://www.nltk.org/_modules/nltk/tokenize/treebank.html)
Below is a code snippet for more details.
```python
value = value.lower()
value = re.sub(r'[\r\n]+', ' ', value)
value = re.sub(r'[^\x00-\x7F]+', ' ', value)
tokenized = TreebankWordTokenizer().tokenize(value)
sentence = ' '.join(tokenized)
sentence = re.sub(r"\s's\b", "'s", sentence)
```
### BibTeX entry and citation info
```bibtex
@InProceedings{peng2019transfer,
author = {Yifan Peng and Shankai Yan and Zhiyong Lu},
title = {Transfer Learning in Biomedical Natural Language Processing: An Evaluation of BERT and ELMo on Ten Benchmarking Datasets},
booktitle = {Proceedings of the 2019 Workshop on Biomedical Natural Language Processing (BioNLP 2019)},
year = {2019},
pages = {58--65},
}
```
### Acknowledgments
This work was supported by the Intramural Research Programs of the National Institutes of Health, National Library of
Medicine and Clinical Center. This work was supported by the National Library of Medicine of the National Institutes of Health under award number 4R00LM013001-01.
We are also grateful to the authors of BERT and ELMo to make the data and codes publicly available.
We would like to thank Dr Sun Kim for processing the PubMed texts.
### Disclaimer
This tool shows the results of research conducted in the Computational Biology Branch, NCBI. The information produced
on this website is not intended for direct diagnostic use or medical decision-making without review and oversight
by a clinical professional. Individuals should not change their health behavior solely on the basis of information
produced on this website. NIH does not independently verify the validity or utility of the information produced
by this tool. If you have questions about the information produced on this website, please see a health care
professional. More information about NCBI's disclaimer policy is available.
|
{"language": ["en"], "license": "cc0-1.0", "tags": ["bert", "bluebert"], "datasets": ["PubMed"]}
| null |
bionlp/bluebert_pubmed_uncased_L-24_H-1024_A-16
|
[
"transformers",
"pytorch",
"jax",
"bert",
"bluebert",
"en",
"dataset:PubMed",
"license:cc0-1.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #bert #bluebert #en #dataset-PubMed #license-cc0-1.0 #endpoints_compatible #region-us
|
# BlueBert-Base, Uncased, PubMed
## Model description
A BERT model pre-trained on PubMed abstracts.
## Intended uses & limitations
#### How to use
Please see URL
## Training data
We provide preprocessed PubMed texts that were used to pre-train the BlueBERT models.
The corpus contains ~4000M words extracted from the PubMed ASCII code version.
Pre-trained model: URL
## Training procedure
* lowercasing the text
* removing speical chars '\x00'-'\x7F'
* tokenizing the text using the NLTK Treebank tokenizer
Below is a code snippet for more details.
### BibTeX entry and citation info
### Acknowledgments
This work was supported by the Intramural Research Programs of the National Institutes of Health, National Library of
Medicine and Clinical Center. This work was supported by the National Library of Medicine of the National Institutes of Health under award number 4R00LM013001-01.
We are also grateful to the authors of BERT and ELMo to make the data and codes publicly available.
We would like to thank Dr Sun Kim for processing the PubMed texts.
### Disclaimer
This tool shows the results of research conducted in the Computational Biology Branch, NCBI. The information produced
on this website is not intended for direct diagnostic use or medical decision-making without review and oversight
by a clinical professional. Individuals should not change their health behavior solely on the basis of information
produced on this website. NIH does not independently verify the validity or utility of the information produced
by this tool. If you have questions about the information produced on this website, please see a health care
professional. More information about NCBI's disclaimer policy is available.
|
[
"# BlueBert-Base, Uncased, PubMed",
"## Model description\n\nA BERT model pre-trained on PubMed abstracts.",
"## Intended uses & limitations",
"#### How to use\n\nPlease see URL",
"## Training data\n\nWe provide preprocessed PubMed texts that were used to pre-train the BlueBERT models. \nThe corpus contains ~4000M words extracted from the PubMed ASCII code version. \n\nPre-trained model: URL",
"## Training procedure\n\n* lowercasing the text\n* removing speical chars '\\x00'-'\\x7F'\n* tokenizing the text using the NLTK Treebank tokenizer\n\nBelow is a code snippet for more details.",
"### BibTeX entry and citation info",
"### Acknowledgments\n\nThis work was supported by the Intramural Research Programs of the National Institutes of Health, National Library of\nMedicine and Clinical Center. This work was supported by the National Library of Medicine of the National Institutes of Health under award number 4R00LM013001-01.\n\nWe are also grateful to the authors of BERT and ELMo to make the data and codes publicly available.\n\nWe would like to thank Dr Sun Kim for processing the PubMed texts.",
"### Disclaimer\n\nThis tool shows the results of research conducted in the Computational Biology Branch, NCBI. The information produced\non this website is not intended for direct diagnostic use or medical decision-making without review and oversight\nby a clinical professional. Individuals should not change their health behavior solely on the basis of information\nproduced on this website. NIH does not independently verify the validity or utility of the information produced\nby this tool. If you have questions about the information produced on this website, please see a health care\nprofessional. More information about NCBI's disclaimer policy is available."
] |
[
"TAGS\n#transformers #pytorch #jax #bert #bluebert #en #dataset-PubMed #license-cc0-1.0 #endpoints_compatible #region-us \n",
"# BlueBert-Base, Uncased, PubMed",
"## Model description\n\nA BERT model pre-trained on PubMed abstracts.",
"## Intended uses & limitations",
"#### How to use\n\nPlease see URL",
"## Training data\n\nWe provide preprocessed PubMed texts that were used to pre-train the BlueBERT models. \nThe corpus contains ~4000M words extracted from the PubMed ASCII code version. \n\nPre-trained model: URL",
"## Training procedure\n\n* lowercasing the text\n* removing speical chars '\\x00'-'\\x7F'\n* tokenizing the text using the NLTK Treebank tokenizer\n\nBelow is a code snippet for more details.",
"### BibTeX entry and citation info",
"### Acknowledgments\n\nThis work was supported by the Intramural Research Programs of the National Institutes of Health, National Library of\nMedicine and Clinical Center. This work was supported by the National Library of Medicine of the National Institutes of Health under award number 4R00LM013001-01.\n\nWe are also grateful to the authors of BERT and ELMo to make the data and codes publicly available.\n\nWe would like to thank Dr Sun Kim for processing the PubMed texts.",
"### Disclaimer\n\nThis tool shows the results of research conducted in the Computational Biology Branch, NCBI. The information produced\non this website is not intended for direct diagnostic use or medical decision-making without review and oversight\nby a clinical professional. Individuals should not change their health behavior solely on the basis of information\nproduced on this website. NIH does not independently verify the validity or utility of the information produced\nby this tool. If you have questions about the information produced on this website, please see a health care\nprofessional. More information about NCBI's disclaimer policy is available."
] |
[
45,
14,
17,
9,
8,
52,
56,
11,
109,
130
] |
[
"passage: TAGS\n#transformers #pytorch #jax #bert #bluebert #en #dataset-PubMed #license-cc0-1.0 #endpoints_compatible #region-us \n# BlueBert-Base, Uncased, PubMed## Model description\n\nA BERT model pre-trained on PubMed abstracts.## Intended uses & limitations#### How to use\n\nPlease see URL## Training data\n\nWe provide preprocessed PubMed texts that were used to pre-train the BlueBERT models. \nThe corpus contains ~4000M words extracted from the PubMed ASCII code version. \n\nPre-trained model: URL## Training procedure\n\n* lowercasing the text\n* removing speical chars '\\x00'-'\\x7F'\n* tokenizing the text using the NLTK Treebank tokenizer\n\nBelow is a code snippet for more details.### BibTeX entry and citation info### Acknowledgments\n\nThis work was supported by the Intramural Research Programs of the National Institutes of Health, National Library of\nMedicine and Clinical Center. This work was supported by the National Library of Medicine of the National Institutes of Health under award number 4R00LM013001-01.\n\nWe are also grateful to the authors of BERT and ELMo to make the data and codes publicly available.\n\nWe would like to thank Dr Sun Kim for processing the PubMed texts.### Disclaimer\n\nThis tool shows the results of research conducted in the Computational Biology Branch, NCBI. The information produced\non this website is not intended for direct diagnostic use or medical decision-making without review and oversight\nby a clinical professional. Individuals should not change their health behavior solely on the basis of information\nproduced on this website. NIH does not independently verify the validity or utility of the information produced\nby this tool. If you have questions about the information produced on this website, please see a health care\nprofessional. More information about NCBI's disclaimer policy is available."
] |
[
-0.009972762316465378,
0.242561936378479,
-0.00659784534946084,
0.013092860579490662,
0.06205746531486511,
0.005284602753818035,
0.0624539740383625,
0.129481703042984,
0.0027574331033974886,
0.14823220670223236,
0.0168942678719759,
0.07972858846187592,
0.1079392209649086,
0.010273542255163193,
0.009613659232854843,
-0.1682787388563156,
0.01322938036173582,
0.028656216338276863,
0.17575334012508392,
0.055301833897829056,
0.05251295864582062,
-0.049976646900177,
0.06340251117944717,
-0.0022831475362181664,
-0.006439019925892353,
-0.027464307844638824,
0.013549371622502804,
0.001319805160164833,
0.08779682964086533,
0.030573109164834023,
0.02108795940876007,
0.015137912705540657,
0.04422109201550484,
-0.21768410503864288,
0.0023198348935693502,
0.04243422672152519,
-0.019411873072385788,
0.09184969216585159,
0.010254060849547386,
-0.04627609997987747,
0.13170763850212097,
-0.10469314455986023,
0.04159977659583092,
0.03093680739402771,
-0.11713190376758575,
-0.1724236160516739,
-0.12973052263259888,
0.14123167097568512,
0.022095220163464546,
0.059534333646297455,
0.005711809266358614,
0.12363108992576599,
0.008640690706670284,
0.026424061506986618,
0.11712276190519333,
-0.13677243888378143,
0.00965789146721363,
0.024393051862716675,
0.08003538846969604,
0.13856175541877747,
-0.07934331148862839,
0.013838675804436207,
0.00562524376437068,
-0.007337919436395168,
0.07736723124980927,
-0.03595873713493347,
0.04697798192501068,
0.013643178157508373,
-0.10161063820123672,
-0.04893272742629051,
0.09967383742332458,
-0.036662228405475616,
-0.09635061770677567,
-0.07921718806028366,
-0.007085001096129417,
0.08889022469520569,
0.07307833433151245,
-0.044027771800756454,
0.054813578724861145,
-0.048857755959033966,
0.09620044380426407,
-0.04667871072888374,
-0.0663875862956047,
-0.049387168139219284,
0.014008082449436188,
0.07806187868118286,
0.024459993466734886,
0.0245214831084013,
0.013722922652959824,
0.08074270933866501,
-0.07666415721178055,
-0.057857125997543335,
-0.07439683377742767,
-0.031030863523483276,
0.0194399394094944,
-0.024494092911481857,
0.016403868794441223,
-0.08133117109537125,
0.033707208931446075,
0.15248644351959229,
-0.04931081086397171,
-0.01360945962369442,
-0.08983571082353592,
-0.012271985411643982,
0.09783671051263809,
0.06020711362361908,
-0.07427936047315598,
-0.035832479596138,
-0.0013987217098474503,
0.013766673393547535,
0.04226832464337349,
-0.0038772900588810444,
0.002642608480527997,
0.006881420034915209,
0.024062518030405045,
0.07862301915884018,
-0.0007021232158876956,
-0.02710721641778946,
-0.07618675380945206,
-0.008060735650360584,
0.22680336236953735,
-0.07949340343475342,
0.015023671090602875,
0.03805069997906685,
0.0045499037951231,
0.00673656864091754,
0.08196406066417694,
-0.01995701715350151,
-0.060214970260858536,
0.10335050523281097,
-0.07365776598453522,
-0.003136923536658287,
-0.035129398107528687,
-0.10741382092237473,
0.05159812048077583,
-0.09539981186389923,
-0.011202841065824032,
-0.10418368130922318,
-0.032937757670879364,
-0.08998693525791168,
0.009866530075669289,
-0.05640862137079239,
0.07549059391021729,
0.02087107114493847,
0.03642011433839798,
-0.010781540535390377,
-0.018709782510995865,
-0.01892382651567459,
-0.02704952098429203,
0.04284852743148804,
-0.08240877091884613,
0.030100496485829353,
-0.07631964981555939,
0.008318319916725159,
-0.07443073391914368,
0.02284286729991436,
-0.05145587399601936,
-0.0343320369720459,
-0.07819110155105591,
-0.034571193158626556,
-0.10894344002008438,
-0.025959694758057594,
-0.024355929344892502,
-0.0379558764398098,
0.06855005025863647,
0.09493080526590347,
-0.10388006269931793,
-0.009941431693732738,
0.16574063897132874,
-0.08026713877916336,
-0.07712802290916443,
0.09563009440898895,
-0.03495791181921959,
0.06213020905852318,
0.0693817287683487,
0.13265518844127655,
0.05376741290092468,
-0.17769776284694672,
-0.11741498112678528,
-0.1114492416381836,
-0.0176515132188797,
0.031509481370449066,
0.04513312876224518,
-0.05911397188901901,
-0.02144731394946575,
-0.020287683233618736,
-0.08644964545965195,
-0.07812425494194031,
-0.028538458049297333,
0.013448488898575306,
0.019609056413173676,
-0.05285355821251869,
-0.030283547937870026,
0.01010249275714159,
-0.03868405148386955,
-0.02435513213276863,
-0.031396009027957916,
0.01722933165729046,
0.05917758122086525,
-0.02130785956978798,
0.040010929107666016,
-0.06902162730693817,
-0.013983246870338917,
-0.039802439510822296,
-0.016534797847270966,
-0.13742855191230774,
0.050534818321466446,
0.07467204332351685,
-0.10806089639663696,
0.06703227013349533,
-0.07551731914281845,
-0.017503930255770683,
0.06552845984697342,
-0.03897678107023239,
0.0031944834627211094,
-0.008302442729473114,
0.0011945657897740602,
-0.08864660561084747,
-0.09998214244842529,
-0.0020269479136914015,
-0.035748790949583054,
0.0018269841093569994,
-0.1464734971523285,
0.030208822339773178,
0.017132876440882683,
0.06393144279718399,
0.059180330485105515,
-0.09183249622583389,
0.07201892137527466,
0.03835592791438103,
0.0017396969487890601,
0.0070039513520896435,
0.005183633882552385,
-0.012724418193101883,
-0.05231424793601036,
0.0795067548751831,
-0.1383344531059265,
-0.18564823269844055,
-0.01556328497827053,
0.09767427295446396,
-0.016103044152259827,
0.00696810195222497,
-0.023147201165556908,
-0.022197892889380455,
-0.09884438663721085,
-0.11860432475805283,
0.1639375239610672,
0.01511833630502224,
0.0478031262755394,
-0.049732450395822525,
-0.061274539679288864,
-0.019721783697605133,
-0.05713609233498573,
-0.024011997506022453,
0.05043654516339302,
0.049581378698349,
-0.15060389041900635,
0.04107550159096718,
-0.00015718476788606495,
0.05982569605112076,
0.1334613859653473,
0.049151461571455,
-0.11094807833433151,
-0.04419893026351929,
-0.045248404145240784,
0.06793804466724396,
0.09157409518957138,
-0.026905691251158714,
0.01424112357199192,
0.060673341155052185,
0.011645596474409103,
0.01934242621064186,
-0.05247069522738457,
0.0513225682079792,
0.015612240880727768,
-0.012526072561740875,
-0.042249102145433426,
-0.009069454856216908,
-0.0320599228143692,
0.11334104090929031,
0.02878238447010517,
0.13712364435195923,
-0.03065192885696888,
-0.027985936030745506,
-0.11997252702713013,
0.12559446692466736,
-0.06181838735938072,
-0.19286863505840302,
-0.16989359259605408,
-0.04719869792461395,
0.039436813443899155,
0.009437399916350842,
0.03634488955140114,
-0.03433730825781822,
-0.08771554380655289,
-0.10963505506515503,
0.011236615478992462,
0.03360693156719208,
-0.0994202047586441,
-0.05931251868605614,
0.008014350198209286,
0.013401873409748077,
-0.11393587291240692,
-0.0010992051102221012,
-0.020565124228596687,
0.007000413723289967,
0.039185140281915665,
-0.016761956736445427,
0.09873969107866287,
0.06768140941858292,
0.032624680548906326,
-0.04773872345685959,
0.011276699602603912,
0.0981489047408104,
-0.05146705359220505,
0.10556574165821075,
0.11109315603971481,
-0.020961226895451546,
0.06393250077962875,
0.061083268374204636,
0.028804443776607513,
-0.07383256405591965,
0.05426471307873726,
0.03427205979824066,
-0.01937219314277172,
-0.2979229688644409,
-0.04811682179570198,
-0.027328573167324066,
-0.008563104085624218,
0.013160529546439648,
0.03499957174062729,
0.06912276148796082,
0.027429722249507904,
-0.025031674653291702,
0.030907941982150078,
-0.012109355069696903,
0.10655832290649414,
0.022225219756364822,
0.02220171131193638,
0.0708017498254776,
-0.03918334096670151,
0.0738050565123558,
0.11729893088340759,
0.009322994388639927,
0.17902792990207672,
0.011195730417966843,
0.20548208057880402,
0.09387937933206558,
0.03939763456583023,
0.04545416682958603,
0.08719310909509659,
-0.00674481550231576,
0.05879265442490578,
-0.009551381692290306,
-0.06472111493349075,
-0.07420919835567474,
0.036650098860263824,
-0.03131536766886711,
-0.030510718002915382,
0.00021683424711227417,
-0.12009337544441223,
-0.00791650079190731,
0.12040546536445618,
0.04158362001180649,
-0.09948652982711792,
-0.07724778354167938,
0.06243637949228287,
-0.07550453394651413,
-0.11667107045650482,
-0.03158409148454666,
0.1308564841747284,
-0.10895344614982605,
0.010736250318586826,
-0.015321433544158936,
0.09875979274511337,
-0.13413766026496887,
0.0018479377031326294,
-0.07433634996414185,
-0.05541916564106941,
-0.06797204166650772,
0.05980833247303963,
-0.13994860649108887,
0.08683285117149353,
0.025788890197873116,
0.06645102798938751,
-0.05674970522522926,
0.00043405068572610617,
-0.015389877371490002,
0.08931519836187363,
0.13001450896263123,
0.029600415378808975,
0.05647338926792145,
-0.012984144501388073,
-0.10607833415269852,
0.001325748860836029,
0.11249025166034698,
-0.15488186478614807,
0.09009400010108948,
0.019565308466553688,
0.021598869934678078,
-0.07895628362894058,
-0.061620134860277176,
-0.1654689908027649,
-0.1169271320104599,
0.08349082618951797,
-0.05456816405057907,
0.1620735377073288,
-0.005745080299675465,
-0.028024809435009956,
0.02269270084798336,
0.09749142825603485,
-0.1999492347240448,
-0.0600736141204834,
-0.1337854266166687,
-0.005556456744670868,
0.06501886248588562,
-0.052547793835401535,
-0.0024811155162751675,
-0.010018768720328808,
0.049065038561820984,
-0.02493216097354889,
-0.03837835416197777,
-0.0033630153629928827,
-0.06822104752063751,
-0.19750262796878815,
-0.07614567130804062,
0.12471165508031845,
0.1385277509689331,
0.07123854756355286,
-0.0078080883249640465,
0.09091668576002121,
-0.007266663480550051,
-0.07495227456092834,
0.06988322734832764,
0.19859956204891205,
0.19430668652057648,
0.04764570668339729,
-0.10115563124418259,
-0.07833407074213028,
-0.10913380235433578,
-0.0742587223649025,
0.02161378413438797,
0.15249493718147278,
-0.02302938513457775,
0.11754067987203598,
0.2000795304775238,
-0.144046351313591,
-0.1577949821949005,
-0.025659741833806038,
-0.0015912916278466582,
0.015473579056560993,
0.037327732890844345,
-0.22739532589912415,
0.02543884702026844,
0.09553276002407074,
-0.00415725726634264,
-0.0077216592617332935,
-0.02524302527308464,
-0.07562444359064102,
-0.0579925961792469,
0.10502734780311584,
-0.011436950415372849,
-0.11812696605920792,
-0.09785959869623184,
0.034022945910692215,
-0.22425879538059235,
0.15084534883499146,
-0.02560311183333397,
0.051925282925367355,
-0.07894361019134521,
-0.007292160764336586,
0.04398282244801521,
-0.0472104474902153,
0.09043067693710327,
0.015615061856806278,
0.058494433760643005,
-0.05194534361362457,
-0.029479598626494408,
0.048953864723443985,
-0.05137839540839195,
0.14087653160095215,
0.034659191966056824,
0.03621385991573334,
-0.11741911619901657,
-0.04476267844438553,
-0.086262546479702,
-0.026480043306946754,
-0.055972058326005936,
-0.08811455219984055,
-0.09330619126558304,
0.09176643192768097,
0.06211381033062935,
-0.011901716701686382,
0.08978674560785294,
-0.11284825205802917,
0.031645312905311584,
0.12531408667564392,
0.21946337819099426,
0.04717309772968292,
0.050040245056152344,
0.002298018429428339,
-0.07546903938055038,
0.05426168069243431,
-0.15221482515335083,
0.04244588688015938,
0.10947132110595703,
0.045083217322826385,
0.12598110735416412,
-0.0047251153737306595,
-0.16207140684127808,
0.007696866989135742,
0.046456195414066315,
-0.1427742838859558,
-0.15909352898597717,
0.011386016383767128,
0.008306356146931648,
-0.10289885848760605,
0.0370778925716877,
0.11750344932079315,
-0.03667902573943138,
-0.044893741607666016,
-0.02505074068903923,
0.08791693300008774,
-0.010384166613221169,
0.07648465037345886,
0.060345184057950974,
0.006636342965066433,
-0.03254052996635437,
0.046366240829229355,
0.12368091195821762,
-0.037296101450920105,
0.01812267303466797,
0.07560359686613083,
-0.056021690368652344,
-0.03910961374640465,
-0.05534239113330841,
0.10697460174560547,
-0.0020568715408444405,
-0.09154683351516724,
0.0163634791970253,
-0.020919879898428917,
0.006127736996859312,
0.18123868107795715,
-0.043546486645936966,
0.08080659061670303,
-0.006257131230086088,
0.02262759953737259,
-0.12170194089412689,
0.0699407085776329,
-0.04994597285985947,
0.048733118921518326,
0.03955511003732681,
0.0030190495308488607,
0.0013940862845629454,
-0.024797607213258743,
-0.016883239150047302,
0.012105342000722885,
-0.08134301751852036,
-0.06218457594513893,
-0.12473109364509583,
-0.0034516577143222094,
-0.0318341962993145,
-0.044947996735572815,
-0.044255148619413376,
-0.02574910596013069,
0.031115753576159477,
0.021122049540281296,
0.007687844801694155,
-0.02690659649670124,
-0.003973247017711401,
0.09542521834373474,
-0.1346300095319748,
0.016741501167416573,
0.07498716562986374,
-0.061544619500637054,
0.10800395905971527,
-0.0372963547706604,
-0.011723426170647144,
0.005579684861004353,
-0.08228806406259537,
0.09152814000844955,
-0.013580193743109703,
0.0575692281126976,
0.010108797810971737,
-0.1568678468465805,
-0.04980282858014107,
-0.033648502081632614,
-0.0642869770526886,
0.02806228958070278,
0.0854729637503624,
-0.04496874660253525,
0.0844370573759079,
0.036346107721328735,
-0.010538988746702671,
-0.0661943256855011,
0.023311417549848557,
-0.013416959904134274,
0.03572874143719673,
0.06690459698438644,
-0.009847162291407585,
0.03161855414509773,
-0.0979067012667656,
-0.009353243745863438,
0.04048917070031166,
0.010289317928254604,
-0.02966511994600296,
-0.02761578932404518,
0.045939892530441284,
0.05067553371191025,
0.12954942882061005,
-0.06023627892136574,
-0.07444154471158981,
0.027041401714086533,
0.03258385881781578,
-0.02429327555000782,
-0.061048299074172974,
0.01828855462372303,
-0.024881863966584206,
-0.06564082950353622,
-0.0224788598716259,
-0.018270427361130714,
-0.07115039974451065,
-0.04530232399702072,
0.16369011998176575,
0.12680616974830627,
0.13452371954917908,
-0.05516068637371063,
-0.03295564278960228,
-0.002741021104156971,
-0.15777872502803802,
-0.08303295820951462,
0.041336141526699066,
-0.02794780395925045,
-0.026605041697621346,
0.09537038952112198,
0.12408725172281265,
-0.15467298030853271,
0.07810227572917938,
0.02458297647535801,
-0.06636154651641846,
-0.06690260767936707,
-0.1625891625881195,
-0.024463407695293427,
0.01700489968061447,
-0.008286205120384693,
-0.12020324915647507,
0.11633578687906265,
0.08191687613725662,
0.0065017626620829105,
0.01778150163590908,
0.05780461058020592,
-0.042468003928661346,
-0.018249306827783585,
0.045138586312532425,
0.006233357358723879,
-0.006191282533109188,
-0.07123462855815887,
0.09500943869352341,
0.00872815866023302,
0.03346731513738632,
0.08763908594846725,
0.10518929362297058,
0.07063115388154984,
-0.023207010701298714,
-0.009751379489898682,
-0.10140326619148254,
0.030301177874207497,
-0.009651419706642628,
-0.01338917762041092,
0.2002401202917099,
0.07398470491170883,
0.009162590838968754,
0.04256215691566467,
0.18069227039813995,
-0.03318275138735771,
0.02175017260015011,
-0.09817393124103546,
0.14567525684833527,
0.0026605005841702223,
-0.0019977265037596226,
0.012432915158569813,
-0.10688932240009308,
0.060761552304029465,
0.1694641262292862,
-0.019997764378786087,
0.046026479452848434,
0.015454770997166634,
-0.007870892062783241,
0.029472162947058678,
0.05037838593125343,
0.0879836231470108,
0.04140274226665497,
0.2122718095779419,
-0.024005435407161713,
0.04088687524199486,
-0.058044590055942535,
0.006200222298502922,
-0.049966905266046524,
0.08264423906803131,
-0.05293884500861168,
-0.04683183878660202,
-0.06661255657672882,
0.04992826282978058,
-0.03759230673313141,
-0.26734253764152527,
0.04583965241909027,
-0.06179976835846901,
-0.04847806692123413,
-0.030032170936465263,
-0.014785662293434143,
-0.07490009069442749,
0.024089844897389412,
0.06514539569616318,
0.001335570472292602,
0.23351064324378967,
0.025695132091641426,
-0.05366655811667442,
0.008984958752989769,
0.03758111596107483,
-0.05084159970283508,
0.18442846834659576,
0.048678383231163025,
0.04862567037343979,
0.06955289095640182,
-0.030642105266451836,
-0.12222011387348175,
0.15086014568805695,
-0.034096039831638336,
-0.005008975509554148,
0.01558753103017807,
0.15846285223960876,
0.05814812332391739,
0.10457809269428253,
0.02512400783598423,
-0.04881741851568222,
0.029659375548362732,
0.08154221624135971,
-0.052567243576049805,
-0.10615748167037964,
0.06678364425897598,
-0.07793556898832321,
0.12312344461679459,
0.08644773066043854,
-0.04037511721253395,
0.012585083954036236,
-0.053465817123651505,
0.035473570227622986,
0.024468937888741493,
0.09261269122362137,
0.021055102348327637,
-0.1462676078081131,
0.06175852194428444,
-0.013010337948799133,
0.07352740317583084,
-0.2447599172592163,
-0.02227633260190487,
0.005334625951945782,
-0.0025814296677708626,
-0.05632486939430237,
0.04177007079124451,
0.015181978233158588,
0.008127149194478989,
-0.049141671508550644,
-0.1885983645915985,
0.024017736315727234,
0.1051139310002327,
-0.07631627470254898,
-0.026414217427372932
] |
null | null |
transformers
|
## Malayalam news classifier
### Overview
This model is trained on top of [MalayalamBert](https://huggingface.co/eliasedwin7/MalayalamBERT) for the task of classifying malayalam news headlines. Presently, the following news categories are supported:
* Business
* Sports
* Entertainment
### Dataset
The dataset used for training this model can be found [here](https://www.kaggle.com/disisbig/malyalam-news-dataset).
### Using the model with HF pipeline
```python
from transformers import pipeline
news_headline = "ക്രിപ്റ്റോ ഇടപാടുകളുടെ വിവരങ്ങൾ ആവശ്യപ്പെട്ട് ആദായനികുതി വകുപ്പ് നോട്ടീസയച്ചു"
model = pipeline(task="text-classification", model="bipin/malayalam-news-classifier")
model(news_headline)
# Output
# [{'label': 'business', 'score': 0.9979357123374939}]
```
### Contact
For feedback and questions, feel free to contact via twitter [@bkrish_](https://twitter.com/bkrish_)
|
{"license": "mit", "tags": ["text-classification", "roberta", "malayalam", "pytorch"], "widget": [{"text": "2032 \u0d12\u0d33\u0d3f\u0d2e\u0d4d\u0d2a\u0d3f\u0d15\u0d4d\u200c\u0d38\u0d3f\u0d28\u0d4d \u0d2c\u0d4d\u0d30\u0d3f\u0d38\u0d4d\u200c\u0d2c\u0d46\u0d2f\u0d4d\u0d28\u0d4d\u200d \u0d35\u0d47\u0d26\u0d3f\u0d2f\u0d3e\u0d15\u0d41\u0d02; \u0d17\u0d46\u0d2f\u0d3f\u0d02\u0d38\u0d3f\u0d28\u0d4d \u0d35\u0d47\u0d26\u0d3f\u0d2f\u0d3e\u0d15\u0d41\u0d28\u0d4d\u0d28 \u0d2e\u0d42\u0d28\u0d4d\u0d28\u0d3e\u0d2e\u0d24\u0d4d\u0d24\u0d46 \u0d13\u0d38\u0d4d\u200c\u0d1f\u0d4d\u0d30\u0d47\u0d32\u0d3f\u0d2f\u0d28\u0d4d\u200d \u0d28\u0d17\u0d30\u0d02"}]}
|
text-classification
|
bipin/malayalam-news-classifier
|
[
"transformers",
"pytorch",
"roberta",
"text-classification",
"malayalam",
"license:mit",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #roberta #text-classification #malayalam #license-mit #autotrain_compatible #endpoints_compatible #region-us
|
## Malayalam news classifier
### Overview
This model is trained on top of MalayalamBert for the task of classifying malayalam news headlines. Presently, the following news categories are supported:
* Business
* Sports
* Entertainment
### Dataset
The dataset used for training this model can be found here.
### Using the model with HF pipeline
### Contact
For feedback and questions, feel free to contact via twitter @bkrish_
|
[
"## Malayalam news classifier",
"### Overview\n\nThis model is trained on top of MalayalamBert for the task of classifying malayalam news headlines. Presently, the following news categories are supported:\n\n* Business\n* Sports\n* Entertainment",
"### Dataset\n\nThe dataset used for training this model can be found here.",
"### Using the model with HF pipeline",
"### Contact\n\nFor feedback and questions, feel free to contact via twitter @bkrish_"
] |
[
"TAGS\n#transformers #pytorch #roberta #text-classification #malayalam #license-mit #autotrain_compatible #endpoints_compatible #region-us \n",
"## Malayalam news classifier",
"### Overview\n\nThis model is trained on top of MalayalamBert for the task of classifying malayalam news headlines. Presently, the following news categories are supported:\n\n* Business\n* Sports\n* Entertainment",
"### Dataset\n\nThe dataset used for training this model can be found here.",
"### Using the model with HF pipeline",
"### Contact\n\nFor feedback and questions, feel free to contact via twitter @bkrish_"
] |
[
46,
5,
46,
17,
11,
19
] |
[
"passage: TAGS\n#transformers #pytorch #roberta #text-classification #malayalam #license-mit #autotrain_compatible #endpoints_compatible #region-us \n## Malayalam news classifier### Overview\n\nThis model is trained on top of MalayalamBert for the task of classifying malayalam news headlines. Presently, the following news categories are supported:\n\n* Business\n* Sports\n* Entertainment### Dataset\n\nThe dataset used for training this model can be found here.### Using the model with HF pipeline### Contact\n\nFor feedback and questions, feel free to contact via twitter @bkrish_"
] |
[
-0.03577365726232529,
-0.09659107774496078,
-0.0009895449038594961,
0.03769412636756897,
0.1468673199415207,
-0.0006949059315957129,
0.12144555151462555,
0.08519086241722107,
-0.044376738369464874,
-0.09724986553192139,
0.11390210688114166,
0.04985933378338814,
0.021076194941997528,
0.1712225079536438,
0.0005005057901144028,
-0.3476299047470093,
-0.04881134256720543,
0.001768935238942504,
-0.024063048884272575,
0.14426009356975555,
0.12382996082305908,
-0.040569476783275604,
0.08359183371067047,
0.02257659286260605,
-0.12817226350307465,
0.05539627745747566,
-0.04779526963829994,
-0.09507957845926285,
0.12649932503700256,
0.00503295985981822,
0.06929458677768707,
0.06967243552207947,
0.06258738040924072,
-0.10369852185249329,
0.07406916469335556,
-0.03902166709303856,
-0.016837814822793007,
0.023684101179242134,
-0.01010120939463377,
-0.005647488869726658,
0.2385547161102295,
-0.03142797574400902,
0.0424262136220932,
0.01816781796514988,
-0.1176472008228302,
-0.08300480246543884,
-0.07084260135889053,
0.03133745491504669,
0.1598915457725525,
0.15138092637062073,
-0.062193889170885086,
0.14452117681503296,
-0.15535582602024078,
0.09043853729963303,
0.028395678848028183,
-0.23739971220493317,
-0.021076884120702744,
0.11215685307979584,
-0.013273051008582115,
0.010273450054228306,
-0.03317992389202118,
0.08105156570672989,
0.0020344650838524103,
0.053614821285009384,
0.023617316037416458,
-0.07752121984958649,
-0.021503865718841553,
-0.04499103128910065,
-0.04608994722366333,
0.0476960688829422,
0.13688139617443085,
-0.036109670996665955,
-0.008131123147904873,
-0.05976562947034836,
0.054473910480737686,
0.00666154595091939,
-0.04082779213786125,
0.04230792075395584,
-0.06930729746818542,
0.01658516377210617,
-0.0006857865955680609,
-0.007960665971040726,
-0.10200225561857224,
-0.015353209339082241,
-0.07391127943992615,
0.061152663081884384,
0.01457864511758089,
0.0274411141872406,
-0.20992615818977356,
0.08768730610609055,
0.0664663314819336,
-0.14319847524166107,
0.005981402471661568,
-0.075685515999794,
0.013972237706184387,
0.007686145603656769,
-0.013911724090576172,
-0.038752395659685135,
0.02799381874501705,
-0.06673294305801392,
-0.04922236502170563,
0.038792725652456284,
0.04232202097773552,
0.027739817276597023,
0.06579341739416122,
0.01618420146405697,
-0.13069508969783783,
-0.02272716350853443,
0.09633351862430573,
-0.01989203691482544,
0.02171211875975132,
0.009736103937029839,
-0.05419330671429634,
0.04162098467350006,
-0.01247416902333498,
0.04926923289895058,
-0.07516039907932281,
0.17818735539913177,
0.017969882115721703,
-0.007903636433184147,
0.017636148259043694,
-0.07307995855808258,
-0.0823194682598114,
-0.018226556479930878,
-0.09696190059185028,
-0.0762791782617569,
0.060172125697135925,
0.032615408301353455,
0.011866770684719086,
0.1479882448911667,
-0.045228056609630585,
-0.020422814413905144,
-0.02954701893031597,
-0.05470505729317665,
0.0014327216194942594,
-0.13220344483852386,
0.04344025254249573,
-0.10212717950344086,
-0.23972360789775848,
0.01136633288115263,
0.01604798436164856,
-0.07151571661233902,
-0.08616631478071213,
-0.06335852295160294,
-0.021389901638031006,
-0.005660129711031914,
-0.022704895585775375,
0.12966416776180267,
-0.06470300257205963,
0.08258560299873352,
-0.014366958290338516,
0.11972340941429138,
-0.0825338065624237,
0.053669579327106476,
-0.0702480599284172,
0.03217209875583649,
-0.009663548320531845,
0.15014785528182983,
-0.1142023503780365,
0.09755411744117737,
-0.032457783818244934,
-0.0518544502556324,
-0.045563310384750366,
0.048033107072114944,
0.011635635048151016,
0.16516995429992676,
-0.12649370729923248,
-0.08695642650127411,
0.043638862669467926,
-0.030957695096731186,
-0.15607592463493347,
0.019402405247092247,
-0.0908142700791359,
0.2339058220386505,
0.08298202604055405,
0.076615110039711,
0.09114155173301697,
-0.036250848323106766,
0.024790089577436447,
0.04185612499713898,
-0.07322069257497787,
0.0034961956553161144,
0.044903066009283066,
0.07784761488437653,
-0.08744784444570541,
0.09191230684518814,
-0.00771327642723918,
0.10938166081905365,
-0.0727948471903801,
-0.023001056164503098,
0.07760702073574066,
-0.025560444220900536,
0.05309556797146797,
0.046283405274152756,
0.16872543096542358,
-0.011791995726525784,
-0.05223557725548744,
0.1060614064335823,
0.0896480605006218,
0.02397218905389309,
-0.04005984589457512,
-0.1510007679462433,
0.04315723851323128,
-0.0016041091876104474,
0.039092447608709335,
-0.15365438163280487,
-0.005435221828520298,
-0.05237192660570145,
0.1106593906879425,
0.1241750717163086,
0.0385991595685482,
-0.005741252098232508,
-0.027575472369790077,
-0.044438205659389496,
0.04509653151035309,
0.0843501165509224,
0.03841549530625343,
-0.04747265577316284,
-0.14231401681900024,
0.08370643854141235,
-0.04234369844198227,
0.2252844125032425,
-0.13082686066627502,
0.025221623480319977,
0.15393401682376862,
0.09753890335559845,
0.006668883841484785,
0.09017273783683777,
0.10053692758083344,
0.11195261031389236,
0.038584236055612564,
-0.01402406394481659,
0.10310070216655731,
0.002604719251394272,
-0.07241739332675934,
0.0831792950630188,
-0.05476238578557968,
0.06368044018745422,
0.10986527800559998,
-0.09853857755661011,
-0.038906291127204895,
0.08831224590539932,
-0.03771844506263733,
-0.018466953188180923,
-0.02962898463010788,
0.14832240343093872,
0.181566521525383,
0.046613700687885284,
0.17438644170761108,
-0.05016171187162399,
-0.09780728816986084,
0.008982157334685326,
-0.07837998867034912,
0.04520796239376068,
0.04666561633348465,
0.0740494504570961,
-0.28416708111763,
0.06909357756376266,
0.015329469926655293,
0.05564573034644127,
0.2778698801994324,
-0.010581615380942822,
0.0499701164662838,
-0.01976834423840046,
-0.11644387245178223,
-0.07962414622306824,
0.10070712864398956,
-0.17388933897018433,
-0.01396607980132103,
-0.002543211216107011,
-0.04743237793445587,
0.08280354738235474,
-0.04435691609978676,
-0.08399771898984909,
0.006615950260311365,
0.042235538363456726,
-0.14913888275623322,
0.20421749353408813,
-0.03489188849925995,
0.052454832941293716,
0.009676958434283733,
-0.016691511496901512,
0.05500997602939606,
-0.03736231103539467,
-0.13258035480976105,
0.19936738908290863,
-0.06051070988178253,
-0.3161873519420624,
-0.12339948117733002,
-0.1648217886686325,
-0.0077984994277358055,
-0.05924127623438835,
0.06759561598300934,
-0.13768215477466583,
-0.018818387761712074,
-0.0031332294456660748,
-0.01406654343008995,
0.07377778738737106,
0.001291983644478023,
-0.06088361516594887,
0.012882784008979797,
-0.025185467675328255,
-0.0869952067732811,
-0.01686810702085495,
-0.040067605674266815,
-0.05686040595173836,
0.05263683199882507,
-0.11864631623029709,
0.006240708753466606,
0.10206524282693863,
-0.05401703342795372,
0.08214619755744934,
-0.03979906067252159,
0.23365609347820282,
-0.12864698469638824,
0.0645255520939827,
0.04191664978861809,
-0.03406146913766861,
0.023651055991649628,
0.23004558682441711,
0.04811907559633255,
-0.0334591343998909,
0.06804748624563217,
0.026237135753035545,
-0.029719889163970947,
-0.2011002153158188,
-0.11887223273515701,
-0.028328947722911835,
-0.009952899068593979,
-0.10141757130622864,
0.025335075333714485,
0.06979318708181381,
0.05138390511274338,
0.06290236115455627,
0.05051945149898529,
0.03749451786279678,
0.04849805682897568,
0.16653504967689514,
0.004831136204302311,
0.08504987508058548,
-0.09036003053188324,
0.001705470960587263,
0.07855433225631714,
-0.07172352820634842,
0.15936917066574097,
-0.011916682124137878,
-0.04029349610209465,
0.16047176718711853,
0.05257835239171982,
0.16066975891590118,
0.01822345331311226,
-0.10080580413341522,
-0.022785617038607597,
-0.006383923348039389,
0.012279057875275612,
-0.01948397420346737,
-0.0370347760617733,
-0.09609577059745789,
-0.06690319627523422,
-0.011547846719622612,
0.06002990901470184,
0.009870093315839767,
0.11873365938663483,
0.037606511265039444,
-0.12873412668704987,
-0.07258091866970062,
0.008144826628267765,
-0.047415778040885925,
-0.04408938065171242,
-0.008031095378100872,
-0.0200003981590271,
-0.1579550951719284,
0.09321150928735733,
-0.025040477514266968,
0.10931049287319183,
-0.0445743054151535,
0.04569818824529648,
-0.02748613804578781,
-0.17944012582302094,
-0.07659486681222916,
0.14173920452594757,
-0.18365202844142914,
0.38890913128852844,
0.020990721881389618,
0.03306083753705025,
-0.017470261082053185,
-0.08703794330358505,
0.08370968699455261,
0.257642537355423,
0.060505714267492294,
0.009005073457956314,
0.04841103404760361,
-0.21486857533454895,
-0.02542111836373806,
0.04516316577792168,
-0.006930340547114611,
-0.0032927868887782097,
0.07838792353868484,
0.021759536117315292,
0.0017871332820504904,
-0.016929062083363533,
-0.07191510498523712,
-0.1108374297618866,
-0.05582102760672569,
0.013461518101394176,
-0.09998206794261932,
0.059082165360450745,
-0.06552518904209137,
-0.09255661070346832,
0.04352573677897453,
-0.039090897887945175,
-0.10962355136871338,
-0.1347474902868271,
-0.08513212203979492,
0.1434938758611679,
-0.10495959222316742,
-0.11623115092515945,
0.008131406269967556,
0.011576907709240913,
0.020789196714758873,
0.024980826303362846,
-0.0705198422074318,
0.041399698704481125,
-0.001627136953175068,
-0.10015740990638733,
0.010306209325790405,
0.08833327144384384,
0.07959797978401184,
-0.00941736064851284,
0.06584581732749939,
-0.007536349818110466,
0.034256719052791595,
-0.11170109361410141,
-0.04322288930416107,
-0.04636484757065773,
-0.0006813372019678354,
0.032533932477235794,
-0.046971678733825684,
-0.1365661323070526,
-0.05387505516409874,
-0.05986872687935829,
0.19005286693572998,
0.22416165471076965,
-0.05937029793858528,
0.1546185314655304,
0.18739822506904602,
-0.08239410817623138,
-0.2570172846317291,
-0.08692428469657898,
0.014528491534292698,
0.01517229899764061,
0.037482183426618576,
-0.1728874295949936,
0.1099139153957367,
-0.011178846471011639,
-0.00784754566848278,
-0.19224177300930023,
-0.14866282045841217,
-0.14572420716285706,
0.16556885838508606,
0.0112213846296072,
0.2782208323478699,
-0.06807383894920349,
-0.0325452983379364,
-0.08634750545024872,
-0.08013612776994705,
0.06703097373247147,
-0.16426247358322144,
0.0854288786649704,
-0.08806274086236954,
0.14698612689971924,
-0.03845091164112091,
0.01774783805012703,
0.09459660947322845,
-0.02114497683942318,
0.056922223418951035,
-0.12150360643863678,
-0.12433715909719467,
0.2139880359172821,
0.016591552644968033,
0.10630936920642853,
-0.04074132815003395,
0.06581617891788483,
-0.3049073815345764,
-0.020096685737371445,
-0.11075080186128616,
0.012803401798009872,
-0.03435208648443222,
-0.047942593693733215,
-0.027988366782665253,
0.09884597361087799,
-0.0046123056672513485,
-0.043417856097221375,
-0.036021165549755096,
-0.09518735110759735,
0.03187638893723488,
-0.006388376466929913,
0.22377146780490875,
-0.027914633974432945,
0.060124900192022324,
-0.05536071956157684,
-0.039790451526641846,
0.015551958233118057,
-0.18522624671459198,
-0.06629055738449097,
0.0382721871137619,
0.027415096759796143,
0.10076671838760376,
0.03779520466923714,
-0.08663301169872284,
0.07378203421831131,
0.1500789225101471,
-0.08952224254608154,
-0.07482387125492096,
-0.07644832134246826,
0.08140306919813156,
0.053749483078718185,
-0.004769572522491217,
0.09601348638534546,
-0.0688510611653328,
-0.050090670585632324,
0.04843541234731674,
-0.005820911843329668,
-0.07592561095952988,
0.06351616978645325,
0.09336385875940323,
0.05847853422164917,
-0.09485846757888794,
0.0638670027256012,
0.0547713041305542,
0.06379180401563644,
-0.007284571882337332,
0.20296238362789154,
-0.21456462144851685,
-0.10430608689785004,
-0.02878447435796261,
0.26068148016929626,
0.0013396242866292596,
-0.04205544292926788,
-0.03704608976840973,
-0.07146747410297394,
0.03249751776456833,
0.28640326857566833,
0.05349790304899216,
0.005754292942583561,
-0.11306197196245193,
-0.04569351300597191,
0.010661848820745945,
0.1460285633802414,
0.07829124480485916,
-0.03535447642207146,
-0.0976579412817955,
-0.02501813881099224,
0.0482025071978569,
0.12742282450199127,
-0.11339081823825836,
-0.10932618379592896,
-0.041031304746866226,
0.022405577823519707,
-0.1715216040611267,
0.00807047076523304,
-0.05215195193886757,
-0.024395333603024483,
-0.052625663578510284,
-0.03577117621898651,
-0.04510129615664482,
-0.028761276975274086,
-0.052031729370355606,
0.04422106221318245,
-0.009099844843149185,
0.08429330587387085,
-0.1117825135588646,
-0.04443678259849548,
0.0677647739648819,
-0.005578940734267235,
0.10156477987766266,
0.03284761309623718,
-0.006600739900022745,
0.11669736355543137,
-0.0035702527966350317,
-0.03330286592245102,
0.010671243071556091,
-0.031641535460948944,
0.07699636369943619,
-0.09110403060913086,
-0.0042475503869354725,
-0.022589463740587234,
0.08290299773216248,
0.10373494029045105,
0.16063255071640015,
-0.05688850209116936,
0.032263074070215225,
-0.003796905744820833,
0.034811146557331085,
-0.06557256728410721,
0.04560059309005737,
0.022055594250559807,
0.1231379359960556,
0.2302858531475067,
-0.09253084659576416,
0.03175992891192436,
-0.07302159816026688,
-0.00383277446962893,
-0.04694955796003342,
-0.13822633028030396,
-0.07732216268777847,
-0.10046271234750748,
0.015092078596353531,
-0.050718966871500015,
0.20539270341396332,
0.113290935754776,
-0.06845692545175552,
0.06665794551372528,
0.06275579333305359,
-0.005104538518935442,
-0.026365794241428375,
0.1772526651620865,
0.053995367139577866,
0.05729550123214722,
-0.034527845680713654,
0.026978764683008194,
-0.10800491273403168,
-0.039190568029880524,
0.09591411799192429,
0.03490934520959854,
0.06588249653577805,
-0.013666504994034767,
0.12035855650901794,
0.0587628148496151,
-0.04273802787065506,
-0.19668269157409668,
-0.1008288636803627,
0.0577714629471302,
-0.07363773882389069,
0.08302810043096542,
0.19529035687446594,
-0.11764732748270035,
0.03081914409995079,
-0.05178924649953842,
-0.11169786006212234,
-0.08846587687730789,
-0.12364363670349121,
-0.06474372744560242,
-0.10760825127363205,
0.03817867487668991,
-0.0893210768699646,
-0.06066376715898514,
0.12496570497751236,
0.07726755738258362,
-0.04665759950876236,
0.22699202597141266,
-0.09707750380039215,
-0.03242941573262215,
0.11329003423452377,
-0.042099785059690475,
-0.058099403977394104,
-0.12408498674631119,
0.1553635597229004,
-0.10398659110069275,
0.02485707774758339,
-0.05758470296859741,
-0.006059370934963226,
-0.11648432165384293,
-0.022763438522815704,
-0.062448348850011826,
-0.08693323284387589,
0.01299089565873146,
0.05602357164025307,
0.023672539740800858,
0.12170897424221039,
0.009964833967387676,
0.03278140723705292,
0.015719667077064514,
0.23328502476215363,
0.0453466922044754,
-0.19719573855400085,
-0.0981014296412468,
0.19385415315628052,
0.03160174563527107,
0.022843191400170326,
-0.0031962792854756117,
-0.042018283158540726,
0.010360482148826122,
0.21849645674228668,
0.2794381380081177,
-0.054377615451812744,
0.03091403841972351,
-0.05958943068981171,
0.03873156011104584,
-0.004694556817412376,
0.16050605475902557,
0.030301421880722046,
0.13036733865737915,
-0.1278757005929947,
0.010223261080682278,
-0.08113320916891098,
-0.08241000771522522,
-0.0406472273170948,
0.012456288561224937,
0.14214655756950378,
-0.07770591229200363,
-0.06746313720941544,
0.19766949117183685,
-0.20915621519088745,
-0.12834890186786652,
-0.11964273452758789,
-0.0593067929148674,
-0.1253032386302948,
-0.09105736017227173,
-0.09740415960550308,
0.026728661730885506,
0.0520271472632885,
0.02869606763124466,
0.06197727471590042,
0.062317948788404465,
0.0612124539911747,
-0.09853597730398178,
-0.0984400138258934,
0.20871417224407196,
-0.033555760979652405,
0.006787971593439579,
-0.06960907578468323,
0.11003165692090988,
0.05862651392817497,
-0.013893716968595982,
-0.02643444947898388,
0.10396412760019302,
0.01862364076077938,
0.1190347969532013,
-0.04182180389761925,
0.10305341333150864,
0.04281139373779297,
-0.12496595829725266,
0.037613172084093094,
-0.12849290668964386,
0.09588833153247833,
-0.16925020515918732,
-0.06081182509660721,
-0.07707038521766663,
0.11733271181583405,
-0.002700298558920622,
0.06465326249599457,
0.15494604408740997,
-0.06027694046497345,
0.013915922492742538,
-0.057763516902923584,
0.0015269313007593155,
0.0017053615301847458,
-0.14544761180877686,
-0.04433424025774002,
-0.10760273784399033,
-0.024149354547262192,
-0.029184788465499878,
-0.005333393812179565,
-0.26992931962013245,
0.01674925908446312,
-0.0826367512345314,
0.01162927970290184,
0.014796038158237934,
0.16988277435302734,
0.06147749722003937,
0.0401214100420475,
-0.016750440001487732,
-0.12083283811807632,
-0.04362202808260918,
0.047572892159223557,
-0.20668701827526093,
-0.10912665724754333
] |
null | null |
transformers
|
# Wav2vec 2.0 large VoxRex Swedish (C)
Experiment with LM model.
**Disclaimer:** This is a work in progress. See [VoxRex](https://huggingface.co/KBLab/wav2vec2-large-voxrex) for more details.
**Update 2022-01-10:** Updated to VoxRex-C version.
Finetuned version of KBs [VoxRex large](https://huggingface.co/KBLab/wav2vec2-large-voxrex) model using Swedish radio broadcasts, NST and Common Voice data. Evalutation without a language model gives the following: WER for NST + Common Voice test set (2% of total sentences) is **2.5%**. WER for Common Voice test set is **8.49%** directly and **7.37%** with a 4-gram language model.
When using this model, make sure that your speech input is sampled at 16kHz.
# Performance\*

<center><del>*<i>Chart shows performance without the additional 20k steps of Common Voice fine-tuning</i></del></center>
## Training
This model has been fine-tuned for 120000 updates on NST + CommonVoice<del> and then for an additional 20000 updates on CommonVoice only. The additional fine-tuning on CommonVoice hurts performance on the NST+CommonVoice test set somewhat and, unsurprisingly, improves it on the CommonVoice test set. It seems to perform generally better though [citation needed]</del>.

## Usage
The model can be used directly (without a language model) as follows:
```python
import torch
import torchaudio
from datasets import load_dataset
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
test_dataset = load_dataset("common_voice", "sv-SE", split="test[:2%]").
processor = Wav2Vec2Processor.from_pretrained("KBLab/wav2vec2-large-voxrex-swedish")
model = Wav2Vec2ForCTC.from_pretrained("KBLab/wav2vec2-large-voxrex-swedish")
resampler = torchaudio.transforms.Resample(48_000, 16_000)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def speech_file_to_array_fn(batch):
speech_array, sampling_rate = torchaudio.load(batch["path"])
batch["speech"] = resampler(speech_array).squeeze().numpy()
return batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
inputs = processor(test_dataset["speech"][:2], sampling_rate=16_000, return_tensors="pt", padding=True)
with torch.no_grad():
logits = model(inputs.input_values, attention_mask=inputs.attention_mask).logits
predicted_ids = torch.argmax(logits, dim=-1)
print("Prediction:", processor.batch_decode(predicted_ids))
print("Reference:", test_dataset["sentence"][:2])
```
|
{"language": "sv", "license": "cc0-1.0", "tags": ["audio", "automatic-speech-recognition", "speech"], "datasets": ["common_voice", "NST Swedish ASR Database", "P4"], "metrics": ["wer"], "model-index": [{"name": "Wav2vec 2.0 large VoxRex Swedish", "results": [{"task": {"type": "automatic-speech-recognition", "name": "Speech Recognition"}, "dataset": {"name": "Common Voice", "type": "common_voice", "args": "sv-SE"}, "metrics": [{"type": "wer", "value": 9.914, "name": "Test WER"}]}]}]}
|
automatic-speech-recognition
|
birgermoell/lm-swedish
|
[
"transformers",
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"audio",
"speech",
"sv",
"license:cc0-1.0",
"model-index",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"sv"
] |
TAGS
#transformers #pytorch #wav2vec2 #automatic-speech-recognition #audio #speech #sv #license-cc0-1.0 #model-index #endpoints_compatible #region-us
|
# Wav2vec 2.0 large VoxRex Swedish (C)
Experiment with LM model.
Disclaimer: This is a work in progress. See VoxRex for more details.
Update 2022-01-10: Updated to VoxRex-C version.
Finetuned version of KBs VoxRex large model using Swedish radio broadcasts, NST and Common Voice data. Evalutation without a language model gives the following: WER for NST + Common Voice test set (2% of total sentences) is 2.5%. WER for Common Voice test set is 8.49% directly and 7.37% with a 4-gram language model.
When using this model, make sure that your speech input is sampled at 16kHz.
# Performance\*
!Comparison
<center><del>*<i>Chart shows performance without the additional 20k steps of Common Voice fine-tuning</i></del></center>
## Training
This model has been fine-tuned for 120000 updates on NST + CommonVoice<del> and then for an additional 20000 updates on CommonVoice only. The additional fine-tuning on CommonVoice hurts performance on the NST+CommonVoice test set somewhat and, unsurprisingly, improves it on the CommonVoice test set. It seems to perform generally better though [citation needed]</del>.
!WER during training
## Usage
The model can be used directly (without a language model) as follows:
|
[
"# Wav2vec 2.0 large VoxRex Swedish (C)\n\nExperiment with LM model. \n\nDisclaimer: This is a work in progress. See VoxRex for more details.\n\nUpdate 2022-01-10: Updated to VoxRex-C version.\n\nFinetuned version of KBs VoxRex large model using Swedish radio broadcasts, NST and Common Voice data. Evalutation without a language model gives the following: WER for NST + Common Voice test set (2% of total sentences) is 2.5%. WER for Common Voice test set is 8.49% directly and 7.37% with a 4-gram language model.\n\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"# Performance\\*\n\n!Comparison\n<center><del>*<i>Chart shows performance without the additional 20k steps of Common Voice fine-tuning</i></del></center>",
"## Training\nThis model has been fine-tuned for 120000 updates on NST + CommonVoice<del> and then for an additional 20000 updates on CommonVoice only. The additional fine-tuning on CommonVoice hurts performance on the NST+CommonVoice test set somewhat and, unsurprisingly, improves it on the CommonVoice test set. It seems to perform generally better though [citation needed]</del>.\n\n!WER during training",
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
"TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #audio #speech #sv #license-cc0-1.0 #model-index #endpoints_compatible #region-us \n",
"# Wav2vec 2.0 large VoxRex Swedish (C)\n\nExperiment with LM model. \n\nDisclaimer: This is a work in progress. See VoxRex for more details.\n\nUpdate 2022-01-10: Updated to VoxRex-C version.\n\nFinetuned version of KBs VoxRex large model using Swedish radio broadcasts, NST and Common Voice data. Evalutation without a language model gives the following: WER for NST + Common Voice test set (2% of total sentences) is 2.5%. WER for Common Voice test set is 8.49% directly and 7.37% with a 4-gram language model.\n\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"# Performance\\*\n\n!Comparison\n<center><del>*<i>Chart shows performance without the additional 20k steps of Common Voice fine-tuning</i></del></center>",
"## Training\nThis model has been fine-tuned for 120000 updates on NST + CommonVoice<del> and then for an additional 20000 updates on CommonVoice only. The additional fine-tuning on CommonVoice hurts performance on the NST+CommonVoice test set somewhat and, unsurprisingly, improves it on the CommonVoice test set. It seems to perform generally better though [citation needed]</del>.\n\n!WER during training",
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
57,
155,
41,
101,
20
] |
[
"passage: TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #audio #speech #sv #license-cc0-1.0 #model-index #endpoints_compatible #region-us \n# Wav2vec 2.0 large VoxRex Swedish (C)\n\nExperiment with LM model. \n\nDisclaimer: This is a work in progress. See VoxRex for more details.\n\nUpdate 2022-01-10: Updated to VoxRex-C version.\n\nFinetuned version of KBs VoxRex large model using Swedish radio broadcasts, NST and Common Voice data. Evalutation without a language model gives the following: WER for NST + Common Voice test set (2% of total sentences) is 2.5%. WER for Common Voice test set is 8.49% directly and 7.37% with a 4-gram language model.\n\nWhen using this model, make sure that your speech input is sampled at 16kHz.# Performance\\*\n\n!Comparison\n<center><del>*<i>Chart shows performance without the additional 20k steps of Common Voice fine-tuning</i></del></center>## Training\nThis model has been fine-tuned for 120000 updates on NST + CommonVoice<del> and then for an additional 20000 updates on CommonVoice only. The additional fine-tuning on CommonVoice hurts performance on the NST+CommonVoice test set somewhat and, unsurprisingly, improves it on the CommonVoice test set. It seems to perform generally better though [citation needed]</del>.\n\n!WER during training## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
-0.07926260679960251,
0.10282541811466217,
-0.004340698942542076,
-0.023779572919011116,
0.04732692241668701,
0.011325010098516941,
-0.013057422824203968,
0.09180358052253723,
-0.031074464321136475,
0.12967608869075775,
0.04790427163243294,
0.00806579552590847,
0.05850596725940704,
0.016163595020771027,
0.04171126335859299,
-0.17682231962680817,
0.052579186856746674,
-0.07421708106994629,
0.17967508733272552,
0.04167947918176651,
0.09923677891492844,
-0.03466051444411278,
0.00418016966432333,
0.04941942170262337,
-0.0013603550614789128,
0.021894171833992004,
0.03956383839249611,
-0.02761927619576454,
0.14170630276203156,
0.02440270595252514,
0.0030508937779814005,
0.020604083314538002,
0.03132929652929306,
-0.21685121953487396,
0.020015699788928032,
0.09263169020414352,
0.04688859358429909,
0.029067877680063248,
0.08351031690835953,
0.046815212815999985,
0.1999102085828781,
-0.034031786024570465,
-0.038396935909986496,
0.061249762773513794,
-0.028677916154265404,
-0.056818872690200806,
-0.14410696923732758,
0.0246712788939476,
0.08409050852060318,
0.10189443081617355,
-0.05701446533203125,
0.12888765335083008,
-0.041729193180799484,
0.08758501708507538,
0.09669375419616699,
-0.25588297843933105,
0.0052905818447470665,
0.059252046048641205,
0.056266363710165024,
0.022651048377156258,
-0.06789417564868927,
0.06781134009361267,
0.0620897002518177,
0.0028353966772556305,
-0.09185465425252914,
0.028661556541919708,
-0.13009561598300934,
-0.049738623201847076,
-0.08782468736171722,
-0.05831914022564888,
0.15523600578308105,
0.0464591383934021,
-0.1141534298658371,
-0.11005280166864395,
-0.018375486135482788,
-0.13379797339439392,
0.015874959528446198,
-0.07169565558433533,
0.0016252407804131508,
0.02353256568312645,
0.037402719259262085,
-0.09568538516759872,
-0.1079210564494133,
-0.0398356094956398,
-0.004120672587305307,
-0.005334950052201748,
0.03628145158290863,
0.009062069468200207,
0.06206584721803665,
0.10470884293317795,
-0.24539975821971893,
-0.013761813752353191,
-0.04306074604392052,
-0.017413372173905373,
-0.15282629430294037,
-0.05183941870927811,
-0.05280369147658348,
-0.2554801106452942,
0.006460496690124273,
0.09585834294557571,
-0.01868283934891224,
0.03987805172801018,
-0.129079207777977,
-0.004831101279705763,
-0.005141633562743664,
0.11313329637050629,
-0.1252109855413437,
0.015123527497053146,
0.05708400160074234,
-0.03662073239684105,
0.07925809919834137,
-0.01590728387236595,
-0.018812270835042,
-0.023834632709622383,
0.10889653861522675,
0.07340274006128311,
-0.01039075292646885,
-0.02682514861226082,
-0.07757239043712616,
0.01927514746785164,
0.06929227709770203,
-0.10130524635314941,
0.03421499952673912,
-0.0021809227764606476,
0.04524371400475502,
0.11962816119194031,
0.07153317332267761,
0.05596539378166199,
-0.0953507348895073,
0.052913449704647064,
-0.007381103001534939,
-0.04235647991299629,
-0.015308153815567493,
-0.05191843956708908,
0.09267691522836685,
-0.06974063813686371,
-0.04091913253068924,
-0.05049533024430275,
-0.08773107081651688,
-0.05663531273603439,
-0.028446460142731667,
-0.021988432854413986,
-0.08740256726741791,
-0.01667695865035057,
-0.062175218015909195,
-0.001598301692865789,
-0.022598015144467354,
0.061154481023550034,
-0.02843133546411991,
0.02546871080994606,
-0.03820664435625076,
0.030238507315516472,
0.019526885822415352,
-0.0014591687358915806,
-0.04636858031153679,
0.02137329988181591,
-0.023463841527700424,
0.0857827439904213,
-0.09201852977275848,
-0.17040316760540009,
-0.06857423484325409,
0.003477643011137843,
-0.1269199103116989,
0.00947660394012928,
0.09452128410339355,
0.11049941182136536,
-0.3034838140010834,
-0.013600202277302742,
0.12827089428901672,
-0.11137626320123672,
0.030110342428088188,
0.19387665390968323,
0.03795963525772095,
-0.012326948344707489,
0.10106562823057175,
0.14104224741458893,
0.06846266239881516,
-0.21697182953357697,
-0.14703601598739624,
-0.011779229156672955,
-0.06852618604898453,
0.10053759068250656,
0.004978837911039591,
-0.10113025456666946,
0.09320547431707382,
0.042765356600284576,
0.02922658622264862,
-0.04295605048537254,
-0.01117260567843914,
-0.04987628385424614,
-0.05098864063620567,
-0.035710375756025314,
0.0690620094537735,
0.006863162387162447,
-0.043155770748853683,
-0.08478282392024994,
-0.15306177735328674,
-0.07639249414205551,
0.1095789447426796,
-0.0459752231836319,
0.0940924808382988,
-0.06522496789693832,
0.02248533070087433,
-0.07546684145927429,
0.00808407086879015,
-0.13769248127937317,
0.009534439072012901,
0.04116297885775566,
0.021755842491984367,
0.06408888846635818,
0.06592723727226257,
0.041862186044454575,
0.015046718530356884,
-0.060807257890701294,
0.023333100602030754,
-0.05326155945658684,
-0.008384176529943943,
-0.04848163574934006,
-0.10801343619823456,
-0.06635486334562302,
-0.029469218105077744,
0.15892699360847473,
-0.16643205285072327,
-0.06599590927362442,
0.10330072790384293,
0.17115019261837006,
-0.011539118364453316,
-0.11501959711313248,
0.039700768887996674,
0.015097799710929394,
-0.014073343947529793,
-0.02973915822803974,
-0.04301276057958603,
0.0001417048624716699,
-0.05076207220554352,
0.1281304657459259,
-0.12199653685092926,
-0.202925905585289,
0.042030639946460724,
0.10460368543863297,
-0.0432211235165596,
0.14869904518127441,
-0.040989603847265244,
-0.054298385977745056,
-0.07978139072656631,
-0.11533235013484955,
0.17197886109352112,
0.07886622846126556,
0.03642112761735916,
-0.05923504754900932,
0.016247855499386787,
0.02488594874739647,
-0.0562361478805542,
-0.046300191432237625,
0.04150304198265076,
0.004434641450643539,
0.013745106756687164,
-0.04758339375257492,
-0.09658825397491455,
-0.0945219174027443,
0.15855436027050018,
-0.020547758787870407,
-0.10503800958395004,
0.0032527800649404526,
-0.03787653148174286,
0.0323270820081234,
0.07408344000577927,
-0.032380931079387665,
0.06660003960132599,
0.039172831922769547,
0.009962891228497028,
0.05922942981123924,
-0.11401274055242538,
0.06041879579424858,
0.016855891793966293,
-0.08631327748298645,
0.0023643565364181995,
0.07964043319225311,
-0.034524936228990555,
0.05172779783606529,
-0.0663829818367958,
0.0011105731828138232,
-0.023508785292506218,
-0.011438538320362568,
-0.1400277018547058,
0.041449934244155884,
-0.09653233736753464,
-0.19449558854103088,
-0.19291171431541443,
0.08437252789735794,
-0.06351467221975327,
0.0016389021184295416,
0.04008202254772186,
-0.09375488758087158,
-0.0698084756731987,
-0.10400144755840302,
-0.033372268080711365,
-0.01039096713066101,
-0.028318772092461586,
-0.022410990670323372,
-0.02690897509455681,
0.05138532817363739,
-0.11724472045898438,
0.03196360543370247,
-0.04705760255455971,
-0.08374150097370148,
-0.034205641597509384,
0.050863511860370636,
0.09114867448806763,
0.16336709260940552,
0.006384409498423338,
-0.020361725240945816,
0.005482573062181473,
0.0923406109213829,
-0.10403887182474136,
0.0705164223909378,
0.14878720045089722,
-0.006827694363892078,
0.011068652383983135,
0.058649756014347076,
0.00833930354565382,
-0.03592008724808693,
-0.01507811713963747,
0.07493963837623596,
-0.06210993602871895,
-0.2755958139896393,
-0.10820811241865158,
-0.04407109320163727,
-0.05766628682613373,
-0.034611668437719345,
0.03949495404958725,
0.046566370874643326,
-0.03152605518698692,
-0.0661388412117958,
-0.008308961056172848,
0.037776555866003036,
0.0014200512086972594,
0.14480265974998474,
-0.010787926614284515,
0.03794245794415474,
-0.05176730826497078,
0.029499631375074387,
0.07194685190916061,
0.03172232583165169,
0.07315512001514435,
0.02052471786737442,
0.20114949345588684,
0.049718063324689865,
0.09760954976081848,
-0.0044318921864032745,
0.032614897936582565,
0.02321062795817852,
0.0049071707762777805,
0.024849966168403625,
-0.08757524937391281,
-0.056114278733730316,
0.002551720943301916,
0.17183396220207214,
-0.03931566700339317,
-0.02676311321556568,
0.0787249282002449,
0.015065113082528114,
0.19102618098258972,
0.07912730425596237,
-0.09002232551574707,
-0.07730818539857864,
-0.028940929099917412,
-0.08120877295732498,
-0.025414129719138145,
-0.0101148197427392,
0.10068323463201523,
-0.14371681213378906,
0.07370641827583313,
-0.013335321098566055,
0.06192625313997269,
-0.06466900557279587,
0.025368589907884598,
-0.08515745401382446,
0.10659372061491013,
0.03093300200998783,
0.08346930146217346,
-0.12718746066093445,
0.08897243440151215,
0.03491845354437828,
0.1648448407649994,
-0.0658411756157875,
0.032008636742830276,
-0.021990658715367317,
-0.04259010776877403,
0.1358959674835205,
-0.007838013581931591,
-0.03559710457921028,
0.013552234508097172,
-0.10525087267160416,
-0.009684490971267223,
0.05470316857099533,
0.07585165649652481,
0.05730750784277916,
0.017492108047008514,
-0.016836341470479965,
0.02016577683389187,
0.01864994503557682,
-0.21227751672267914,
-0.1407974511384964,
0.07701890915632248,
0.1174587532877922,
0.028500571846961975,
-0.04291732236742973,
-0.08483757078647614,
-0.12708404660224915,
0.18994785845279694,
-0.0794563964009285,
-0.007465033791959286,
-0.08964821696281433,
-0.006001138594001532,
0.23583823442459106,
-0.0077385674230754375,
-0.003731014207005501,
0.03067777119576931,
0.1867339313030243,
-0.0942305326461792,
-0.005740320775657892,
-0.05250506475567818,
-0.06266886740922928,
-0.14353498816490173,
-0.0044885971583426,
0.2576979100704193,
-0.032771702855825424,
0.05868275836110115,
0.03751567378640175,
0.024638982489705086,
0.04591437429189682,
-0.07695160806179047,
0.05305911973118782,
0.12357936799526215,
-0.1499233990907669,
0.07056962698698044,
0.04477187246084213,
-0.03923134505748749,
-0.07740100473165512,
-0.0683293417096138,
0.1076815277338028,
0.1733619123697281,
-0.06757976114749908,
0.1604584902524948,
0.03850105032324791,
-0.1245829164981842,
-0.20290398597717285,
-0.06477683037519455,
0.1036778911948204,
0.07062402367591858,
0.04452212527394295,
-0.030771905556321144,
0.03701742738485336,
0.08030102401971817,
-0.02731570228934288,
-0.023520642891526222,
-0.1782340407371521,
-0.15183024108409882,
0.04052345082163811,
-0.07449119538068771,
0.004044902976602316,
0.004948068410158157,
-0.05588531866669655,
-0.04108138754963875,
-0.03160689398646355,
0.0007331493543460965,
-0.0658978670835495,
0.09886657446622849,
0.06510613113641739,
0.03685329481959343,
0.08103815466165543,
-0.05656178295612335,
0.12915635108947754,
-0.005692469421774149,
-0.04479910060763359,
0.039818789809942245,
0.10055822879076004,
0.02440408430993557,
-0.00760544091463089,
0.15740543603897095,
0.07279264181852341,
0.016425443813204765,
-0.025514032691717148,
-0.02805745229125023,
-0.060123734176158905,
0.08948271721601486,
-0.033229582011699677,
0.009206651709973812,
-0.023396918550133705,
-0.017180217429995537,
0.03522292152047157,
0.016314750537276268,
0.05346781760454178,
-0.0745958611369133,
0.01307239942252636,
0.20183740556240082,
0.16843923926353455,
-0.006842266768217087,
-0.10749475657939911,
0.029215771704912186,
-0.053610168397426605,
0.015709616243839264,
-0.005575063172727823,
0.05409311130642891,
0.07133756577968597,
0.021069305017590523,
0.04943213239312172,
-0.03796966001391411,
-0.22509384155273438,
0.029356898739933968,
0.030971135944128036,
-0.07210713624954224,
-0.1533319503068924,
-0.02028370276093483,
-0.01951749250292778,
-0.0591743066906929,
0.00018004272715188563,
0.17863982915878296,
-0.10962049663066864,
-0.0011495179496705532,
0.011236675083637238,
0.05577497556805611,
-0.06724148988723755,
0.17461399734020233,
-0.005136309191584587,
0.04783455282449722,
-0.07165475934743881,
0.13945332169532776,
0.07293044030666351,
-0.09095464646816254,
0.1036725714802742,
-0.04067371413111687,
-0.03738696873188019,
-0.036374568939208984,
-0.05298951640725136,
-0.00030414649518206716,
0.0619841068983078,
-0.08832229673862457,
-0.003950211219489574,
-0.025546526536345482,
-0.0003958278684876859,
-0.02781202644109726,
-0.01827644370496273,
0.08828960359096527,
-0.07682148367166519,
-0.034486643970012665,
-0.1537688970565796,
0.06246471032500267,
0.11714339256286621,
0.054759591817855835,
-0.12229600548744202,
0.11689218133687973,
-0.020758818835020065,
0.03347280994057655,
-0.012294652871787548,
-0.01812625303864479,
0.025861117988824844,
0.015188694931566715,
-0.03756316006183624,
-0.005730807315558195,
-0.022198501974344254,
-0.04389338195323944,
0.03543801233172417,
-0.003621256910264492,
-0.021046841517090797,
0.08184531331062317,
-0.040917620062828064,
-0.04357901215553284,
-0.04794464260339737,
0.0448465459048748,
-0.09438043087720871,
0.05477077513933182,
0.03827408328652382,
-0.06416569650173187,
0.053768694400787354,
0.04989873990416527,
-0.010162975639104843,
0.11407898366451263,
-0.17579707503318787,
-0.022387875244021416,
0.03664577752351761,
0.0271525289863348,
-0.046611860394477844,
-0.08074113726615906,
-0.009388902224600315,
0.07274628430604935,
0.027961136773228645,
-0.02535180374979973,
-0.051803216338157654,
-0.06242911145091057,
-0.06354879587888718,
-0.015519705601036549,
0.021312830969691277,
-0.03899558633565903,
0.05003718286752701,
0.06760527193546295,
0.05694907158613205,
0.13060912489891052,
-0.07642509788274765,
0.02024790085852146,
-0.11959535628557205,
0.025470569729804993,
-0.07153230160474777,
0.02782456763088703,
-0.05032363533973694,
0.020694104954600334,
0.07655609399080276,
-0.03048926219344139,
0.05216923728585243,
-0.03700235113501549,
0.06479427963495255,
0.004188441205769777,
-0.009272310882806778,
-0.11633595079183578,
0.01649673841893673,
0.12149685621261597,
0.03996039554476738,
0.020612560212612152,
-0.06101880222558975,
-0.0866975486278534,
-0.018853450194001198,
0.04522784426808357,
0.028676895424723625,
0.04194777458906174,
0.07139024883508682,
0.11501360684633255,
0.05262753739953041,
-0.0792933851480484,
-0.025000132620334625,
0.05914650857448578,
-0.10072067379951477,
0.07020936906337738,
-0.002197487512603402,
0.016972316429018974,
0.11217391490936279,
-0.14095617830753326,
0.12010718882083893,
0.016609717160463333,
-0.08238399028778076,
-0.11003101617097855,
-0.18054448068141937,
-0.05871473625302315,
-0.0533631332218647,
0.0511062927544117,
-0.11130693554878235,
0.12231985479593277,
0.027378156781196594,
0.005382852628827095,
-0.027811627835035324,
0.14704754948616028,
-0.1375020295381546,
-0.09403173625469208,
0.08229659497737885,
-0.03896462172269821,
-0.010608806274831295,
0.03281058371067047,
0.012619650922715664,
0.10032085329294205,
0.009392890147864819,
0.08865998685359955,
0.03288017213344574,
0.07406746596097946,
0.06126022711396217,
-0.020871547982096672,
-0.07612640410661697,
0.05165012180805206,
-0.030986815690994263,
0.08287547528743744,
0.13082215189933777,
0.08019789308309555,
-0.04465357959270477,
-0.025021804496645927,
0.15808872878551483,
-0.042049139738082886,
-0.055627863854169846,
-0.1485816389322281,
0.1382284313440323,
0.0359150692820549,
0.03278467059135437,
-0.02787281759083271,
-0.11658206582069397,
0.04878133535385132,
0.12831631302833557,
0.055698443204164505,
0.04010087624192238,
-0.022093651816248894,
-0.036122821271419525,
-0.015598227269947529,
-0.05322053283452988,
0.07797738909721375,
0.020075347274541855,
0.1399867832660675,
-0.018136251717805862,
0.13252583146095276,
-0.007468729745596647,
-0.02613290213048458,
-0.08565225452184677,
0.11782204359769821,
-0.09075456857681274,
-0.06574421375989914,
0.030562886968255043,
0.1214851513504982,
-0.040828924626111984,
-0.2053334265947342,
-0.13375402987003326,
-0.07586359977722168,
-0.09903771430253983,
0.017129385843873024,
0.07069894671440125,
0.02395038865506649,
0.03674826771020889,
-0.006460010539740324,
0.003271419322118163,
0.16232189536094666,
-0.020796088501811028,
-0.035907771438360214,
-0.006076782010495663,
-0.05720853805541992,
-0.010563434101641178,
0.13270162045955658,
0.05203012749552727,
0.13902385532855988,
0.030766600742936134,
0.02409413456916809,
-0.06853189319372177,
0.09666828066110611,
0.006235858425498009,
-0.16747836768627167,
0.07184037566184998,
0.22742104530334473,
-0.003773468779399991,
0.07953424751758575,
0.015913689509034157,
-0.13042375445365906,
0.023728596046566963,
0.08754095435142517,
0.04417130723595619,
-0.017231205478310585,
0.10711725801229477,
-0.09491579234600067,
0.11582423001527786,
0.07406572252511978,
-0.04031766206026077,
0.03293628990650177,
-0.0747736245393753,
0.02152421325445175,
-0.007037017028778791,
0.1155300959944725,
0.03182582929730415,
-0.1980227380990982,
0.02740044891834259,
-0.04078369960188866,
-0.01639978028833866,
-0.08216582983732224,
-0.06885523349046707,
0.03545520827174187,
-0.061670199036598206,
-0.021017009392380714,
0.08648699522018433,
0.08989530801773071,
-0.03587362915277481,
0.012236139737069607,
0.030276428908109665,
0.05014501512050629,
0.07887109369039536,
-0.12141724675893784,
-0.08037211745977402
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# ner-swedish-wikiann
This model is a fine-tuned version of [nordic-roberta-wiki](hhttps://huggingface.co/flax-community/nordic-roberta-wiki) trained for NER on the wikiann dataset.
eval F1-Score: **83,78**
test F1-Score: **83,76**
## Model Usage
```python
from transformers import AutoTokenizer, AutoModelForTokenClassification
from transformers import pipeline
tokenizer = AutoTokenizer.from_pretrained("birgermoell/ner-swedish-wikiann")
model = AutoModelForTokenClassification.from_pretrained("birgermoell/ner-swedish-wikiann")
nlp = pipeline("ner", model=model, tokenizer=tokenizer)
example = "Jag heter Per och jag jobbar på KTH"
nlp(example)
```
<!--
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 4.9086903597787154e-05
- train_batch_size: 32
- eval_batch_size: 16
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 5.0
- mixed_precision_training: Native AMP
### Training results
It achieves the following results on the evaluation set:
- Loss: 0.3156
- Precision: 0.8332
from transformers import AutoTokenizer, AutoModelForTokenClassification
from transformers import pipeline
tokenizer = AutoTokenizer.from_pretrained("birgermoell/ner-swedish-wikiann")
model = AutoModelForTokenClassification.from_pretrained("birgermoell/ner-swedish-wikiann")
nlp = pipeline("ner", model=model, tokenizer=tokenizer)
example = "Jag heter Per och jag jobbar på KTH"
nlp(example)
- F1: 0.8378
- Accuracy: 0.9193
It achieves the following results on the test set:
- Loss: 0.3023
- Precision: 0.8301
- Recall: 0.8452
- F1: 0.8376
- Accuracy: 0.92
### Framework versions
- Transformers 4.6.1
- Pytorch 1.8.1+cu101
- Datasets 1.6.2
- Tokenizers 0.10.2
-->
|
{"license": "apache-2.0", "tags": ["token-classification"], "datasets": ["wikiann"], "metrics": ["precision", "recall", "f1", "accuracy"], "model-index": [{"name": "ner-swedish-wikiann", "results": [{"task": {"type": "token-classification", "name": "Token Classification"}, "dataset": {"name": "wikiann", "type": "wikiann"}, "metrics": [{"type": "precision", "value": 0.8331921416757433, "name": "Precision"}, {"type": "recall", "value": 0.84243586083126, "name": "Recall"}, {"type": "f1", "value": 0.8377885044416501, "name": "F1"}, {"type": "accuracy", "value": 0.91930707459758, "name": "Accuracy"}]}]}]}
|
token-classification
|
birgermoell/ner-swedish-wikiann
|
[
"transformers",
"pytorch",
"roberta",
"token-classification",
"dataset:wikiann",
"license:apache-2.0",
"model-index",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #roberta #token-classification #dataset-wikiann #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us
|
# ner-swedish-wikiann
This model is a fine-tuned version of nordic-roberta-wiki trained for NER on the wikiann dataset.
eval F1-Score: 83,78
test F1-Score: 83,76
## Model Usage
|
[
"# ner-swedish-wikiann\n\nThis model is a fine-tuned version of nordic-roberta-wiki trained for NER on the wikiann dataset.\n\neval F1-Score: 83,78 \n\ntest F1-Score: 83,76",
"## Model Usage"
] |
[
"TAGS\n#transformers #pytorch #roberta #token-classification #dataset-wikiann #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us \n",
"# ner-swedish-wikiann\n\nThis model is a fine-tuned version of nordic-roberta-wiki trained for NER on the wikiann dataset.\n\neval F1-Score: 83,78 \n\ntest F1-Score: 83,76",
"## Model Usage"
] |
[
56,
57,
4
] |
[
"passage: TAGS\n#transformers #pytorch #roberta #token-classification #dataset-wikiann #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us \n# ner-swedish-wikiann\n\nThis model is a fine-tuned version of nordic-roberta-wiki trained for NER on the wikiann dataset.\n\neval F1-Score: 83,78 \n\ntest F1-Score: 83,76## Model Usage"
] |
[
-0.08875424414873123,
0.10241816192865372,
-0.0016468525864183903,
0.13502702116966248,
0.02997990883886814,
-0.024501964449882507,
0.09463243186473846,
0.047997768968343735,
-0.0034048452507704496,
-0.032320212572813034,
0.16091500222682953,
0.11068986356258392,
0.02163880690932274,
0.11345931887626648,
-0.02370690554380417,
-0.2380894124507904,
0.09097328037023544,
0.013481753878295422,
0.0264528039842844,
0.10820447653532028,
0.11183716356754303,
-0.021666238084435463,
0.052326224744319916,
0.09726647287607193,
-0.11994175612926483,
0.04046512767672539,
0.030242912471294403,
-0.1005106270313263,
0.13007742166519165,
0.043309684842824936,
0.12153594195842743,
0.031190359964966774,
0.08763176202774048,
-0.11871291697025299,
0.01786603219807148,
-0.03660283237695694,
-0.016802947968244553,
0.06197568029165268,
0.024943016469478607,
-0.001562207005918026,
0.15771515667438507,
0.08471120148897171,
0.02385891042649746,
-0.00504156481474638,
-0.048167526721954346,
-0.12718170881271362,
-0.060438647866249084,
0.10862497985363007,
0.03406691923737526,
0.07273507863283157,
0.01492809783667326,
0.1873665153980255,
-0.19252410531044006,
0.08889027684926987,
0.1252046525478363,
-0.2727934718132019,
-0.06498971581459045,
0.11749623715877533,
-0.034962497651576996,
-0.0714246928691864,
-0.05033889040350914,
0.0558522492647171,
0.07471057027578354,
0.03464524447917938,
0.08103644847869873,
-0.02294171415269375,
-0.16193917393684387,
0.02239805832505226,
-0.1101715937256813,
0.025385385379195213,
0.19336648285388947,
0.07248518615961075,
-0.03184707090258598,
0.0217791385948658,
-0.00758980680257082,
0.04751428961753845,
-0.013532576151192188,
-0.06856818497180939,
-0.058258235454559326,
-0.05634371191263199,
0.020076796412467957,
-0.030912071466445923,
-0.07775836437940598,
-0.04723966494202614,
-0.16840529441833496,
0.19409893453121185,
0.03756635636091232,
0.06659922748804092,
-0.09004288911819458,
0.031966760754585266,
-0.1312028467655182,
-0.10534722357988358,
-0.04055226966738701,
-0.09101861715316772,
-0.03935576602816582,
-0.037397049367427826,
-0.029949482530355453,
0.0035454670432955027,
0.09557082504034042,
0.22899118065834045,
0.023059453815221786,
-0.018192989751696587,
0.06275870651006699,
0.03586618974804878,
-0.005610177759081125,
0.07154596596956253,
-0.057990286499261856,
-0.12767058610916138,
0.08400975167751312,
-0.10582572221755981,
-0.008563432842493057,
-0.014275742694735527,
-0.09810249507427216,
-0.08256310969591141,
0.05955260619521141,
-0.03166177123785019,
-0.011915363371372223,
0.03792738914489746,
-0.0019372403621673584,
-0.026048099622130394,
0.14150232076644897,
-0.05948876962065697,
-0.014526932500302792,
-0.011265244334936142,
-0.07787959277629852,
0.10930297523736954,
0.02350667305290699,
0.04018966853618622,
-0.03883815556764603,
0.11673232167959213,
-0.03197994455695152,
-0.06111392006278038,
0.0006579969194717705,
-0.0724167749285698,
0.07499615103006363,
-0.13265873491764069,
0.058090128004550934,
-0.1587027907371521,
-0.16128721833229065,
-0.006116315256804228,
0.09945832192897797,
-0.00818305928260088,
-0.09815015643835068,
-0.05187399312853813,
-0.026023736223578453,
0.04932849481701851,
-0.050849758088588715,
-0.04402826726436615,
-0.03628075495362282,
-0.0418967679142952,
-0.060112323611974716,
0.07936210930347443,
-0.1554407924413681,
0.0079036895185709,
-0.12349693477153778,
-0.008255811408162117,
-0.10360431671142578,
-0.04076066613197327,
-0.15018248558044434,
0.0350150540471077,
-0.10723171383142471,
-0.0430876761674881,
-0.02566973678767681,
-0.014222261495888233,
0.05081553012132645,
0.14186447858810425,
-0.08753913640975952,
-0.055462758988142014,
0.10473734140396118,
-0.12871339917182922,
-0.10582174360752106,
0.120902881026268,
-0.025243129581212997,
0.0520586334168911,
0.09177357703447342,
0.17065727710723877,
0.1439923495054245,
-0.08030106127262115,
0.022245343774557114,
0.04631197080016136,
-0.05383020266890526,
-0.18924081325531006,
0.04504641890525818,
0.05939401686191559,
-0.12826089560985565,
0.08385954052209854,
-0.020170951262116432,
0.08169770985841751,
-0.03033226914703846,
-0.07886531949043274,
-0.010622642934322357,
-0.08967775106430054,
0.05084173381328583,
0.02785574272274971,
0.09992743283510208,
-0.06631424278020859,
-0.026501672342419624,
0.0096870893612504,
0.08136609941720963,
-0.01699889823794365,
0.021962111815810204,
-0.06328386068344116,
0.11089532822370529,
-0.09658429771661758,
-0.0195389986038208,
-0.07666872441768646,
-0.12046582251787186,
-0.04964989051222801,
-0.06460953503847122,
0.03541693836450577,
0.06717993319034576,
0.05868925154209137,
-0.015810739248991013,
-0.018066929653286934,
0.054501693695783615,
0.030382594093680382,
0.056920185685157776,
-0.00200493261218071,
-0.19203777611255646,
0.03354381397366524,
-0.023449314758181572,
0.022415677085518837,
-0.14446988701820374,
-0.04233762249350548,
-0.03150862082839012,
0.14481040835380554,
-0.039938606321811676,
0.05629873648285866,
-0.0259868074208498,
0.025426290929317474,
-0.06947042793035507,
0.03447231277823448,
0.07087276875972748,
-0.02742139995098114,
-0.06423433870077133,
0.13411647081375122,
0.11252842098474503,
0.13283029198646545,
0.12011579424142838,
-0.0354558601975441,
-0.0349089652299881,
0.0505296029150486,
-0.009985287673771381,
-0.03499066084623337,
-0.026473158970475197,
0.11931797862052917,
0.0312394630163908,
0.011593017727136612,
0.0752202644944191,
-0.061338625848293304,
-0.02423284761607647,
0.08101435005664825,
-0.08806218951940536,
-0.0008414057665504515,
0.13272449374198914,
0.174934983253479,
-0.26693201065063477,
0.0953478217124939,
0.09615521132946014,
-0.15540888905525208,
0.15033167600631714,
0.013445001095533371,
-0.039072103798389435,
0.014754604548215866,
-0.08655882626771927,
-0.008863100782036781,
0.11686443537473679,
-0.045815806835889816,
0.004836418200284243,
0.05473858118057251,
0.033042531460523605,
0.04345773160457611,
-0.06445130705833435,
-0.04977333918213844,
0.007570903282612562,
-0.020133694633841515,
-0.017987648025155067,
0.08949697762727737,
-0.05754100903868675,
0.06983260065317154,
-0.011400899849832058,
-0.22219595313072205,
0.06077847257256508,
0.03157106786966324,
-0.08325754106044769,
0.17279207706451416,
-0.029871448874473572,
-0.11282740533351898,
-0.14002296328544617,
-0.05503604933619499,
-0.13670256733894348,
-0.04712771996855736,
0.06133495643734932,
-0.04942081496119499,
-0.05773075297474861,
-0.016799937933683395,
-0.04339740425348282,
0.015025192871689796,
0.011794356629252434,
-0.053872767835855484,
-0.05409250035881996,
-0.016700811684131622,
-0.08903291821479797,
-0.016291750594973564,
-0.03949568793177605,
0.049311935901641846,
0.059195004403591156,
-0.12026375532150269,
0.13563847541809082,
0.05783673748373985,
-0.03202943876385689,
0.03432353958487511,
0.033843640238046646,
0.25729066133499146,
-0.05057868734002113,
0.08418245613574982,
0.058294929563999176,
-0.06386887282133102,
0.030527330935001373,
0.11277783662080765,
0.037696827203035355,
-0.013492606580257416,
-0.030263982713222504,
0.02926125004887581,
-0.0841006487607956,
-0.25824350118637085,
-0.08869615942239761,
-0.02299649268388748,
0.0068982127122581005,
0.0689702257514,
0.029982222244143486,
0.03887690603733063,
0.14306196570396423,
0.06434763967990875,
-0.06458167731761932,
-0.07996772229671478,
0.055963847786188126,
0.1826956570148468,
0.060675960034132004,
0.12512899935245514,
-0.07816749066114426,
-0.047190383076667786,
0.07574968785047531,
0.046482816338539124,
0.1357540488243103,
0.05458053573966026,
0.029436947777867317,
0.04886727035045624,
0.22051475942134857,
0.09024171531200409,
0.13448408246040344,
0.019503427669405937,
-0.052196063101291656,
0.03676168620586395,
-0.013635142706334591,
0.012839876115322113,
0.02164025418460369,
-0.08042921870946884,
-0.07419370114803314,
0.0047762528993189335,
-0.025316452607512474,
0.01084650307893753,
0.1363818198442459,
0.09250222146511078,
-0.24463744461536407,
-0.0756409615278244,
-0.03439103066921234,
-0.060750991106033325,
-0.020107155665755272,
0.06003623455762863,
-0.02448420226573944,
-0.1362372785806656,
0.12598571181297302,
-0.014813316985964775,
0.11494767665863037,
-0.005559442099183798,
-0.0050550056621432304,
-0.045987240970134735,
0.0303952656686306,
-0.0008329881238751113,
0.11885499954223633,
-0.1444641798734665,
0.23313330113887787,
-0.026572080329060555,
0.033379584550857544,
-0.07284039258956909,
-0.06933324784040451,
0.03192184120416641,
0.17986193299293518,
0.17104585468769073,
0.03646213188767433,
-0.11419235914945602,
-0.005310835316777229,
-0.08227382600307465,
0.0605132095515728,
-0.05359592288732529,
-0.012055942788720131,
0.043568503111600876,
0.012306352145969868,
0.0026782879140228033,
-0.011642269790172577,
0.0996161699295044,
-0.10722365230321884,
-0.06468075513839722,
-0.03070382960140705,
0.11482606083154678,
-0.10679959505796432,
0.01219788659363985,
-0.12391404807567596,
-0.2547813057899475,
0.21469931304454803,
0.031202608719468117,
-0.03635873273015022,
-0.12943637371063232,
0.005871220957487822,
0.08597353845834732,
-0.11392953991889954,
0.024210087954998016,
-0.05309842526912689,
0.022656675428152084,
-0.07747826725244522,
-0.12230661511421204,
0.046246014535427094,
-0.10407784581184387,
-0.017833830788731575,
0.027663394808769226,
0.02745905891060829,
0.1070822924375534,
-0.007604080252349377,
0.09065786004066467,
0.027868617326021194,
-0.03633856773376465,
-0.12389488518238068,
-0.031126489862799644,
0.020970141515135765,
-0.07440779358148575,
-0.08157411962747574,
0.05714627355337143,
0.001008038641884923,
-0.04151320084929466,
-0.022860771045088768,
0.17745551466941833,
0.09745373576879501,
-0.11102692037820816,
0.07997623831033707,
0.2023310512304306,
-0.05385580286383629,
-0.27844879031181335,
-0.05115174502134323,
0.010621115565299988,
0.03458539769053459,
-0.010065584443509579,
-0.07721544057130814,
0.27603599429130554,
0.09478887915611267,
-0.07296078652143478,
0.0209796279668808,
-0.007112766616046429,
-0.10358753055334091,
0.18653525412082672,
0.06492402404546738,
0.3959839344024658,
0.011991540901362896,
0.0006265214178711176,
-0.01959409937262535,
-0.1876789927482605,
0.13763435184955597,
-0.1221243217587471,
0.04955802112817764,
-0.06151903048157692,
0.10480695217847824,
0.007568270433694124,
-0.04011226072907448,
0.10039936751127243,
0.046066831797361374,
0.0006505773635581136,
-0.06891979277133942,
0.08437125384807587,
0.025160303339362144,
-0.04987599700689316,
0.14338158071041107,
-0.04002606123685837,
0.05067979916930199,
-0.06026014685630798,
-0.08240347355604172,
-0.09923138469457626,
0.15359079837799072,
-0.022896992042660713,
-0.04836248606443405,
-0.05194497108459473,
0.06760675460100174,
0.03483501449227333,
-0.009817861020565033,
0.1840711086988449,
-0.013659004122018814,
0.05886077880859375,
0.06950418651103973,
0.08195420354604721,
-0.015825429931282997,
0.02162565477192402,
-0.01317517552524805,
-0.102537140250206,
0.09059217572212219,
-0.18221035599708557,
0.022090798243880272,
0.15043222904205322,
-0.00907636247575283,
-0.008317430503666401,
-0.005328664090484381,
-0.11811627447605133,
-0.0751296654343605,
0.10640958696603775,
-0.15211427211761475,
0.06349173933267593,
-0.026028092950582504,
-0.2016521692276001,
0.04567689821124077,
0.08016553521156311,
0.14575880765914917,
-0.13909593224525452,
0.00753240380436182,
-0.003766134614124894,
-0.038220807909965515,
-0.06620048731565475,
0.11329959332942963,
0.1346961259841919,
0.02352057956159115,
-0.11276066303253174,
0.05262620747089386,
0.003862440586090088,
0.006078021600842476,
0.03565351665019989,
0.058757781982421875,
-0.13367874920368195,
-0.09060593694448471,
0.019493279978632927,
0.18007118999958038,
-0.2571689188480377,
-0.15093190968036652,
-0.05624610558152199,
-0.09044710546731949,
0.04336797073483467,
0.13815534114837646,
0.1215863823890686,
0.007111371960490942,
0.0054082381539046764,
-0.10763411968946457,
-0.07215895503759384,
0.06601868569850922,
0.019630860537290573,
0.05242728814482689,
-0.13802936673164368,
-0.11600515246391296,
-0.02569623850286007,
0.10080128163099289,
-0.05446363613009453,
0.014440485276281834,
-0.08537372946739197,
0.02768157422542572,
-0.1787651926279068,
-0.0040468741208314896,
-0.09846577793359756,
0.07348936796188354,
-0.017407124862074852,
-0.09272806346416473,
-0.055440038442611694,
0.03628994897007942,
-0.10688064247369766,
0.04263216629624367,
-0.002993607195094228,
0.007718033157289028,
-0.04647120088338852,
0.013029531575739384,
0.03511229529976845,
-0.006231086794286966,
0.04570239782333374,
0.036344096064567566,
-0.015472440980374813,
0.16381245851516724,
-0.1255611926317215,
0.03286024183034897,
-0.011984904296696186,
0.0354352705180645,
0.10344482213258743,
-0.026731783524155617,
-0.0012411379721015692,
0.09675902873277664,
0.06063363701105118,
0.03937101364135742,
-0.0629345029592514,
-0.08616290241479874,
-0.09310851246118546,
0.01607969030737877,
-0.1243724673986435,
-0.02227708324790001,
-0.00862790085375309,
0.1058986485004425,
0.031401291489601135,
0.20371568202972412,
0.000029119006285327487,
0.03833090513944626,
-0.046375855803489685,
0.017221644520759583,
-0.0015793185448274016,
-0.1119818165898323,
-0.019321775063872337,
-0.03520704060792923,
-0.024241818115115166,
-0.055835749953985214,
0.1615975797176361,
0.001287809805944562,
-0.027023768052458763,
0.0014636492123827338,
-0.08487271517515182,
-0.10550019890069962,
-0.01331888698041439,
0.18084335327148438,
0.06746869534254074,
0.004235400818288326,
-0.07320377975702286,
0.019900349900126457,
-0.037791457027196884,
0.10413111746311188,
0.10589127987623215,
0.053389061242341995,
-0.028558669611811638,
0.08536690473556519,
0.07530096918344498,
0.039656464010477066,
-0.0388205386698246,
-0.003496984951198101,
-0.09032823890447617,
0.0678362175822258,
0.09337209165096283,
-0.07273771613836288,
0.23883388936519623,
-0.05636284127831459,
-0.013140602968633175,
0.02569114789366722,
-0.05928165093064308,
-0.2329762578010559,
-0.29891282320022583,
-0.16019059717655182,
-0.07130380719900131,
0.08601478487253189,
-0.07553329318761826,
-0.01810499280691147,
0.0702497661113739,
0.12549366056919098,
-0.055771347135305405,
0.02897687442600727,
0.05301964655518532,
-0.03126799315214157,
0.0734962746500969,
-0.04751726984977722,
-0.09571012109518051,
0.05585337430238724,
0.0019894212018698454,
-0.06262150406837463,
0.0455867163836956,
-0.041451696306467056,
-0.03619886189699173,
-0.006269651465117931,
0.03657190129160881,
-0.040728144347667694,
-0.0467853806912899,
-0.046439386904239655,
0.01712907664477825,
0.06179823353886604,
0.03193062171339989,
0.03831677511334419,
0.014295789413154125,
0.01083415374159813,
0.11550502479076385,
0.0170577485114336,
-0.14761760830879211,
-0.16220052540302277,
0.16447506844997406,
0.009239940904080868,
0.053683869540691376,
-0.015431317500770092,
0.021950947120785713,
0.031011313199996948,
0.2849789559841156,
0.24204020202159882,
0.06275835633277893,
0.009316104464232922,
-0.03609183058142662,
-0.01398405246436596,
0.013090535998344421,
0.05855448171496391,
0.010507859289646149,
0.10624440759420395,
-0.048747267574071884,
-0.06768947094678879,
-0.12771715223789215,
-0.02215544693171978,
-0.03846298158168793,
0.05668044090270996,
0.06665648519992828,
-0.08212342113256454,
-0.12081333994865417,
0.1092260330915451,
-0.16239318251609802,
0.01114549022167921,
0.04438970610499382,
-0.0018099688459187746,
-0.1734466254711151,
-0.04549441486597061,
0.03456399217247963,
0.04081084206700325,
0.023011531680822372,
-0.07039660215377808,
-0.0015834332443773746,
-0.04289955645799637,
-0.005723912734538317,
-0.12649032473564148,
-0.05454952269792557,
0.067266084253788,
0.10370814800262451,
0.09840483963489532,
0.051400840282440186,
0.13284477591514587,
0.09507318586111069,
0.02526482753455639,
-0.11241009086370468,
0.0726693794131279,
0.006827347446233034,
-0.0045451014302670956,
0.03167464956641197,
0.024356653913855553,
0.012423471547663212,
-0.001387467491440475,
0.03257431462407112,
-0.1012636199593544,
-0.010577000677585602,
0.06877575069665909,
-0.023121396079659462,
-0.05497074872255325,
0.12309855222702026,
-0.07181049138307571,
0.138697549700737,
0.14221510291099548,
-0.028601257130503654,
-0.03491764888167381,
-0.08841021358966827,
0.07597391307353973,
-0.009350782260298729,
-0.14079120755195618,
-0.03825050964951515,
-0.08763228356838226,
-0.02865300327539444,
-0.033869244158267975,
-0.007581750862300396,
-0.07835372537374496,
0.019446197897195816,
-0.09354343265295029,
-0.026723425835371017,
-0.02162264473736286,
0.05440446361899376,
0.13409224152565002,
0.016263490542769432,
-0.006759243551641703,
0.018773600459098816,
-0.01566118746995926,
-0.0011775154853239655,
-0.0896206945180893,
-0.13879786431789398
] |
null | null |
transformers
|
# Svensk Roberta
## Description
Swedish Roberta model trained on the MC4 dataset. The model performance needs to be assessed
## Model series
This model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.
## Gpt models
## Swedish Gpt
https://huggingface.co/birgermoell/swedish-gpt/
## Swedish gpt wiki
https://huggingface.co/flax-community/swe-gpt-wiki
# Nordic gpt wiki
https://huggingface.co/flax-community/nordic-gpt-wiki
## Dansk gpt wiki
https://huggingface.co/flax-community/dansk-gpt-wiki
## Norsk gpt wiki
https://huggingface.co/flax-community/norsk-gpt-wiki
## Roberta models
## Nordic Roberta Wiki
https://huggingface.co/flax-community/nordic-roberta-wiki
## Swe Roberta Wiki Oscar
https://huggingface.co/flax-community/swe-roberta-wiki-oscar
## Roberta Swedish Scandi
https://huggingface.co/birgermoell/roberta-swedish-scandi
## Roberta Swedish
https://huggingface.co/birgermoell/roberta-swedish
## Swedish T5 model
https://huggingface.co/birgermoell/t5-base-swedish
|
{"language": "sv", "license": "cc-by-4.0", "tags": ["translate"], "datasets": ["mc4"], "widget": [{"text": "Meningen med livet \u00e4r <mask>"}]}
|
feature-extraction
|
birgermoell/roberta-swedish-scandi
|
[
"transformers",
"pytorch",
"jax",
"tensorboard",
"roberta",
"feature-extraction",
"translate",
"sv",
"dataset:mc4",
"license:cc-by-4.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"sv"
] |
TAGS
#transformers #pytorch #jax #tensorboard #roberta #feature-extraction #translate #sv #dataset-mc4 #license-cc-by-4.0 #endpoints_compatible #region-us
|
# Svensk Roberta
## Description
Swedish Roberta model trained on the MC4 dataset. The model performance needs to be assessed
## Model series
This model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.
## Gpt models
## Swedish Gpt
URL
## Swedish gpt wiki
URL
# Nordic gpt wiki
URL
## Dansk gpt wiki
URL
## Norsk gpt wiki
URL
## Roberta models
## Nordic Roberta Wiki
URL
## Swe Roberta Wiki Oscar
URL
## Roberta Swedish Scandi
URL
## Roberta Swedish
URL
## Swedish T5 model
URL
|
[
"# Svensk Roberta",
"## Description\nSwedish Roberta model trained on the MC4 dataset. The model performance needs to be assessed",
"## Model series\nThis model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.",
"## Gpt models",
"## Swedish Gpt\nURL",
"## Swedish gpt wiki\nURL",
"# Nordic gpt wiki\nURL",
"## Dansk gpt wiki\nURL",
"## Norsk gpt wiki\nURL",
"## Roberta models",
"## Nordic Roberta Wiki\nURL",
"## Swe Roberta Wiki Oscar\nURL",
"## Roberta Swedish Scandi\nURL",
"## Roberta Swedish\nURL",
"## Swedish T5 model\nURL"
] |
[
"TAGS\n#transformers #pytorch #jax #tensorboard #roberta #feature-extraction #translate #sv #dataset-mc4 #license-cc-by-4.0 #endpoints_compatible #region-us \n",
"# Svensk Roberta",
"## Description\nSwedish Roberta model trained on the MC4 dataset. The model performance needs to be assessed",
"## Model series\nThis model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.",
"## Gpt models",
"## Swedish Gpt\nURL",
"## Swedish gpt wiki\nURL",
"# Nordic gpt wiki\nURL",
"## Dansk gpt wiki\nURL",
"## Norsk gpt wiki\nURL",
"## Roberta models",
"## Nordic Roberta Wiki\nURL",
"## Swe Roberta Wiki Oscar\nURL",
"## Roberta Swedish Scandi\nURL",
"## Roberta Swedish\nURL",
"## Swedish T5 model\nURL"
] |
[
58,
4,
23,
32,
4,
5,
6,
6,
6,
6,
4,
6,
7,
7,
5,
6
] |
[
"passage: TAGS\n#transformers #pytorch #jax #tensorboard #roberta #feature-extraction #translate #sv #dataset-mc4 #license-cc-by-4.0 #endpoints_compatible #region-us \n# Svensk Roberta## Description\nSwedish Roberta model trained on the MC4 dataset. The model performance needs to be assessed## Model series\nThis model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.## Gpt models## Swedish Gpt\nURL## Swedish gpt wiki\nURL# Nordic gpt wiki\nURL## Dansk gpt wiki\nURL## Norsk gpt wiki\nURL## Roberta models## Nordic Roberta Wiki\nURL## Swe Roberta Wiki Oscar\nURL## Roberta Swedish Scandi\nURL## Roberta Swedish\nURL## Swedish T5 model\nURL"
] |
[
-0.051757436245679855,
0.19655023515224457,
0.0004742823075503111,
0.15953779220581055,
0.05316965654492378,
0.020095277577638626,
0.04008375480771065,
0.11742103844881058,
0.023735197260975838,
0.016371000558137894,
0.17234203219413757,
0.06206796318292618,
0.11410794407129288,
0.04082202911376953,
0.07992029935121536,
-0.35344749689102173,
0.06827946752309799,
-0.028216388076543808,
-0.07766173779964447,
0.09284885972738266,
0.0829644501209259,
0.003706239629536867,
0.06911377608776093,
0.010212995111942291,
-0.027820490300655365,
-0.0006989568355493248,
-0.005680729169398546,
-0.02701476588845253,
0.11119626462459564,
0.0649879202246666,
-0.0034429419320076704,
0.09071068465709686,
0.12251510471105576,
-0.12750950455665588,
0.013914945535361767,
-0.004392547067254782,
-0.02455211617052555,
0.019537681713700294,
0.018704622983932495,
0.07011306285858154,
0.2872470021247864,
0.024049272760748863,
0.016109464690089226,
0.007573237642645836,
-0.031372785568237305,
-0.19696001708507538,
-0.07820296287536621,
0.07134140282869339,
-0.01598917320370674,
0.05977874994277954,
-0.049020543694496155,
0.15598349273204803,
-0.18513473868370056,
0.09271235764026642,
0.12948091328144073,
-0.278827428817749,
-0.06472732871770859,
0.22510398924350739,
0.11055034399032593,
0.08128253370523453,
-0.08187604695558548,
0.14161363244056702,
0.036566171795129776,
0.05780639871954918,
0.051202885806560516,
-0.053465936332941055,
-0.03522995114326477,
0.05415414273738861,
-0.1411832720041275,
0.07553499937057495,
0.21517331898212433,
0.08066640794277191,
-0.025997253134846687,
-0.06816918402910233,
0.026772864162921906,
0.043834153562784195,
-0.014069332741200924,
-0.09123199433088303,
0.023399529978632927,
-0.03810781240463257,
-0.02410268783569336,
-0.061402253806591034,
-0.0660749152302742,
-0.07302363961935043,
0.015325041487812996,
0.02023635059595108,
0.01518466416746378,
0.03063761629164219,
0.029249267652630806,
0.06596066802740097,
-0.1840512901544571,
-0.1029304563999176,
-0.07640068978071213,
-0.08772145956754684,
-0.05160176008939743,
-0.018527336418628693,
0.042887285351753235,
-0.10926186293363571,
0.08642898499965668,
0.0912390872836113,
0.034512486308813095,
0.02471679076552391,
0.10283204913139343,
0.04200340062379837,
0.015015768818557262,
0.138035848736763,
-0.19608506560325623,
-0.17203155159950256,
-0.004955464508384466,
-0.06325951963663101,
-0.03834763541817665,
-0.033114418387413025,
-0.04174141213297844,
-0.06535708159208298,
0.03223308548331261,
0.020676488056778908,
0.014646852388978004,
0.07134157419204712,
-0.0357021801173687,
0.036865293979644775,
0.025498881936073303,
-0.043489083647727966,
0.01072064507752657,
-0.05358337610960007,
-0.0725182518362999,
0.0857718214392662,
0.07521378993988037,
0.01585792750120163,
-0.06524516642093658,
0.10716695338487625,
-0.0864500105381012,
-0.04197731241583824,
0.04538843408226967,
-0.07358666509389877,
0.08070960640907288,
-0.08529082685709,
0.035812973976135254,
-0.09913429617881775,
-0.13362076878547668,
-0.06349699199199677,
0.07796484231948853,
-0.0978512167930603,
-0.04353415593504906,
-0.08155565708875656,
-0.1044558435678482,
0.05330442637205124,
-0.022564789280295372,
0.06751013547182083,
-0.06282951682806015,
0.043357495218515396,
-0.14460960030555725,
0.08389012515544891,
-0.016651786863803864,
-0.03801766410470009,
-0.1161089539527893,
-0.023142684251070023,
-0.18892882764339447,
0.05292405188083649,
-0.21479415893554688,
0.02390042133629322,
-0.13668376207351685,
-0.09332709014415741,
-0.0016870507970452309,
0.05355842411518097,
0.0398227795958519,
0.24856242537498474,
-0.14207389950752258,
0.004472773987799883,
0.2269923835992813,
-0.08884496986865997,
-0.04697153717279434,
0.15515021979808807,
0.02036959119141102,
0.08345997333526611,
0.11009471863508224,
0.22532683610916138,
0.06961232423782349,
-0.11302170157432556,
0.030346591025590897,
0.07889475673437119,
-0.06729667633771896,
-0.014962702989578247,
0.07433655112981796,
-0.019297011196613312,
0.009456053376197815,
0.054955411702394485,
-0.1332664042711258,
0.03873224928975105,
-0.019370611757040024,
-0.05712810903787613,
0.0029504168778657913,
-0.061811599880456924,
0.022224340587854385,
0.07835502922534943,
0.006663661915808916,
-0.08693963289260864,
-0.178270623087883,
-0.13864250481128693,
0.026606539264321327,
-0.07919928431510925,
0.019840195775032043,
-0.05013393610715866,
0.09272794425487518,
-0.06301367282867432,
0.007308392319828272,
-0.008756261318922043,
-0.1352090984582901,
-0.058988407254219055,
0.024849142879247665,
0.03606375679373741,
0.1354321837425232,
0.11611707508563995,
0.03078398108482361,
-0.04565678536891937,
0.023334801197052002,
0.01067293994128704,
0.007737101521342993,
-0.013578517362475395,
-0.23585334420204163,
0.039309777319431305,
-0.037990666925907135,
0.01137573178857565,
-0.17519772052764893,
-0.06028499826788902,
0.1394682079553604,
0.12898100912570953,
-0.008370807394385338,
-0.0647667646408081,
0.03669717535376549,
-0.02377697452902794,
-0.0047407145611941814,
-0.06306818127632141,
0.016786450520157814,
-0.05756976455450058,
-0.12525039911270142,
0.13509106636047363,
0.12566319108009338,
0.002806244883686304,
0.07063539326190948,
0.06793524324893951,
-0.13126598298549652,
0.10727091133594513,
-0.005935408174991608,
0.0034187017008662224,
0.0019949532579630613,
-0.0012118463637307286,
-0.0007084584212861955,
0.022882381454110146,
0.045940835028886795,
-0.07806973904371262,
-0.013159267604351044,
0.05152313411235809,
-0.058269061148166656,
-0.025165369734168053,
0.2090197205543518,
0.1535395234823227,
-0.1720861941576004,
0.05832938849925995,
-0.09209930151700974,
-0.027871131896972656,
0.280236154794693,
0.02602553181350231,
-0.051039401441812515,
0.003325041616335511,
-0.03790862113237381,
-0.0009704968542791903,
0.2212757021188736,
-0.0471251979470253,
0.036893099546432495,
0.04113243520259857,
0.03139306604862213,
0.005881150718778372,
-0.07560527324676514,
-0.10726836323738098,
-0.00811983086168766,
-0.08427807688713074,
-0.021386045962572098,
0.10084699839353561,
-0.10402727872133255,
0.06114125996828079,
0.0022827174980193377,
-0.12678860127925873,
-0.0054295966401696205,
0.0006187607068568468,
-0.09878744930028915,
0.22624599933624268,
-0.039125584065914154,
-0.1149422898888588,
-0.11988939344882965,
0.01776043511927128,
-0.03760897368192673,
-0.024717427790164948,
0.09263956546783447,
-0.13693764805793762,
-0.1094755306839943,
-0.08083748817443848,
0.11312714964151382,
-0.00092656584456563,
-0.03469080477952957,
-0.14916689693927765,
-0.035708025097846985,
-0.0691113993525505,
-0.11773011833429337,
-0.0025629610754549503,
-0.007783989887684584,
0.02038787119090557,
0.02779083512723446,
-0.09607163816690445,
0.15077225863933563,
0.07382010668516159,
0.017603792250156403,
0.026255430653691292,
0.03734232112765312,
0.24868570268154144,
-0.12510468065738678,
0.12896843254566193,
-0.005304347723722458,
0.024913283064961433,
0.0449674166738987,
0.11647138744592667,
0.05013371631503105,
-0.03736629709601402,
-0.07008865475654602,
0.023235341534018517,
-0.07639823853969574,
-0.2010379433631897,
-0.07519318163394928,
0.0018482634332031012,
0.05086183547973633,
0.021512921899557114,
0.05967817083001137,
-0.12448249757289886,
0.14541663229465485,
0.03291603922843933,
-0.101466104388237,
0.016056116670370102,
0.02235954813659191,
-0.05457938089966774,
-0.02218051254749298,
0.05729357898235321,
-0.08055105060338974,
-0.004665191285312176,
0.09031327068805695,
0.030555207282304764,
0.09060001373291016,
-0.04763147234916687,
0.023086201399564743,
0.08937796950340271,
0.15353569388389587,
0.011108304373919964,
0.059814926236867905,
0.051337432116270065,
-0.06094653904438019,
0.027907704934477806,
-0.06782252341508865,
-0.0002848324947990477,
0.05418295040726662,
0.0012831956846639514,
-0.09060341119766235,
-0.03381370007991791,
-0.006175012793391943,
0.01760888658463955,
0.08415940403938293,
0.06958997994661331,
-0.21891210973262787,
-0.06576886773109436,
0.04132745787501335,
-0.030339688062667847,
-0.034615591168403625,
0.02533832937479019,
0.051328469067811966,
-0.18428514897823334,
0.0803542360663414,
-0.05318167060613632,
0.09381397813558578,
-0.019969839602708817,
-0.04278314858675003,
0.03203611075878143,
0.03981751576066017,
-0.0522821769118309,
0.1303664743900299,
-0.059929583221673965,
0.21741043031215668,
-0.009871098212897778,
0.09379711747169495,
-0.09336799383163452,
-0.01744665764272213,
0.018419520929455757,
0.17694884538650513,
0.32827097177505493,
0.05110041797161102,
-0.03437241539359093,
-0.051078569144010544,
-0.05891605466604233,
0.06456424295902252,
-0.015848582610487938,
-0.03342590853571892,
0.07368968427181244,
0.02483474090695381,
0.016198797151446342,
-0.07462868094444275,
-0.04524218291044235,
-0.10500361025333405,
-0.0497979074716568,
0.0033968440257012844,
-0.06601735204458237,
-0.015781091526150703,
-0.01435966044664383,
-0.08108341693878174,
-0.28248631954193115,
0.19630064070224762,
-0.031078992411494255,
-0.12320758402347565,
-0.14323864877223969,
0.06818926334381104,
0.12264156341552734,
-0.07987462729215622,
-0.04116249457001686,
0.007265973836183548,
0.01866621896624565,
-0.027752354741096497,
-0.021721230819821358,
0.023781899362802505,
-0.05391928553581238,
-0.07726311683654785,
0.007122778799384832,
0.08084731549024582,
0.10542596876621246,
0.025242136791348457,
0.07741434872150421,
0.04673558473587036,
-0.034238554537296295,
-0.1839427500963211,
0.040840160101652145,
-0.019418615847826004,
-0.051855720579624176,
0.008629018440842628,
0.02093360759317875,
0.08610832691192627,
-0.01596939004957676,
-0.03717536851763725,
0.09659568965435028,
0.20761588215827942,
-0.12941040098667145,
0.10958222299814224,
0.13014647364616394,
-0.04566981643438339,
-0.30493399500846863,
-0.028879376128315926,
-0.013168771751224995,
0.03961488977074623,
0.087813600897789,
-0.04306798800826073,
0.16352976858615875,
0.05220462381839752,
-0.016498947516083717,
0.027315352112054825,
-0.21240977942943573,
-0.08910751342773438,
0.03516523912549019,
0.1425691395998001,
0.0724349319934845,
-0.06729274988174438,
-0.039588019251823425,
0.00928720086812973,
-0.23198911547660828,
0.025370147079229355,
-0.07040693610906601,
0.08821597695350647,
0.0012407591566443443,
0.11015670001506805,
0.03472944721579552,
-0.050450559705495834,
0.16891855001449585,
-0.0259051825851202,
-0.04687125235795975,
-0.1268005222082138,
0.1353341042995453,
0.0899161547422409,
-0.0072939288802444935,
0.21637539565563202,
-0.07209429144859314,
0.008371400646865368,
-0.1186026930809021,
-0.05812382325530052,
-0.1104016900062561,
0.11658924072980881,
-0.054214175790548325,
-0.10277310013771057,
-0.10124460607767105,
0.12779265642166138,
0.10009262710809708,
-0.020928043872117996,
0.0564633272588253,
-0.12350156158208847,
0.036166634410619736,
-0.04547334089875221,
0.17490167915821075,
-0.004938769154250622,
-0.09446699172258377,
-0.000748088292311877,
-0.060260649770498276,
0.04947785288095474,
-0.2321004718542099,
0.003922533243894577,
0.1333058476448059,
0.013104534707963467,
0.027385829016566277,
-0.014354000799357891,
-0.14986620843410492,
-0.03279343619942665,
0.12134720385074615,
-0.18145616352558136,
-0.12243524193763733,
-0.0538705512881279,
-0.2879336178302765,
0.05888596922159195,
0.02349632792174816,
0.15664999186992645,
-0.1277252584695816,
0.02972121350467205,
0.0053683193400502205,
0.03887499123811722,
-0.06817957758903503,
0.17157404124736786,
0.10800425708293915,
0.05654469132423401,
-0.1246163621544838,
0.07660971581935883,
-0.045599281787872314,
-0.014013533480465412,
0.04359439015388489,
0.14899230003356934,
-0.10821064561605453,
-0.0831025019288063,
0.04981400817632675,
0.14606179296970367,
-0.16500268876552582,
-0.05123366788029671,
-0.0936698243021965,
-0.05093538761138916,
-0.01039155013859272,
0.009751363657414913,
0.04745294526219368,
0.003248719498515129,
0.020117757841944695,
-0.0658113956451416,
-0.07826697826385498,
0.06826332956552505,
0.06435670703649521,
-0.019845228642225266,
-0.16022300720214844,
0.030004454776644707,
-0.04682577773928642,
0.08141300827264786,
-0.0757426768541336,
0.09666130691766739,
-0.08451355993747711,
-0.00932029727846384,
-0.020206177607178688,
-0.025881128385663033,
-0.05726656690239906,
0.04537646099925041,
-0.042248159646987915,
-0.07253333926200867,
-0.0414796881377697,
0.02488526701927185,
-0.0986604392528534,
-0.0020268044900149107,
-0.01429292093962431,
-0.010533925145864487,
-0.06712900847196579,
-0.01617712341248989,
0.01772233471274376,
-0.007518493104726076,
0.04455443099141121,
-0.07715270668268204,
-0.018193259835243225,
0.155076265335083,
-0.1680014580488205,
0.09706147015094757,
-0.0282489825040102,
-0.055245526134967804,
0.03177446126937866,
0.04840420186519623,
-0.031208256259560585,
-0.025330400094389915,
0.051682259887456894,
0.06115535646677017,
-0.06778016686439514,
-0.07077484577894211,
-0.035949062556028366,
-0.027959303930401802,
-0.051453009247779846,
-0.06602554768323898,
0.12724359333515167,
0.08295896649360657,
0.04071640223264694,
0.10577213764190674,
-0.04848961904644966,
0.050333745777606964,
-0.10366779565811157,
-0.0042828903533518314,
0.039661988615989685,
-0.055406443774700165,
0.03515326976776123,
0.02816290594637394,
0.05211137235164642,
-0.0891314223408699,
0.1391730010509491,
0.09586573392152786,
-0.014393618330359459,
0.016835367307066917,
-0.004413895774632692,
0.07633677870035172,
-0.02306271530687809,
0.14193952083587646,
-0.0031240833923220634,
0.002632174640893936,
-0.08506868034601212,
0.032231107354164124,
-0.044746752828359604,
-0.0426962785422802,
0.10124355554580688,
0.004973371513187885,
0.05677532032132149,
0.12141220271587372,
0.015203913673758507,
0.037576574832201004,
-0.004511862061917782,
0.058442309498786926,
0.04911068081855774,
0.05535966902971268,
0.02829296700656414,
0.018877821043133736,
0.18740314245224,
-0.12187454849481583,
0.02409917674958706,
0.026093553751707077,
-0.10443561524152756,
-0.17496399581432343,
-0.31665557622909546,
-0.1214064359664917,
-0.06977548450231552,
0.09656555950641632,
-0.13663049042224884,
0.014382665976881981,
0.039720118045806885,
0.10387741774320602,
-0.056094713509082794,
0.07900521159172058,
-0.042663201689720154,
-0.07348765432834625,
0.08429240435361862,
0.018723830580711365,
-0.04584987461566925,
0.06382612138986588,
0.026704391464591026,
0.0022599452640861273,
0.031137390062212944,
-0.0649835392832756,
-0.024900611490011215,
-0.07344066351652145,
-0.021540824323892593,
-0.026360776275396347,
-0.06569474935531616,
-0.031029507517814636,
0.061226584017276764,
0.06092631444334984,
0.016291502863168716,
0.05804670602083206,
-0.04456770792603493,
-0.04314816743135452,
0.20203717052936554,
0.005631604231894016,
-0.006403197068721056,
-0.15721635520458221,
0.10913685709238052,
-0.08028294891119003,
0.08106474578380585,
-0.00635457644239068,
-0.1004493460059166,
0.09951795637607574,
0.25344136357307434,
0.2009163796901703,
-0.009229266084730625,
-0.012506700120866299,
-0.03576863557100296,
-0.02281149849295616,
0.01712561771273613,
0.07362152636051178,
-0.04181280732154846,
0.1548311859369278,
-0.10743498057126999,
0.04282884672284126,
-0.06189483776688576,
-0.02266419120132923,
-0.06911271065473557,
0.027210483327507973,
0.09648207575082779,
-0.023333469405770302,
-0.13757365942001343,
0.19999046623706818,
-0.13818593323230743,
-0.12353190779685974,
0.06346575915813446,
-0.04891437664628029,
-0.15992289781570435,
-0.07947571575641632,
0.008629041723906994,
0.04821273684501648,
0.03661657124757767,
-0.022673267871141434,
0.01430271752178669,
-0.06295191496610641,
0.05775725841522217,
-0.15359193086624146,
-0.05025981366634369,
-0.010215706191956997,
0.058154862374067307,
0.21676558256149292,
0.030217614024877548,
0.04955923929810524,
0.11769293993711472,
-0.04075371101498604,
-0.07566004246473312,
0.050007205456495285,
0.05154469981789589,
-0.08708591014146805,
0.025545869022607803,
0.14586986601352692,
0.01283678412437439,
0.0002652979164849967,
0.02648366615176201,
-0.047881484031677246,
0.005014041438698769,
0.10796307027339935,
-0.027309074997901917,
-0.045618936419487,
0.1362164318561554,
-0.11860843002796173,
0.09921041876077652,
0.18596303462982178,
-0.014095744118094444,
-0.01212093885987997,
-0.11923959106206894,
0.11877583712339401,
0.018123572692275047,
-0.06992512941360474,
0.05566268786787987,
-0.10084456205368042,
-0.06886649131774902,
-0.035330332815647125,
-0.026458432897925377,
-0.09670501202344894,
-0.012323003262281418,
-0.1154233068227768,
0.027184559032320976,
-0.006118519697338343,
0.12299584597349167,
0.12225335091352463,
0.019482852891087532,
0.01460269931703806,
-0.10831508785486221,
0.039778079837560654,
0.06850502640008926,
-0.12364806979894638,
-0.0789976492524147
] |
null | null |
transformers
|
Swedish RoBERTa
## Model series
This model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.
## Gpt models
## Swedish Gpt
https://huggingface.co/birgermoell/swedish-gpt/
## Swedish gpt wiki
https://huggingface.co/flax-community/swe-gpt-wiki
# Nordic gpt wiki
https://huggingface.co/flax-community/nordic-gpt-wiki
## Dansk gpt wiki
https://huggingface.co/flax-community/dansk-gpt-wiki
## Norsk gpt wiki
https://huggingface.co/flax-community/norsk-gpt-wiki
## Roberta models
## Nordic Roberta Wiki
https://huggingface.co/flax-community/nordic-roberta-wiki
## Swe Roberta Wiki Oscar
https://huggingface.co/flax-community/swe-roberta-wiki-oscar
## Roberta Swedish Scandi
https://huggingface.co/birgermoell/roberta-swedish-scandi
## Roberta Swedish
https://huggingface.co/birgermoell/roberta-swedish
## Swedish T5 model
https://huggingface.co/birgermoell/t5-base-swedish
|
{"widget": [{"text": "Var kan jag hitta n\u00e5gon <mask> talar engelska?"}]}
|
fill-mask
|
birgermoell/roberta-swedish
|
[
"transformers",
"pytorch",
"jax",
"tensorboard",
"roberta",
"fill-mask",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #jax #tensorboard #roberta #fill-mask #autotrain_compatible #endpoints_compatible #region-us
|
Swedish RoBERTa
## Model series
This model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.
## Gpt models
## Swedish Gpt
URL
## Swedish gpt wiki
URL
# Nordic gpt wiki
URL
## Dansk gpt wiki
URL
## Norsk gpt wiki
URL
## Roberta models
## Nordic Roberta Wiki
URL
## Swe Roberta Wiki Oscar
URL
## Roberta Swedish Scandi
URL
## Roberta Swedish
URL
## Swedish T5 model
URL
|
[
"## Model series\nThis model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.",
"## Gpt models",
"## Swedish Gpt\nURL",
"## Swedish gpt wiki\nURL",
"# Nordic gpt wiki\nURL",
"## Dansk gpt wiki\nURL",
"## Norsk gpt wiki\nURL",
"## Roberta models",
"## Nordic Roberta Wiki\nURL",
"## Swe Roberta Wiki Oscar\nURL",
"## Roberta Swedish Scandi\nURL",
"## Roberta Swedish\nURL",
"## Swedish T5 model\nURL"
] |
[
"TAGS\n#transformers #pytorch #jax #tensorboard #roberta #fill-mask #autotrain_compatible #endpoints_compatible #region-us \n",
"## Model series\nThis model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.",
"## Gpt models",
"## Swedish Gpt\nURL",
"## Swedish gpt wiki\nURL",
"# Nordic gpt wiki\nURL",
"## Dansk gpt wiki\nURL",
"## Norsk gpt wiki\nURL",
"## Roberta models",
"## Nordic Roberta Wiki\nURL",
"## Swe Roberta Wiki Oscar\nURL",
"## Roberta Swedish Scandi\nURL",
"## Roberta Swedish\nURL",
"## Swedish T5 model\nURL"
] |
[
44,
32,
4,
5,
6,
6,
6,
6,
4,
6,
7,
7,
5,
6
] |
[
"passage: TAGS\n#transformers #pytorch #jax #tensorboard #roberta #fill-mask #autotrain_compatible #endpoints_compatible #region-us \n## Model series\nThis model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.## Gpt models## Swedish Gpt\nURL## Swedish gpt wiki\nURL# Nordic gpt wiki\nURL## Dansk gpt wiki\nURL## Norsk gpt wiki\nURL## Roberta models## Nordic Roberta Wiki\nURL## Swe Roberta Wiki Oscar\nURL## Roberta Swedish Scandi\nURL## Roberta Swedish\nURL## Swedish T5 model\nURL"
] |
[
-0.03859005495905876,
0.12813186645507812,
0.0003573751309886575,
0.16056302189826965,
0.10411304235458374,
0.07355877012014389,
0.11797530949115753,
0.13116776943206787,
0.03267701342701912,
-0.015240282751619816,
0.16085170209407806,
0.07587215304374695,
0.1028071716427803,
0.09465815871953964,
0.09360230714082718,
-0.44559940695762634,
0.022824112325906754,
-0.00396506255492568,
-0.08046659082174301,
0.08557072281837463,
0.0954967737197876,
0.0007923492812551558,
0.08004525303840637,
0.02631070464849472,
-0.0693405345082283,
0.0016671051271259785,
0.02561711147427559,
-0.062037114053964615,
0.15623290836811066,
0.053153812885284424,
0.040672771632671356,
0.05642719566822052,
0.1444716602563858,
-0.08679339289665222,
0.04239388555288315,
0.003692427184432745,
-0.07051244378089905,
0.041434165090322495,
-0.0008813785971142352,
-0.008022602647542953,
0.22761648893356323,
0.0920478031039238,
0.045338090509176254,
0.024708060547709465,
-0.09460049122571945,
-0.2368440181016922,
-0.02633199840784073,
0.12081700563430786,
-0.04943405091762543,
0.06951846927404404,
-0.02325589768588543,
0.22984561324119568,
-0.12437038123607635,
0.09196913987398148,
0.18496054410934448,
-0.3084075152873993,
-0.08939006924629211,
0.16997665166854858,
0.1417602151632309,
0.03921780362725258,
-0.0750785544514656,
0.08618777990341187,
0.020839553326368332,
0.07198052108287811,
0.06887215375900269,
-0.0584585927426815,
-0.0104737663641572,
-0.015624798834323883,
-0.11633037030696869,
0.09341394156217575,
0.18810857832431793,
0.05732448026537895,
-0.016793962568044662,
-0.03429674357175827,
0.01280747726559639,
0.06400177627801895,
-0.036009758710861206,
-0.06846986711025238,
-0.006656437646597624,
-0.021314838901162148,
-0.06703741103410721,
-0.04263685643672943,
-0.08240809291601181,
-0.07036611437797546,
0.004981856793165207,
0.08604682236909866,
0.004595910664647818,
0.0447111651301384,
-0.04230532422661781,
0.036777760833501816,
-0.1386597901582718,
-0.10496509075164795,
-0.017484499141573906,
-0.0800853744149208,
-0.04338467866182327,
-0.032141491770744324,
0.013671047054231167,
-0.10111896693706512,
0.07066716998815536,
0.1636427938938141,
0.09144230931997299,
0.031146930530667305,
0.12485045939683914,
0.039173778146505356,
-0.013691931962966919,
0.10377343744039536,
-0.16249725222587585,
-0.11040261387825012,
-0.03370228037238121,
-0.04026404768228531,
-0.03339619189500809,
-0.03974873945116997,
-0.06401832401752472,
-0.06316792219877243,
0.012933681719005108,
-0.014479819685220718,
-0.008429386653006077,
0.08825884759426117,
-0.03059331513941288,
0.0019036338198930025,
0.006928944028913975,
-0.024977857246994972,
-0.00641363300383091,
-0.04384191334247589,
-0.00965722743421793,
0.047117963433265686,
0.07607581466436386,
0.032861385494470596,
-0.03825035318732262,
0.10897514224052429,
-0.1265045702457428,
-0.04200773686170578,
0.020090196281671524,
-0.06541813164949417,
0.03918073698878288,
-0.11709911376237869,
0.03174382448196411,
-0.16751213371753693,
-0.0962105467915535,
-0.038998063653707504,
0.10302815586328506,
-0.08839017897844315,
-0.06827260553836823,
-0.015362769365310669,
-0.06964940577745438,
0.02226482704281807,
0.02469443716108799,
0.057360902428627014,
-0.047627147287130356,
0.05722749978303909,
-0.1139351949095726,
0.14535246789455414,
0.012217344716191292,
-0.012977764941751957,
-0.09145712852478027,
-0.03564590960741043,
-0.19949312508106232,
0.0016361583257094026,
-0.1312732994556427,
0.0781228169798851,
-0.08799717575311661,
-0.09592396020889282,
-0.015109281055629253,
0.05841255933046341,
0.05852261930704117,
0.1988804191350937,
-0.1495513916015625,
-0.02492341957986355,
0.29523566365242004,
-0.0914793461561203,
-0.07487893104553223,
0.1184849888086319,
0.011689318343997002,
0.1607232242822647,
0.03600272163748741,
0.16040436923503876,
0.05584750324487686,
-0.08483726531267166,
0.10282841324806213,
0.042356938123703,
-0.07317642122507095,
-0.034623969346284866,
0.05452535301446915,
0.006042741239070892,
-0.09112696349620819,
0.023272356018424034,
-0.08475418388843536,
0.05982465669512749,
-0.042964790016412735,
-0.04829195514321327,
0.04037591069936752,
-0.09092673659324646,
0.1065719798207283,
0.10633215308189392,
0.03166335076093674,
-0.08639346063137054,
-0.15925976634025574,
-0.12275312840938568,
0.05168168619275093,
-0.03714729845523834,
-0.01671765372157097,
-0.05541040375828743,
0.0845918282866478,
0.002384844934567809,
-0.021223219111561775,
-0.035467907786369324,
-0.12212979793548584,
-0.06494708359241486,
0.0651356652379036,
0.03653861582279205,
0.13319191336631775,
0.14781315624713898,
0.024025218561291695,
-0.044962186366319656,
0.03860509395599365,
-0.005744251422584057,
0.005494589917361736,
0.010341353714466095,
-0.2236405313014984,
0.007974195294082165,
-0.06373037397861481,
0.06476318836212158,
-0.1405794322490692,
-0.021825339645147324,
-0.021028220653533936,
0.14952142536640167,
0.029160641133785248,
-0.05225774273276329,
-0.006476543378084898,
-0.0015243198722600937,
0.009195425547659397,
-0.051469963043928146,
0.07297780364751816,
-0.0247395820915699,
-0.09107022732496262,
0.11694549769163132,
0.0518876276910305,
0.08763789385557175,
0.08043452352285385,
-0.05751219391822815,
-0.19746436178684235,
0.10343648493289948,
-0.03160326927900314,
0.023150641471147537,
0.006823590490967035,
0.01618250645697117,
-0.019508227705955505,
-0.008415231481194496,
0.07415162771940231,
-0.04524493217468262,
-0.007884006947278976,
0.0721953734755516,
-0.07830771803855896,
-0.02820122241973877,
0.14177171885967255,
0.1898636668920517,
-0.16817903518676758,
0.06388050317764282,
-0.040095217525959015,
-0.06686621159315109,
0.2950122058391571,
0.053718313574790955,
-0.016646001487970352,
0.0070950924418866634,
-0.0874427855014801,
0.025197286158800125,
0.20545798540115356,
-0.06846235692501068,
0.00503476383164525,
0.02814772166311741,
-0.030856436118483543,
-0.029865022748708725,
-0.07927216589450836,
-0.14299197494983673,
-0.0007742841844446957,
-0.01954270713031292,
-0.01150632556527853,
0.12081381678581238,
-0.10750868916511536,
0.09678423404693604,
0.030448181554675102,
-0.1638210266828537,
0.008923286572098732,
0.025180000811815262,
-0.10965573787689209,
0.19950732588768005,
-0.008259834721684456,
-0.14142106473445892,
-0.11825620383024216,
-0.00010610442404868081,
0.018259400501847267,
-0.015157186426222324,
0.07102381438016891,
-0.11622975766658783,
-0.08768542110919952,
-0.026771200820803642,
0.0512237474322319,
0.03299811854958534,
0.01841573975980282,
-0.11550453305244446,
-0.007672717794775963,
-0.030030367895960808,
-0.097312331199646,
-0.005087616387754679,
-0.030639436095952988,
-0.010928613133728504,
0.052943143993616104,
-0.11895311623811722,
0.10663249343633652,
0.0542735680937767,
-0.02013607695698738,
0.015551377087831497,
0.030315639451146126,
0.24044394493103027,
-0.10743365436792374,
0.13998650014400482,
0.06527058035135269,
-0.03389713913202286,
0.050865888595581055,
0.1127837523818016,
0.062181875109672546,
-0.013612538576126099,
-0.023759089410305023,
0.012822465971112251,
-0.09734830260276794,
-0.151811882853508,
-0.04106989875435829,
-0.005979984998703003,
0.06987616419792175,
0.07570097595453262,
0.04391615837812424,
0.020354649052023888,
0.18514229357242584,
0.060744758695364,
-0.07061304897069931,
-0.030475551262497902,
0.0405261293053627,
-0.09341474622488022,
-0.033905964344739914,
0.09255672246217728,
-0.08543572574853897,
-0.07597749680280685,
0.05314101651310921,
0.0044189016334712505,
0.025449182838201523,
-0.004227912984788418,
-0.028540626168251038,
0.06265955418348312,
0.14366312325000763,
0.05472506582736969,
0.08451279997825623,
0.02968812733888626,
-0.07921776920557022,
0.012052024714648724,
-0.04721548408269882,
0.025778623297810555,
0.08706442266702652,
-0.006318965926766396,
-0.0759073868393898,
-0.048211719840765,
-0.05810888856649399,
0.002721073105931282,
0.09309184551239014,
0.0969875231385231,
-0.23986917734146118,
-0.07148832827806473,
0.017645301297307014,
-0.049559708684682846,
-0.04068543389439583,
-0.0059553650207817554,
0.03173404559493065,
-0.1706743836402893,
0.0874001681804657,
-0.06255411356687546,
0.08018173277378082,
0.034305982291698456,
-0.005518288817256689,
0.08906061202287674,
0.07895200699567795,
-0.05575304850935936,
0.09773962944746017,
-0.11109721660614014,
0.25478431582450867,
-0.037657056003808975,
0.03646586090326309,
-0.11529721319675446,
-0.03247570991516113,
0.05330021679401398,
0.1277153193950653,
0.27375271916389465,
0.023013044148683548,
-0.006955169141292572,
-0.022533347830176353,
-0.005249406676739454,
0.033836401998996735,
-0.020517569035291672,
-0.07363659888505936,
0.051554013043642044,
-0.009085659869015217,
-0.018721535801887512,
-0.042158547788858414,
0.039494022727012634,
-0.07447779178619385,
-0.01757192239165306,
0.030310645699501038,
-0.09839314967393875,
-0.041153669357299805,
-0.008467789739370346,
-0.12231529504060745,
-0.244882732629776,
0.14066484570503235,
0.052340518683195114,
-0.0652422234416008,
-0.14866025745868683,
0.015695616602897644,
0.08059605211019516,
-0.07062286138534546,
0.017636891454458237,
0.024043239653110504,
0.03067074529826641,
-0.02459288202226162,
-0.045946154743433,
0.07560856640338898,
-0.08255920559167862,
-0.09157293289899826,
-0.04694044217467308,
0.04449320584535599,
0.06106497719883919,
0.04081153869628906,
0.048142291605472565,
0.0385536290705204,
-0.030216841027140617,
-0.11519411951303482,
0.09832555800676346,
-0.06845389306545258,
-0.057091280817985535,
-0.0751919224858284,
-0.031505879014730453,
0.023334860801696777,
-0.02361542545258999,
-0.012133545242249966,
0.1035388633608818,
0.2265346497297287,
-0.15180574357509613,
0.07860824465751648,
0.1286536455154419,
0.012013762257993221,
-0.2919645607471466,
-0.02812854014337063,
-0.0275055393576622,
0.024896597489714622,
0.13457387685775757,
-0.09862061589956284,
0.14197932183742523,
0.0015542992623522878,
-0.014644741080701351,
0.036470212042331696,
-0.16911745071411133,
-0.10760079324245453,
0.09115248918533325,
0.16000309586524963,
0.1944912075996399,
-0.06658351421356201,
0.0008137012482620776,
0.002411527093499899,
-0.17785821855068207,
0.04095388948917389,
-0.01584639400243759,
0.09378354251384735,
-0.01683039590716362,
0.06837191432714462,
0.03254089131951332,
-0.03714185208082199,
0.09451184421777725,
-0.06748663634061813,
-0.0528891459107399,
-0.17115601897239685,
0.02702973037958145,
0.05035215616226196,
0.004998203366994858,
0.12406525760889053,
-0.07970833778381348,
-0.002464386634528637,
-0.09439439326524734,
-0.039210978895425797,
-0.09001395106315613,
0.10459567606449127,
-0.027961544692516327,
-0.13060972094535828,
-0.018501847982406616,
0.11310649663209915,
0.04081312194466591,
-0.030350621789693832,
0.009926383383572102,
-0.08945616334676743,
0.18761268258094788,
-0.029932444915175438,
0.17204992473125458,
0.031208284199237823,
-0.051110707223415375,
-0.022242875769734383,
-0.09769995510578156,
0.06657661497592926,
-0.1270560622215271,
-0.020391734316945076,
0.08956789970397949,
0.03809630870819092,
0.04178602620959282,
0.03495414927601814,
-0.1273012012243271,
-0.05283179506659508,
0.1662893295288086,
-0.2193371206521988,
-0.09165854752063751,
-0.09436340630054474,
-0.22206419706344604,
0.022519245743751526,
0.0009137451997958124,
0.1491863876581192,
-0.10057428479194641,
-0.021331805735826492,
-0.006330172065645456,
0.015465689823031425,
-0.08440467715263367,
0.06526299566030502,
0.15819740295410156,
0.045727066695690155,
-0.06987915188074112,
0.018150467425584793,
-0.019823944196105003,
-0.006214876659214497,
0.0333699956536293,
0.2467273771762848,
-0.08952634781599045,
-0.11162484437227249,
0.07890493422746658,
0.17701761424541473,
-0.20639821887016296,
0.0037782308645546436,
-0.11076881736516953,
-0.0556059256196022,
-0.005488730035722256,
0.16015775501728058,
0.05068422108888626,
-0.04527348652482033,
0.02340294048190117,
-0.02997695654630661,
-0.08098506182432175,
0.056841444224119186,
0.054940614849328995,
0.015083249658346176,
-0.15947110950946808,
0.10197370499372482,
-0.045907847583293915,
0.09259408712387085,
-0.08934144675731659,
0.06434768438339233,
-0.15569032728672028,
-0.030545460060238838,
-0.019138216972351074,
-0.06944523751735687,
-0.06415668874979019,
0.016817711293697357,
-0.03581162169575691,
-0.07168782502412796,
-0.03401749208569527,
-0.009217752143740654,
-0.10126493126153946,
-0.016591837629675865,
0.018416019156575203,
-0.03991822525858879,
-0.04369506612420082,
-0.009857786819338799,
0.04400135204195976,
-0.02543136291205883,
0.08276556432247162,
-0.07396511733531952,
-0.0009216255857609212,
0.10939005762338638,
-0.1274840533733368,
0.08893466740846634,
-0.04637616127729416,
-0.05574719235301018,
0.02823304757475853,
0.0031776134856045246,
-0.01387892384082079,
-0.07470154762268066,
0.04488702863454819,
0.06062476336956024,
0.002445084508508444,
-0.07433179020881653,
0.01921141892671585,
0.03070615604519844,
-0.1169263944029808,
-0.06340723484754562,
0.052746083587408066,
0.09110301733016968,
-0.00414407579228282,
0.031931594014167786,
-0.055331144481897354,
0.08152683824300766,
-0.11975269019603729,
0.011671221815049648,
0.007581536658108234,
-0.06636220961809158,
0.053781118243932724,
0.017779812216758728,
0.03226352110505104,
-0.07346579432487488,
0.04751076176762581,
0.13618342578411102,
-0.03486085683107376,
0.015551460906863213,
0.009783647023141384,
0.10345661640167236,
0.002431632485240698,
0.16352717578411102,
0.008433074690401554,
-0.037896595895290375,
-0.14857341349124908,
0.07289450615644455,
-0.07245419919490814,
-0.0012157484889030457,
0.11324811726808548,
-0.008431059308350086,
0.016207564622163773,
0.050288960337638855,
0.057663533836603165,
0.03535446897149086,
0.006629746872931719,
-0.027898890897631645,
0.06672871857881546,
0.028472034260630608,
-0.012923373840749264,
0.011698015034198761,
0.19649071991443634,
-0.08605928719043732,
0.013202291913330555,
-0.008810852654278278,
-0.06554964929819107,
-0.18703758716583252,
-0.2790617346763611,
-0.09696008265018463,
-0.05349592864513397,
0.0663595050573349,
-0.07356537133455276,
-0.02717762440443039,
0.02980879694223404,
0.05342138186097145,
-0.047175802290439606,
0.07667385041713715,
-0.04826948046684265,
-0.020153973251581192,
0.06784847378730774,
0.04728534817695618,
-0.0320388488471508,
0.0060717761516571045,
0.032620325684547424,
-0.060740333050489426,
0.022658100351691246,
-0.11346036940813065,
-0.07636981457471848,
-0.08560901135206223,
-0.010734817944467068,
0.012717998586595058,
-0.08260035514831543,
-0.001056720968335867,
0.010646325536072254,
0.03828096017241478,
0.021856654435396194,
0.030061835423111916,
0.006289226934313774,
-0.03649692237377167,
0.17209209501743317,
0.006004802882671356,
-0.0005635405541397631,
-0.19010400772094727,
0.13270512223243713,
-0.11201038956642151,
0.049776431173086166,
-0.011606087908148766,
-0.06372194737195969,
0.08385423570871353,
0.3016749918460846,
0.2703028619289398,
-0.04968893155455589,
0.028596889227628708,
0.04072556644678116,
-0.015679268166422844,
0.0024417468812316656,
0.11311886459589005,
0.003884541802108288,
0.16026170551776886,
-0.11312000453472137,
0.016456952318549156,
-0.048273149877786636,
-0.012757595628499985,
-0.014696476981043816,
0.014175980351865292,
0.1152324229478836,
-0.00304784975014627,
-0.11134515702724457,
0.14045467972755432,
-0.1542629450559616,
-0.05030282214283943,
0.08834846317768097,
-0.10064700990915298,
-0.1476714313030243,
-0.06777181476354599,
-0.013529369607567787,
0.06365765631198883,
0.07396925985813141,
-0.004222323186695576,
0.006687936373054981,
-0.02479853294789791,
0.06302854418754578,
-0.1427316665649414,
-0.075743168592453,
0.02380012720823288,
-0.001896267756819725,
0.19155941903591156,
-0.04330193251371384,
0.00031904905335977674,
0.13409575819969177,
-0.0017914731288328767,
-0.04720843955874443,
0.011807205155491829,
0.056411389261484146,
-0.04136135056614876,
-0.004908952862024307,
0.17739494144916534,
-0.003873322857543826,
-0.04320402443408966,
0.034953970462083817,
-0.1297094225883484,
0.0324663482606411,
0.013768395408987999,
-0.04708513990044594,
-0.0018549973610788584,
0.18040011823177338,
-0.10376957803964615,
0.123368039727211,
0.21703237295150757,
-0.013440309092402458,
-0.02937263436615467,
-0.08773340284824371,
0.06525062024593353,
0.04398297891020775,
-0.13418371975421906,
-0.018973655998706818,
-0.07474160194396973,
-0.0772438496351242,
-0.10870709270238876,
-0.046903129667043686,
-0.16233225166797638,
-0.03162291646003723,
-0.15658356249332428,
-0.025283733382821083,
-0.05171818286180496,
0.06397856026887894,
0.14569087326526642,
0.04206547886133194,
0.005234277341514826,
-0.02245759591460228,
0.01924917846918106,
0.07901892066001892,
-0.15033970773220062,
-0.08812118321657181
] |
null | null |
transformers
|
# common-voice-vox-populi-swedish
Fine-tuned [facebook/wav2vec2-large-sv-voxpopuli](https://huggingface.co/facebook/wav2vec2-large-xlsr-53) in Swedish using the [Common Voice](https://huggingface.co/datasets/common_voice)
When using this model, make sure that your speech input is sampled at 16kHz.
## Usage
The model can be used directly (without a language model) as follows:
```python
import torch
import torchaudio
from datasets import load_dataset
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
test_dataset = load_dataset("common_voice", "sv-SE", split="test[:2%]").
processor = Wav2Vec2Processor.from_pretrained("birgermoell/birgermoell/common-voice-vox-populi-swedish")
model = Wav2Vec2ForCTC.from_pretrained("birgermoell/common-voice-vox-populi-swedish")
resampler = torchaudio.transforms.Resample(48_000, 16_000)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def speech_file_to_array_fn(batch):
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\tspeech_array, sampling_rate = torchaudio.load(batch["path"])
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\tbatch["speech"] = resampler(speech_array).squeeze().numpy()
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\treturn batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
inputs = processor(test_dataset["speech"][:2], sampling_rate=16_000, return_tensors="pt", padding=True)
with torch.no_grad():
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\tlogits = model(inputs.input_values, attention_mask=inputs.attention_mask).logits
predicted_ids = torch.argmax(logits, dim=-1)
print("Prediction:", processor.batch_decode(predicted_ids))
print("Reference:", test_dataset["sentence"][:2])
```
## Evaluation
The model can be evaluated as follows on the Swedish test data of Common Voice.
```python
import torch
import torchaudio
from datasets import load_dataset, load_metric
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import re
test_dataset = load_dataset("common_voice", "sv-SE", split="test")
wer = load_metric("wer")
processor = Wav2Vec2Processor.from_pretrained("birgermoell/common-voice-vox-populi-swedish")
model = Wav2Vec2ForCTC.from_pretrained("birgermoell/common-voice-vox-populi-swedish")
model.to("cuda")
chars_to_ignore_regex = '[\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\,\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\?\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\.\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\!\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\-\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\;\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\:\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\"\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\“]'
resampler = torchaudio.transforms.Resample(48_000, 16_000)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def speech_file_to_array_fn(batch):
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\tbatch["sentence"] = re.sub(chars_to_ignore_regex, '', batch["sentence"]).lower()
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\tspeech_array, sampling_rate = torchaudio.load(batch["path"])
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\tbatch["speech"] = resampler(speech_array).squeeze().numpy()
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\treturn batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def evaluate(batch):
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\tinputs = processor(batch["speech"], sampling_rate=16_000, return_tensors="pt", padding=True)
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\twith torch.no_grad():
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\t\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\tlogits = model(inputs.input_values.to("cuda"), attention_mask=inputs.attention_mask.to("cuda")).logits
pred_ids = torch.argmax(logits, dim=-1)
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\tbatch["pred_strings"] = processor.batch_decode(pred_ids)
\\\\\\\\\\\\\\\\\\\\
```
**Test Result**:
WER: 22.684600
|
{"language": "et", "license": "apache-2.0", "tags": ["audio", "automatic-speech-recognition", "speech", "xlsr-fine-tuning-week"], "datasets": ["common_voice"], "model-index": [{"name": "common-voice-vox-populi-swedish by Birger Moell", "results": [{"task": {"type": "automatic-speech-recognition", "name": "Speech Recognition"}, "dataset": {"name": "Common Voice Vox Populi Swedish", "type": "common_voice", "args": "et"}, "metrics": [{"type": "wer", "value": 36.951816, "name": "Test WER"}]}]}]}
|
automatic-speech-recognition
|
birgermoell/swedish-common-voice-vox-voxpopuli
|
[
"transformers",
"pytorch",
"jax",
"wav2vec2",
"automatic-speech-recognition",
"audio",
"speech",
"xlsr-fine-tuning-week",
"et",
"dataset:common_voice",
"license:apache-2.0",
"model-index",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"et"
] |
TAGS
#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #et #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us
|
# common-voice-vox-populi-swedish
Fine-tuned facebook/wav2vec2-large-sv-voxpopuli in Swedish using the Common Voice
When using this model, make sure that your speech input is sampled at 16kHz.
## Usage
The model can be used directly (without a language model) as follows:
## Evaluation
The model can be evaluated as follows on the Swedish test data of Common Voice.
Test Result:
WER: 22.684600
|
[
"# common-voice-vox-populi-swedish\n\nFine-tuned facebook/wav2vec2-large-sv-voxpopuli in Swedish using the Common Voice\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"## Usage\n\nThe model can be used directly (without a language model) as follows:",
"## Evaluation\n\nThe model can be evaluated as follows on the Swedish test data of Common Voice.\n\n\n\nTest Result:\nWER: 22.684600"
] |
[
"TAGS\n#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #et #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us \n",
"# common-voice-vox-populi-swedish\n\nFine-tuned facebook/wav2vec2-large-sv-voxpopuli in Swedish using the Common Voice\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"## Usage\n\nThe model can be used directly (without a language model) as follows:",
"## Evaluation\n\nThe model can be evaluated as follows on the Swedish test data of Common Voice.\n\n\n\nTest Result:\nWER: 22.684600"
] |
[
80,
60,
20,
30
] |
[
"passage: TAGS\n#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #et #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us \n# common-voice-vox-populi-swedish\n\nFine-tuned facebook/wav2vec2-large-sv-voxpopuli in Swedish using the Common Voice\nWhen using this model, make sure that your speech input is sampled at 16kHz.## Usage\n\nThe model can be used directly (without a language model) as follows:## Evaluation\n\nThe model can be evaluated as follows on the Swedish test data of Common Voice.\n\n\n\nTest Result:\nWER: 22.684600"
] |
[
-0.14323340356349945,
0.023167211562395096,
-0.0027422276325523853,
-0.053139667958021164,
0.05910209193825722,
-0.10184963047504425,
0.11743062734603882,
0.05266086757183075,
0.05072161927819252,
-0.0059853410348296165,
0.02828156389296055,
0.04434414952993393,
0.07132109254598618,
0.08041947335004807,
-0.006748247426003218,
-0.24420325458049774,
0.07070187479257584,
-0.01950695738196373,
0.10088792443275452,
0.10836713016033173,
0.11283590644598007,
-0.03849755600094795,
-0.026369668543338776,
0.12367022037506104,
-0.03371678292751312,
0.027304548770189285,
0.059158071875572205,
-0.1036435142159462,
0.14630740880966187,
0.08131717890501022,
-0.006396460346877575,
0.0734378844499588,
0.06287454813718796,
-0.15854182839393616,
0.014645561575889587,
-0.03528844937682152,
0.04716172814369202,
-0.016744745895266533,
0.04768066853284836,
0.028767498210072517,
0.1324012130498886,
0.10280714184045792,
-0.05528184771537781,
0.049493759870529175,
0.020150231197476387,
-0.2501426935195923,
0.0015074003022164106,
-0.0028676632791757584,
0.0689634382724762,
0.0862998366355896,
-0.0960373654961586,
0.10249964892864227,
-0.11357831209897995,
0.10773815214633942,
0.1077573224902153,
-0.21255478262901306,
-0.00820792093873024,
0.08406239002943039,
0.08343109488487244,
0.08793900907039642,
-0.008940419182181358,
0.08043446391820908,
0.016393491998314857,
0.045033421367406845,
-0.0010538179194554687,
-0.0689135193824768,
-0.160853311419487,
-0.04520045965909958,
-0.16240882873535156,
0.055134791880846024,
0.2640373706817627,
-0.006510019302368164,
-0.049638859927654266,
-0.14376607537269592,
0.019183192402124405,
0.09206702560186386,
-0.01697252318263054,
-0.11864407360553741,
0.011403475888073444,
0.002006412949413061,
0.0427490659058094,
-0.054466232657432556,
-0.10527174174785614,
-0.11100855469703674,
-0.0007283746963366866,
0.09117082506418228,
0.0069696418941020966,
0.01808219589293003,
-0.0538964718580246,
-0.017461854964494705,
-0.2024068385362625,
-0.036231473088264465,
-0.04039502143859863,
-0.020840246230363846,
-0.07409993559122086,
0.0196854155510664,
-0.10100336372852325,
-0.27810779213905334,
0.10139795392751694,
-0.05152905359864235,
0.04834238812327385,
0.007171782199293375,
0.003103365655988455,
0.05332231521606445,
0.08062153309583664,
0.15214607119560242,
-0.10235976427793503,
-0.10011574625968933,
0.003828127169981599,
-0.03732862323522568,
0.010365748777985573,
-0.020695021376013756,
-0.08619790524244308,
-0.0810774490237236,
0.05719355493783951,
0.013549167662858963,
-0.02811625599861145,
0.014675737358629704,
-0.030625663697719574,
-0.05104494094848633,
-0.04089369624853134,
-0.10989683866500854,
-0.04583001509308815,
0.04004082456231117,
0.02652466855943203,
0.15508927404880524,
0.03672510385513306,
0.046619441360235214,
-0.10931973904371262,
0.002097951713949442,
0.03332611918449402,
0.03379914537072182,
0.04676293954253197,
-0.10246028006076813,
0.020282190293073654,
-0.0644940584897995,
-0.010629281401634216,
-0.07932911068201065,
-0.03161713108420372,
-0.11586437374353409,
-0.013781190849840641,
0.009940740652382374,
-0.09840389341115952,
-0.11589451879262924,
-0.0003880027506966144,
-0.03264010697603226,
-0.11086707562208176,
-0.003675337415188551,
-0.06034782901406288,
0.03695247694849968,
-0.004632243420928717,
0.07993467152118683,
-0.0255939569324255,
0.08100753277540207,
-0.10386762768030167,
-0.06675498932600021,
-0.09416723996400833,
0.13946866989135742,
-0.12180545181035995,
-0.07735170423984528,
-0.09305325150489807,
-0.07531507313251495,
-0.009327529929578304,
0.1038937196135521,
0.02603917196393013,
0.0900457352399826,
-0.22517570853233337,
-0.13461565971374512,
0.1973448544740677,
-0.16082005202770233,
-0.016682131215929985,
0.19130776822566986,
0.06048782914876938,
0.06118268519639969,
0.18292200565338135,
0.315835565328598,
0.08623351901769638,
-0.16664519906044006,
0.03714868798851967,
0.0868297666311264,
-0.06363576650619507,
-0.08238554000854492,
0.06534703075885773,
-0.08793579787015915,
-0.02180074155330658,
0.05101833865046501,
-0.05677707865834236,
0.033713486045598984,
0.002340947277843952,
-0.043671686202287674,
0.01763550564646721,
-0.0920606330037117,
0.05225592106580734,
0.03290143609046936,
-0.010384195484220982,
-0.04065488651394844,
-0.031707268208265305,
0.10269973427057266,
0.0613304078578949,
-0.12319869548082352,
0.06523558497428894,
-0.08947639167308807,
0.10919596999883652,
-0.11200052499771118,
0.01600174978375435,
-0.13066907227039337,
0.13873673975467682,
-0.06438937038183212,
0.04458627104759216,
0.1005893424153328,
0.22208921611309052,
0.023692017421126366,
-0.030097855255007744,
-0.024183955043554306,
0.022846024483442307,
0.07439704239368439,
-0.002292647724971175,
-0.014566339552402496,
-0.1161484643816948,
-0.008632035925984383,
-0.063357874751091,
0.052518196403980255,
-0.055886056274175644,
-0.06675635278224945,
0.028085265308618546,
-0.006100416649132967,
-0.025797231122851372,
-0.006257503759115934,
0.09076528996229172,
0.08776366710662842,
0.06446484476327896,
0.06585881859064102,
0.058272961527109146,
-0.01858353801071644,
-0.11905018240213394,
0.219826802611351,
-0.06195630878210068,
0.02046256698668003,
0.10069450736045837,
-0.06615249067544937,
0.020879670977592468,
0.06915152072906494,
0.0071901180781424046,
-0.010738635435700417,
-0.026163872331380844,
-0.05033743008971214,
0.25161275267601013,
0.05058368295431137,
0.07600521296262741,
-0.10232561826705933,
0.03444186598062515,
0.036595989018678665,
-0.13374756276607513,
0.02514980547130108,
0.09189052134752274,
0.002357294550165534,
0.0010555194457992911,
-0.0021502692252397537,
-0.07946587353944778,
-0.08880466222763062,
0.2796817421913147,
-0.02356250025331974,
-0.08674021810293198,
0.055422961711883545,
-0.0706363394856453,
-0.058155108243227005,
0.05754294991493225,
-0.2248072326183319,
-0.04778999835252762,
0.05576307326555252,
0.028902003541588783,
0.07773315906524658,
-0.07958793640136719,
0.0268770270049572,
-0.000986717757768929,
-0.13519272208213806,
-0.1169075220823288,
0.08657211810350418,
-0.04440009221434593,
0.007844546809792519,
-0.09900112450122833,
-0.13746318221092224,
0.0010224877623841166,
-0.04145216569304466,
-0.17140604555606842,
0.0942390188574791,
-0.0691392794251442,
-0.276704877614975,
-0.14892055094242096,
-0.005903644021600485,
-0.06287741661071777,
0.02857358194887638,
0.12089066952466965,
-0.10877665877342224,
-0.05382894352078438,
-0.018224237486720085,
0.13552062213420868,
0.0690595880150795,
-0.053667373955249786,
-0.06663273274898529,
-0.05555219575762749,
0.07410020381212234,
-0.1470710039138794,
-0.020224230363965034,
-0.0841333344578743,
-0.023254061117768288,
0.004011747892946005,
-0.03086353838443756,
0.015959633514285088,
0.18784192204475403,
0.05429323390126228,
0.0190828088670969,
0.011680432595312595,
0.2095033824443817,
-0.05593634024262428,
-0.027986103668808937,
0.18829073011875153,
-0.00009338268864667043,
-0.005182567983865738,
0.12244812399148941,
0.03003562241792679,
-0.029749583452939987,
-0.05474497377872467,
0.021448889747262,
-0.06413081288337708,
-0.21836180984973907,
-0.17149507999420166,
-0.03919941186904907,
-0.049219921231269836,
-0.08998280763626099,
-0.0027632606215775013,
-0.0002081935526803136,
0.02455243095755577,
0.006266438402235508,
-0.16895976662635803,
0.05078365281224251,
-0.020741106942296028,
0.20776091516017914,
-0.056840550154447556,
0.12402705103158951,
-0.030382949858903885,
-0.026712261140346527,
0.04143262282013893,
-0.034403663128614426,
0.04522893950343132,
0.09650231152772903,
0.03776061162352562,
0.07867258787155151,
0.08503261208534241,
0.09531492739915848,
0.07903586328029633,
-0.03966105356812477,
-0.01620248705148697,
0.016573777422308922,
-0.07185832411050797,
-0.0874389111995697,
0.03653063625097275,
0.1827574372291565,
-0.08814956992864609,
-0.003919417038559914,
0.005410713143646717,
0.021949153393507004,
0.20246946811676025,
0.09197841584682465,
-0.1092018336057663,
-0.07020105421543121,
-0.03593217208981514,
-0.11218901723623276,
0.015856625512242317,
0.05377587676048279,
0.09423182904720306,
-0.14326997101306915,
0.10524534434080124,
0.005083004478365183,
0.07912267744541168,
0.014004180207848549,
0.07178229838609695,
-0.1255900114774704,
0.04314056783914566,
0.02895205281674862,
0.08613846451044083,
-0.12166334688663483,
0.1863071471452713,
-0.002268719021230936,
0.09447095543146133,
-0.06241341680288315,
0.0016488621477037668,
-0.012926355004310608,
0.11274591833353043,
0.15085630118846893,
0.050958458334207535,
0.0458223782479763,
-0.027246978133916855,
-0.06883130967617035,
0.0654972642660141,
-0.04023555666208267,
0.04228818044066429,
0.01855877786874771,
0.010998675599694252,
-0.011420396156609058,
-0.04338962212204933,
-0.02965565398335457,
-0.07277414947748184,
-0.013707879930734634,
0.015093025751411915,
0.17642077803611755,
0.133416086435318,
0.003181667299941182,
-0.09946445375680923,
-0.2373763471841812,
0.0801699236035347,
-0.03675805404782295,
-0.13048872351646423,
-0.043734751641750336,
-0.08413806557655334,
0.10007095336914062,
-0.05072195827960968,
-0.032338883727788925,
0.08295014500617981,
0.0761069729924202,
-0.07799877226352692,
-0.0023164169397205114,
0.05359315872192383,
-0.08463450521230698,
-0.06288713961839676,
0.054387617856264114,
0.22295545041561127,
0.08762660622596741,
0.06332938373088837,
0.08689739555120468,
0.02314108982682228,
0.009616219438612461,
-0.05968393757939339,
0.0076095666736364365,
0.09251009672880173,
-0.15647156536579132,
0.0034122734796255827,
0.09111432731151581,
-0.18272462487220764,
-0.10634306818246841,
-0.011011047288775444,
0.1668824404478073,
0.04536684602499008,
-0.03838860243558884,
0.22097286581993103,
0.3254353702068329,
-0.05740131810307503,
-0.24039693176746368,
-0.12279404699802399,
0.10827989876270294,
0.09883210062980652,
0.00999967660754919,
-0.12498960644006729,
0.16958530247211456,
0.012060137465596199,
-0.046280235052108765,
-0.14270614087581635,
-0.19084098935127258,
-0.14083212614059448,
0.18444302678108215,
-0.06512735784053802,
0.19077280163764954,
0.0533757321536541,
-0.05864565074443817,
-0.025214197114109993,
-0.020919131115078926,
-0.046282462775707245,
-0.08313899487257004,
0.11228734254837036,
0.0453684963285923,
0.13046857714653015,
0.07245784997940063,
0.0024782679975032806,
0.09623415768146515,
0.07905959337949753,
-0.06628984212875366,
-0.019777711480855942,
0.10136976838111877,
-0.0015679833013564348,
0.047896191477775574,
0.21197247505187988,
-0.09678675979375839,
-0.00435931421816349,
-0.04037108272314072,
-0.11995868384838104,
-0.11868937313556671,
0.11110597103834152,
0.05079809948801994,
-0.04113928601145744,
0.051890961825847626,
-0.03734523057937622,
0.012948627583682537,
0.010140052065253258,
0.030477602034807205,
-0.24040080606937408,
0.01911134272813797,
0.19827783107757568,
0.22940248250961304,
-0.150637686252594,
-0.14666657149791718,
-0.024470791220664978,
-0.0569596141576767,
0.11988508701324463,
0.01834085024893284,
0.05159543454647064,
0.07223077118396759,
0.03237685188651085,
0.07760992646217346,
-0.0443548783659935,
-0.12274250388145447,
0.049200139939785004,
0.039419446140527725,
-0.07886585593223572,
-0.15920230746269226,
-0.03629232197999954,
-0.11065895855426788,
0.007035772316157818,
0.08070546388626099,
0.1527547985315323,
-0.07992078363895416,
-0.02205965667963028,
-0.037786491215229034,
0.020029272884130478,
-0.14261165261268616,
0.2230408787727356,
0.02758939005434513,
0.07934015989303589,
-0.15841050446033478,
0.042082782834768295,
-0.05872233957052231,
-0.014595608226954937,
0.058287207037210464,
-0.028118152171373367,
-0.05892867594957352,
-0.05975255370140076,
-0.03965609148144722,
0.07666580379009247,
0.009455178864300251,
-0.1601991355419159,
-0.023241236805915833,
-0.13198724389076233,
-0.005736464634537697,
0.09110055863857269,
0.07028116285800934,
0.012742879800498486,
-0.10330275446176529,
-0.0938367247581482,
-0.03612808138132095,
0.03408103808760643,
0.08716253191232681,
-0.05609264597296715,
-0.11070670187473297,
0.09707505255937576,
0.013594554737210274,
0.06647324562072754,
-0.07787498831748962,
-0.06859678775072098,
0.007162106689065695,
0.042879801243543625,
-0.13428886234760284,
-0.01490830723196268,
-0.06772322952747345,
0.03081449866294861,
0.015878841280937195,
-0.050996020436286926,
0.004512698855251074,
0.07895068824291229,
-0.10853195190429688,
0.07798022776842117,
-0.013036925345659256,
0.043434131890535355,
-0.10001077502965927,
0.050221916288137436,
0.007187545765191317,
0.012923339381814003,
0.07622002810239792,
0.10912380367517471,
-0.1275368332862854,
0.15495043992996216,
-0.1570833921432495,
-0.048718977719545364,
0.08012866228818893,
0.0638333261013031,
0.00426918501034379,
-0.045357923954725266,
-0.016300741583108902,
0.13698187470436096,
0.038705263286828995,
-0.010920489206910133,
0.0352335199713707,
-0.07344169914722443,
0.0377592071890831,
-0.04639854654669762,
-0.03790256008505821,
-0.019435714930295944,
0.014473344199359417,
0.07876819372177124,
0.15212053060531616,
0.18462751805782318,
-0.11114823818206787,
0.05978303402662277,
-0.0750294104218483,
0.04296569898724556,
-0.04591621831059456,
-0.030880138278007507,
-0.09711399674415588,
-0.07580149173736572,
0.040364284068346024,
-0.058093227446079254,
0.13563986122608185,
0.06850949674844742,
0.07263628393411636,
-0.03311339020729065,
-0.012360361404716969,
0.010552501305937767,
-0.0244459081441164,
0.1933668553829193,
0.03274987265467644,
0.05677986145019531,
-0.02089868299663067,
0.007009090390056372,
0.00007807615475030616,
0.09736824035644531,
-0.023727674037218094,
0.06663393974304199,
0.06276770681142807,
0.11796578764915466,
0.13113325834274292,
0.026192864403128624,
-0.056390099227428436,
-0.009738493710756302,
-0.05213460698723793,
0.0554807148873806,
-0.020537283271551132,
0.13576005399227142,
0.11163431406021118,
-0.049423947930336,
0.09624549001455307,
0.03154149651527405,
-0.06433789432048798,
-0.20418839156627655,
-0.21238650381565094,
-0.11520728468894958,
-0.14221711456775665,
0.021527661010622978,
-0.10741990804672241,
0.01629558950662613,
0.017400402575731277,
0.057698898017406464,
-0.03854362666606903,
0.1295609176158905,
-0.09387470781803131,
-0.13454774022102356,
0.09302858263254166,
-0.09500887989997864,
-0.0070022111758589745,
-0.04557012394070625,
0.039775438606739044,
0.165980726480484,
0.10444609820842743,
0.025923941284418106,
0.017946090549230576,
-0.06717532128095627,
-0.02829773724079132,
-0.08457113802433014,
-0.08321730047464371,
-0.024381550028920174,
-0.02657265029847622,
0.09562896192073822,
0.07337769865989685,
0.10916128754615784,
-0.09733980894088745,
-0.01674548350274563,
0.1163044348359108,
-0.04979169741272926,
-0.1609715223312378,
-0.12040194869041443,
0.1741219311952591,
-0.01768515631556511,
0.09994164109230042,
-0.017560115084052086,
-0.04719512164592743,
0.008799671195447445,
0.1889093965291977,
0.22784042358398438,
0.08826474100351334,
0.03622306510806084,
-0.07660165429115295,
-0.007533106487244368,
-0.03665533661842346,
0.004706172738224268,
0.017841029912233353,
0.21070018410682678,
0.022570626810193062,
0.10381972044706345,
-0.09556203335523605,
-0.06866762787103653,
-0.02163221314549446,
-0.011221562512218952,
-0.0011221847962588072,
-0.1161901131272316,
-0.0007824180065654218,
0.18280260264873505,
-0.11352250725030899,
-0.08802754431962967,
-0.13995777070522308,
-0.010041717439889908,
-0.12321799248456955,
-0.03595639020204544,
0.048082608729600906,
0.1552506387233734,
0.03799533098936081,
-0.03773970529437065,
0.029657164588570595,
0.09366331249475479,
-0.0072893863543868065,
-0.09494847804307938,
-0.0510406456887722,
0.049525946378707886,
-0.07229146361351013,
-0.02668522484600544,
0.037105780094861984,
0.12796872854232788,
-0.005364745389670134,
0.0933745950460434,
0.03251887485384941,
0.18897128105163574,
-0.03186247870326042,
-0.14516478776931763,
0.06545454263687134,
0.12903647124767303,
0.00012087363575119525,
0.1387387365102768,
0.0002873703488148749,
-0.1777946650981903,
-0.007813642732799053,
-0.07705322653055191,
-0.03566133230924606,
-0.04539599269628525,
0.10241207480430603,
-0.07938552647829056,
0.0408068485558033,
0.07937921583652496,
-0.02546956017613411,
-0.07108242809772491,
-0.07018348574638367,
0.08812671154737473,
0.0008873917395249009,
-0.10626866668462753,
-0.04377102851867676,
-0.2085421234369278,
-0.03664860501885414,
-0.0888594314455986,
-0.05225967988371849,
-0.0739859789609909,
-0.005193936172872782,
-0.03194333240389824,
-0.01805197447538376,
0.03724074736237526,
0.00634528324007988,
0.06907127797603607,
-0.002070426242426038,
0.037975627928972244,
0.034375838935375214,
0.08603266626596451,
0.1130659356713295,
-0.17971085011959076,
-0.09890349209308624
] |
null | null |
transformers
|
## Model series
This model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.
## Gpt models
## Swedish Gpt
https://huggingface.co/birgermoell/swedish-gpt/
## Swedish gpt wiki
https://huggingface.co/flax-community/swe-gpt-wiki
# Nordic gpt wiki
https://huggingface.co/flax-community/nordic-gpt-wiki
## Dansk gpt wiki
https://huggingface.co/flax-community/dansk-gpt-wiki
## Norsk gpt wiki
https://huggingface.co/flax-community/norsk-gpt-wiki
## Roberta models
## Nordic Roberta Wiki
https://huggingface.co/flax-community/nordic-roberta-wiki
## Swe Roberta Wiki Oscar
https://huggingface.co/flax-community/swe-roberta-wiki-oscar
## Roberta Swedish Scandi
https://huggingface.co/birgermoell/roberta-swedish-scandi
## Roberta Swedish
https://huggingface.co/birgermoell/roberta-swedish
## Swedish T5 model
https://huggingface.co/birgermoell/t5-base-swedish
# GPT-svenska-wikipedia
A swedish GPT2 style model trained using Flax CLM pipeline on the Swedish
part of the wiki40b dataset and the Oscar dataset.
https://huggingface.co/datasets/wiki40b
The model was trained for around 22600 steps (42 hours) as part of the Huggingface Jax/Flax challenge with the following loss and learning rate
Loss: 3.1715331077575684, Learning Rate: 0.0024816440418362617)
The model could likely be trained for longer.
## Data cleaning and preprocessing
The data was cleaned and preprocessed using the following script. Make sure to install depencies for beam_runner to make the dataset work.
```python
from datasets import load_dataset
def load_and_clean_wiki():
dataset = load_dataset('wiki40b', 'sv', beam_runner='DirectRunner', split="train")
#dataset = load_dataset('wiki40b', 'sv', beam_runner='DirectRunner')
dataset = dataset.remove_columns(['wikidata_id', 'version_id'])
filtered_dataset = dataset.map(filter_wikipedia)
# filtered_dataset[:3]
# print(filtered_dataset[:3])
return filtered_dataset
def filter_wikipedia(batch):
batch["text"] = " ".join(batch["text"].split("\
_START_SECTION_\
"))
batch["text"] = " ".join(batch["text"].split("\
_START_ARTICLE_\
"))
batch["text"] = " ".join(batch["text"].split("\
_START_ARTICLE_\
"))
batch["text"] = " ".join(batch["text"].split("\
_START_PARAGRAPH_\
"))
batch["text"] = " ".join(batch["text"].split("_NEWLINE_"))
batch["text"] = " ".join(batch["text"].split("\xa0"))
return batch
```
## Training script
The following training script was used to train the model.
```bash
./run_clm_flax.py --output_dir="${MODEL_DIR}" --model_type="gpt2" --config_name="${MODEL_DIR}" --tokenizer_name="${MODEL_DIR}" --dataset_name="wiki40b" --dataset_config_name="sv" --do_train --do_eval --block_size="512" --per_device_train_batch_size="64" --per_device_eval_batch_size="64" --learning_rate="5e-3" --warmup_steps="1000" --adam_beta1="0.9" --adam_beta2="0.98" --weight_decay="0.01" --overwrite_output_dir --num_train_epochs="20" --logging_steps="500" --save_steps="1000" --eval_steps="2500" --push_to_hub
```
|
{"language": "sv", "widget": [{"text": "Jag \u00e4r en svensk spr\u00e5kmodell."}]}
|
text-generation
|
birgermoell/swedish-gpt
|
[
"transformers",
"pytorch",
"jax",
"tensorboard",
"gpt2",
"text-generation",
"sv",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"sv"
] |
TAGS
#transformers #pytorch #jax #tensorboard #gpt2 #text-generation #sv #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us
|
## Model series
This model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.
## Gpt models
## Swedish Gpt
URL
## Swedish gpt wiki
URL
# Nordic gpt wiki
URL
## Dansk gpt wiki
URL
## Norsk gpt wiki
URL
## Roberta models
## Nordic Roberta Wiki
URL
## Swe Roberta Wiki Oscar
URL
## Roberta Swedish Scandi
URL
## Roberta Swedish
URL
## Swedish T5 model
URL
# GPT-svenska-wikipedia
A swedish GPT2 style model trained using Flax CLM pipeline on the Swedish
part of the wiki40b dataset and the Oscar dataset.
URL
The model was trained for around 22600 steps (42 hours) as part of the Huggingface Jax/Flax challenge with the following loss and learning rate
Loss: 3.1715331077575684, Learning Rate: 0.0024816440418362617)
The model could likely be trained for longer.
## Data cleaning and preprocessing
The data was cleaned and preprocessed using the following script. Make sure to install depencies for beam_runner to make the dataset work.
## Training script
The following training script was used to train the model.
|
[
"## Model series\nThis model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.",
"## Gpt models",
"## Swedish Gpt\nURL",
"## Swedish gpt wiki\nURL",
"# Nordic gpt wiki\nURL",
"## Dansk gpt wiki\nURL",
"## Norsk gpt wiki\nURL",
"## Roberta models",
"## Nordic Roberta Wiki\nURL",
"## Swe Roberta Wiki Oscar\nURL",
"## Roberta Swedish Scandi\nURL",
"## Roberta Swedish\nURL",
"## Swedish T5 model\nURL",
"# GPT-svenska-wikipedia\nA swedish GPT2 style model trained using Flax CLM pipeline on the Swedish\npart of the wiki40b dataset and the Oscar dataset. \nURL\n\nThe model was trained for around 22600 steps (42 hours) as part of the Huggingface Jax/Flax challenge with the following loss and learning rate\nLoss: 3.1715331077575684, Learning Rate: 0.0024816440418362617) \n\nThe model could likely be trained for longer.",
"## Data cleaning and preprocessing\nThe data was cleaned and preprocessed using the following script. Make sure to install depencies for beam_runner to make the dataset work.",
"## Training script\nThe following training script was used to train the model."
] |
[
"TAGS\n#transformers #pytorch #jax #tensorboard #gpt2 #text-generation #sv #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us \n",
"## Model series\nThis model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.",
"## Gpt models",
"## Swedish Gpt\nURL",
"## Swedish gpt wiki\nURL",
"# Nordic gpt wiki\nURL",
"## Dansk gpt wiki\nURL",
"## Norsk gpt wiki\nURL",
"## Roberta models",
"## Nordic Roberta Wiki\nURL",
"## Swe Roberta Wiki Oscar\nURL",
"## Roberta Swedish Scandi\nURL",
"## Roberta Swedish\nURL",
"## Swedish T5 model\nURL",
"# GPT-svenska-wikipedia\nA swedish GPT2 style model trained using Flax CLM pipeline on the Swedish\npart of the wiki40b dataset and the Oscar dataset. \nURL\n\nThe model was trained for around 22600 steps (42 hours) as part of the Huggingface Jax/Flax challenge with the following loss and learning rate\nLoss: 3.1715331077575684, Learning Rate: 0.0024816440418362617) \n\nThe model could likely be trained for longer.",
"## Data cleaning and preprocessing\nThe data was cleaned and preprocessed using the following script. Make sure to install depencies for beam_runner to make the dataset work.",
"## Training script\nThe following training script was used to train the model."
] |
[
60,
32,
4,
5,
6,
6,
6,
6,
4,
6,
7,
7,
5,
6,
111,
40,
14
] |
[
"passage: TAGS\n#transformers #pytorch #jax #tensorboard #gpt2 #text-generation #sv #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us \n## Model series\nThis model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.## Gpt models## Swedish Gpt\nURL## Swedish gpt wiki\nURL# Nordic gpt wiki\nURL## Dansk gpt wiki\nURL## Norsk gpt wiki\nURL## Roberta models## Nordic Roberta Wiki\nURL## Swe Roberta Wiki Oscar\nURL## Roberta Swedish Scandi\nURL## Roberta Swedish\nURL## Swedish T5 model\nURL# GPT-svenska-wikipedia\nA swedish GPT2 style model trained using Flax CLM pipeline on the Swedish\npart of the wiki40b dataset and the Oscar dataset. \nURL\n\nThe model was trained for around 22600 steps (42 hours) as part of the Huggingface Jax/Flax challenge with the following loss and learning rate\nLoss: 3.1715331077575684, Learning Rate: 0.0024816440418362617) \n\nThe model could likely be trained for longer.## Data cleaning and preprocessing\nThe data was cleaned and preprocessed using the following script. Make sure to install depencies for beam_runner to make the dataset work.## Training script\nThe following training script was used to train the model."
] |
[
-0.0932951271533966,
0.2307949811220169,
-0.0004250535857863724,
0.06637486070394516,
0.04010304436087608,
0.008655490353703499,
0.05876021459698677,
0.12583233416080475,
-0.005285907071083784,
0.0844700038433075,
0.12320355325937271,
-0.05287715420126915,
0.09859446436166763,
0.13337598741054535,
0.040750693529844284,
-0.37224462628364563,
0.05403013899922371,
-0.06478361040353775,
-0.04086761176586151,
0.09894471615552902,
0.09420417994260788,
-0.06624697893857956,
0.04374855384230614,
-0.022294169291853905,
-0.04106530547142029,
-0.001742845051921904,
-0.032889094203710556,
-0.051482681185007095,
0.09634032100439072,
0.07412797212600708,
0.0796755775809288,
0.07819409668445587,
0.15902723371982574,
-0.14370281994342804,
0.004445462487637997,
0.06535778194665909,
0.011415239423513412,
0.0324656218290329,
0.06397945433855057,
0.051360905170440674,
0.16857236623764038,
0.013421732001006603,
0.07428135722875595,
0.026775404810905457,
-0.07206366956233978,
-0.23673418164253235,
-0.07805898040533066,
0.04987124353647232,
0.04588591307401657,
0.10111168771982193,
-0.07283665984869003,
0.03809001296758652,
-0.13744673132896423,
0.05450586602091789,
0.13573569059371948,
-0.21749472618103027,
-0.0816463902592659,
0.12206317484378815,
0.07886075973510742,
0.06536456942558289,
-0.09197515994310379,
0.05807904899120331,
0.05495969206094742,
0.03796064481139183,
0.11728332191705704,
0.008862710557878017,
0.0635305717587471,
0.009687716141343117,
-0.150421142578125,
-0.03429627791047096,
0.045182038098573685,
0.07434937357902527,
-0.045039307326078415,
-0.18142169713974,
-0.04183446243405342,
-0.07434182614088058,
-0.01923729106783867,
-0.04572438821196556,
0.045638248324394226,
-0.010666540823876858,
-0.020386258140206337,
-0.03292930871248245,
-0.06632223725318909,
-0.061365049332380295,
0.05779871344566345,
0.11083712428808212,
0.05145222321152687,
0.02510058879852295,
0.022202860563993454,
0.09453675150871277,
-0.037417102605104446,
-0.10734812170267105,
-0.06732729822397232,
-0.020553043112158775,
-0.11284397542476654,
0.0029195414390414953,
0.014230340719223022,
-0.0054290322586894035,
-0.06659918278455734,
0.19657421112060547,
0.01710611768066883,
0.010682692751288414,
0.10426252335309982,
-0.010265830904245377,
-0.006946562323719263,
0.12700128555297852,
-0.1543135792016983,
-0.1570841670036316,
-0.06579577922821045,
0.0516289621591568,
-0.02955430932343006,
-0.02096278965473175,
-0.018605919554829597,
-0.07206331938505173,
0.043831754475831985,
0.05373688042163849,
-0.015472397208213806,
0.011182405985891819,
-0.0065300241112709045,
-0.009316817857325077,
0.11893884837627411,
-0.1093350425362587,
-0.009987090714275837,
-0.016814855858683586,
-0.12381697446107864,
0.019635019823908806,
0.03587695583701134,
-0.0069799525663256645,
-0.03145664557814598,
0.13107125461101532,
-0.05687054246664047,
-0.061793986707925797,
-0.06741288304328918,
-0.14052073657512665,
0.015518461354076862,
-0.09420192241668701,
-0.02345399744808674,
-0.043153658509254456,
-0.16091057658195496,
-0.0958109200000763,
0.08179895579814911,
-0.07288341969251633,
-0.03865016624331474,
-0.07702916860580444,
-0.07761745899915695,
0.020568210631608963,
-0.040850963443517685,
0.1448940932750702,
-0.030861325562000275,
0.045221101492643356,
-0.07079961150884628,
0.12640540301799774,
0.08589985966682434,
0.022893253713846207,
-0.09278783947229385,
0.02292732335627079,
-0.19019652903079987,
0.07970599085092545,
-0.127833753824234,
0.013566731475293636,
-0.1170828640460968,
-0.10104691237211227,
-0.04289709031581879,
0.08789888024330139,
0.029105141758918762,
0.1352093517780304,
-0.18288885056972504,
-0.026539914309978485,
0.2700602114200592,
-0.10834386944770813,
0.007574109826236963,
0.1102038249373436,
0.006122126244008541,
0.08450615406036377,
0.07950541377067566,
0.1588783860206604,
0.042933668941259384,
-0.13846446573734283,
-0.03050798363983631,
-0.0002960587153211236,
-0.01199385430663824,
0.06467122584581375,
0.06600433588027954,
-0.01802857592701912,
0.09131816029548645,
0.03479999303817749,
-0.09028718620538712,
0.00449999189004302,
-0.03370104730129242,
-0.054615333676338196,
0.024357374757528305,
-0.0745597705245018,
-0.014700072817504406,
0.06382045149803162,
-0.01039907243102789,
-0.05079084262251854,
-0.16840888559818268,
-0.006339057814329863,
0.12271611392498016,
-0.08330461382865906,
0.0072565833106637,
-0.08015614002943039,
-0.09719377011060715,
0.04717444255948067,
-0.023575671017169952,
-0.11023622751235962,
-0.08043670654296875,
-0.029031910002231598,
-0.050890326499938965,
-0.043674685060977936,
0.11472932994365692,
0.08953889459371567,
0.06992996484041214,
-0.06449293345212936,
-0.02529328130185604,
-0.02840336598455906,
-0.04085135459899902,
-0.0666818842291832,
-0.11529000848531723,
-0.07674811035394669,
-0.049111004918813705,
0.1245836392045021,
-0.2011229693889618,
0.02502262592315674,
0.025859171524643898,
0.12737242877483368,
0.016773082315921783,
-0.06732787936925888,
0.03632689267396927,
-0.008997004479169846,
0.008631371892988682,
-0.11652327328920364,
0.018001126125454903,
-0.05770128220319748,
-0.030295636504888535,
0.07456431537866592,
-0.062176413834095,
-0.1032155454158783,
0.0756986066699028,
0.2533321678638458,
-0.12518560886383057,
0.06438451260328293,
-0.05819722265005112,
-0.017348846420645714,
-0.09278695285320282,
-0.0056463442742824554,
0.0701906830072403,
0.06874266266822815,
0.10357724875211716,
-0.09916912019252777,
-0.012631295248866081,
0.015087621286511421,
-0.003979239147156477,
-0.048763565719127655,
0.15584991872310638,
0.09558848291635513,
-0.13706524670124054,
0.06098273769021034,
-0.0964970588684082,
0.017102133482694626,
0.2368413209915161,
0.057488810271024704,
-0.11892062425613403,
0.0056354389525949955,
-0.007411458529531956,
0.01333890575915575,
0.1571921706199646,
0.052365146577358246,
0.03473604843020439,
0.028754740953445435,
0.0011214889818802476,
-0.001807314227335155,
-0.11650902032852173,
-0.07397592812776566,
0.008250362239778042,
-0.07270147651433945,
0.02810382843017578,
0.03634013235569,
-0.07131996750831604,
0.06310541927814484,
0.03694711625576019,
-0.11084343492984772,
-0.04927593842148781,
-0.013071882538497448,
-0.07809159904718399,
0.1928565502166748,
-0.011045168153941631,
-0.23696786165237427,
-0.08969567716121674,
0.06859540939331055,
0.04502403363585472,
-0.022985273972153664,
0.04642517864704132,
-0.12653765082359314,
-0.12081911414861679,
-0.10976073145866394,
0.07922251522541046,
-0.04705803841352463,
-0.03347651660442352,
-0.05282343924045563,
0.03272329270839691,
-0.02007376216351986,
-0.09516198188066483,
0.021442973986268044,
-0.03224696218967438,
-0.026298489421606064,
0.019711071625351906,
-0.07120981812477112,
0.06393861025571823,
0.09751460701227188,
0.014326689764857292,
0.030683740973472595,
0.029993213713169098,
0.20575246214866638,
-0.11061466485261917,
0.12127499282360077,
0.12062875926494598,
0.0706135705113411,
0.06981728971004486,
0.14802759885787964,
0.029274893924593925,
-0.1112564280629158,
0.03648129850625992,
0.07886385172605515,
-0.0606660470366478,
-0.16571395099163055,
-0.06657825410366058,
0.0040257456712424755,
0.04456251114606857,
0.15053795278072357,
0.04379749670624733,
-0.07138731330633163,
0.05475880950689316,
-0.10694270581007004,
-0.06068585440516472,
0.05265798047184944,
0.04518682882189751,
-0.11502563953399658,
-0.01512796152383089,
0.09224117547273636,
-0.03762432187795639,
0.012417002581059933,
0.09070033580064774,
-0.03223245218396187,
0.1829659789800644,
-0.035317737609148026,
0.0710175484418869,
0.055165406316518784,
0.10197734087705612,
0.06268052011728287,
-0.0038173478096723557,
0.04824110120534897,
-0.050926487892866135,
-0.006605727132409811,
-0.044866565614938736,
0.0067089274525642395,
0.10446304082870483,
-0.026828590780496597,
-0.0976233258843422,
-0.01346093975007534,
0.02212853543460369,
-0.06977201998233795,
0.285227507352829,
0.08587179332971573,
-0.2472876012325287,
-0.11434664577245712,
0.04375678673386574,
-0.08404260128736496,
-0.07178676128387451,
-0.022732535377144814,
0.11880273371934891,
-0.15670374035835266,
0.0644921287894249,
-0.07165228575468063,
0.08281037211418152,
-0.023966310545802116,
-0.028491396456956863,
0.04296809434890747,
0.10183684527873993,
-0.029929161071777344,
0.09670773893594742,
-0.15242211520671844,
0.1466372311115265,
-0.01786324754357338,
0.12793055176734924,
-0.05447715148329735,
-0.0004993377369828522,
0.006873629055917263,
0.05940434709191322,
0.23192204535007477,
0.01876796782016754,
-0.10305065661668777,
-0.09787647426128387,
-0.12961886823177338,
-0.013012335635721684,
0.03227456659078598,
-0.10404705256223679,
0.11174207180738449,
0.008914027363061905,
-0.008105362765491009,
-0.03861581161618233,
-0.06668521463871002,
-0.09324802458286285,
-0.09412756562232971,
-0.006621426437050104,
-0.08436346054077148,
0.05360164865851402,
-0.07422050088644028,
-0.0672825276851654,
-0.0926334410905838,
0.21042336523532867,
-0.03250272572040558,
-0.10491151362657547,
-0.14472468197345734,
0.10443821549415588,
0.15951953828334808,
-0.07858328521251678,
0.012488278560340405,
0.047031573951244354,
0.15279370546340942,
-0.06954367458820343,
-0.015216759406030178,
0.06157354637980461,
-0.05238264799118042,
-0.1554073840379715,
-0.018658744171261787,
0.06233016774058342,
0.1166163757443428,
0.08038956671953201,
0.002520015463232994,
0.050301335752010345,
-0.02651536650955677,
-0.08576184511184692,
0.04389562830328941,
0.14757952094078064,
0.007904918864369392,
-0.07633942365646362,
-0.04326355829834938,
0.04265240207314491,
-0.04200245440006256,
-0.04888230189681053,
0.028800485655665398,
0.25721263885498047,
-0.11974691599607468,
0.12440214306116104,
0.13753899931907654,
-0.0014198716962710023,
-0.2768232226371765,
-0.016674578189849854,
0.06570520251989365,
0.12069962918758392,
0.017015282064676285,
-0.23211964964866638,
0.054302576929330826,
0.1208399310708046,
-0.03228309005498886,
0.003942095208913088,
-0.3039643168449402,
-0.12190708518028259,
0.01601271890103817,
0.08114495873451233,
-0.019412348046898842,
-0.0023098939564079046,
0.001259742071852088,
-0.006048998795449734,
-0.06074386090040207,
0.01928095333278179,
-0.008140145801007748,
0.09639501571655273,
-0.013288231566548347,
0.024704230949282646,
0.03877079859375954,
-0.05165204778313637,
0.12659674882888794,
-0.002266888739541173,
0.04112454876303673,
-0.06812027841806412,
0.10546715557575226,
0.05661054328083992,
-0.056649379432201385,
0.19047731161117554,
-0.0344604067504406,
0.011324761435389519,
-0.12948361039161682,
-0.09914795309305191,
-0.08940059691667557,
0.05277477949857712,
-0.0715508684515953,
-0.07822153717279434,
-0.06993574649095535,
0.11960742622613907,
0.07660700380802155,
-0.008098765276372433,
0.012129491195082664,
-0.081160768866539,
-0.022787095978856087,
0.04823552072048187,
0.10899539291858673,
0.05417107790708542,
-0.09061970561742783,
-0.006027792580425739,
0.003441296052187681,
0.06824632734060287,
-0.2031644582748413,
-0.03783411160111427,
0.08675818890333176,
0.04960396885871887,
0.07507782429456711,
-0.025479385629296303,
-0.13296793401241302,
-0.033778224140405655,
0.04251604154706001,
-0.13343870639801025,
-0.17787793278694153,
-0.02731541357934475,
-0.12211693823337555,
-0.09538096934556961,
-0.124297134578228,
0.0844353437423706,
-0.11812301725149155,
0.0017938060918822885,
-0.024619556963443756,
0.04695316404104233,
0.014992953278124332,
0.1779308021068573,
0.05031602829694748,
0.07007792592048645,
-0.10477714240550995,
0.11027805507183075,
0.03853751718997955,
-0.07054568827152252,
0.0486333966255188,
0.16551631689071655,
-0.13748963177204132,
-0.04923803731799126,
-0.004934838507324457,
0.04253239184617996,
-0.0772271677851677,
-0.01383458636701107,
-0.07936285436153412,
-0.04463477432727814,
0.045103561133146286,
-0.0019318476552143693,
0.033627476543188095,
0.015439044684171677,
0.018060820177197456,
-0.028960352763533592,
-0.10633942484855652,
0.05535975098609924,
0.09000343829393387,
-0.004754436202347279,
-0.05607966333627701,
0.17565995454788208,
0.015830595046281815,
-0.03045896254479885,
-0.030685827136039734,
0.04012179374694824,
-0.04742398113012314,
-0.010400524362921715,
-0.03004094399511814,
-0.04180142655968666,
-0.046270646154880524,
-0.004285680130124092,
-0.061822790652513504,
-0.046033766120672226,
-0.015760114416480064,
0.00451585091650486,
-0.09875227510929108,
-0.05215165764093399,
-0.0571134127676487,
-0.025937849655747414,
-0.07092712074518204,
-0.01275537721812725,
0.013876241631805897,
-0.07721914350986481,
0.027409790083765984,
-0.025860967114567757,
0.03513209894299507,
0.055204927921295166,
0.065701924264431,
-0.017082707956433296,
-0.007847082801163197,
-0.0020985074806958437,
0.04232977703213692,
0.020474642515182495,
-0.044480100274086,
-0.030752601101994514,
0.003803752362728119,
0.023754127323627472,
-0.049458447843790054,
-0.10329528898000717,
0.000174402492120862,
-0.0028346586041152477,
0.0022682417184114456,
-0.08110932260751724,
0.11945789307355881,
0.0988491103053093,
0.0478731133043766,
0.09509702026844025,
-0.0449921078979969,
0.062321458011865616,
-0.20350517332553864,
-0.0064044250175356865,
-0.012493417598307133,
-0.016537662595510483,
0.03729483485221863,
0.023732129484415054,
0.06952151656150818,
-0.04284738004207611,
0.04930396378040314,
0.06472151726484299,
0.007763580419123173,
0.06536182761192322,
-0.08905842155218124,
0.022605374455451965,
0.018325524404644966,
0.12998943030834198,
0.07078371196985245,
0.017721962183713913,
0.05787200108170509,
0.032266195863485336,
-0.02335932105779648,
0.06150186434388161,
0.12467791140079498,
0.14899100363254547,
0.1183839961886406,
0.09819760173559189,
-0.06329919397830963,
-0.09321768581867218,
-0.14700950682163239,
0.16182780265808105,
-0.06757025420665741,
0.03624687343835831,
-0.014050825498998165,
0.09880871325731277,
0.1780264526605606,
-0.1848587840795517,
0.08028030395507812,
0.06354811042547226,
-0.08391410112380981,
-0.15253430604934692,
-0.2011057585477829,
-0.0969572439789772,
0.00904049165546894,
0.0739966630935669,
-0.08655651658773422,
0.07101349532604218,
0.09499238431453705,
0.05699196085333824,
0.02856902964413166,
0.08654434978961945,
0.00434166518971324,
-0.0948081910610199,
0.09440076351165771,
0.025278057903051376,
0.017841292545199394,
0.022765561938285828,
-0.006397056393325329,
0.08019361644983292,
0.0055239200592041016,
0.00973918940871954,
-0.0167025625705719,
-0.01863238401710987,
-0.001732640084810555,
0.0546702966094017,
-0.06968437880277634,
-0.0296518262475729,
0.03612586483359337,
0.07661955803632736,
0.04708930477499962,
0.08703111112117767,
-0.0226130373775959,
-0.043523047119379044,
0.23254480957984924,
-0.04955728352069855,
-0.00745480228215456,
-0.13821688294410706,
0.19884036481380463,
0.0001226053573191166,
0.05852339044213295,
0.0007878750911913812,
-0.07080917805433273,
0.007960086688399315,
0.19498814642429352,
0.06831201910972595,
-0.011585299856960773,
-0.011003054678440094,
-0.02371874824166298,
-0.02301837131381035,
-0.023068025708198547,
0.06876372545957565,
0.004057042300701141,
0.19138973951339722,
-0.1015569195151329,
0.027963286265730858,
-0.032557230442762375,
-0.10274256020784378,
-0.09376530349254608,
0.10142465680837631,
0.005324804224073887,
0.005184660200029612,
-0.12573295831680298,
0.09766450524330139,
0.010346765629947186,
-0.17866384983062744,
0.08651819825172424,
-0.04984758794307709,
-0.15597090125083923,
-0.02883322723209858,
-0.009672509506344795,
0.045634448528289795,
0.10711409896612167,
0.013760698027908802,
0.040925897657871246,
0.09326405078172684,
0.010453766211867332,
-0.09900595992803574,
-0.10709723830223083,
0.048141300678253174,
-0.0012171814450994134,
0.19190630316734314,
-0.016822446137666702,
0.018926963210105896,
0.08425368368625641,
-0.0472545400261879,
-0.10445939749479294,
0.03370153531432152,
0.026196306571364403,
-0.04831172898411751,
0.024952124804258347,
0.2033379077911377,
-0.05401971563696861,
0.0992753878235817,
0.049423664808273315,
-0.12400277704000473,
0.013671720400452614,
0.0036379906814545393,
-0.08500748127698898,
-0.04800863564014435,
0.10084056109189987,
-0.027446981519460678,
0.10814469307661057,
0.22574852406978607,
-0.0389774851500988,
-0.012357461266219616,
-0.0769672840833664,
0.07854259014129639,
-0.01725427247583866,
0.050851576030254364,
0.05646180734038353,
-0.15168531239032745,
-0.06823179125785828,
-0.06436138600111008,
-0.00020882861281279474,
-0.20073436200618744,
-0.03614664450287819,
-0.04747651144862175,
-0.04029860347509384,
0.01565810665488243,
0.09392537921667099,
0.06020251661539078,
0.045579470694065094,
-0.022046612575650215,
-0.03483535349369049,
0.023623650893568993,
0.09362675994634628,
-0.1411105990409851,
-0.058834657073020935
] |
null | null |
transformers
|
[Google's T5](https://ai.googleblog.com/2020/02/exploring-transfer-learning-with-t5.html)
Pretraining Dataset: [C4](https://huggingface.co/datasets/oscar)
Paper: [Exploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer](https://arxiv.org/pdf/1910.10683.pdf)
Authors: *Colin Raffel, Noam Shazeer, Adam Roberts, Katherine Lee, Sharan Narang, Michael Matena, Yanqi Zhou, Wei Li, Peter J. Liu*
## Abstract
Transfer learning, where a model is first pre-trained on a data-rich task before being fine-tuned on a downstream task, has emerged as a powerful technique in natural language processing (NLP). The effectiveness of transfer learning has given rise to a diversity of approaches, methodology, and practice. In this paper, we explore the landscape of transfer learning techniques for NLP by introducing a unified framework that converts every language problem into a text-to-text format. Our systematic study compares pre-training objectives, architectures, unlabeled datasets, transfer approaches, and other factors on dozens of language understanding tasks. By combining the insights from our exploration with scale and our new “Colossal Clean Crawled Corpus”, we achieve state-of-the-art results on many benchmarks covering summarization, question answering, text classification, and more. To facilitate future work on transfer learning for NLP, we release our dataset, pre-trained models, and code.

## Model series
This model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.
## Gpt models
## Swedish Gpt
https://huggingface.co/birgermoell/swedish-gpt/
## Swedish gpt wiki
https://huggingface.co/flax-community/swe-gpt-wiki
# Nordic gpt wiki
https://huggingface.co/flax-community/nordic-gpt-wiki
## Dansk gpt wiki
https://huggingface.co/flax-community/dansk-gpt-wiki
## Norsk gpt wiki
https://huggingface.co/flax-community/norsk-gpt-wiki
## Roberta models
## Nordic Roberta Wiki
https://huggingface.co/flax-community/nordic-roberta-wiki
## Swe Roberta Wiki Oscar
https://huggingface.co/flax-community/swe-roberta-wiki-oscar
## Roberta Swedish Scandi
https://huggingface.co/birgermoell/roberta-swedish-scandi
## Roberta Swedish
https://huggingface.co/birgermoell/roberta-swedish
## Swedish T5 model
https://huggingface.co/birgermoell/t5-base-swedish
|
{"language": ["sv"], "license": "apache-2.0", "tags": ["summarization", "translation"], "datasets": ["oscar"]}
|
translation
|
birgermoell/t5-base-swedish
|
[
"transformers",
"pytorch",
"jax",
"tensorboard",
"t5",
"feature-extraction",
"summarization",
"translation",
"sv",
"dataset:oscar",
"arxiv:1910.10683",
"license:apache-2.0",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"1910.10683"
] |
[
"sv"
] |
TAGS
#transformers #pytorch #jax #tensorboard #t5 #feature-extraction #summarization #translation #sv #dataset-oscar #arxiv-1910.10683 #license-apache-2.0 #endpoints_compatible #text-generation-inference #region-us
|
Google's T5
Pretraining Dataset: C4
Paper: Exploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer
Authors: *Colin Raffel, Noam Shazeer, Adam Roberts, Katherine Lee, Sharan Narang, Michael Matena, Yanqi Zhou, Wei Li, Peter J. Liu*
## Abstract
Transfer learning, where a model is first pre-trained on a data-rich task before being fine-tuned on a downstream task, has emerged as a powerful technique in natural language processing (NLP). The effectiveness of transfer learning has given rise to a diversity of approaches, methodology, and practice. In this paper, we explore the landscape of transfer learning techniques for NLP by introducing a unified framework that converts every language problem into a text-to-text format. Our systematic study compares pre-training objectives, architectures, unlabeled datasets, transfer approaches, and other factors on dozens of language understanding tasks. By combining the insights from our exploration with scale and our new “Colossal Clean Crawled Corpus”, we achieve state-of-the-art results on many benchmarks covering summarization, question answering, text classification, and more. To facilitate future work on transfer learning for NLP, we release our dataset, pre-trained models, and code.
!model image
## Model series
This model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.
## Gpt models
## Swedish Gpt
URL
## Swedish gpt wiki
URL
# Nordic gpt wiki
URL
## Dansk gpt wiki
URL
## Norsk gpt wiki
URL
## Roberta models
## Nordic Roberta Wiki
URL
## Swe Roberta Wiki Oscar
URL
## Roberta Swedish Scandi
URL
## Roberta Swedish
URL
## Swedish T5 model
URL
|
[
"## Abstract\nTransfer learning, where a model is first pre-trained on a data-rich task before being fine-tuned on a downstream task, has emerged as a powerful technique in natural language processing (NLP). The effectiveness of transfer learning has given rise to a diversity of approaches, methodology, and practice. In this paper, we explore the landscape of transfer learning techniques for NLP by introducing a unified framework that converts every language problem into a text-to-text format. Our systematic study compares pre-training objectives, architectures, unlabeled datasets, transfer approaches, and other factors on dozens of language understanding tasks. By combining the insights from our exploration with scale and our new “Colossal Clean Crawled Corpus”, we achieve state-of-the-art results on many benchmarks covering summarization, question answering, text classification, and more. To facilitate future work on transfer learning for NLP, we release our dataset, pre-trained models, and code.\n!model image",
"## Model series\nThis model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.",
"## Gpt models",
"## Swedish Gpt\nURL",
"## Swedish gpt wiki\nURL",
"# Nordic gpt wiki\nURL",
"## Dansk gpt wiki\nURL",
"## Norsk gpt wiki\nURL",
"## Roberta models",
"## Nordic Roberta Wiki\nURL",
"## Swe Roberta Wiki Oscar\nURL",
"## Roberta Swedish Scandi\nURL",
"## Roberta Swedish\nURL",
"## Swedish T5 model\nURL"
] |
[
"TAGS\n#transformers #pytorch #jax #tensorboard #t5 #feature-extraction #summarization #translation #sv #dataset-oscar #arxiv-1910.10683 #license-apache-2.0 #endpoints_compatible #text-generation-inference #region-us \n",
"## Abstract\nTransfer learning, where a model is first pre-trained on a data-rich task before being fine-tuned on a downstream task, has emerged as a powerful technique in natural language processing (NLP). The effectiveness of transfer learning has given rise to a diversity of approaches, methodology, and practice. In this paper, we explore the landscape of transfer learning techniques for NLP by introducing a unified framework that converts every language problem into a text-to-text format. Our systematic study compares pre-training objectives, architectures, unlabeled datasets, transfer approaches, and other factors on dozens of language understanding tasks. By combining the insights from our exploration with scale and our new “Colossal Clean Crawled Corpus”, we achieve state-of-the-art results on many benchmarks covering summarization, question answering, text classification, and more. To facilitate future work on transfer learning for NLP, we release our dataset, pre-trained models, and code.\n!model image",
"## Model series\nThis model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.",
"## Gpt models",
"## Swedish Gpt\nURL",
"## Swedish gpt wiki\nURL",
"# Nordic gpt wiki\nURL",
"## Dansk gpt wiki\nURL",
"## Norsk gpt wiki\nURL",
"## Roberta models",
"## Nordic Roberta Wiki\nURL",
"## Swe Roberta Wiki Oscar\nURL",
"## Roberta Swedish Scandi\nURL",
"## Roberta Swedish\nURL",
"## Swedish T5 model\nURL"
] |
[
78,
234,
32,
4,
5,
6,
6,
6,
6,
4,
6,
7,
7,
5,
6
] |
[
"passage: TAGS\n#transformers #pytorch #jax #tensorboard #t5 #feature-extraction #summarization #translation #sv #dataset-oscar #arxiv-1910.10683 #license-apache-2.0 #endpoints_compatible #text-generation-inference #region-us \n## Abstract\nTransfer learning, where a model is first pre-trained on a data-rich task before being fine-tuned on a downstream task, has emerged as a powerful technique in natural language processing (NLP). The effectiveness of transfer learning has given rise to a diversity of approaches, methodology, and practice. In this paper, we explore the landscape of transfer learning techniques for NLP by introducing a unified framework that converts every language problem into a text-to-text format. Our systematic study compares pre-training objectives, architectures, unlabeled datasets, transfer approaches, and other factors on dozens of language understanding tasks. By combining the insights from our exploration with scale and our new “Colossal Clean Crawled Corpus”, we achieve state-of-the-art results on many benchmarks covering summarization, question answering, text classification, and more. To facilitate future work on transfer learning for NLP, we release our dataset, pre-trained models, and code.\n!model image## Model series\nThis model is part of a series of models training on TPU with Flax Jax during Huggingface Flax/Jax challenge.## Gpt models## Swedish Gpt\nURL## Swedish gpt wiki\nURL# Nordic gpt wiki\nURL## Dansk gpt wiki\nURL## Norsk gpt wiki\nURL## Roberta models## Nordic Roberta Wiki\nURL## Swe Roberta Wiki Oscar\nURL## Roberta Swedish Scandi\nURL## Roberta Swedish\nURL## Swedish T5 model\nURL"
] |
[
-0.0028484114445745945,
0.1365913301706314,
-0.001975875347852707,
0.06489604711532593,
0.061288606375455856,
-0.010331422090530396,
0.0511230006814003,
0.10922469943761826,
-0.00032157261739484966,
0.07363301515579224,
0.020113136619329453,
0.04329824075102806,
0.11120481044054031,
0.03289664909243584,
0.06304808706045151,
-0.3057731091976166,
0.03904234245419502,
-0.1300407499074936,
-0.08957231789827347,
0.08438331633806229,
0.09659608453512192,
-0.05514676123857498,
0.03704145923256874,
-0.023110680282115936,
0.03249740228056908,
0.016554880887269974,
-0.09562929719686508,
-0.07192165404558182,
0.11608492583036423,
0.09773009270429611,
0.02817339077591896,
0.037411417812108994,
0.0961102619767189,
-0.23729804158210754,
0.01937505602836609,
0.03484805300831795,
0.028794284909963608,
0.03275858983397484,
0.05143265798687935,
-0.00038769200909882784,
0.20353560149669647,
-0.0667608454823494,
0.0668385699391365,
0.04552548751235008,
-0.040776729583740234,
-0.16757729649543762,
-0.1014007031917572,
0.04714101180434227,
0.03792591765522957,
0.034405313432216644,
-0.03463723510503769,
0.08612405508756638,
-0.09397194534540176,
0.06786205619573593,
0.10801780223846436,
-0.2631416618824005,
-0.04362538084387779,
0.10273032635450363,
0.057796549052000046,
0.15694615244865417,
-0.09131494164466858,
0.08072053641080856,
0.04129989072680473,
0.016137130558490753,
0.03601348400115967,
-0.015014084987342358,
0.016966255381703377,
-0.006287273485213518,
-0.14090919494628906,
-0.0181562639772892,
0.1890331208705902,
0.027390895411372185,
-0.0585566908121109,
-0.19518786668777466,
0.015417125076055527,
0.10124260932207108,
0.007536229677498341,
-0.08314630389213562,
0.01462857611477375,
-0.03064301796257496,
0.0817747712135315,
-0.12266439944505692,
-0.09836309403181076,
0.014627931639552116,
0.0019431601976975799,
0.08999983221292496,
0.03444710746407509,
0.032824281603097916,
0.07644914835691452,
0.064115010201931,
-0.0027302957605570555,
-0.04829974099993706,
-0.05905017629265785,
-0.08739989995956421,
-0.12341364473104477,
-0.013779043219983578,
0.04404473677277565,
-0.15498043596744537,
0.04434432089328766,
0.14720402657985687,
0.028197694569826126,
0.023657754063606262,
-0.038787487894296646,
0.04881114885210991,
0.11461013555526733,
0.1527753621339798,
-0.0790630504488945,
-0.18877743184566498,
-0.05619286000728607,
-0.05685257166624069,
-0.016459787264466286,
-0.04183045029640198,
-0.019634904339909554,
-0.030534619465470314,
0.005689694546163082,
0.0859084278345108,
0.029133887961506844,
0.03330110013484955,
0.02861202508211136,
-0.015892991796135902,
0.034957405179739,
-0.14870113134384155,
-0.013396020047366619,
-0.009909764863550663,
-0.03717304766178131,
0.095201775431633,
0.037647783756256104,
0.023447968065738678,
-0.07046405971050262,
0.03938455134630203,
-0.03607631474733353,
-0.05959412828087807,
-0.021739980205893517,
-0.13029207289218903,
0.0655684694647789,
-0.02948692813515663,
-0.04524993896484375,
-0.061092231422662735,
-0.08739373087882996,
-0.06802525371313095,
0.03845922648906708,
-0.05235006660223007,
0.018396243453025818,
-0.07798982411623001,
-0.05299355089664459,
-0.017020447179675102,
-0.017445005476474762,
0.017957700416445732,
-0.03144284337759018,
0.02909485623240471,
-0.16252315044403076,
0.08862705528736115,
0.021213822066783905,
-0.00019046566740144044,
-0.08241511881351471,
0.011404872871935368,
-0.20595207810401917,
0.149448961019516,
-0.22313159704208374,
-0.029384367167949677,
-0.1492045372724533,
-0.07123684883117676,
0.03084559366106987,
0.04322221875190735,
-0.017505347728729248,
0.11201000213623047,
-0.14404472708702087,
-0.03520424664020538,
0.16117635369300842,
-0.14600950479507446,
-0.013595124706625938,
0.14198505878448486,
0.0008267622324638069,
0.0958358570933342,
0.10648338496685028,
0.22653864324092865,
0.09394922852516174,
-0.03639187663793564,
-0.05444692075252533,
0.052035462111234665,
-0.06341289728879929,
0.08776100724935532,
0.05601216107606888,
-0.02693793550133705,
0.07455427199602127,
0.045606471598148346,
-0.01626601256430149,
0.007159492000937462,
-0.014869275502860546,
-0.0791163221001625,
0.027923379093408585,
-0.023153800517320633,
-0.002073734300211072,
0.033112719655036926,
-0.0466289147734642,
-0.04470131918787956,
-0.10436572134494781,
-0.026876097545027733,
0.03618411347270012,
-0.10986753553152084,
0.03497977554798126,
-0.06234099343419075,
0.006913033779710531,
-0.011104111559689045,
0.018365459516644478,
-0.10753355175256729,
-0.14435043931007385,
0.006124674808233976,
-0.051289916038513184,
0.09876358509063721,
0.13334603607654572,
0.05073654651641846,
0.05288121476769447,
-0.024194953963160515,
0.03445090353488922,
-0.026188641786575317,
-0.004273736849427223,
-0.032038602977991104,
-0.1715860813856125,
-0.006993298884481192,
-0.054632458835840225,
0.04636472091078758,
-0.11213918775320053,
-0.011437422595918179,
0.05228491872549057,
0.08928598463535309,
0.030002014711499214,
-0.030114801600575447,
0.008420247584581375,
0.03869928419589996,
-0.00738335819914937,
-0.051247574388980865,
0.022191578522324562,
-0.08670131117105484,
-0.11531282216310501,
0.10413298010826111,
-0.03840501233935356,
-0.11928068101406097,
0.04040348902344704,
0.08493857830762863,
-0.1011071428656578,
-0.0017132555367425084,
-0.0735960602760315,
-0.030020028352737427,
-0.04857903718948364,
-0.021713433787226677,
0.11717141419649124,
0.0596800334751606,
0.04809092730283737,
-0.09750176221132278,
-0.007477985229343176,
0.0032658460550010204,
-0.03918604180216789,
-0.014424043707549572,
0.15249677002429962,
0.0064804269932210445,
-0.21760942041873932,
0.07400576025247574,
0.0029507814906537533,
0.05590534955263138,
0.22939398884773254,
-0.00813643354922533,
-0.09016650170087814,
-0.0004891410353593528,
0.029627233743667603,
-0.01404224056750536,
0.11513073742389679,
0.05046806111931801,
0.03417595475912094,
0.054745644330978394,
0.035276807844638824,
0.05593616142868996,
-0.0754353329539299,
-0.003588666208088398,
-0.009843770414590836,
-0.05956384912133217,
0.11067476868629456,
0.045075125992298126,
-0.011500661261379719,
0.08258122950792313,
0.01629120111465454,
-0.008827668614685535,
-0.04680917039513588,
-0.04945436865091324,
-0.06686445325613022,
0.17029374837875366,
-0.10570633411407471,
-0.2823243737220764,
-0.16128012537956238,
0.14270439743995667,
-0.06882691383361816,
-0.05351760983467102,
0.03806101903319359,
-0.072544626891613,
-0.13374564051628113,
-0.11610983312129974,
0.08780024945735931,
-0.015611755661666393,
-0.11248636990785599,
-0.0793987512588501,
0.038375142961740494,
-0.07019288837909698,
-0.14651894569396973,
0.010700172744691372,
-0.0016568793216720223,
-0.10075781494379044,
-0.0032150365877896547,
-0.023125002160668373,
0.0697593167424202,
0.07318542897701263,
-0.008593125268816948,
-0.016509810462594032,
-0.019399361684918404,
0.15647830069065094,
-0.1235601156949997,
0.12632542848587036,
0.018549686297774315,
-0.08001641184091568,
0.05672148987650871,
0.09402342885732651,
0.02531031146645546,
-0.03749234974384308,
0.028549328446388245,
0.07663626968860626,
-0.07271178066730499,
-0.2937993109226227,
-0.09970581531524658,
-0.02988370694220066,
0.08661573380231857,
-0.01034743431955576,
0.07025254517793655,
0.02943377010524273,
0.01612807810306549,
-0.059854719787836075,
-0.1097174659371376,
0.09121764451265335,
0.04381629824638367,
0.10520277172327042,
-0.06675461679697037,
0.050781916826963425,
-0.08056540787220001,
-0.020185064524412155,
0.0770186111330986,
0.037247076630592346,
0.20121383666992188,
0.018009399995207787,
0.08072350174188614,
0.0668145939707756,
0.07616053521633148,
0.0011193744139745831,
0.05659592151641846,
-0.0026863613165915012,
0.03208460286259651,
-0.03015347756445408,
-0.07489494979381561,
-0.030104205012321472,
0.08544384688138962,
0.10477405041456223,
-0.06637374311685562,
0.0407700315117836,
0.01748756133019924,
0.042772747576236725,
0.20046833157539368,
0.013496057130396366,
-0.02838164195418358,
-0.04639457166194916,
0.07476173341274261,
-0.091530442237854,
-0.04356590658426285,
0.044861312955617905,
0.14306244254112244,
-0.1737971156835556,
0.044987015426158905,
-0.03974183648824692,
0.08705021440982819,
-0.10332979261875153,
-0.04644973203539848,
-0.08769461512565613,
0.06007125601172447,
-0.029906146228313446,
0.1435830146074295,
-0.06244617700576782,
0.12452198565006256,
0.01718958653509617,
0.062157317996025085,
-0.07095377147197723,
0.0339789018034935,
-0.023929564282298088,
0.019748596474528313,
0.20882931351661682,
0.025516094639897346,
-0.09764523059129715,
-0.004508216865360737,
-0.08364333212375641,
0.045597754418849945,
0.07000510394573212,
-0.07246513664722443,
0.062183067202568054,
-0.022602127864956856,
0.015869544818997383,
-0.051055844873189926,
0.011550158262252808,
-0.10679949074983597,
-0.17722012102603912,
0.020907530561089516,
-0.10002758353948593,
-0.00869298167526722,
-0.04569779708981514,
-0.07239851355552673,
-0.17929591238498688,
0.23928150534629822,
-0.15799395740032196,
-0.13448990881443024,
-0.12908326089382172,
0.014250379055738449,
0.1448415070772171,
-0.023909777402877808,
-0.014390477910637856,
0.012673130258917809,
0.06536758691072464,
-0.0965695008635521,
-0.04813207685947418,
0.02304016798734665,
-0.01434700284153223,
-0.146805077791214,
0.026386689394712448,
0.1312524378299713,
0.11823008209466934,
0.040093958377838135,
0.01854817196726799,
0.01154917012900114,
0.06516165286302567,
-0.14786621928215027,
-0.02506372146308422,
0.21129997074604034,
-0.025458866730332375,
0.09769634902477264,
-0.048609409481287,
-0.03994414210319519,
-0.08611802011728287,
-0.04081542044878006,
0.17243258655071259,
0.1439475268125534,
-0.09097899496555328,
0.22553206980228424,
0.15558524429798126,
-0.11514294892549515,
-0.2886958122253418,
-0.032035063952207565,
0.0414450541138649,
0.023882536217570305,
0.10809578746557236,
-0.1273491084575653,
0.06923219561576843,
0.08360176533460617,
-0.010394356213510036,
0.0025374970864504576,
-0.24866020679473877,
-0.09599032998085022,
0.006084021646529436,
0.03781062737107277,
0.019452324137091637,
-0.037487663328647614,
-0.013283242471516132,
0.02572389878332615,
-0.08390238881111145,
0.10190863907337189,
0.020103393122553825,
0.08950299769639969,
0.010919005610048771,
0.05384049564599991,
0.06382473558187485,
-0.03956835716962814,
0.12380099296569824,
-0.022416219115257263,
-0.0003102992777712643,
-0.0936126559972763,
0.09855461120605469,
0.09917788952589035,
-0.0034914412535727024,
0.13445395231246948,
-0.04456726461648941,
-0.0011795277241617441,
-0.08486244827508926,
-0.06784535199403763,
-0.06484982371330261,
0.09512375295162201,
-0.06519099324941635,
-0.08489196747541428,
-0.14609456062316895,
0.12934578955173492,
0.10240576416254044,
-0.00252185738645494,
0.13428163528442383,
-0.09824908524751663,
0.04745224863290787,
0.07366540282964706,
0.19196943938732147,
0.05605445057153702,
-0.08852413296699524,
-0.030770765617489815,
-0.009820966050028801,
0.04648416116833687,
-0.18113362789154053,
0.04884827509522438,
0.16662825644016266,
0.0017286953516304493,
0.11798296868801117,
-0.02514703944325447,
-0.17247365415096283,
-0.005418877117335796,
0.038324806839227676,
-0.12109646201133728,
-0.28269481658935547,
-0.004768505226820707,
-0.11620271950960159,
-0.01228624489158392,
-0.017673829570412636,
0.15915942192077637,
-0.10633435100317001,
0.011443065479397774,
0.01798718050122261,
0.07695810496807098,
-0.015573736280202866,
0.0788101851940155,
0.050385598093271255,
0.044495608657598495,
-0.06441047787666321,
0.1419675350189209,
0.050821419805288315,
-0.11603442579507828,
0.05255766957998276,
0.1185808777809143,
-0.07311898469924927,
-0.04264012724161148,
0.0016308990307152271,
0.0831793025135994,
-0.05687727779150009,
-0.05654088035225868,
0.061182111501693726,
-0.085613913834095,
-0.021876288577914238,
0.06155649572610855,
-0.012464921921491623,
0.02509290538728237,
0.0020544608123600483,
0.013229441829025745,
-0.03986131399869919,
0.09690773487091064,
-0.031196273863315582,
-0.05363670364022255,
-0.028928622603416443,
0.012095066718757153,
0.0033690505661070347,
0.030584435909986496,
-0.0332394503057003,
-0.04587526246905327,
-0.06027621403336525,
-0.005291030742228031,
-0.06090373545885086,
-0.005997606553137302,
-0.09571538865566254,
0.015865040943026543,
0.001214862335473299,
-0.030832745134830475,
0.0070899901911616325,
0.011460288427770138,
-0.07096312940120697,
-0.005116359796375036,
-0.037027452141046524,
0.07855155318975449,
-0.1260862499475479,
-0.025656599551439285,
0.01755829155445099,
-0.038509368896484375,
0.06404298543930054,
-0.06772272288799286,
-0.02520398050546646,
0.0896044597029686,
-0.07252092659473419,
0.06741148233413696,
0.001202505431137979,
0.037484314292669296,
0.021110879257321358,
0.01567390374839306,
-0.05705137178301811,
-0.03468790277838707,
-0.017361190170049667,
0.024068230763077736,
-0.053112439811229706,
-0.07890830188989639,
0.0419265478849411,
-0.017314299941062927,
-0.06808783859014511,
-0.05866708606481552,
0.07479149103164673,
0.053120218217372894,
0.055435363203287125,
0.07803856581449509,
-0.055459264665842056,
0.08181817829608917,
-0.09393295645713806,
-0.020960602909326553,
0.05607589706778526,
-0.00323608354665339,
0.11002036184072495,
-0.04953804612159729,
0.019076332449913025,
-0.06656736135482788,
0.1352992057800293,
0.0526740700006485,
0.009024818427860737,
0.03806314244866371,
-0.11322695016860962,
-0.045011114329099655,
0.03479180857539177,
0.0737384706735611,
0.022653134539723396,
0.023533493280410767,
-0.021625282242894173,
-0.04429935663938522,
-0.07156199216842651,
-0.08269090950489044,
0.09663049131631851,
0.08428722620010376,
0.06426771730184555,
0.05780861899256706,
0.04734358564019203,
-0.04070761799812317,
-0.05216933786869049,
0.08189890533685684,
0.05287846550345421,
0.0715777650475502,
-0.024529630318284035,
0.042362846434116364,
0.14450111985206604,
-0.13876190781593323,
0.12095589190721512,
0.02800261601805687,
-0.09281995892524719,
-0.12468046694993973,
-0.1803704798221588,
-0.06136145070195198,
0.00804048590362072,
0.03479067608714104,
-0.14704348146915436,
0.05893675237894058,
0.003918128553777933,
0.10222367197275162,
-0.03624687343835831,
0.0342746265232563,
-0.09181258827447891,
-0.1142793670296669,
0.06274035573005676,
0.03795100748538971,
0.06535881012678146,
-0.028808897361159325,
0.04326650872826576,
0.0034359386190772057,
0.07312309741973877,
-0.0004672908107750118,
0.07265514880418777,
0.03497243672609329,
-0.009524272754788399,
-0.06615503132343292,
-0.05280033126473427,
-0.01618574745953083,
0.010755529627203941,
0.03416510298848152,
0.06319194287061691,
0.08041133731603622,
-0.0766894742846489,
-0.034970324486494064,
0.16855040192604065,
-0.016877885907888412,
-0.05255860462784767,
-0.14605429768562317,
0.1807943433523178,
-0.037553682923316956,
0.08431883901357651,
0.023011600598692894,
-0.0910905972123146,
0.0004125196428503841,
0.15809398889541626,
0.19357356429100037,
0.032777123153209686,
-0.018560253083705902,
-0.029169980436563492,
-0.003786495653912425,
-0.000546654628124088,
0.09260306507349014,
-0.04329180344939232,
0.225400909781456,
-0.035646915435791016,
0.09378349781036377,
-0.06432238966226578,
-0.04601964354515076,
-0.05966854840517044,
0.11895690113306046,
-0.008338750340044498,
-0.012303994968533516,
-0.07544754445552826,
0.1670631468296051,
-0.14229953289031982,
-0.31090012192726135,
0.11213061213493347,
-0.07538545876741409,
-0.14734958112239838,
-0.018939640372991562,
0.08022116124629974,
0.012353462167084217,
0.026941606774926186,
0.028902975842356682,
-0.006102238781750202,
0.08170249313116074,
0.04632636159658432,
-0.06371116638183594,
0.012007168494164944,
0.01968437246978283,
-0.09333246946334839,
0.13938061892986298,
0.04130074754357338,
0.04638383910059929,
0.08410229533910751,
-0.018531842157244682,
-0.0641755759716034,
-0.00617204187437892,
0.05980014055967331,
-0.06431398540735245,
-0.002511106664314866,
0.1688995510339737,
0.024432092905044556,
0.12642039358615875,
0.07718124240636826,
-0.06506269425153732,
0.009299526922404766,
0.046031128615140915,
-0.055337075144052505,
-0.014752735383808613,
0.10456272959709167,
-0.08392076939344406,
0.1273416131734848,
0.14409691095352173,
0.010156250558793545,
0.014971590600907803,
-0.08677656948566437,
0.004292767029255629,
-0.019906627014279366,
0.05984511598944664,
0.010250834748148918,
-0.10795384645462036,
-0.053134720772504807,
-0.11223149299621582,
0.025989113375544548,
-0.12141481041908264,
-0.01992720738053322,
-0.0067070540972054005,
0.010741211473941803,
-0.009460948407649994,
0.08994375914335251,
0.08501245081424713,
-0.031494349241256714,
0.0077088819816708565,
-0.20077566802501678,
0.04738854989409447,
0.08559105545282364,
-0.10381606966257095,
0.009011361747980118
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# wav2vec2-common_voice-tr-demo
This model is a fine-tuned version of [facebook/wav2vec2-large-xlsr-53](https://huggingface.co/facebook/wav2vec2-large-xlsr-53) on the COMMON_VOICE - SV-SE dataset.
It achieves the following results on the evaluation set:
- Loss: 0.5528
- Wer: 0.3811
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0003
- train_batch_size: 16
- eval_batch_size: 8
- seed: 42
- gradient_accumulation_steps: 2
- total_train_batch_size: 32
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 500
- num_epochs: 15.0
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Wer |
|:-------------:|:-----:|:----:|:---------------:|:------:|
| No log | 0.74 | 100 | 3.4444 | 1.0 |
| No log | 1.47 | 200 | 2.9421 | 1.0 |
| No log | 2.21 | 300 | 2.2802 | 1.0137 |
| No log | 2.94 | 400 | 0.9683 | 0.7611 |
| 3.7264 | 3.68 | 500 | 0.7941 | 0.6594 |
| 3.7264 | 4.41 | 600 | 0.6695 | 0.5751 |
| 3.7264 | 5.15 | 700 | 0.6507 | 0.5314 |
| 3.7264 | 5.88 | 800 | 0.5731 | 0.4927 |
| 3.7264 | 6.62 | 900 | 0.5723 | 0.4580 |
| 0.4592 | 7.35 | 1000 | 0.5913 | 0.4479 |
| 0.4592 | 8.09 | 1100 | 0.5562 | 0.4423 |
| 0.4592 | 8.82 | 1200 | 0.5566 | 0.4292 |
| 0.4592 | 9.56 | 1300 | 0.5492 | 0.4303 |
| 0.4592 | 10.29 | 1400 | 0.5665 | 0.4331 |
| 0.2121 | 11.03 | 1500 | 0.5610 | 0.4084 |
| 0.2121 | 11.76 | 1600 | 0.5703 | 0.4014 |
| 0.2121 | 12.5 | 1700 | 0.5669 | 0.3898 |
| 0.2121 | 13.24 | 1800 | 0.5586 | 0.3962 |
| 0.2121 | 13.97 | 1900 | 0.5656 | 0.3897 |
| 0.1326 | 14.71 | 2000 | 0.5565 | 0.3813 |
### Framework versions
- Transformers 4.16.0.dev0
- Pytorch 1.10.1+cu113
- Datasets 1.18.0
- Tokenizers 0.10.3
|
{"language": ["sv-SE"], "license": "apache-2.0", "tags": ["automatic-speech-recognition", "common_voice", "generated_from_trainer"], "datasets": ["common_voice"], "model-index": [{"name": "wav2vec2-common_voice-tr-demo", "results": []}]}
|
automatic-speech-recognition
|
birgermoell/wav2vec2-common_voice-tr-demo
|
[
"transformers",
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"common_voice",
"generated_from_trainer",
"dataset:common_voice",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"sv-SE"
] |
TAGS
#transformers #pytorch #wav2vec2 #automatic-speech-recognition #common_voice #generated_from_trainer #dataset-common_voice #license-apache-2.0 #endpoints_compatible #region-us
|
wav2vec2-common\_voice-tr-demo
==============================
This model is a fine-tuned version of facebook/wav2vec2-large-xlsr-53 on the COMMON\_VOICE - SV-SE dataset.
It achieves the following results on the evaluation set:
* Loss: 0.5528
* Wer: 0.3811
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 0.0003
* train\_batch\_size: 16
* eval\_batch\_size: 8
* seed: 42
* gradient\_accumulation\_steps: 2
* total\_train\_batch\_size: 32
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* lr\_scheduler\_warmup\_steps: 500
* num\_epochs: 15.0
* mixed\_precision\_training: Native AMP
### Training results
### Framework versions
* Transformers 4.16.0.dev0
* Pytorch 1.10.1+cu113
* Datasets 1.18.0
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0003\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 8\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 32\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 500\n* num\\_epochs: 15.0\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.16.0.dev0\n* Pytorch 1.10.1+cu113\n* Datasets 1.18.0\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #common_voice #generated_from_trainer #dataset-common_voice #license-apache-2.0 #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0003\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 8\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 32\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 500\n* num\\_epochs: 15.0\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.16.0.dev0\n* Pytorch 1.10.1+cu113\n* Datasets 1.18.0\n* Tokenizers 0.10.3"
] |
[
67,
159,
4,
40
] |
[
"passage: TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #common_voice #generated_from_trainer #dataset-common_voice #license-apache-2.0 #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0003\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 8\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 32\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 500\n* num\\_epochs: 15.0\n* mixed\\_precision\\_training: Native AMP### Training results### Framework versions\n\n\n* Transformers 4.16.0.dev0\n* Pytorch 1.10.1+cu113\n* Datasets 1.18.0\n* Tokenizers 0.10.3"
] |
[
-0.11221516877412796,
0.07982339709997177,
-0.0037102941423654556,
0.033470284193754196,
0.12529268860816956,
0.005515833850950003,
0.09059354662895203,
0.14515577256679535,
-0.10154422372579575,
0.08516019582748413,
0.08222422748804092,
0.06913313269615173,
0.06400781124830246,
0.10395049303770065,
-0.015411758795380592,
-0.30983567237854004,
0.015789855271577835,
0.0006841248250566423,
-0.0776417925953865,
0.11088529229164124,
0.10258830338716507,
-0.1059601753950119,
0.007212532684206963,
0.027757087722420692,
-0.11553706973791122,
0.008132614195346832,
-0.02026238664984703,
-0.05999856814742088,
0.12362109869718552,
0.04961719363927841,
0.08552385121583939,
0.022055549547076225,
0.08976016938686371,
-0.28411126136779785,
0.014724109321832657,
0.06194252893328667,
0.04209987446665764,
0.06785918027162552,
0.10759888589382172,
-0.0174671933054924,
0.13164642453193665,
-0.06600376963615417,
0.06847414374351501,
0.05667690560221672,
-0.10264497995376587,
-0.31108972430229187,
-0.08542992174625397,
0.018909718841314316,
0.134964719414711,
0.10230018198490143,
-0.03967832028865814,
0.05450069531798363,
-0.089177705347538,
0.09460312128067017,
0.23210150003433228,
-0.24739080667495728,
-0.06965076923370361,
-0.04831781983375549,
0.05117202550172806,
0.042158931493759155,
-0.10996644198894501,
-0.023909246549010277,
0.024133455008268356,
0.04088933393359184,
0.09950067102909088,
0.010418473742902279,
-0.04821246862411499,
0.004101014230400324,
-0.13477849960327148,
-0.04494091868400574,
0.10789883881807327,
0.08064454793930054,
-0.028862496837973595,
-0.09027943015098572,
-0.007878492586314678,
-0.21899878978729248,
-0.04755597934126854,
0.008980651386082172,
0.03212464973330498,
-0.029097426682710648,
-0.09275585412979126,
0.02748967707157135,
-0.07804509997367859,
-0.08881999552249908,
0.01174084097146988,
0.11937457323074341,
0.04243012145161629,
-0.04458780214190483,
0.01430723536759615,
0.09066110849380493,
0.020372385159134865,
-0.127430260181427,
0.0010586377466097474,
0.05823376402258873,
-0.10644294321537018,
-0.018912578001618385,
-0.039356738328933716,
-0.06358671933412552,
0.009205945767462254,
0.10863008350133896,
-0.012439343146979809,
0.08776430040597916,
-0.010857019573450089,
0.025998959317803383,
-0.07473180443048477,
0.16614720225334167,
-0.054880257695913315,
-0.05463112145662308,
-0.044833745807409286,
0.07931150496006012,
-0.014150344766676426,
-0.014393532648682594,
-0.06551191955804825,
0.016238044947385788,
0.11395597457885742,
0.04930592328310013,
-0.020917920395731926,
0.0060809701681137085,
-0.06993085145950317,
-0.011680146679282188,
-0.02401505969464779,
-0.10774710029363632,
0.044764671474695206,
0.03899267315864563,
-0.05380581319332123,
0.015029039233922958,
0.0073450650088489056,
0.0325111486017704,
-0.03460386022925377,
0.13139359652996063,
-0.04900011420249939,
0.012079441919922829,
-0.06844046711921692,
-0.11079024523496628,
0.034804102033376694,
-0.030659779906272888,
-0.0003861312579829246,
-0.06695754081010818,
-0.07509440928697586,
-0.06451674550771713,
0.05307537689805031,
-0.052608635276556015,
-0.07638488709926605,
-0.0795745849609375,
-0.06429305672645569,
0.05496208742260933,
-0.037015318870544434,
0.17881284654140472,
-0.06475837528705597,
0.10823359340429306,
0.01221659779548645,
0.030872976407408714,
0.04929644241929054,
0.08902054280042648,
-0.03236572816967964,
0.03348059952259064,
-0.13812194764614105,
0.08350813388824463,
-0.0828942134976387,
0.047197286039590836,
-0.15011945366859436,
-0.12857766449451447,
-0.00943317823112011,
0.0027494162786751986,
0.1098954975605011,
0.08356976509094238,
-0.2011038362979889,
-0.09459856152534485,
0.17245930433273315,
-0.06587249040603638,
-0.07747161388397217,
0.153578981757164,
-0.023810505867004395,
-0.0223548486828804,
0.0500151664018631,
0.1884615123271942,
0.08776157349348068,
-0.10010530799627304,
0.027956310659646988,
-0.06792125850915909,
0.12917031347751617,
0.02882843278348446,
0.09131898730993271,
-0.05753018707036972,
0.02900034561753273,
-0.009569650515913963,
-0.00904929731041193,
0.07129917293787003,
-0.08962913602590561,
-0.07672697305679321,
-0.010395892895758152,
-0.07527811080217361,
0.008627571165561676,
0.054233163595199585,
0.014977733604609966,
-0.105507031083107,
-0.13153360784053802,
0.03229402005672455,
0.10123246908187866,
-0.10525882989168167,
0.04471191018819809,
-0.07628310471773148,
0.042247939854860306,
-0.011276161298155785,
-0.019255787134170532,
-0.16126748919487,
0.02899022586643696,
0.026663456112146378,
-0.04560062661767006,
0.03668529540300369,
-0.00985416118055582,
0.06744275987148285,
0.040647175163030624,
-0.053457364439964294,
-0.06264203786849976,
-0.0541444830596447,
0.0075690858066082,
-0.07908659428358078,
-0.2334805577993393,
-0.0636613592505455,
-0.02564016357064247,
0.14936333894729614,
-0.20925603806972504,
-0.004128406289964914,
0.02709232270717621,
0.10183138400316238,
0.029906943440437317,
-0.0503903329372406,
-0.006362652871757746,
0.09621035307645798,
-0.011474513448774815,
-0.05494192615151405,
0.03435799106955528,
0.00890851579606533,
-0.11944033205509186,
0.029763730242848396,
-0.13574998080730438,
0.06798844039440155,
0.10079330205917358,
-0.03125372529029846,
-0.08904880285263062,
-0.06043989583849907,
-0.05369548127055168,
-0.061576224863529205,
-0.03178391978144646,
-0.0009484617621637881,
0.2247742861509323,
0.035693105310201645,
0.11686018109321594,
-0.06483034044504166,
-0.0369190014898777,
0.03091784380376339,
0.008550304919481277,
-0.003825431689620018,
0.1321186125278473,
0.0636187419295311,
-0.038798749446868896,
0.08736038208007812,
0.06870201975107193,
-0.0792996808886528,
0.13743005692958832,
-0.0685860738158226,
-0.11637987941503525,
-0.02788403071463108,
0.011186717078089714,
0.026471512392163277,
0.09562361985445023,
-0.16783498227596283,
-0.009528850205242634,
0.01866108924150467,
0.03411611542105675,
0.020753318443894386,
-0.20694264769554138,
-0.003913046792149544,
0.0509728379547596,
-0.07425583899021149,
-0.05122673511505127,
-0.020250149071216583,
-0.003917294554412365,
0.08581743389368057,
0.009984306991100311,
-0.06127502769231796,
-0.019524618983268738,
-0.03643076494336128,
-0.09170006960630417,
0.1694878190755844,
-0.11508700996637344,
-0.13535742461681366,
-0.12188822776079178,
-0.05673268437385559,
0.019540250301361084,
-0.014763237908482552,
0.06778411567211151,
-0.1167382225394249,
-0.04201086610555649,
-0.06402234733104706,
0.03663851320743561,
-0.07114619016647339,
0.02769097127020359,
-0.01550863403826952,
0.00769296009093523,
0.07676523923873901,
-0.10626313090324402,
0.018293600529432297,
-0.011235471814870834,
-0.02892550826072693,
0.02114032581448555,
0.04187728092074394,
0.08271221071481705,
0.17553235590457916,
0.041146308183670044,
0.011354935355484486,
-0.04626409709453583,
0.15684716403484344,
-0.11487464606761932,
-0.03227734938263893,
0.09910076856613159,
-0.0021431727800518274,
0.033807918429374695,
0.14572003483772278,
0.053697068244218826,
-0.08156317472457886,
0.02062535099685192,
0.04159534350037575,
-0.013384441845119,
-0.2551257610321045,
-0.04267681762576103,
-0.07147198170423508,
-0.034848857671022415,
0.09644518047571182,
0.029109077528119087,
-0.004390187095850706,
0.006697566714137793,
-0.014241939410567284,
-0.0010796342976391315,
0.018367739394307137,
0.05895495414733887,
0.10029197484254837,
0.03322315216064453,
0.11556291580200195,
-0.012695475481450558,
-0.02219398319721222,
0.026582499966025352,
-0.014025919139385223,
0.24021854996681213,
0.017056239768862724,
0.1741015762090683,
0.05536332353949547,
0.15082065761089325,
0.015207024291157722,
0.05177310109138489,
0.018327362835407257,
-0.016241492703557014,
0.01816563494503498,
-0.05209549888968468,
-0.037018582224845886,
0.02847413904964924,
0.11845344305038452,
0.021257823333144188,
-0.11961277574300766,
-0.037037648260593414,
0.008472288027405739,
0.3669363856315613,
0.07494024932384491,
-0.2720969617366791,
-0.08602435141801834,
0.002679790137335658,
-0.10318516194820404,
-0.04109272360801697,
0.033477623015642166,
0.10736478865146637,
-0.09587065875530243,
0.06184084340929985,
-0.05097753182053566,
0.10296761989593506,
-0.05792714282870293,
0.011819655075669289,
0.07039035111665726,
0.06931061297655106,
-0.006073499098420143,
0.0620465986430645,
-0.2745465934276581,
0.3048536479473114,
-0.014300503768026829,
0.07893231511116028,
-0.029961688444018364,
0.03226953372359276,
0.02270505018532276,
-0.04622148722410202,
0.060382358729839325,
-0.01054326631128788,
-0.1080980896949768,
-0.17879824340343475,
-0.07257169485092163,
0.017757441848516464,
0.12037099152803421,
-0.03479420766234398,
0.11432987451553345,
-0.032899465411901474,
-0.017784545198082924,
0.055737100541591644,
-0.06359824538230896,
-0.10270409286022186,
-0.10217662900686264,
0.011376961134374142,
0.04338952898979187,
0.11176702380180359,
-0.09075646847486496,
-0.1033208817243576,
-0.08188094943761826,
0.15591274201869965,
-0.08529634773731232,
-0.0032104726415127516,
-0.11877358704805374,
0.08190998435020447,
0.16093716025352478,
-0.06367002427577972,
0.057636916637420654,
0.036999598145484924,
0.1109461560845375,
0.028388842940330505,
0.004862667061388493,
0.11894456297159195,
-0.07445800304412842,
-0.18293355405330658,
-0.06656596809625626,
0.1812663972377777,
0.045000702142715454,
0.08826813101768494,
-0.02602355368435383,
0.0336526557803154,
-0.013319467194378376,
-0.05851021781563759,
0.06095258519053459,
0.036302994936704636,
-0.000024092712919809856,
0.06715583056211472,
-0.03221258148550987,
-0.02801583521068096,
-0.08623132854700089,
-0.08979519456624985,
0.17365914583206177,
0.2868083119392395,
-0.08120306581258774,
0.06155563145875931,
0.03921244665980339,
-0.05489126220345497,
-0.1293407678604126,
0.015339179895818233,
0.1407119184732437,
0.04730500280857086,
0.021373571828007698,
-0.22073154151439667,
0.034625791013240814,
0.08966165781021118,
-0.01795293390750885,
0.055723246186971664,
-0.3260408341884613,
-0.1338607668876648,
0.10965710878372192,
0.08447291702032089,
-0.032166581600904465,
-0.1442445069551468,
-0.05640806257724762,
-0.02792169898748398,
-0.10232776403427124,
0.04212551191449165,
-0.012174486182630062,
0.1273973137140274,
0.012940917164087296,
0.06661555171012878,
0.027463361620903015,
-0.041355013847351074,
0.13648684322834015,
-0.014725897461175919,
0.040074530988931656,
-0.01432754285633564,
0.05657913535833359,
-0.030577674508094788,
-0.03764966502785683,
-0.003089969279244542,
-0.08305652439594269,
0.004367826972156763,
-0.12296223640441895,
-0.03362053260207176,
-0.07972323149442673,
0.005726702976971865,
-0.037769608199596405,
-0.03882226347923279,
-0.012531326152384281,
0.03109012357890606,
0.08111340552568436,
0.0016811755485832691,
0.09981494396924973,
-0.07591556012630463,
0.1552094668149948,
0.07514706254005432,
0.10798531770706177,
-0.006478250492364168,
-0.10530810058116913,
-0.009998333640396595,
-0.016535114496946335,
0.04495595395565033,
-0.10343621671199799,
0.041055548936128616,
0.14244095981121063,
0.04601641371846199,
0.1591581404209137,
0.04907597228884697,
-0.08099940419197083,
0.02711491845548153,
0.0574130043387413,
-0.059681687504053116,
-0.1383366882801056,
-0.004246054217219353,
0.0655222088098526,
-0.12768875062465668,
-0.021426435559988022,
0.10649304836988449,
-0.04939320683479309,
-0.018504159525036812,
0.018842581659555435,
0.03127022087574005,
-0.06484729796648026,
0.23141948878765106,
0.007285472005605698,
0.07480460405349731,
-0.09370627999305725,
0.06258892267942429,
0.0718880221247673,
-0.1739666759967804,
0.04051487520337105,
0.0830429419875145,
-0.03049691952764988,
-0.02204263210296631,
0.04124131798744202,
0.06965136528015137,
0.029960215091705322,
-0.05601698160171509,
-0.09333157539367676,
-0.15689344704151154,
0.09170708060264587,
0.08995944261550903,
0.02152673900127411,
0.021323828026652336,
-0.04444628208875656,
0.03820641711354256,
-0.10701800882816315,
0.08695799112319946,
0.10034379363059998,
0.061796434223651886,
-0.1192588359117508,
0.16252920031547546,
0.015031453222036362,
-0.00765675213187933,
0.009662768803536892,
-0.010850547812879086,
-0.0786331295967102,
0.041632067412137985,
-0.13025277853012085,
-0.021703526377677917,
-0.04795753210783005,
0.0017920678947120905,
0.013208838179707527,
-0.05692953243851662,
-0.050600070506334305,
0.019970551133155823,
-0.12164578586816788,
-0.040298353880643845,
-0.018816746771335602,
0.07343953847885132,
-0.09749315679073334,
-0.022799139842391014,
0.045524321496486664,
-0.10210458934307098,
0.0905313789844513,
0.05976715683937073,
0.012578250840306282,
0.03811148181557655,
-0.13860253989696503,
-0.006413716822862625,
0.04505612701177597,
0.00011083092249464244,
0.0204288512468338,
-0.18237575888633728,
-0.020776860415935516,
-0.006729952059686184,
0.03041836805641651,
-0.00525807635858655,
0.02908247336745262,
-0.1302478313446045,
-0.04704158008098602,
-0.03593536093831062,
-0.07067080587148666,
-0.05679476261138916,
0.04167165234684944,
0.06295507401227951,
0.04625312238931656,
0.16379483044147491,
-0.09996039420366287,
0.0718284323811531,
-0.20829962193965912,
0.0150145273655653,
-0.045655861496925354,
-0.055697157979011536,
-0.07906324416399002,
-0.03437504544854164,
0.08762281388044357,
-0.060589998960494995,
0.07156877219676971,
-0.043230317533016205,
0.06168805807828903,
0.03462042659521103,
-0.12385524064302444,
0.007358406204730272,
0.04039706289768219,
0.22435534000396729,
0.052137341350317,
-0.03588956966996193,
0.06329968571662903,
-0.005870217457413673,
0.055633511394262314,
0.18850010633468628,
0.13103610277175903,
0.17512068152427673,
0.06758607178926468,
0.0801566019654274,
0.06485677510499954,
-0.12600870430469513,
-0.12115667760372162,
0.12054109573364258,
-0.03725256398320198,
0.13715901970863342,
-0.016882799565792084,
0.2700650095939636,
0.0933828055858612,
-0.19796137511730194,
0.07229072600603104,
-0.04209519550204277,
-0.08609896898269653,
-0.09663411229848862,
-0.0570523738861084,
-0.07440299540758133,
-0.1822274625301361,
0.007558770943433046,
-0.10448308289051056,
0.0753805860877037,
0.0476275272667408,
0.04175839200615883,
0.026464631780982018,
0.11103640496730804,
0.03423751890659332,
-0.01680188998579979,
0.11497409641742706,
0.010994788259267807,
-0.0113908676430583,
-0.06601601839065552,
-0.0830220878124237,
0.06572752445936203,
-0.0337030328810215,
0.053840748965740204,
-0.029221422970294952,
-0.10572097450494766,
0.062182992696762085,
0.002131653716787696,
-0.11149761080741882,
0.026780081912875175,
-0.01586761698126793,
0.07456057518720627,
0.11728709191083908,
0.044535309076309204,
-0.005086587741971016,
-0.012875507585704327,
0.2447095811367035,
-0.10144829750061035,
-0.0649251937866211,
-0.12586095929145813,
0.24384431540966034,
0.02312229573726654,
-0.017358705401420593,
0.014625740237534046,
-0.06732112169265747,
0.0007108390564098954,
0.14293073117733002,
0.14778858423233032,
-0.007349308114498854,
-0.01740865781903267,
0.009621458128094673,
-0.011815779842436314,
-0.0414174385368824,
0.07367927581071854,
0.13758890330791473,
0.05400171875953674,
-0.057286154478788376,
-0.009943763725459576,
-0.05749940127134323,
-0.04782465100288391,
-0.019323483109474182,
0.06037425994873047,
0.023705722764134407,
-0.02315448224544525,
-0.012833650223910809,
0.13058780133724213,
-0.059076134115457535,
-0.14312857389450073,
0.006141113582998514,
-0.1822117120027542,
-0.18444903194904327,
-0.028820769861340523,
0.07536429166793823,
0.058326683938503265,
0.04341493546962738,
-0.01317988708615303,
-0.014755075797438622,
0.12314049154520035,
0.0012473338283598423,
-0.03652239963412285,
-0.116225466132164,
0.08431103080511093,
-0.0974983349442482,
0.166346937417984,
-0.03350924327969551,
0.03944208100438118,
0.10955603420734406,
0.09626156091690063,
-0.058773454278707504,
0.06398764997720718,
0.0724099650979042,
-0.14289870858192444,
0.06135474145412445,
0.20295724272727966,
-0.04677719622850418,
0.14724090695381165,
0.048319682478904724,
-0.11092737317085266,
0.03465336188673973,
-0.11687833815813065,
-0.058865539729595184,
-0.04922086000442505,
-0.005184571258723736,
-0.046923477202653885,
0.13154573738574982,
0.20589807629585266,
-0.06990250200033188,
-0.02324928715825081,
-0.05843798443675041,
-0.008889265358448029,
0.03658048063516617,
0.13550665974617004,
-0.047933515161275864,
-0.27205196022987366,
0.02462572418153286,
0.002321816748008132,
0.022194311022758484,
-0.24082864820957184,
-0.09252375364303589,
0.02982310764491558,
-0.0618770532310009,
-0.06848342716693878,
0.11624164879322052,
0.0627722293138504,
0.03621579334139824,
-0.05961071699857712,
-0.11192747205495834,
-0.018267827108502388,
0.19703058898448944,
-0.169640451669693,
-0.056978970766067505
] |
null | null |
transformers
|
# Wav2Vec2-Large-XLSR-53-Estonian
Fine-tuned [facebook/wav2vec2-large-xlsr-53](https://huggingface.co/facebook/wav2vec2-large-xlsr-53) in Luganda using the [Common Voice](https://huggingface.co/datasets/common_voice)
When using this model, make sure that your speech input is sampled at 16kHz.
## Usage
The model can be used directly (without a language model) as follows:
```python
import torch
import torchaudio
from datasets import load_dataset
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
test_dataset = load_dataset("common_voice", "et", split="test[:2%]").
processor = Wav2Vec2Processor.from_pretrained("birgermoell/wav2vec2-large-xlrs-estonian")
model = Wav2Vec2ForCTC.from_pretrained("birgermoell/wav2vec2-large-xlrs-estonian")
resampler = torchaudio.transforms.Resample(48_000, 16_000)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def speech_file_to_array_fn(batch):
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\tspeech_array, sampling_rate = torchaudio.load(batch["path"])
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\tbatch["speech"] = resampler(speech_array).squeeze().numpy()
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\treturn batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
inputs = processor(test_dataset["speech"][:2], sampling_rate=16_000, return_tensors="pt", padding=True)
with torch.no_grad():
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\tlogits = model(inputs.input_values, attention_mask=inputs.attention_mask).logits
predicted_ids = torch.argmax(logits, dim=-1)
print("Prediction:", processor.batch_decode(predicted_ids))
print("Reference:", test_dataset["sentence"][:2])
```
## Evaluation
The model can be evaluated as follows on the Luganda test data of Common Voice.
```python
import torch
import torchaudio
from datasets import load_dataset, load_metric
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import re
test_dataset = load_dataset("common_voice", "fi", split="test")
wer = load_metric("wer")
processor = Wav2Vec2Processor.from_pretrained("birgermoell/wav2vec2-large-xlrs-estonian")
model = Wav2Vec2ForCTC.from_pretrained("birgermoell/wav2vec2-large-xlrs-estonian")
model.to("cuda")
chars_to_ignore_regex = '[\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\,\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\?\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\.\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\!\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\-\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\;\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\:\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\"\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\“]'
resampler = torchaudio.transforms.Resample(48_000, 16_000)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def speech_file_to_array_fn(batch):
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\tbatch["sentence"] = re.sub(chars_to_ignore_regex, '', batch["sentence"]).lower()
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\tspeech_array, sampling_rate = torchaudio.load(batch["path"])
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\tbatch["speech"] = resampler(speech_array).squeeze().numpy()
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\treturn batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def evaluate(batch):
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\tinputs = processor(batch["speech"], sampling_rate=16_000, return_tensors="pt", padding=True)
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\twith torch.no_grad():
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\t\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\tlogits = model(inputs.input_values.to("cuda"), attention_mask=inputs.attention_mask.to("cuda")).logits
pred_ids = torch.argmax(logits, dim=-1)
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\tbatch["pred_strings"] = processor.batch_decode(pred_ids)
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\treturn batch
result = test_dataset.map(evaluate, batched=True, batch_size=8)
print("WER: {:2f}".format(100 * wer.compute(predictions=result["pred_strings"], references=result["sentence"])))
```
**Test Result**:
WER: 36.951816
## Training
The Common Voice `train` and `validation` datasets were used for training.
The script used for training can be found here
https://colab.research.google.com/drive/1VcWT92vBCwVn-5d-mkYxhgILPr11OHfR?usp=sharing
|
{"language": "et", "license": "apache-2.0", "tags": ["audio", "automatic-speech-recognition", "speech", "xlsr-fine-tuning-week"], "datasets": ["common_voice"], "model-index": [{"name": "XLSR Wav2Vec2 Estonian by Birger Moell", "results": [{"task": {"type": "automatic-speech-recognition", "name": "Speech Recognition"}, "dataset": {"name": "Common Voice Estonian", "type": "common_voice", "args": "et"}, "metrics": [{"type": "wer", "value": 36.951816, "name": "Test WER"}]}]}]}
|
automatic-speech-recognition
|
birgermoell/wav2vec2-large-xlrs-estonian
|
[
"transformers",
"pytorch",
"jax",
"wav2vec2",
"automatic-speech-recognition",
"audio",
"speech",
"xlsr-fine-tuning-week",
"et",
"dataset:common_voice",
"license:apache-2.0",
"model-index",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"et"
] |
TAGS
#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #et #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us
|
# Wav2Vec2-Large-XLSR-53-Estonian
Fine-tuned facebook/wav2vec2-large-xlsr-53 in Luganda using the Common Voice
When using this model, make sure that your speech input is sampled at 16kHz.
## Usage
The model can be used directly (without a language model) as follows:
## Evaluation
The model can be evaluated as follows on the Luganda test data of Common Voice.
Test Result:
WER: 36.951816
## Training
The Common Voice 'train' and 'validation' datasets were used for training.
The script used for training can be found here
URL
|
[
"# Wav2Vec2-Large-XLSR-53-Estonian\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 in Luganda using the Common Voice\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"## Usage\n\nThe model can be used directly (without a language model) as follows:",
"## Evaluation\n\nThe model can be evaluated as follows on the Luganda test data of Common Voice.\n\n\n\n\nTest Result:\nWER: 36.951816",
"## Training\n\nThe Common Voice 'train' and 'validation' datasets were used for training.\nThe script used for training can be found here\nURL"
] |
[
"TAGS\n#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #et #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us \n",
"# Wav2Vec2-Large-XLSR-53-Estonian\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 in Luganda using the Common Voice\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"## Usage\n\nThe model can be used directly (without a language model) as follows:",
"## Evaluation\n\nThe model can be evaluated as follows on the Luganda test data of Common Voice.\n\n\n\n\nTest Result:\nWER: 36.951816",
"## Training\n\nThe Common Voice 'train' and 'validation' datasets were used for training.\nThe script used for training can be found here\nURL"
] |
[
80,
62,
20,
32,
33
] |
[
"passage: TAGS\n#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #et #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us \n# Wav2Vec2-Large-XLSR-53-Estonian\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 in Luganda using the Common Voice\nWhen using this model, make sure that your speech input is sampled at 16kHz.## Usage\n\nThe model can be used directly (without a language model) as follows:## Evaluation\n\nThe model can be evaluated as follows on the Luganda test data of Common Voice.\n\n\n\n\nTest Result:\nWER: 36.951816## Training\n\nThe Common Voice 'train' and 'validation' datasets were used for training.\nThe script used for training can be found here\nURL"
] |
[
-0.1428551822900772,
0.0014933116035535932,
-0.0018434664234519005,
0.0016972885932773352,
0.12373056262731552,
-0.03574923053383827,
0.1813642382621765,
0.11513392627239227,
0.019650401547551155,
-0.026717666536569595,
0.021612288430333138,
0.004151956178247929,
0.04619215428829193,
0.08532112836837769,
0.06267356872558594,
-0.23971672356128693,
-0.004027812276035547,
0.015610608272254467,
0.031241299584507942,
0.11373768001794815,
0.10912664234638214,
-0.05344957858324051,
-0.008571757934987545,
0.08406016230583191,
-0.1671687811613083,
0.06295723468065262,
0.008593004196882248,
-0.09381361305713654,
0.14572682976722717,
0.05156271159648895,
0.09465770423412323,
0.017565451562404633,
0.11235877871513367,
-0.2153828740119934,
0.02619362249970436,
0.033796802163124084,
0.023246675729751587,
0.017783688381314278,
0.01790158636868,
-0.04737646132707596,
0.10254240781068802,
0.08405586332082748,
-0.015183745883405209,
0.07564212381839752,
-0.05881129950284958,
-0.17841127514839172,
0.013074823655188084,
-0.011539224535226822,
0.07719100266695023,
0.17278675734996796,
-0.05410849675536156,
0.05128259211778641,
-0.1627318263053894,
0.08681295812129974,
0.12323708087205887,
-0.14173969626426697,
0.0038861031644046307,
0.11481921374797821,
0.0642881914973259,
0.062347959727048874,
-0.0803200826048851,
0.015413781628012657,
0.025027960538864136,
0.018242938444018364,
0.04507173225283623,
-0.022351987659931183,
-0.2192506343126297,
-0.03771857172250748,
-0.127670019865036,
-0.00549439899623394,
0.2285473644733429,
-0.02075183019042015,
-0.06465297937393188,
-0.11119580268859863,
0.0169485192745924,
0.005713496822863817,
-0.01740705594420433,
-0.02124549262225628,
-0.0004339748702477664,
0.04300680384039879,
0.0032168817706406116,
-0.07287461310625076,
-0.11905238777399063,
-0.15868249535560608,
0.04935261234641075,
0.09708142280578613,
0.02982013300061226,
-0.005195484030991793,
-0.13387782871723175,
0.1067761480808258,
-0.08341275900602341,
-0.06476718187332153,
-0.02876978926360607,
0.02140960656106472,
-0.03663454204797745,
0.016224564984440804,
-0.06843441724777222,
-0.1557014137506485,
0.027374373748898506,
0.04985050857067108,
0.129537433385849,
0.031577885150909424,
-0.018179314211010933,
0.07569773495197296,
-0.01983858458697796,
0.15073521435260773,
-0.00825571920722723,
-0.03425068035721779,
0.04520372301340103,
0.06131616234779358,
-0.04840036854147911,
-0.01480924990028143,
-0.10796673595905304,
-0.07980115711688995,
0.007097965572029352,
0.0859808698296547,
-0.03017878159880638,
0.03679151460528374,
-0.045121341943740845,
-0.025058967992663383,
0.014522591605782509,
-0.11612117290496826,
-0.03884946554899216,
0.07973391562700272,
-0.032556451857089996,
0.08952026814222336,
0.1110619306564331,
0.038413599133491516,
-0.0983598604798317,
-0.05627889558672905,
0.01786728948354721,
0.08941508084535599,
-0.05606212094426155,
-0.10460920631885529,
0.024856911972165108,
-0.006660825107246637,
0.0008932884666137397,
-0.13102179765701294,
-0.12078087031841278,
-0.07547216862440109,
-0.004522003699094057,
0.05121507868170738,
0.027778945863246918,
-0.11966998130083084,
-0.015349159017205238,
-0.043783146888017654,
-0.04609152302145958,
0.0422099344432354,
-0.03350881487131119,
0.053927455097436905,
-0.00047106947749853134,
0.04380076006054878,
0.0543452687561512,
0.07980173081159592,
-0.10028448700904846,
-0.08622201532125473,
-0.007848113775253296,
0.1164577379822731,
-0.049286823719739914,
-0.007939646020531654,
-0.08795642107725143,
-0.07645745575428009,
-0.09966921806335449,
0.08545488864183426,
0.05447792634367943,
0.1323663741350174,
-0.2362874150276184,
-0.09310740232467651,
0.2477731555700302,
-0.1195203959941864,
-0.01841985620558262,
0.17512743175029755,
-0.019740810617804527,
0.15313847362995148,
0.1316210776567459,
0.23441562056541443,
0.1833195835351944,
-0.20530061423778534,
0.05963516980409622,
0.017070550471544266,
-0.011314400471746922,
-0.07934519648551941,
0.0807279497385025,
-0.05402342602610588,
0.013609836809337139,
0.03878970071673393,
-0.11215073615312576,
0.10044152289628983,
-0.013838929124176502,
-0.06309616565704346,
-0.006543261464685202,
-0.06335590034723282,
0.02470082975924015,
0.036012668162584305,
0.017537426203489304,
-0.030758652836084366,
-0.10164237767457962,
0.07399293035268784,
0.14339551329612732,
-0.16002947092056274,
0.06776314973831177,
-0.108971506357193,
0.026300696656107903,
-0.020721232518553734,
0.010904583148658276,
-0.1302194446325302,
0.12917597591876984,
-0.026762448251247406,
0.01648721657693386,
0.0643736720085144,
0.10941935330629349,
0.014992987737059593,
0.012067015282809734,
-0.03850991278886795,
-0.013092448934912682,
-0.03953290358185768,
-0.03217994421720505,
-0.02316759154200554,
-0.08216825127601624,
-0.043768975883722305,
-0.06511566787958145,
0.07854541391134262,
-0.1884908527135849,
0.034160662442445755,
0.02147538959980011,
-0.013044117018580437,
-0.003907681442797184,
-0.023181963711977005,
0.07876883447170258,
0.10355902463197708,
-0.012624102644622326,
-0.014867168851196766,
0.06234147772192955,
0.009692823514342308,
-0.06109410151839256,
0.09779289364814758,
-0.13726578652858734,
-0.010031873360276222,
0.10584112256765366,
-0.06730516999959946,
-0.011026773601770401,
0.03509645536541939,
-0.012643367983400822,
-0.007591294124722481,
-0.06700415164232254,
-0.05117243155837059,
0.2630438804626465,
-0.013628697022795677,
0.12179220467805862,
-0.07957854866981506,
0.016798464581370354,
0.003916577436029911,
-0.10126953572034836,
0.06842327117919922,
0.05441880598664284,
0.021813932806253433,
0.06141764670610428,
0.021978700533509254,
-0.06325095891952515,
-0.10103555768728256,
0.294516921043396,
-0.029473252594470978,
-0.0806504487991333,
0.028050050139427185,
-0.008767690509557724,
-0.01976757124066353,
0.04583665728569031,
-0.2070673704147339,
-0.0827479287981987,
0.0049964552745223045,
0.057121243327856064,
0.08491717278957367,
-0.17316821217536926,
-0.007585865445435047,
0.021992530673742294,
-0.12676680088043213,
-0.16044114530086517,
0.072110116481781,
-0.06798103451728821,
0.03730754926800728,
-0.10056212544441223,
-0.03575310483574867,
-0.004896101076155901,
-0.042347609996795654,
-0.16806189715862274,
0.15816590189933777,
-0.08750756829977036,
-0.2141713798046112,
-0.17038817703723907,
0.05769915506243706,
0.07447167485952377,
0.002793053863570094,
0.09088478982448578,
-0.14621399343013763,
0.0006627226248383522,
-0.03221290558576584,
0.1326148957014084,
0.03221270814538002,
-0.06062034144997597,
-0.026335583999753,
0.04641157388687134,
0.056602321565151215,
-0.1755136400461197,
0.002361358841881156,
-0.02639463171362877,
-0.07340922206640244,
-0.009674540720880032,
-0.053298525512218475,
0.021832147613167763,
0.17189942300319672,
0.04266023263335228,
0.012213087640702724,
-0.01901426538825035,
0.14125128090381622,
-0.09269285947084427,
0.007258294615894556,
0.22016942501068115,
-0.012936125509440899,
-0.010727057233452797,
0.060079533606767654,
0.035304948687553406,
-0.06433034688234329,
0.010505366139113903,
-0.02317442186176777,
-0.10039477795362473,
-0.2487916648387909,
-0.09511065483093262,
-0.06534413993358612,
-0.04757201671600342,
-0.008941377513110638,
0.0025988894049078226,
0.039247531443834305,
0.018641354516148567,
-0.002573584206402302,
-0.09777841717004776,
0.0894683226943016,
0.021726131439208984,
0.0892760306596756,
-0.002760052215307951,
0.08793076127767563,
-0.06393260508775711,
-0.002433952409774065,
-0.004446569364517927,
0.02870228886604309,
0.16854016482830048,
0.04931710660457611,
0.10202457010746002,
0.0929187461733818,
0.0873560756444931,
0.08971843868494034,
0.06876881420612335,
-0.033627238124608994,
-0.008390438742935658,
0.022206656634807587,
-0.04864030331373215,
-0.03301917016506195,
0.030483363196253777,
0.07175841927528381,
-0.0715014711022377,
-0.04999464750289917,
-0.029274793341755867,
0.03003440424799919,
0.12471538037061691,
0.05610423907637596,
-0.20047476887702942,
-0.09564689546823502,
-0.03174891695380211,
-0.056934721767902374,
0.010658820159733295,
0.05298817902803421,
0.14480486512184143,
-0.14147813618183136,
0.0010995633201673627,
0.0060114432126283646,
0.09413067251443863,
0.010946526192128658,
0.018579158931970596,
-0.04715431481599808,
0.07412347942590714,
-0.0035588021855801344,
0.09857210516929626,
-0.26057082414627075,
0.20184390246868134,
-0.004848303273320198,
0.15674874186515808,
-0.04189290106296539,
0.002711391309276223,
0.045775916427373886,
0.07209187746047974,
0.11485681682825089,
0.01990496926009655,
0.03948195278644562,
-0.13314670324325562,
-0.07467477023601532,
0.05195849388837814,
0.0064637791365385056,
-0.016185197979211807,
0.051912128925323486,
-0.006292157806456089,
0.003786959685385227,
0.022636039182543755,
-0.08906258642673492,
-0.14664024114608765,
-0.0582219623029232,
-0.003919542301446199,
0.13758240640163422,
0.11643965542316437,
-0.028063232079148293,
-0.09297876060009003,
-0.050279099494218826,
0.039214134216308594,
-0.093618243932724,
-0.05609884113073349,
-0.053809165954589844,
-0.011528723873198032,
0.07799506932497025,
-0.08335758000612259,
0.01254985574632883,
0.10000433772802353,
0.10476618260145187,
-0.03411358594894409,
-0.052245862782001495,
0.026145633310079575,
-0.12348264455795288,
-0.0822892114520073,
0.004123309627175331,
0.19210082292556763,
0.10326378792524338,
0.06466647237539291,
0.04579887166619301,
-0.010291188955307007,
0.00902672577649355,
-0.05163909122347832,
-0.008295772597193718,
0.13917940855026245,
-0.1021389290690422,
0.010971992276608944,
-0.05491073429584503,
-0.1575397551059723,
-0.12250911444425583,
-0.06715178489685059,
0.1750989705324173,
0.06978023052215576,
-0.059238508343696594,
0.11221519857645035,
0.19331218302249908,
-0.12331599742174149,
-0.23384185135364532,
0.016350483521819115,
0.0985003113746643,
0.11878660321235657,
-0.012240210548043251,
-0.2592008411884308,
0.05221007019281387,
-0.004606931004673243,
-0.011750838719308376,
-0.03301720693707466,
-0.33902493119239807,
-0.14920580387115479,
0.1486220508813858,
-0.011009964160621166,
0.12424185127019882,
-0.0035669368226081133,
0.007236601784825325,
0.008254789747297764,
-0.03809225931763649,
0.024067476391792297,
-0.08425324410200119,
0.1280772089958191,
0.0348769836127758,
0.09134945273399353,
0.04223950207233429,
-0.035928498953580856,
0.06448829174041748,
0.08396559208631516,
-0.0017466886201873422,
-0.007648357655853033,
0.03517024591565132,
0.0314629003405571,
-0.0028813527897000313,
0.16804970800876617,
-0.10801119357347488,
0.011011375114321709,
-0.07554415613412857,
-0.10290887951850891,
-0.085752934217453,
0.07847533375024796,
0.02643558382987976,
-0.0444694422185421,
0.009493771940469742,
-0.013255164958536625,
0.005103100556880236,
0.013892799615859985,
-0.04472311586141586,
-0.1355602741241455,
0.01889641582965851,
0.08780574053525925,
0.18588344752788544,
0.016195394098758698,
-0.1129511222243309,
0.0016295784153044224,
-0.021750817075371742,
0.12717494368553162,
-0.14051152765750885,
0.010122368112206459,
0.06437736749649048,
0.04938775673508644,
0.13749052584171295,
0.017712166532874107,
-0.1199827492237091,
0.076703742146492,
0.054703790694475174,
-0.022704651579260826,
-0.09998011589050293,
-0.03913281112909317,
-0.06579244881868362,
-0.03160976618528366,
0.026960136368870735,
0.09448307752609253,
-0.11400230973958969,
-0.013183392584323883,
-0.02064785547554493,
0.01359112374484539,
-0.1347147673368454,
0.18625912070274353,
0.060188889503479004,
0.06328803300857544,
-0.07656372338533401,
0.049415770918130875,
-0.0282281544059515,
-0.017068926244974136,
0.0598188191652298,
0.009925213642418385,
-0.08677800744771957,
-0.08178537338972092,
-0.05233961343765259,
0.10539283603429794,
0.02683483436703682,
-0.11690061539411545,
-0.0748010128736496,
-0.06383895874023438,
-0.015410570427775383,
0.07688010483980179,
0.059932608157396317,
0.013061241246759892,
-0.11870571970939636,
-0.017879027873277664,
-0.10898447781801224,
0.052726373076438904,
0.09618138521909714,
-0.04602030664682388,
-0.09587643295526505,
0.19046631455421448,
0.10200313478708267,
0.0012633601436391473,
-0.028652265667915344,
-0.083571657538414,
-0.027494141831994057,
0.0913926512002945,
-0.053988195955753326,
-0.024156013503670692,
-0.04937110096216202,
0.016201812773942947,
-0.02385726012289524,
-0.06609688699245453,
-0.0029638786800205708,
0.09790656715631485,
-0.09239212423563004,
0.026821335777640343,
-0.020927857607603073,
0.07164999097585678,
-0.06571284681558609,
0.00709883589297533,
0.03957289084792137,
-0.06199273467063904,
0.05946172773838043,
0.12136048078536987,
-0.09223563969135284,
0.14521358907222748,
-0.18926364183425903,
-0.016181614249944687,
0.08284518122673035,
0.0557609386742115,
-0.04478734731674194,
-0.06561674922704697,
0.031297020614147186,
0.04882845655083656,
0.08082184195518494,
-0.018026430159807205,
0.09757903218269348,
-0.07066500186920166,
0.013925273902714252,
-0.032602500170469284,
0.0025832944083958864,
-0.03775358200073242,
0.06930502504110336,
0.059468500316143036,
0.1449907273054123,
0.15595628321170807,
-0.11011409014463425,
0.1229945719242096,
-0.12996114790439606,
0.012819784693419933,
-0.03696340322494507,
-0.02493204176425934,
-0.10124210268259048,
-0.07047086209058762,
0.05374293029308319,
-0.06815815716981888,
0.09242814034223557,
0.024105601012706757,
0.03599328175187111,
-0.02981804497539997,
-0.07723583281040192,
0.008653752505779266,
-0.012321610935032368,
0.20434455573558807,
0.034496452659368515,
0.021642837673425674,
-0.03608427569270134,
0.007543935440480709,
0.037373315542936325,
0.07211383432149887,
0.02069932036101818,
0.1689741462469101,
0.023619351908564568,
0.09784983098506927,
0.08671119064092636,
-0.05116794630885124,
-0.11423524469137192,
-0.07352474331855774,
-0.08500812947750092,
0.03241388499736786,
-0.08391467481851578,
0.18485461175441742,
0.15932098031044006,
-0.06591185927391052,
0.08425401151180267,
0.04304153099656105,
-0.10158298164606094,
-0.15943491458892822,
-0.16037748754024506,
-0.03326704353094101,
-0.13655169308185577,
0.026147805154323578,
-0.07619859278202057,
0.015230518765747547,
0.036950334906578064,
0.043628890067338943,
-0.04059255123138428,
0.23508943617343903,
0.0394180528819561,
-0.12047306448221207,
0.07680939137935638,
-0.0918586403131485,
-0.007073696237057447,
-0.08478249609470367,
0.037759989500045776,
0.1584213227033615,
0.007832192815840244,
0.06949169933795929,
0.008800597861409187,
-0.06732576340436935,
0.03292590752243996,
-0.09645534306764603,
-0.062293458729982376,
-0.035194769501686096,
0.006854787934571505,
0.08709654211997986,
0.11263151466846466,
0.11968079954385757,
-0.08444957435131073,
0.01591387763619423,
0.14427030086517334,
-0.01601206697523594,
-0.16522741317749023,
-0.12922504544258118,
0.12552811205387115,
0.055346500128507614,
0.0007570160669274628,
-0.05198555439710617,
-0.03124954178929329,
-0.007359158247709274,
0.2670334279537201,
0.229649156332016,
0.0843023732304573,
0.03108307160437107,
-0.022693565115332603,
-0.0050981431268155575,
-0.02758292481303215,
0.09595136344432831,
0.07004935294389725,
0.15568315982818604,
0.0005420060479082167,
0.023335697129368782,
-0.0640856921672821,
-0.073430635035038,
-0.0059076896868646145,
0.03955746814608574,
-0.06697450578212738,
-0.0743473619222641,
-0.02804785594344139,
0.11862168461084366,
-0.04681887477636337,
-0.09869778156280518,
-0.08688077330589294,
-0.0833294466137886,
-0.07708307355642319,
-0.027643568813800812,
0.0419161394238472,
0.09445007145404816,
0.03307761996984482,
-0.06601490080356598,
0.007095772307366133,
0.11194855719804764,
-0.010265476070344448,
-0.04536120966076851,
-0.09600795060396194,
0.027559759095311165,
-0.07287851721048355,
0.04207538068294525,
-0.022665303200483322,
0.1473436802625656,
0.014858579263091087,
0.09330365061759949,
-0.022767065092921257,
0.14967291057109833,
-0.026877181604504585,
0.005244359839707613,
0.011710927821695805,
0.09309226274490356,
-0.06277786195278168,
0.11365018784999847,
0.001993120415136218,
-0.1379283219575882,
0.05347831919789314,
-0.10845519602298737,
-0.056874051690101624,
-0.09104086458683014,
0.03597188740968704,
-0.04960662126541138,
0.07530014961957932,
0.10926231741905212,
-0.07011613994836807,
-0.0768454298377037,
-0.04682198911905289,
0.05748966708779335,
0.05364523082971573,
-0.04114273190498352,
-0.055491529405117035,
-0.21776404976844788,
-0.027307050302624702,
-0.08358486741781235,
-0.012211720459163189,
-0.160160630941391,
-0.035557836294174194,
-0.007935373112559319,
-0.07363857328891754,
-0.006835511885583401,
0.048622265458106995,
0.0998915284872055,
0.044259510934352875,
0.01371209230273962,
-0.03992363065481186,
0.057325296103954315,
0.13539980351924896,
-0.18907280266284943,
-0.12213610112667084
] |
null | null |
transformers
|
# Wav2Vec2-Large-XLSR-53-Finnish
Fine-tuned [facebook/wav2vec2-large-xlsr-53](https://huggingface.co/facebook/wav2vec2-large-xlsr-53) in Finnish using the [Common Voice](https://huggingface.co/datasets/common_voice)
When using this model, make sure that your speech input is sampled at 16kHz.
## Usage
The model can be used directly (without a language model) as follows:
```python
import torch
import torchaudio
from datasets import load_dataset
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
test_dataset = load_dataset("common_voice", "fi", split="test[:2%]").
processor = Wav2Vec2Processor.from_pretrained("birgermoell/wav2vec2-large-xlsr-finnish")
model = Wav2Vec2ForCTC.from_pretrained("birgermoell/wav2vec2-large-xlsr-finnish")
resampler = torchaudio.transforms.Resample(48_000, 16_000)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def speech_file_to_array_fn(batch):
\\tspeech_array, sampling_rate = torchaudio.load(batch["path"])
\\tbatch["speech"] = resampler(speech_array).squeeze().numpy()
\\treturn batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
inputs = processor(test_dataset["speech"][:2], sampling_rate=16_000, return_tensors="pt", padding=True)
with torch.no_grad():
\\tlogits = model(inputs.input_values, attention_mask=inputs.attention_mask).logits
predicted_ids = torch.argmax(logits, dim=-1)
print("Prediction:", processor.batch_decode(predicted_ids))
print("Reference:", test_dataset["sentence"][:2])
```
## Evaluation
The model can be evaluated as follows on the Finnish test data of Common Voice.
```python
import torch
import torchaudio
from datasets import load_dataset, load_metric
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import re
test_dataset = load_dataset("common_voice", "fi", split="test")
wer = load_metric("wer")
processor = Wav2Vec2Processor.from_pretrained("birgermoell/wav2vec2-large-xlsr-finnish")
model = Wav2Vec2ForCTC.from_pretrained("birgermoell/wav2vec2-large-xlsr-finnish")
model.to("cuda")
chars_to_ignore_regex = '[\\\\,\\\\?\\\\.\\\\!\\\\-\\\\;\\\\:\\\\"\\\\“]'
resampler = torchaudio.transforms.Resample(48_000, 16_000)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def speech_file_to_array_fn(batch):
\\tbatch["sentence"] = re.sub(chars_to_ignore_regex, '', batch["sentence"]).lower()
\\tspeech_array, sampling_rate = torchaudio.load(batch["path"])
\\tbatch["speech"] = resampler(speech_array).squeeze().numpy()
\\treturn batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def evaluate(batch):
\\tinputs = processor(batch["speech"], sampling_rate=16_000, return_tensors="pt", padding=True)
\\twith torch.no_grad():
\\t\\tlogits = model(inputs.input_values.to("cuda"), attention_mask=inputs.attention_mask.to("cuda")).logits
pred_ids = torch.argmax(logits, dim=-1)
\\tbatch["pred_strings"] = processor.batch_decode(pred_ids)
\\treturn batch
result = test_dataset.map(evaluate, batched=True, batch_size=8)
print("WER: {:2f}".format(100 * wer.compute(predictions=result["pred_strings"], references=result["sentence"])))
```
**Test Result**:
The WER is 55.097365
## Training
The Common Voice `train` and `validation` datasets were used for training.
The script used for training can be found here
https://colab.research.google.com/drive/16AyzqMWU_aWNe3IA-NxrhskB1WLPHG-Q?usp=sharing
|
{"language": "fi", "license": "apache-2.0", "tags": ["audio", "automatic-speech-recognition", "speech", "xlsr-fine-tuning-week"], "datasets": ["common_voice"], "model-index": [{"name": "XLSR Wav2Vec2 Finnish by Birger Moell", "results": [{"task": {"type": "automatic-speech-recognition", "name": "Speech Recognition"}, "dataset": {"name": "Common Voice fi", "type": "common_voice", "args": "fi"}, "metrics": [{"type": "wer", "value": 55.097365, "name": "Test WER"}]}]}]}
|
automatic-speech-recognition
|
birgermoell/wav2vec2-large-xlsr-finnish
|
[
"transformers",
"pytorch",
"jax",
"wav2vec2",
"automatic-speech-recognition",
"audio",
"speech",
"xlsr-fine-tuning-week",
"fi",
"dataset:common_voice",
"license:apache-2.0",
"model-index",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"fi"
] |
TAGS
#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #fi #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us
|
# Wav2Vec2-Large-XLSR-53-Finnish
Fine-tuned facebook/wav2vec2-large-xlsr-53 in Finnish using the Common Voice
When using this model, make sure that your speech input is sampled at 16kHz.
## Usage
The model can be used directly (without a language model) as follows:
## Evaluation
The model can be evaluated as follows on the Finnish test data of Common Voice.
Test Result:
The WER is 55.097365
## Training
The Common Voice 'train' and 'validation' datasets were used for training.
The script used for training can be found here
URL
|
[
"# Wav2Vec2-Large-XLSR-53-Finnish\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 in Finnish using the Common Voice\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"## Usage\n\nThe model can be used directly (without a language model) as follows:",
"## Evaluation\n\nThe model can be evaluated as follows on the Finnish test data of Common Voice.\n\n\n\n\nTest Result:\nThe WER is 55.097365",
"## Training\n\nThe Common Voice 'train' and 'validation' datasets were used for training.\nThe script used for training can be found here\nURL"
] |
[
"TAGS\n#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #fi #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us \n",
"# Wav2Vec2-Large-XLSR-53-Finnish\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 in Finnish using the Common Voice\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"## Usage\n\nThe model can be used directly (without a language model) as follows:",
"## Evaluation\n\nThe model can be evaluated as follows on the Finnish test data of Common Voice.\n\n\n\n\nTest Result:\nThe WER is 55.097365",
"## Training\n\nThe Common Voice 'train' and 'validation' datasets were used for training.\nThe script used for training can be found here\nURL"
] |
[
80,
63,
20,
33,
33
] |
[
"passage: TAGS\n#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #fi #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us \n# Wav2Vec2-Large-XLSR-53-Finnish\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 in Finnish using the Common Voice\nWhen using this model, make sure that your speech input is sampled at 16kHz.## Usage\n\nThe model can be used directly (without a language model) as follows:## Evaluation\n\nThe model can be evaluated as follows on the Finnish test data of Common Voice.\n\n\n\n\nTest Result:\nThe WER is 55.097365## Training\n\nThe Common Voice 'train' and 'validation' datasets were used for training.\nThe script used for training can be found here\nURL"
] |
[
-0.1619464010000229,
0.033176224678754807,
-0.001114157377742231,
0.0024973072577267885,
0.1142803281545639,
-0.04667608067393303,
0.19643761217594147,
0.10798394680023193,
0.016078343614935875,
-0.02430972084403038,
0.03466575965285301,
-0.013394963927567005,
0.06625331193208694,
0.12712225317955017,
0.026934290304780006,
-0.23305746912956238,
-0.002603396773338318,
0.026401488110423088,
0.01045054942369461,
0.13269968330860138,
0.11041975766420364,
-0.052385132759809494,
-0.029502782970666885,
0.0821441039443016,
-0.19767087697982788,
0.04017201066017151,
0.04063383489847183,
-0.11719358712434769,
0.14449676871299744,
0.047665659338235855,
0.10456826537847519,
0.026924466714262962,
0.10427971929311752,
-0.18073295056819916,
0.027916017919778824,
0.04031066969037056,
0.047128431499004364,
0.035788245499134064,
0.04012531414628029,
0.016322139650583267,
0.0935460776090622,
0.10491081327199936,
-0.015524550341069698,
0.06836360692977905,
-0.04528026655316353,
-0.1613960713148117,
0.00948983896523714,
-0.013563526794314384,
0.08040407299995422,
0.168142169713974,
-0.05916527286171913,
0.0800129622220993,
-0.1473979651927948,
0.11021986603736877,
0.1195765808224678,
-0.13606420159339905,
-0.0052069383673369884,
0.071861132979393,
0.0754772201180458,
0.04137986898422241,
-0.07219766825437546,
0.03067321702837944,
0.025353508070111275,
0.017988672479987144,
0.024245744571089745,
-0.006772696506232023,
-0.22647587954998016,
-0.05142730847001076,
-0.11601558327674866,
0.0011313613504171371,
0.24512580037117004,
-0.013110082596540451,
-0.06126786768436432,
-0.10446202754974365,
0.006380047183483839,
0.009736473672091961,
0.011237410828471184,
-0.05907103046774864,
-0.025464029982686043,
0.018402690067887306,
-0.008591734804213047,
-0.06793384999036789,
-0.1292850226163864,
-0.15240317583084106,
0.04800501838326454,
0.09542585164308548,
0.005999399349093437,
0.02585042454302311,
-0.14736893773078918,
0.0866168662905693,
-0.05261729657649994,
-0.06900595873594284,
-0.0017500645481050014,
0.01922631822526455,
-0.038401782512664795,
0.005172311794012785,
-0.07302180677652359,
-0.20405419170856476,
0.012248269282281399,
-0.021325580775737762,
0.06986606866121292,
0.005287387408316135,
-0.017199711874127388,
0.07247551530599594,
-0.004475241526961327,
0.13044972717761993,
-0.037391118705272675,
-0.03324069082736969,
0.035293009132146835,
0.04534446820616722,
-0.07329431921243668,
-0.03417990729212761,
-0.09255664050579071,
-0.05800756812095642,
0.038967315107584,
0.06901466846466064,
-0.02895507775247097,
0.01303345151245594,
-0.036584772169589996,
-0.05630097538232803,
0.020726623013615608,
-0.13338004052639008,
-0.028058694675564766,
0.07918895781040192,
-0.03801349177956581,
0.10859877616167068,
0.07999098300933838,
0.03252623602747917,
-0.12042846530675888,
-0.03984178975224495,
0.025724615901708603,
0.09092199802398682,
-0.047195058315992355,
-0.10842011868953705,
0.021954508498311043,
-0.01190074160695076,
-0.021469028666615486,
-0.10585709661245346,
-0.09095588326454163,
-0.07775520533323288,
0.008395149372518063,
0.056449536234140396,
-0.0009289399022236466,
-0.11320609599351883,
-0.010188205167651176,
-0.03309004381299019,
-0.060192424803972244,
0.03660918027162552,
-0.033829208463430405,
0.051348768174648285,
-0.022332392632961273,
0.045205578207969666,
0.061659324914216995,
0.07710897922515869,
-0.08665895462036133,
-0.09798458963632584,
-0.016964666545391083,
0.10744310915470123,
-0.032214485108852386,
-0.04981047287583351,
-0.07929395884275436,
-0.09634435921907425,
-0.05674073100090027,
0.07288843393325806,
0.054633524268865585,
0.1300506293773651,
-0.2596794068813324,
-0.09570170938968658,
0.23199878633022308,
-0.13327373564243317,
-0.005617584101855755,
0.18154843151569366,
-0.0038759729359298944,
0.12696868181228638,
0.13885562121868134,
0.25013354420661926,
0.13446742296218872,
-0.20476087927818298,
0.050631094723939896,
0.007730323355644941,
-0.017832521349191666,
-0.07675991952419281,
0.07002890855073929,
-0.05061006173491478,
-0.005052152555435896,
0.0407445915043354,
-0.09824458509683609,
0.08659259229898453,
-0.012638969346880913,
-0.0719330906867981,
-0.0003987040545325726,
-0.06297638267278671,
0.004154225345700979,
0.05974999815225601,
0.03429426625370979,
-0.03714878857135773,
-0.07984036952257156,
0.05359215661883354,
0.11639333516359329,
-0.15801574289798737,
0.07006790488958359,
-0.09973695874214172,
0.0617668442428112,
-0.05969615653157234,
-0.005808977875858545,
-0.1372290998697281,
0.1293870508670807,
-0.02516310289502144,
0.04366866126656532,
0.04269064590334892,
0.13978993892669678,
0.02132895030081272,
0.03312027081847191,
-0.029573973268270493,
-0.01058719027787447,
-0.044085972011089325,
-0.02565602771937847,
-0.029666684567928314,
-0.09632457047700882,
-0.03896113485097885,
-0.0675114318728447,
0.07971073687076569,
-0.18316619098186493,
0.012018098495900631,
0.02840479463338852,
-0.004627245012670755,
0.00854980107396841,
-0.0372091569006443,
0.06945542246103287,
0.09184210002422333,
-0.0010315296240150928,
-0.007005185820162296,
0.04913884401321411,
0.005081159994006157,
-0.04060173034667969,
0.10340812802314758,
-0.1353316456079483,
0.010610775090754032,
0.09217526018619537,
-0.05523425713181496,
-0.03091888315975666,
0.03887563571333885,
-0.02798210270702839,
-0.011264609172940254,
-0.07928218692541122,
-0.009405028074979782,
0.25710994005203247,
0.0025649683084338903,
0.10712062567472458,
-0.08256158977746964,
0.024529285728931427,
0.023650307208299637,
-0.1004553884267807,
0.05907227471470833,
0.047268785536289215,
0.011882705613970757,
-0.004114584531635046,
0.022056277841329575,
-0.07034836709499359,
-0.08286305516958237,
0.3305114507675171,
-0.015203865244984627,
-0.10354137420654297,
0.026217298582196236,
-0.02995123527944088,
-0.015116269700229168,
0.10419543832540512,
-0.1666596680879593,
-0.05948915332555771,
0.009431477636098862,
0.05342938005924225,
0.07288170605897903,
-0.1631854623556137,
-0.010683617554605007,
0.01177737582474947,
-0.13267435133457184,
-0.16200906038284302,
0.03683387488126755,
-0.0564773753285408,
0.051040079444646835,
-0.09596776217222214,
-0.06902290880680084,
-0.014866129495203495,
-0.048325199633836746,
-0.16976754367351532,
0.16300265491008759,
-0.08340755105018616,
-0.25302451848983765,
-0.161944180727005,
0.10088366270065308,
0.05087803304195404,
0.00013798955478705466,
0.08899907767772675,
-0.13665950298309326,
-0.007231082767248154,
-0.03917795419692993,
0.12608148157596588,
0.04215922951698303,
-0.060591645538806915,
-0.033878426998853683,
0.0324270986020565,
0.0642472431063652,
-0.16637609899044037,
0.015961548313498497,
-0.03931655362248421,
-0.049761444330215454,
-0.006506070960313082,
-0.03554100915789604,
0.015116473659873009,
0.18267570436000824,
0.0037871128879487514,
0.028759263455867767,
-0.015433508902788162,
0.1406739056110382,
-0.11419697105884552,
0.0166920218616724,
0.16754524409770966,
-0.005583055783063173,
-0.02208562195301056,
0.05486222356557846,
0.025574754923582077,
-0.05198151245713234,
-0.0049966927617788315,
-0.023843437433242798,
-0.08527461439371109,
-0.2388295829296112,
-0.10546299815177917,
-0.042396269738674164,
-0.032281968742609024,
-0.009556572884321213,
-0.00630649458616972,
0.05162186548113823,
0.006428706459701061,
-0.010149240493774414,
-0.1302349865436554,
0.07580821216106415,
-0.004675711505115032,
0.0444062240421772,
-0.0002590203075669706,
0.0963582694530487,
-0.04794243723154068,
0.007838083431124687,
-0.003668832592666149,
0.03603246435523033,
0.16427624225616455,
0.024244720116257668,
0.08960190415382385,
0.07223843783140182,
0.08845534920692444,
0.09456699341535568,
0.11260468512773514,
-0.01764107681810856,
-0.005344912875443697,
0.04047195240855217,
-0.047812651842832565,
-0.03369918838143349,
0.016301125288009644,
0.08142969012260437,
-0.06653067469596863,
-0.0537337101995945,
-0.0025750095956027508,
0.008657463826239109,
0.18248610198497772,
0.06448300927877426,
-0.18820466101169586,
-0.11841514706611633,
-0.0540330745279789,
-0.05741691589355469,
-0.0032660020515322685,
0.04480191692709923,
0.18407289683818817,
-0.14712895452976227,
-0.008330965414643288,
-0.035301242023706436,
0.09406200051307678,
0.023827942088246346,
0.01831625960767269,
-0.02190314792096615,
0.05104983597993851,
0.004487595520913601,
0.10182136297225952,
-0.23873351514339447,
0.21087034046649933,
-0.00039312223088927567,
0.1690073162317276,
-0.05434129759669304,
-0.006954316049814224,
0.015959009528160095,
0.06971488893032074,
0.12276267260313034,
0.03410232439637184,
0.009208821691572666,
-0.12649884819984436,
-0.09788839519023895,
0.0511588528752327,
-0.0008452345500700176,
-0.0006703163380734622,
0.037992581725120544,
0.00846044160425663,
0.014687503688037395,
0.01812896691262722,
-0.11487872153520584,
-0.14370878040790558,
-0.07285190373659134,
0.01756427250802517,
0.12052584439516068,
0.07104451954364777,
-0.019579879939556122,
-0.11848380416631699,
-0.04763886332511902,
0.08437330275774002,
-0.08924256265163422,
-0.05917659029364586,
-0.05573057010769844,
-0.019605327397584915,
0.08003966510295868,
-0.07231917977333069,
-0.008509841747581959,
0.09654343128204346,
0.10218869149684906,
-0.04035080224275589,
-0.04466211423277855,
0.04128986969590187,
-0.11106053739786148,
-0.08675508201122284,
0.006863413378596306,
0.17114952206611633,
0.1081622913479805,
0.05741632357239723,
0.06507640331983566,
-0.0013827086659148335,
0.006136613432317972,
-0.034912288188934326,
-0.026470739394426346,
0.11116465926170349,
-0.08846299350261688,
-0.0044877356849610806,
-0.019079869613051414,
-0.11699837446212769,
-0.1376122236251831,
-0.04893224686384201,
0.1731879860162735,
0.03259386494755745,
-0.06646230816841125,
0.14598114788532257,
0.19573479890823364,
-0.11686354130506516,
-0.2179456353187561,
0.036767710000276566,
0.09925948083400726,
0.14022962749004364,
-0.02965405024588108,
-0.23441655933856964,
0.07535180449485779,
0.0030182660557329655,
-0.013514835387468338,
0.009378735907375813,
-0.2756274938583374,
-0.1442568004131317,
0.12999121844768524,
-0.0021938830614089966,
0.12783832848072052,
0.011823139153420925,
0.021324729546904564,
0.01677819900214672,
-0.008073577657341957,
0.04626811295747757,
-0.09024172276258469,
0.13153515756130219,
0.028184963390231133,
0.08925296366214752,
0.044811975210905075,
-0.0461290143430233,
0.053577523678541183,
0.06635422259569168,
-0.021651487797498703,
-0.008526366204023361,
0.06725464761257172,
0.04969451203942299,
0.005169996060431004,
0.1765998750925064,
-0.10772349685430527,
0.006083047948777676,
-0.07468356937170029,
-0.10618913173675537,
-0.09308596700429916,
0.0992598608136177,
0.015194625593721867,
-0.039745766669511795,
0.029303627088665962,
-0.014728773385286331,
0.015313344076275826,
0.009423831477761269,
-0.05203733220696449,
-0.12224110215902328,
0.06773242354393005,
0.11197454482316971,
0.19596779346466064,
-0.02613568678498268,
-0.11386539787054062,
0.018469758331775665,
-0.02290067821741104,
0.14794017374515533,
-0.08302410691976547,
0.014576285146176815,
0.07975365221500397,
0.04228364676237106,
0.12614719569683075,
0.008459486998617649,
-0.11665774136781693,
0.060801003128290176,
0.03970007970929146,
-0.07870538532733917,
-0.10330504924058914,
-0.039995089173316956,
-0.05733560398221016,
-0.01998249813914299,
0.03654259443283081,
0.10959330946207047,
-0.1112283319234848,
-0.018739011138677597,
-0.033390067517757416,
0.03612644225358963,
-0.1266097128391266,
0.2162569910287857,
0.04970068857073784,
0.08317405730485916,
-0.08559493720531464,
0.05676186457276344,
-0.029914896935224533,
-0.06966543197631836,
0.08310556411743164,
-0.0006597579922527075,
-0.07879245281219482,
-0.07469110190868378,
-0.0030858481768518686,
0.11783643811941147,
0.05318141356110573,
-0.13191698491573334,
-0.09727734327316284,
-0.07069551944732666,
-0.020362749695777893,
0.080326147377491,
0.050369102507829666,
0.021354099735617638,
-0.11796663701534271,
-0.04377501457929611,
-0.14116914570331573,
0.0635845810174942,
0.09504016488790512,
-0.05482643470168114,
-0.08364658802747726,
0.2067280262708664,
0.08288285881280899,
0.012875784188508987,
-0.03341630473732948,
-0.06911775469779968,
-0.005580557975918055,
0.07190144062042236,
-0.0042100162245333195,
-0.024069862440228462,
-0.03431446850299835,
0.02200962044298649,
-0.027140768244862556,
-0.04337244853377342,
0.021751323714852333,
0.09717432409524918,
-0.08610466867685318,
0.040910523384809494,
-0.024737272411584854,
0.02174784243106842,
-0.0875692144036293,
0.02131330408155918,
0.004044931847602129,
-0.050665851682424545,
0.056834787130355835,
0.12179917097091675,
-0.10454043000936508,
0.12627138197422028,
-0.19138725101947784,
-0.014035673812031746,
0.055572960525751114,
0.053450822830200195,
-0.03207097947597504,
-0.07866407185792923,
0.0295375045388937,
0.07616792619228363,
0.10367602854967117,
-0.0033594409469515085,
0.09958488494157791,
-0.05535942688584328,
0.004323447123169899,
-0.025245727971196175,
-0.006167821120470762,
-0.05230113863945007,
0.08225613087415695,
0.04940367862582207,
0.13816548883914948,
0.16481344401836395,
-0.10859107971191406,
0.13779181241989136,
-0.1241828128695488,
0.012859213165938854,
-0.019396668300032616,
-0.027628127485513687,
-0.09841755032539368,
-0.0432397797703743,
0.06594912707805634,
-0.06832318753004074,
0.06487011164426804,
0.05075624957680702,
0.06178709492087364,
-0.024277716875076294,
-0.13016459345817566,
0.021441364660859108,
-0.007673672866076231,
0.19066935777664185,
0.00873104389756918,
0.015236112289130688,
-0.02627219632267952,
0.002005442278459668,
0.013345520943403244,
0.12009764462709427,
0.038811445236206055,
0.16921591758728027,
-0.003244754858314991,
0.09842747449874878,
0.06481185555458069,
-0.04759036749601364,
-0.10184173285961151,
-0.08136346936225891,
-0.10448777675628662,
0.0548287071287632,
-0.05693032965064049,
0.1514863818883896,
0.14042600989341736,
-0.07796017080545425,
0.08482479304075241,
0.026047999039292336,
-0.12587106227874756,
-0.16964516043663025,
-0.15896327793598175,
-0.044352855533361435,
-0.10296634584665298,
0.030043791979551315,
-0.09473453462123871,
0.03126169368624687,
0.03732956945896149,
0.051004692912101746,
-0.048401083797216415,
0.22486765682697296,
0.05247434601187706,
-0.08802826702594757,
0.10289764404296875,
-0.07142489403486252,
-0.01875181682407856,
-0.0765039250254631,
0.05934706702828407,
0.15726761519908905,
0.0023774898145347834,
0.08094025403261185,
0.0012697933707386255,
-0.0676782876253128,
0.025863749906420708,
-0.07054796814918518,
-0.07678878307342529,
-0.022324098274111748,
-0.007608018349856138,
0.08861882239580154,
0.12796719372272491,
0.12195020169019699,
-0.07640842348337173,
0.0030182944610714912,
0.12597687542438507,
-0.022876223549246788,
-0.14408551156520844,
-0.15526029467582703,
0.13517090678215027,
0.027351783588528633,
0.008772017434239388,
-0.04393200948834419,
-0.024642309173941612,
-0.002494561718776822,
0.22899888455867767,
0.24774853885173798,
0.07766353338956833,
0.022175803780555725,
-0.044440608471632004,
-0.014469452202320099,
-0.04688899219036102,
0.09991203248500824,
0.06179739162325859,
0.14441846311092377,
-0.0047979820519685745,
0.015519769862294197,
-0.07516097277402878,
-0.08913504332304001,
-0.01743433251976967,
0.008267173543572426,
-0.048811085522174835,
-0.0689128190279007,
-0.043481290340423584,
0.12259384244680405,
-0.03458670154213905,
-0.09280425310134888,
-0.09482991695404053,
-0.09441779553890228,
-0.09294149279594421,
-0.02234467677772045,
0.07286322861909866,
0.06114929914474487,
0.04302757605910301,
-0.056827764958143234,
0.03164898231625557,
0.1319490224123001,
-0.0004803739720955491,
-0.05368303135037422,
-0.07281776517629623,
0.021888332441449165,
-0.13611313700675964,
0.0279519222676754,
-0.005029053892940283,
0.1569148302078247,
0.006880437955260277,
0.08201506733894348,
-0.01857023686170578,
0.12925715744495392,
-0.03364691138267517,
-0.006968865171074867,
0.008505763486027718,
0.1470368504524231,
-0.04994935169816017,
0.10551021993160248,
-0.0028952723369002342,
-0.15018367767333984,
0.03448181226849556,
-0.1355435699224472,
-0.03841722756624222,
-0.06434968113899231,
0.04173995926976204,
-0.030045101419091225,
0.08503099530935287,
0.13552306592464447,
-0.06267666071653366,
-0.05860878527164459,
-0.06803199648857117,
0.05900098755955696,
0.036442164331674576,
-0.06786108762025833,
-0.05226675420999527,
-0.22313664853572845,
-0.046745121479034424,
-0.11721303313970566,
-0.017273934558033943,
-0.1659161001443863,
-0.026889963075518608,
-0.00024845689767971635,
-0.10013742744922638,
0.001507396693341434,
0.03731527924537659,
0.08852435648441315,
0.044100429862737656,
0.007337616756558418,
0.022068724036216736,
0.07370758056640625,
0.1283085197210312,
-0.1813850700855255,
-0.11908683180809021
] |
null | null |
transformers
|
# Wav2Vec2-Large-XLSR-53-Hungarian
Fine-tuned [facebook/wav2vec2-large-xlsr-53](https://huggingface.co/facebook/wav2vec2-large-xlsr-53) in Hungarian using the [Common Voice](https://huggingface.co/datasets/common_voice)
When using this model, make sure that your speech input is sampled at 16kHz.
## Usage
The model can be used directly (without a language model) as follows:
```python
import torch
import torchaudio
from datasets import load_dataset
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
test_dataset = load_dataset("common_voice", "hu", split="test[:2%]").
processor = Wav2Vec2Processor.from_pretrained("birgermoell/wav2vec2-large-xlsr-hungarian")
model = Wav2Vec2ForCTC.from_pretrained("birgermoell/wav2vec2-large-xlsr-hungarian")
resampler = torchaudio.transforms.Resample(48_000, 16_000)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def speech_file_to_array_fn(batch):
\tspeech_array, sampling_rate = torchaudio.load(batch["path"])
\tbatch["speech"] = resampler(speech_array).squeeze().numpy()
\treturn batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
inputs = processor(test_dataset["speech"][:2], sampling_rate=16_000, return_tensors="pt", padding=True)
with torch.no_grad():
\tlogits = model(inputs.input_values, attention_mask=inputs.attention_mask).logits
predicted_ids = torch.argmax(logits, dim=-1)
print("Prediction:", processor.batch_decode(predicted_ids))
print("Reference:", test_dataset["sentence"][:2])
```
## Evaluation
The model can be evaluated as follows on the Hungarian test data of Common Voice.
```python
import torch
import torchaudio
from datasets import load_dataset, load_metric
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import re
test_dataset = load_dataset("common_voice", "hu", split="test")
wer = load_metric("wer")
processor = Wav2Vec2Processor.from_pretrained("birgermoell/wav2vec2-large-xlsr-hungarian")
model = Wav2Vec2ForCTC.from_pretrained("birgermoell/wav2vec2-large-xlsr-hungarian")
model.to("cuda")
chars_to_ignore_regex = '[\\,\\?\\.\\!\\-\\;\\:\\"\\“]'
resampler = torchaudio.transforms.Resample(48_000, 16_000)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def speech_file_to_array_fn(batch):
\tbatch["sentence"] = re.sub(chars_to_ignore_regex, '', batch["sentence"]).lower()
\tspeech_array, sampling_rate = torchaudio.load(batch["path"])
\tbatch["speech"] = resampler(speech_array).squeeze().numpy()
\treturn batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def evaluate(batch):
\tinputs = processor(batch["speech"], sampling_rate=16_000, return_tensors="pt", padding=True)
\twith torch.no_grad():
\t\tlogits = model(inputs.input_values.to("cuda"), attention_mask=inputs.attention_mask.to("cuda")).logits
pred_ids = torch.argmax(logits, dim=-1)
\tbatch["pred_strings"] = processor.batch_decode(pred_ids)
\treturn batch
result = test_dataset.map(evaluate, batched=True, batch_size=8)
print("WER: {:2f}".format(100 * wer.compute(predictions=result["pred_strings"], references=result["sentence"])))
```
**Test Result**: 46.97 %
## Training
The Common Voice `train` and `validation` datasets were used for training.
The script used for training can be found [here](https://colab.research.google.com/drive/1c8LS-RP-RMukvXkpqJ9kLXRWmRKFjevs?usp=sharing)
|
{"language": "hu", "license": "apache-2.0", "tags": ["audio", "automatic-speech-recognition", "speech", "xlsr-fine-tuning-week"], "datasets": ["common_voice"], "model-index": [{"name": "XLSR Wav2Vec2 Hugarian by Birger Moell", "results": [{"task": {"type": "automatic-speech-recognition", "name": "Speech Recognition"}, "dataset": {"name": "Common Voice hu", "type": "common_voice", "args": "hu"}, "metrics": [{"type": "wer", "value": 46.97, "name": "Test WER"}]}]}]}
|
automatic-speech-recognition
|
birgermoell/wav2vec2-large-xlsr-hungarian
|
[
"transformers",
"pytorch",
"jax",
"wav2vec2",
"automatic-speech-recognition",
"audio",
"speech",
"xlsr-fine-tuning-week",
"hu",
"dataset:common_voice",
"license:apache-2.0",
"model-index",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"hu"
] |
TAGS
#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #hu #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us
|
# Wav2Vec2-Large-XLSR-53-Hungarian
Fine-tuned facebook/wav2vec2-large-xlsr-53 in Hungarian using the Common Voice
When using this model, make sure that your speech input is sampled at 16kHz.
## Usage
The model can be used directly (without a language model) as follows:
## Evaluation
The model can be evaluated as follows on the Hungarian test data of Common Voice.
Test Result: 46.97 %
## Training
The Common Voice 'train' and 'validation' datasets were used for training.
The script used for training can be found here
|
[
"# Wav2Vec2-Large-XLSR-53-Hungarian\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 in Hungarian using the Common Voice\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"## Usage\n\nThe model can be used directly (without a language model) as follows:",
"## Evaluation\n\nThe model can be evaluated as follows on the Hungarian test data of Common Voice.\n\n\n\n\nTest Result: 46.97 %",
"## Training\n\nThe Common Voice 'train' and 'validation' datasets were used for training.\nThe script used for training can be found here"
] |
[
"TAGS\n#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #hu #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us \n",
"# Wav2Vec2-Large-XLSR-53-Hungarian\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 in Hungarian using the Common Voice\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"## Usage\n\nThe model can be used directly (without a language model) as follows:",
"## Evaluation\n\nThe model can be evaluated as follows on the Hungarian test data of Common Voice.\n\n\n\n\nTest Result: 46.97 %",
"## Training\n\nThe Common Voice 'train' and 'validation' datasets were used for training.\nThe script used for training can be found here"
] |
[
80,
63,
20,
29,
32
] |
[
"passage: TAGS\n#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #hu #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us \n# Wav2Vec2-Large-XLSR-53-Hungarian\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 in Hungarian using the Common Voice\nWhen using this model, make sure that your speech input is sampled at 16kHz.## Usage\n\nThe model can be used directly (without a language model) as follows:## Evaluation\n\nThe model can be evaluated as follows on the Hungarian test data of Common Voice.\n\n\n\n\nTest Result: 46.97 %## Training\n\nThe Common Voice 'train' and 'validation' datasets were used for training.\nThe script used for training can be found here"
] |
[
-0.1614154577255249,
-0.018646225333213806,
-0.0010123762767761946,
-0.03800954297184944,
0.1372675746679306,
-0.05554584041237831,
0.2337213009595871,
0.09671829640865326,
0.06111660972237587,
-0.013911914080381393,
0.0015202222857624292,
0.021803591400384903,
0.04198415204882622,
0.04640946537256241,
0.049940161406993866,
-0.24097858369350433,
0.021104484796524048,
0.004698524251580238,
0.0597374364733696,
0.09879928827285767,
0.11402636766433716,
-0.05121142789721489,
-0.0229254849255085,
0.07192038744688034,
-0.14282865822315216,
0.05589553713798523,
0.010806416161358356,
-0.11384180933237076,
0.16019754111766815,
0.03027072176337242,
0.06487465649843216,
0.04061471298336983,
0.08295349031686783,
-0.15804392099380493,
0.02366340532898903,
0.030613644048571587,
0.035829052329063416,
0.02394513227045536,
0.053666435182094574,
-0.04556700587272644,
0.1533055305480957,
0.08320070058107376,
-0.025506317615509033,
0.07148709148168564,
-0.03258313611149788,
-0.2201577126979828,
0.02424212172627449,
0.055695049464702606,
0.08523322641849518,
0.15372662246227264,
-0.060957252979278564,
0.09103672951459885,
-0.14696958661079407,
0.08797044306993484,
0.11517293751239777,
-0.16875667870044708,
0.021498247981071472,
0.08146610856056213,
0.05897049233317375,
0.09254831075668335,
-0.05649450048804283,
0.021228021010756493,
0.006824202369898558,
0.01708984561264515,
0.05309723690152168,
-0.02964707277715206,
-0.2526552975177765,
-0.06747297197580338,
-0.145746648311615,
-0.014884806238114834,
0.20434273779392242,
-0.016883179545402527,
-0.061625558882951736,
-0.1350376456975937,
0.008852613158524036,
0.0489167720079422,
-0.01998925767838955,
-0.010089876130223274,
-0.02339494228363037,
0.032732948660850525,
0.003207910107448697,
-0.03684741258621216,
-0.09140078723430634,
-0.1370660811662674,
0.06395046412944794,
0.0902501717209816,
0.04038514569401741,
0.004188288934528828,
-0.09412192553281784,
0.08935242146253586,
-0.06388697773218155,
-0.08418673276901245,
0.012323280796408653,
0.06598890572786331,
-0.05902538821101189,
-0.011156540364027023,
-0.06683851778507233,
-0.12469517439603806,
0.00694817490875721,
-0.015475466847419739,
0.08013670146465302,
0.024248573929071426,
-0.023862337693572044,
0.10286390781402588,
-0.011501259170472622,
0.12624318897724152,
-0.06974203884601593,
-0.02322538197040558,
0.0193626806139946,
0.042992401868104935,
-0.04919944703578949,
-0.03326801210641861,
-0.11604677140712738,
-0.07580069452524185,
0.0486874058842659,
0.07121714949607849,
-0.053530190140008926,
0.031091157346963882,
0.010851713828742504,
-0.054468005895614624,
0.0490046925842762,
-0.10146044939756393,
-0.05371975898742676,
0.09192666411399841,
-0.0317806713283062,
0.11866996437311172,
0.027666866779327393,
0.059297073632478714,
-0.11945153772830963,
0.00015879812417551875,
0.01999756321310997,
0.08959906548261642,
-0.04157549515366554,
-0.11028105765581131,
0.019424045458436012,
0.018812566995620728,
0.0028761366847902536,
-0.11090079694986343,
-0.0738176628947258,
-0.05310650169849396,
-0.02116341143846512,
0.049412865191698074,
0.029154008254408836,
-0.11814573407173157,
-0.030461909249424934,
-0.042414918541908264,
-0.05787017568945885,
0.07040809094905853,
-0.04655255749821663,
0.07862188667058945,
-0.04694962501525879,
0.021357612684369087,
0.029712112620472908,
0.07183511555194855,
-0.12316934019327164,
-0.07347521930932999,
-0.046465229243040085,
0.10615681856870651,
-0.044870443642139435,
0.015873070806264877,
-0.07062094658613205,
-0.07197265326976776,
-0.014828621409833431,
0.06979506462812424,
0.045564353466033936,
0.08675043284893036,
-0.24031022191047668,
-0.10457148402929306,
0.2062719315290451,
-0.13867945969104767,
-0.03980298712849617,
0.20446017384529114,
-0.013600141741335392,
0.13078045845031738,
0.1365833580493927,
0.289144903421402,
0.19699783623218536,
-0.17765900492668152,
0.013376861810684204,
-0.001499770674854517,
-0.0025760559365153313,
-0.09771495312452316,
0.09659655392169952,
-0.03191119059920311,
-0.021206272765994072,
0.026229508221149445,
-0.1438623070716858,
0.0995526984333992,
-0.012311931699514389,
-0.035170216113328934,
0.0018487273482605815,
-0.08531232923269272,
0.05440846085548401,
0.044933389872312546,
0.05278562754392624,
-0.04743680730462074,
-0.07497622072696686,
0.05490993708372116,
0.1561899483203888,
-0.14144748449325562,
0.04764139652252197,
-0.08684927970170975,
0.10965617746114731,
-0.04384196177124977,
0.004450731910765171,
-0.13113568723201752,
0.21112962067127228,
0.005855188705027103,
0.05026858299970627,
0.06328398734331131,
0.10002510249614716,
0.008332845754921436,
0.0589262917637825,
-0.029377447441220284,
-0.005761921871453524,
-0.014904794283211231,
-0.023546254262328148,
-0.03492465987801552,
-0.09285301715135574,
-0.04387986287474632,
-0.051495809108018875,
0.10068618506193161,
-0.13924473524093628,
0.019014019519090652,
-0.006269327364861965,
0.018299594521522522,
-0.027833545580506325,
-0.018288200721144676,
0.03668661788105965,
0.10558416694402695,
0.0013545521069318056,
-0.010398699901998043,
0.07233358919620514,
0.029804743826389313,
-0.044445618987083435,
0.11657069623470306,
-0.18221299350261688,
-0.03391014039516449,
0.09207563102245331,
-0.11208542436361313,
-0.022491244599223137,
0.017610138282179832,
-0.023373505100607872,
0.00921778567135334,
-0.07379185408353806,
-0.04601294547319412,
0.33448049426078796,
-0.026762541383504868,
0.14150936901569366,
-0.09393128752708435,
0.0070971474051475525,
-0.0019215528154745698,
-0.08073344081640244,
0.0841442197561264,
0.05992794409394264,
0.031246373429894447,
0.0298093780875206,
0.028535399585962296,
-0.03846867382526398,
-0.08909540623426437,
0.3018130958080292,
-0.020262375473976135,
-0.11029884964227676,
0.0531516969203949,
0.002176336944103241,
-0.01804416999220848,
0.0415232852101326,
-0.18472059071063995,
-0.05088438466191292,
0.013756638392806053,
0.060079384595155716,
0.074350506067276,
-0.13518522679805756,
0.014569470658898354,
0.005234948359429836,
-0.12898199260234833,
-0.1851169764995575,
0.06895141303539276,
-0.052162136882543564,
0.06082690879702568,
-0.08239974081516266,
-0.06312121450901031,
-0.030433613806962967,
-0.039366573095321655,
-0.18064862489700317,
0.12193207442760468,
-0.06049872562289238,
-0.15867429971694946,
-0.20540088415145874,
0.07877148687839508,
0.0700666680932045,
0.01843757927417755,
0.08633993566036224,
-0.1130543053150177,
0.022132569923996925,
-0.019859667867422104,
0.13181695342063904,
0.0078102522529661655,
-0.06789223849773407,
-0.015187530778348446,
0.051052555441856384,
0.042982831597328186,
-0.12694065272808075,
-0.006128143984824419,
-0.03735477104783058,
-0.07452943921089172,
-0.05569196119904518,
-0.024107184261083603,
-0.021621378138661385,
0.18192343413829803,
0.02959328517317772,
0.019348012283444405,
-0.036188382655382156,
0.13786041736602783,
-0.10966824740171432,
-0.004253595136106014,
0.2041379064321518,
-0.04076897352933884,
-0.02038988284766674,
0.07689247280359268,
0.04260893166065216,
-0.02374371327459812,
-0.005123468581587076,
-0.03997474163770676,
-0.10366447269916534,
-0.1912723332643509,
-0.13391442596912384,
-0.05154569819569588,
-0.05639506131410599,
-0.027813022956252098,
-0.007114816922694445,
0.08876350522041321,
0.04074782505631447,
-0.05553627759218216,
-0.10462852567434311,
0.055389970541000366,
0.020674213767051697,
0.04817318543791771,
0.011187420226633549,
0.08307237178087234,
-0.05993291363120079,
-0.018177906051278114,
-0.02264445461332798,
0.011111549101769924,
0.17922239005565643,
0.03598253056406975,
0.08212175965309143,
0.08814182877540588,
0.07473520189523697,
0.10228472948074341,
0.07572835683822632,
-0.046334732323884964,
-0.011823784559965134,
0.01799437776207924,
-0.07327080518007278,
-0.001999154919758439,
0.011268150992691517,
0.06369663774967194,
-0.024882519617676735,
-0.11187921464443207,
-0.06349864602088928,
0.033073484897613525,
0.1848294734954834,
0.04390167072415352,
-0.19144636392593384,
-0.1100277528166771,
-0.02267071045935154,
-0.0495520681142807,
0.04102782905101776,
0.03234035149216652,
0.1601928174495697,
-0.14642170071601868,
-0.012433014810085297,
0.03297313302755356,
0.0936669409275055,
-0.039004553109407425,
0.03919685259461403,
-0.09024491161108017,
0.038105349987745285,
-0.00434102863073349,
0.12508730590343475,
-0.2287922352552414,
0.21545720100402832,
0.0026416643522679806,
0.1590157449245453,
-0.07910671830177307,
-0.014550447463989258,
0.048761263489723206,
0.055226583033800125,
0.09308977425098419,
0.02532665990293026,
0.029570013284683228,
-0.1613030731678009,
-0.08262728154659271,
0.026104573160409927,
-0.003117020009085536,
-0.034241411834955215,
0.027677953243255615,
-0.022785315290093422,
-0.00015289938892237842,
-0.007147687952965498,
-0.09382561594247818,
-0.09726561605930328,
-0.028173338621854782,
0.004588152747601271,
0.19841064512729645,
0.08307202160358429,
-0.028561001643538475,
-0.09030196070671082,
-0.09033182263374329,
0.02725732885301113,
-0.11694224178791046,
-0.06645192950963974,
-0.03397269546985626,
-0.06294991075992584,
0.08117610216140747,
-0.09283509850502014,
-0.03786212578415871,
0.10984472185373306,
0.09793997555971146,
-0.03898687660694122,
-0.07594156265258789,
0.020699912682175636,
-0.1295139640569687,
-0.08842223882675171,
0.008132070302963257,
0.23011568188667297,
0.12153016775846481,
0.08378700911998749,
0.048440780490636826,
0.0041428497061133385,
0.02132468670606613,
-0.05735009163618088,
-0.034184277057647705,
0.08165828138589859,
-0.095872662961483,
0.01673191227018833,
-0.06425660103559494,
-0.13219919800758362,
-0.14763380587100983,
-0.050984833389520645,
0.1825372874736786,
0.08190758526325226,
-0.07029438763856888,
0.13171784579753876,
0.2449503093957901,
-0.07924173772335052,
-0.19516664743423462,
-0.001828639768064022,
0.12001460045576096,
0.10988052934408188,
-0.013720464892685413,
-0.2052600085735321,
0.05602123588323593,
-0.0044461814686656,
-0.023767627775669098,
-0.04620871692895889,
-0.36278340220451355,
-0.13114354014396667,
0.16305124759674072,
-0.001973761711269617,
0.12421422451734543,
0.012667224742472172,
-0.00362230371683836,
0.026053234934806824,
0.007776434067636728,
-0.013358294032514095,
-0.1342366635799408,
0.11636900156736374,
0.049164775758981705,
0.07426205277442932,
0.060357894748449326,
-0.04062849283218384,
0.06374102085828781,
0.11539220809936523,
-0.020793670788407326,
-0.027936862781643867,
0.049885861575603485,
0.061333175748586655,
0.006705780979245901,
0.1340656280517578,
-0.11599362641572952,
0.020264815539121628,
-0.08830221742391586,
-0.08271914720535278,
-0.08529946208000183,
0.058052804321050644,
0.019802112132310867,
-0.0409102626144886,
0.020065777003765106,
-0.019763505086302757,
0.006117681972682476,
0.012837913818657398,
-0.06369300186634064,
-0.13827459514141083,
0.08078008145093918,
0.0815512016415596,
0.17164471745491028,
-0.06140677258372307,
-0.07233614474534988,
0.0021164806094020605,
-0.022665848955512047,
0.1321175992488861,
-0.04953652620315552,
0.01626734994351864,
0.077354796230793,
0.031780678778886795,
0.12080377340316772,
0.028285762295126915,
-0.10646912455558777,
0.07168927788734436,
0.03752285614609718,
-0.04620750620961189,
-0.07709497213363647,
-0.05850716307759285,
0.004535178188234568,
-0.010984702967107296,
0.023610521107912064,
0.09222205728292465,
-0.11585266143083572,
-0.012506191618740559,
-0.030048001557588577,
-0.026816127821803093,
-0.1312960535287857,
0.18719691038131714,
0.004643927328288555,
0.08451081812381744,
-0.09354330599308014,
0.02583366632461548,
0.009560815989971161,
0.004385009873658419,
0.05487842485308647,
0.014551020227372646,
-0.10981399565935135,
-0.07093025743961334,
-0.05485819652676582,
0.12712369859218597,
0.027720415964722633,
-0.13143610954284668,
-0.07778002321720123,
-0.08164311200380325,
-0.014496820978820324,
0.06065867841243744,
0.06375192850828171,
-0.01407010667026043,
-0.15317225456237793,
-0.04577730968594551,
-0.13355141878128052,
0.03601574897766113,
0.07045546919107437,
-0.029627330601215363,
-0.08012482523918152,
0.2407137006521225,
0.09303268045186996,
0.007788753602653742,
-0.04475713148713112,
-0.09702380746603012,
-0.007395830936729908,
0.08900610357522964,
-0.04243067651987076,
-0.05916668474674225,
-0.06245966628193855,
0.0004526520788203925,
-0.02171502821147442,
-0.08375855535268784,
0.010135386139154434,
0.08203734457492828,
-0.08556795865297318,
0.043329764157533646,
-0.021648220717906952,
0.0665685385465622,
-0.06513480842113495,
0.020972440019249916,
0.03802113234996796,
-0.05888073518872261,
0.07361983507871628,
0.12819761037826538,
-0.09317580610513687,
0.14743998646736145,
-0.18727903068065643,
-0.05535927787423134,
0.036010824143886566,
0.05128127336502075,
-0.023478152230381966,
-0.04354141280055046,
0.04764914512634277,
0.05446552857756615,
0.08458735048770905,
-0.0013960502110421658,
0.07832808047533035,
-0.04833459109067917,
0.0018819330725818872,
-0.008056942373514175,
0.014147496782243252,
-0.03785698488354683,
0.08538196980953217,
0.05385353043675423,
0.1332932710647583,
0.12091923505067825,
-0.11294916272163391,
0.12057798355817795,
-0.09807420521974564,
0.019395092502236366,
-0.05404523015022278,
-0.02441154606640339,
-0.08566441386938095,
-0.09259428083896637,
0.044710658490657806,
-0.06744066625833511,
0.07966741919517517,
-0.00606648251414299,
0.10644301027059555,
-0.015747297555208206,
-0.03163069486618042,
0.03819408640265465,
-0.024394523352384567,
0.25182515382766724,
0.049601923674345016,
0.01395145058631897,
-0.043725401163101196,
0.02844560518860817,
0.03169015422463417,
0.05462855473160744,
-0.0038835552986711264,
0.17099657654762268,
-0.03177058696746826,
0.056055162101984024,
0.09716098010540009,
-0.05915733799338341,
-0.1148550882935524,
-0.0925753265619278,
-0.053148601204156876,
0.012287096120417118,
-0.09134561568498611,
0.24250781536102295,
0.18073628842830658,
-0.07077475637197495,
0.09911942481994629,
0.07592469453811646,
-0.07826875150203705,
-0.12859869003295898,
-0.11384207010269165,
-0.009111007675528526,
-0.14156676828861237,
0.010744555853307247,
-0.04942204803228378,
-0.010511018335819244,
0.06403844803571701,
0.061718590557575226,
-0.04202713817358017,
0.2163618505001068,
0.029112854972481728,
-0.09039968252182007,
0.08885609358549118,
-0.10807289928197861,
0.022452155128121376,
-0.1036943569779396,
0.008246881887316704,
0.16284984350204468,
0.005425330251455307,
0.08519414067268372,
-0.002313329605385661,
-0.07839930802583694,
-0.006132942624390125,
-0.09941449761390686,
-0.05900692567229271,
-0.010575491935014725,
-0.028360284864902496,
0.07023248076438904,
0.12933243811130524,
0.11296863108873367,
-0.07979295402765274,
-0.012241506949067116,
0.17486688494682312,
-0.03402005508542061,
-0.15964092314243317,
-0.12410132586956024,
0.16412976384162903,
0.07325026392936707,
-0.01066506002098322,
-0.0311506949365139,
-0.025771673768758774,
-0.04134853556752205,
0.2577834725379944,
0.24399715662002563,
0.07318387925624847,
0.026206230744719505,
-0.05061536282300949,
-0.010840714909136295,
-0.011521118693053722,
0.10827895253896713,
0.0657554343342781,
0.19874678552150726,
-0.004027301911264658,
0.04784950613975525,
-0.08507252484560013,
-0.05526123195886612,
-0.03725496679544449,
0.06532315909862518,
-0.07717615365982056,
-0.08468560874462128,
-0.003788683796301484,
0.12813737988471985,
-0.05039367824792862,
-0.08241824060678482,
-0.1634492725133896,
-0.06876087188720703,
-0.08660894632339478,
-0.012160689570009708,
0.007746710907667875,
0.10057196021080017,
0.020857105031609535,
-0.04913215711712837,
0.021416619420051575,
0.0808660015463829,
-0.006570081692188978,
-0.017057882621884346,
-0.10320864617824554,
0.02292984165251255,
-0.14613863825798035,
0.05991502106189728,
-0.04316256567835808,
0.13537076115608215,
0.03066573664546013,
0.11213662475347519,
0.008910776115953922,
0.15698283910751343,
-0.023102501407265663,
-0.06073659658432007,
0.04143957793712616,
0.13778217136859894,
-0.059232838451862335,
0.10453497618436813,
-0.0007959118811413646,
-0.1991928517818451,
0.05296901986002922,
-0.17679612338542938,
-0.020736699923872948,
-0.07056811451911926,
0.0447843037545681,
-0.015734735876321793,
0.07256705313920975,
0.09535127133131027,
-0.07106409221887589,
-0.05646788701415062,
-0.0704539567232132,
0.015008416958153248,
0.06214729696512222,
-0.08932045102119446,
-0.06355436146259308,
-0.2572512626647949,
-0.03419232368469238,
-0.12733697891235352,
-0.016986465081572533,
-0.22690992057323456,
-0.006037491373717785,
-0.014140107668936253,
-0.08269286155700684,
0.0017701330361887813,
0.013676914386451244,
0.08642598241567612,
0.028834406286478043,
0.005212326999753714,
-0.04280957207083702,
0.03359542414546013,
0.1411229819059372,
-0.21098677814006805,
-0.11326265335083008
] |
null | null |
transformers
|
# Wav2Vec2-Large-XLSR-53-Luganda
Fine-tuned [facebook/wav2vec2-large-xlsr-53](https://huggingface.co/facebook/wav2vec2-large-xlsr-53) in Luganda using the [Common Voice](https://huggingface.co/datasets/common_voice)
When using this model, make sure that your speech input is sampled at 16kHz.
## Usage
The model can be used directly (without a language model) as follows:
```python
import torch
import torchaudio
from datasets import load_dataset
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
test_dataset = load_dataset("common_voice", "lg", split="test[:2%]").
processor = Wav2Vec2Processor.from_pretrained("birgermoell/wav2vec2-luganda")
model = Wav2Vec2ForCTC.from_pretrained("birgermoell/wav2vec2-luganda")
resampler = torchaudio.transforms.Resample(48_000, 16_000)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def speech_file_to_array_fn(batch):
\\\\\\\\\\\\\\\\tspeech_array, sampling_rate = torchaudio.load(batch["path"])
\\\\\\\\\\\\\\\\tbatch["speech"] = resampler(speech_array).squeeze().numpy()
\\\\\\\\\\\\\\\\treturn batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
inputs = processor(test_dataset["speech"][:2], sampling_rate=16_000, return_tensors="pt", padding=True)
with torch.no_grad():
\\\\\\\\\\\\\\\\tlogits = model(inputs.input_values, attention_mask=inputs.attention_mask).logits
predicted_ids = torch.argmax(logits, dim=-1)
print("Prediction:", processor.batch_decode(predicted_ids))
print("Reference:", test_dataset["sentence"][:2])
```
## Evaluation
The model can be evaluated as follows on the Luganda test data of Common Voice.
```python
import torch
import torchaudio
from datasets import load_dataset, load_metric
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import re
test_dataset = load_dataset("common_voice", "fi", split="test")
wer = load_metric("wer")
processor = Wav2Vec2Processor.from_pretrained("birgermoell/wav2vec2-luganda")
model = Wav2Vec2ForCTC.from_pretrained("birgermoell/wav2vec2-luganda")
model.to("cuda")
chars_to_ignore_regex = '[\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\,\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\?\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\.\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\!\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\-\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\;\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\:\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\"\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\“]'
resampler = torchaudio.transforms.Resample(48_000, 16_000)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def speech_file_to_array_fn(batch):
\\\\\\\\\\\\\\\\tbatch["sentence"] = re.sub(chars_to_ignore_regex, '', batch["sentence"]).lower()
\\\\\\\\\\\\\\\\tspeech_array, sampling_rate = torchaudio.load(batch["path"])
\\\\\\\\\\\\\\\\tbatch["speech"] = resampler(speech_array).squeeze().numpy()
\\\\\\\\\\\\\\\\treturn batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def evaluate(batch):
\\\\\\\\\\\\\\\\tinputs = processor(batch["speech"], sampling_rate=16_000, return_tensors="pt", padding=True)
\\\\\\\\\\\\\\\\twith torch.no_grad():
\\\\\\\\\\\\\\\\t\\\\\\\\\\\\\\\\tlogits = model(inputs.input_values.to("cuda"), attention_mask=inputs.attention_mask.to("cuda")).logits
pred_ids = torch.argmax(logits, dim=-1)
\\\\\\\\\\\\\\\\tbatch["pred_strings"] = processor.batch_decode(pred_ids)
\\\\\\\\\\\\\\\\treturn batch
result = test_dataset.map(evaluate, batched=True, batch_size=8)
print("WER: {:2f}".format(100 * wer.compute(predictions=result["pred_strings"], references=result["sentence"])))
```
**Test Result**:
WER: 48.314356
## Training
The Common Voice `train` and `validation` datasets were used for training.
The script used for training can be found here
https://colab.research.google.com/drive/1ZeII36LZ5IpBrTV7kBaTVfhDqygznlmC?usp=sharing
|
{"language": "lg", "license": "apache-2.0", "tags": ["audio", "automatic-speech-recognition", "speech", "xlsr-fine-tuning-week"], "datasets": ["common_voice"], "model-index": [{"name": "XLSR Wav2Vec2 Luganda by Birger Moell", "results": [{"task": {"type": "automatic-speech-recognition", "name": "Speech Recognition"}, "dataset": {"name": "Common Voice Luganda", "type": "common_voice", "args": "lg"}, "metrics": [{"type": "wer", "value": 48.31, "name": "Test WER"}]}]}]}
|
automatic-speech-recognition
|
birgermoell/wav2vec2-luganda
|
[
"transformers",
"pytorch",
"jax",
"wav2vec2",
"automatic-speech-recognition",
"audio",
"speech",
"xlsr-fine-tuning-week",
"lg",
"dataset:common_voice",
"license:apache-2.0",
"model-index",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"lg"
] |
TAGS
#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #lg #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us
|
# Wav2Vec2-Large-XLSR-53-Luganda
Fine-tuned facebook/wav2vec2-large-xlsr-53 in Luganda using the Common Voice
When using this model, make sure that your speech input is sampled at 16kHz.
## Usage
The model can be used directly (without a language model) as follows:
## Evaluation
The model can be evaluated as follows on the Luganda test data of Common Voice.
Test Result:
WER: 48.314356
## Training
The Common Voice 'train' and 'validation' datasets were used for training.
The script used for training can be found here
URL
|
[
"# Wav2Vec2-Large-XLSR-53-Luganda\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 in Luganda using the Common Voice\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"## Usage\n\nThe model can be used directly (without a language model) as follows:",
"## Evaluation\n\nThe model can be evaluated as follows on the Luganda test data of Common Voice.\n\n\n\n\nTest Result:\nWER: 48.314356",
"## Training\n\nThe Common Voice 'train' and 'validation' datasets were used for training.\nThe script used for training can be found here\nURL"
] |
[
"TAGS\n#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #lg #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us \n",
"# Wav2Vec2-Large-XLSR-53-Luganda\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 in Luganda using the Common Voice\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"## Usage\n\nThe model can be used directly (without a language model) as follows:",
"## Evaluation\n\nThe model can be evaluated as follows on the Luganda test data of Common Voice.\n\n\n\n\nTest Result:\nWER: 48.314356",
"## Training\n\nThe Common Voice 'train' and 'validation' datasets were used for training.\nThe script used for training can be found here\nURL"
] |
[
80,
62,
20,
33,
33
] |
[
"passage: TAGS\n#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #lg #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us \n# Wav2Vec2-Large-XLSR-53-Luganda\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 in Luganda using the Common Voice\nWhen using this model, make sure that your speech input is sampled at 16kHz.## Usage\n\nThe model can be used directly (without a language model) as follows:## Evaluation\n\nThe model can be evaluated as follows on the Luganda test data of Common Voice.\n\n\n\n\nTest Result:\nWER: 48.314356## Training\n\nThe Common Voice 'train' and 'validation' datasets were used for training.\nThe script used for training can be found here\nURL"
] |
[
-0.14543816447257996,
0.03393751010298729,
-0.0019601560197770596,
0.009657609276473522,
0.12493585795164108,
-0.026905419304966927,
0.1814686357975006,
0.11572858691215515,
-0.006572915706783533,
-0.03327083960175514,
0.020736951380968094,
0.015841376036405563,
0.04143492132425308,
0.05378127098083496,
0.05567163974046707,
-0.2411934733390808,
-0.02066388726234436,
0.02194250002503395,
0.008440305478870869,
0.11947847902774811,
0.08931917697191238,
-0.04792900010943413,
-0.0035916853230446577,
0.07609695196151733,
-0.16400691866874695,
0.049558091908693314,
0.006609977688640356,
-0.08785438537597656,
0.14541131258010864,
0.06657334417104721,
0.0960044339299202,
0.027991613373160362,
0.11968967318534851,
-0.21880242228507996,
0.02970919758081436,
0.033192139118909836,
0.030627930536866188,
0.025999777019023895,
0.003973201848566532,
-0.03889738768339157,
0.0997733399271965,
0.0970887839794159,
-0.009160264395177364,
0.08201010525226593,
-0.07503712922334671,
-0.16449864208698273,
0.005261903163045645,
-0.019440947100520134,
0.047101378440856934,
0.1661137491464615,
-0.04441429302096367,
0.038848258554935455,
-0.15316593647003174,
0.08279525488615036,
0.12698793411254883,
-0.1555827111005783,
0.0043456158600747585,
0.13654614984989166,
0.07927398383617401,
0.044277019798755646,
-0.08904755115509033,
0.017498522996902466,
0.034387391060590744,
0.014241350814700127,
0.047446176409721375,
-0.026825232431292534,
-0.20415079593658447,
-0.02796034887433052,
-0.1194746121764183,
-0.011258658953011036,
0.22263075411319733,
-0.02238599583506584,
-0.0780261680483818,
-0.10785701870918274,
0.015267996117472649,
0.001494544092565775,
-0.01347311120480299,
-0.01818629540503025,
0.0052071246318519115,
0.05850125104188919,
-0.019739046692848206,
-0.06416798382997513,
-0.10403001308441162,
-0.16753129661083221,
0.04428890347480774,
0.06949776411056519,
0.02440456673502922,
-0.00042280947673134506,
-0.15536612272262573,
0.110564224421978,
-0.0631420686841011,
-0.07389824092388153,
-0.03710652515292168,
0.026890546083450317,
-0.027676379308104515,
0.01403330359607935,
-0.06096276268362999,
-0.12079742550849915,
0.028401194140315056,
0.05012566223740578,
0.11572080850601196,
0.03509718179702759,
-0.030945884063839912,
0.06317760795354843,
-0.01528883446007967,
0.1336064487695694,
-0.013567514717578888,
-0.025078680366277695,
0.06496521830558777,
0.07882160693407059,
-0.04719293490052223,
-0.025181923061609268,
-0.10357236862182617,
-0.0676213800907135,
0.0016652606427669525,
0.09055617451667786,
-0.028528058901429176,
0.03664499893784523,
-0.05403909832239151,
-0.02181069739162922,
0.011812233366072178,
-0.11206980049610138,
-0.037480805069208145,
0.07506143301725388,
-0.03008262813091278,
0.09197280555963516,
0.1309950202703476,
0.03721807524561882,
-0.07233670353889465,
-0.054399456828832626,
0.004851228091865778,
0.07537315785884857,
-0.06641685217618942,
-0.08112551271915436,
0.02360256388783455,
0.01856701448559761,
-0.0006117295706644654,
-0.12096812576055527,
-0.15907038748264313,
-0.0744677409529686,
-0.004492111504077911,
0.04907546937465668,
0.010451260022819042,
-0.11611241102218628,
-0.014341706410050392,
-0.043061353266239166,
-0.045035261660814285,
0.06536795943975449,
-0.039639826864004135,
0.06806409358978271,
0.0348268561065197,
0.040971655398607254,
0.04255190119147301,
0.07338061928749084,
-0.0870673805475235,
-0.0594111792743206,
0.01786446012556553,
0.13167548179626465,
-0.024437127634882927,
-0.0034645574633032084,
-0.07816742360591888,
-0.07692417502403259,
-0.11523421108722687,
0.08790595829486847,
0.04890301451086998,
0.13388553261756897,
-0.25059109926223755,
-0.08555557578802109,
0.2597735822200775,
-0.11221257597208023,
-0.02295745350420475,
0.1583230048418045,
-0.034112755209207535,
0.15379782021045685,
0.13405439257621765,
0.20545721054077148,
0.18603818118572235,
-0.19963161647319794,
0.06894471496343613,
0.008238192647695541,
0.001566018327139318,
-0.06395208835601807,
0.07768692821264267,
-0.052429523319005966,
-0.015568772330880165,
0.0343099907040596,
-0.09439275413751602,
0.10321082174777985,
-0.025523127987980843,
-0.06119120866060257,
-0.012451397255063057,
-0.0784592255949974,
0.030426759272813797,
0.031769853085279465,
0.012286275625228882,
-0.012121472507715225,
-0.09864699840545654,
0.07050720602273941,
0.14874760806560516,
-0.15744680166244507,
0.057925984263420105,
-0.11944567412137985,
0.015215503983199596,
-0.024423955008387566,
0.012723153457045555,
-0.14176668226718903,
0.116425521671772,
-0.027812831103801727,
0.04204985871911049,
0.06407153606414795,
0.09714235365390778,
0.01030708383768797,
0.024330731481313705,
-0.038744498044252396,
-0.006183360703289509,
-0.03929063677787781,
-0.04161020740866661,
-0.02445886842906475,
-0.08159239590167999,
-0.059987086802721024,
-0.06676984578371048,
0.09070397168397903,
-0.20933648943901062,
0.0362468957901001,
0.0017772030550986528,
-0.03486529365181923,
-0.006791438441723585,
-0.031640756875276566,
0.09835531562566757,
0.10127988457679749,
-0.015669574961066246,
-0.016591845080256462,
0.05818389728665352,
0.016525007784366608,
-0.07168468832969666,
0.07152340561151505,
-0.14204026758670807,
0.007815014570951462,
0.10018318146467209,
-0.05822928994894028,
-0.005021307617425919,
0.02329978719353676,
-0.007360138930380344,
-0.009159433655440807,
-0.08927001804113388,
-0.039817746728658676,
0.2780842185020447,
-0.02236567996442318,
0.12469292432069778,
-0.08541359007358551,
0.017122210934758186,
-0.008897235617041588,
-0.09315691143274307,
0.08131511509418488,
0.040054965764284134,
0.03331040218472481,
0.080661840736866,
0.01855284906923771,
-0.06549083441495895,
-0.09546969830989838,
0.28136110305786133,
-0.027899637818336487,
-0.06363785266876221,
0.018391400575637817,
-0.010057251900434494,
-0.015483789145946503,
0.03737087920308113,
-0.1953706443309784,
-0.07298030704259872,
0.001067657838575542,
0.048818573355674744,
0.07594072073698044,
-0.180773064494133,
-0.011642465367913246,
0.034599632024765015,
-0.12993201613426208,
-0.15900878608226776,
0.05554521083831787,
-0.07071618735790253,
0.0393705740571022,
-0.096281997859478,
-0.023785632103681564,
0.0031451876275241375,
-0.0414428748190403,
-0.1697407066822052,
0.1557704657316208,
-0.07263093441724777,
-0.19150608777999878,
-0.15079297125339508,
0.053696367889642715,
0.08040633052587509,
0.01424114778637886,
0.07575467228889465,
-0.13244763016700745,
0.011249100789427757,
-0.028987746685743332,
0.1072915717959404,
0.02657570131123066,
-0.04778970032930374,
-0.006376976612955332,
0.03931521996855736,
0.058978479355573654,
-0.17780373990535736,
0.004865548107773066,
-0.018781866878271103,
-0.08662685006856918,
-0.02264544554054737,
-0.03959881514310837,
0.0275882575660944,
0.17844125628471375,
0.062769815325737,
0.0113450912758708,
-0.02121814526617527,
0.1411682814359665,
-0.07652515918016434,
-0.013787828385829926,
0.23982317745685577,
-0.01788238435983658,
-0.015268165618181229,
0.04479164257645607,
0.03820841759443283,
-0.08331498503684998,
0.014347042888402939,
-0.017218448221683502,
-0.11698970943689346,
-0.22972539067268372,
-0.07589476555585861,
-0.06301999092102051,
-0.04644511267542839,
-0.010665948502719402,
0.004571849014610052,
0.049930717796087265,
0.0215569119900465,
0.005882267374545336,
-0.07042773813009262,
0.08466461300849915,
0.01403058785945177,
0.06226377561688423,
-0.0028339733835309744,
0.09555855393409729,
-0.0596415139734745,
-0.008651621639728546,
-0.010479645803570747,
0.03312927857041359,
0.1609610915184021,
0.04893098399043083,
0.09049513190984726,
0.09748956561088562,
0.10258401185274124,
0.10633929073810577,
0.06802694499492645,
-0.040085822343826294,
-0.009756479412317276,
0.018545998260378838,
-0.053562864661216736,
-0.05986678600311279,
0.025003749877214432,
0.06865659356117249,
-0.046645089983940125,
-0.058866824954748154,
-0.020545363426208496,
0.0104269003495574,
0.12238342314958572,
0.052328113466501236,
-0.22181200981140137,
-0.08379504084587097,
-0.025334496051073074,
-0.03773494437336922,
0.01608716882765293,
0.06801868230104446,
0.13626842200756073,
-0.13483496010303497,
0.003558332333341241,
0.006912562530487776,
0.09737957268953323,
-0.02840975672006607,
0.0223128292709589,
-0.04377727583050728,
0.07028605788946152,
-0.01082932110875845,
0.09207344055175781,
-0.29478147625923157,
0.17464768886566162,
-0.002801374066621065,
0.15168379247188568,
-0.04294341802597046,
0.0026804779190570116,
0.04987819120287895,
0.052718181163072586,
0.10655032098293304,
0.005971258040517569,
0.060988351702690125,
-0.1278432309627533,
-0.07299786806106567,
0.052391018718481064,
0.0022305010352283716,
-0.013693138025701046,
0.06694957613945007,
-0.008470271714031696,
0.004229253623634577,
0.028241688385605812,
-0.06439648568630219,
-0.1639646589756012,
-0.0691850408911705,
-0.0023708543740212917,
0.13812366127967834,
0.13448761403560638,
-0.040117066353559494,
-0.0830422192811966,
-0.021377524361014366,
0.036596011370420456,
-0.07827408611774445,
-0.055199239403009415,
-0.0525309294462204,
0.00884360820055008,
0.06941665709018707,
-0.0709294006228447,
0.006789593957364559,
0.1021188348531723,
0.09190906584262848,
-0.02829333022236824,
-0.0616830438375473,
0.024040011689066887,
-0.11543383449316025,
-0.09891171008348465,
-0.012019171379506588,
0.19298043847084045,
0.10046140849590302,
0.06490879505872726,
0.05079454556107521,
-0.025160834193229675,
0.023233981803059578,
-0.053602077066898346,
-0.007943691685795784,
0.1508551687002182,
-0.09845235198736191,
0.011706561781466007,
-0.059930503368377686,
-0.11939909309148788,
-0.10146208107471466,
-0.06806261837482452,
0.16690577566623688,
0.046978872269392014,
-0.048384882509708405,
0.10816957801580429,
0.18442979454994202,
-0.12905368208885193,
-0.21524344384670258,
0.004361344035714865,
0.09229651838541031,
0.10901765525341034,
-0.006776814814656973,
-0.2723047435283661,
0.05091019719839096,
-0.0005488950992003083,
-0.010164281353354454,
-0.00598490983247757,
-0.37076500058174133,
-0.1431736797094345,
0.13125169277191162,
-0.005527906119823456,
0.13534577190876007,
-0.03637713938951492,
-0.010959203355014324,
-0.005515408236533403,
-0.05870848894119263,
0.047306641936302185,
-0.09015676379203796,
0.13230864703655243,
0.02797403186559677,
0.09198758751153946,
0.042085833847522736,
-0.033477094024419785,
0.06254646927118301,
0.08435951173305511,
0.007886221632361412,
-0.009038921445608139,
0.026072904467582703,
0.024874592199921608,
-0.005635000765323639,
0.1487146019935608,
-0.12303690612316132,
0.015144039876759052,
-0.0810035765171051,
-0.10773678869009018,
-0.07705910503864288,
0.058290302753448486,
0.01902509108185768,
-0.03782769292593002,
0.015601337887346745,
-0.02427624724805355,
0.0015666125109419227,
0.0182157214730978,
-0.05134987458586693,
-0.13136589527130127,
0.0334024578332901,
0.08403073251247406,
0.19358304142951965,
0.02181309461593628,
-0.11223335564136505,
-0.005341436713933945,
-0.01569780893623829,
0.13187314569950104,
-0.15930062532424927,
0.018394291400909424,
0.06106136366724968,
0.05993158742785454,
0.1434021145105362,
0.036755215376615524,
-0.10387680679559708,
0.08102577924728394,
0.05153844878077507,
-0.026420142501592636,
-0.09996160864830017,
-0.03819015994668007,
-0.05126972869038582,
-0.04178312420845032,
0.01957830786705017,
0.09822347015142441,
-0.09902100265026093,
-0.017374536022543907,
-0.015360536053776741,
0.0166871789842844,
-0.14029589295387268,
0.20130029320716858,
0.04959657043218613,
0.06368403881788254,
-0.0789036676287651,
0.046286359429359436,
-0.022701237350702286,
-0.008047948591411114,
0.05107679218053818,
0.011706945486366749,
-0.08056151866912842,
-0.0761677548289299,
-0.0693967193365097,
0.1299920529127121,
0.01571272313594818,
-0.10640428215265274,
-0.0653173178434372,
-0.0717867836356163,
-0.011072245426476002,
0.07825861126184464,
0.04564643278717995,
0.030466729775071144,
-0.11943814158439636,
-0.014181734062731266,
-0.11263082921504974,
0.04331168904900551,
0.09147875010967255,
-0.0349411703646183,
-0.10536389797925949,
0.184820294380188,
0.10242482274770737,
0.0062577868811786175,
-0.02531108446419239,
-0.08517150580883026,
-0.04645269364118576,
0.09998490661382675,
-0.07465814054012299,
-0.014632940292358398,
-0.04855550825595856,
0.01635430008172989,
-0.022600235417485237,
-0.05345425009727478,
0.009746192954480648,
0.09502740949392319,
-0.0848073735833168,
0.01705598272383213,
-0.013363574631512165,
0.06703919172286987,
-0.06433757394552231,
0.004235055297613144,
0.03814255818724632,
-0.06380204856395721,
0.05600791424512863,
0.1137721985578537,
-0.08991871029138565,
0.129233255982399,
-0.18602585792541504,
-0.01400022767484188,
0.085756316781044,
0.050540272146463394,
-0.06105726212263107,
-0.07240885496139526,
0.03553646430373192,
0.043151188641786575,
0.07176299393177032,
-0.018777262419462204,
0.11340038478374481,
-0.07761406898498535,
0.009926144964993,
-0.023306120187044144,
0.01731567457318306,
-0.028137682005763054,
0.0512952022254467,
0.06676310300827026,
0.14296796917915344,
0.14551913738250732,
-0.10377800464630127,
0.1162700280547142,
-0.1501043736934662,
0.007654134649783373,
-0.03448944538831711,
-0.015792066231369972,
-0.11112567782402039,
-0.08142544329166412,
0.06092995032668114,
-0.06473473459482193,
0.08427730947732925,
0.010308107361197472,
0.0347546748816967,
-0.04725867509841919,
-0.05728737264871597,
0.038666415959596634,
-0.018220609053969383,
0.22518402338027954,
0.043508000671863556,
0.02129053696990013,
0.0013637844240292907,
0.02242279052734375,
0.042677510529756546,
0.08110802620649338,
0.02992555871605873,
0.15558253228664398,
0.021812694147229195,
0.09064754843711853,
0.08370474725961685,
-0.05692329257726669,
-0.09058123081922531,
-0.07094317674636841,
-0.07722228765487671,
0.025253858417272568,
-0.08921335637569427,
0.15329350531101227,
0.14518408477306366,
-0.05786531791090965,
0.09231182187795639,
0.04907466843724251,
-0.09804192185401917,
-0.16144013404846191,
-0.16281792521476746,
-0.02417529560625553,
-0.12436980754137039,
0.02553330361843109,
-0.08445876091718674,
0.0077808331698179245,
0.05042240396142006,
0.02750137634575367,
-0.02990550734102726,
0.21617518365383148,
0.038174547255039215,
-0.11580050736665726,
0.06436437368392944,
-0.08915538340806961,
-0.005532517563551664,
-0.08738311380147934,
0.04211597144603729,
0.1587107628583908,
0.0056147146970033646,
0.06516019254922867,
0.010988453403115273,
-0.06050323322415352,
0.044393762946128845,
-0.08259249478578568,
-0.05661414936184883,
-0.032482728362083435,
-0.00038906687404960394,
0.09333911538124084,
0.12018154561519623,
0.11516543477773666,
-0.08634742349386215,
0.024451354518532753,
0.14785313606262207,
-0.026490522548556328,
-0.1568073183298111,
-0.1296430081129074,
0.12683720886707306,
0.04253014922142029,
-0.009861956350505352,
-0.048806678503751755,
-0.02585953287780285,
0.011713405139744282,
0.28472766280174255,
0.22546961903572083,
0.05140545591711998,
0.03305419906973839,
-0.025679973885416985,
-0.004779302980750799,
-0.017701406031847,
0.0911264643073082,
0.092903733253479,
0.16838762164115906,
-0.0003778883838094771,
0.008157843723893166,
-0.05424997955560684,
-0.0733800008893013,
0.002591116586700082,
0.044049955904483795,
-0.07563239336013794,
-0.05367664247751236,
-0.018687983974814415,
0.10612146556377411,
-0.05881130322813988,
-0.09804745018482208,
-0.06957335025072098,
-0.08331037312746048,
-0.06802049279212952,
-0.03930194303393364,
0.011973291635513306,
0.0937562957406044,
0.021181568503379822,
-0.06919357180595398,
0.01118406094610691,
0.12526988983154297,
-0.011186656542122364,
-0.06656188517808914,
-0.0885704979300499,
0.02540765516459942,
-0.050441283732652664,
0.03278077393770218,
-0.02916841395199299,
0.14022168517112732,
0.021182000637054443,
0.09873711317777634,
-0.03707355633378029,
0.14562617242336273,
-0.034477509558200836,
-0.014794871211051941,
-0.0019065187079831958,
0.08450212329626083,
-0.051124271005392075,
0.12078279256820679,
-0.004355703014880419,
-0.12337657809257507,
0.04617695510387421,
-0.09258902817964554,
-0.05006333440542221,
-0.09988070279359818,
0.019587276503443718,
-0.04815234988927841,
0.081544429063797,
0.09835558384656906,
-0.07477430999279022,
-0.0658564418554306,
-0.0407617911696434,
0.06306765973567963,
0.04669966176152229,
-0.040742263197898865,
-0.06994383037090302,
-0.21686477959156036,
-0.02831128053367138,
-0.09532028436660767,
-0.023513557389378548,
-0.18224193155765533,
-0.03850835561752319,
0.002759198658168316,
-0.08171502500772476,
-0.003071107203140855,
0.053022194653749466,
0.1166432574391365,
0.05498802289366722,
0.009309959597885609,
-0.061354950070381165,
0.05098341032862663,
0.14687886834144592,
-0.20189756155014038,
-0.11805764585733414
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# wav2vec2-speechdat
This model is a fine-tuned version of [facebook/wav2vec2-large-xlsr-53](https://huggingface.co/facebook/wav2vec2-large-xlsr-53) on the COMMON_VOICE - SV-SE dataset.
It achieves the following results on the evaluation set:
- Loss: 0.4578
- Wer: 0.2927
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0003
- train_batch_size: 16
- eval_batch_size: 8
- seed: 42
- gradient_accumulation_steps: 2
- total_train_batch_size: 32
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 500
- num_epochs: 15.0
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Wer |
|:-------------:|:-----:|:------:|:---------------:|:------:|
| No log | 0.01 | 100 | 3.6252 | 1.0 |
| No log | 0.02 | 200 | 3.1906 | 1.0 |
| No log | 0.03 | 300 | 3.1090 | 1.0 |
| No log | 0.04 | 400 | 1.8796 | 0.9955 |
| 6.2575 | 0.05 | 500 | 1.3515 | 0.9058 |
| 6.2575 | 0.06 | 600 | 1.1209 | 0.8328 |
| 6.2575 | 0.07 | 700 | 1.1404 | 0.8309 |
| 6.2575 | 0.09 | 800 | 1.0599 | 0.8021 |
| 6.2575 | 0.1 | 900 | 0.9901 | 0.8335 |
| 0.7737 | 0.11 | 1000 | 0.8846 | 0.7400 |
| 0.7737 | 0.12 | 1100 | 0.9971 | 0.7820 |
| 0.7737 | 0.13 | 1200 | 0.8665 | 0.7123 |
| 0.7737 | 0.14 | 1300 | 0.8490 | 0.7366 |
| 0.7737 | 0.15 | 1400 | 0.8250 | 0.6765 |
| 0.6183 | 0.16 | 1500 | 0.8291 | 0.6965 |
| 0.6183 | 0.17 | 1600 | 0.7946 | 0.6823 |
| 0.6183 | 0.18 | 1700 | 0.8239 | 0.6894 |
| 0.6183 | 0.19 | 1800 | 0.8282 | 0.6796 |
| 0.6183 | 0.2 | 1900 | 0.7645 | 0.6518 |
| 0.561 | 0.21 | 2000 | 0.7530 | 0.6367 |
| 0.561 | 0.22 | 2100 | 0.7296 | 0.6177 |
| 0.561 | 0.24 | 2200 | 0.7527 | 0.6498 |
| 0.561 | 0.25 | 2300 | 0.7210 | 0.6316 |
| 0.561 | 0.26 | 2400 | 0.7938 | 0.6757 |
| 0.5402 | 0.27 | 2500 | 0.7485 | 0.6372 |
| 0.5402 | 0.28 | 2600 | 0.7146 | 0.6133 |
| 0.5402 | 0.29 | 2700 | 0.7308 | 0.6626 |
| 0.5402 | 0.3 | 2800 | 0.7078 | 0.5949 |
| 0.5402 | 0.31 | 2900 | 0.7679 | 0.6373 |
| 0.5303 | 0.32 | 3000 | 0.7263 | 0.6502 |
| 0.5303 | 0.33 | 3100 | 0.6613 | 0.5846 |
| 0.5303 | 0.34 | 3200 | 0.6784 | 0.5783 |
| 0.5303 | 0.35 | 3300 | 0.6908 | 0.5833 |
| 0.5303 | 0.36 | 3400 | 0.6595 | 0.5826 |
| 0.503 | 0.37 | 3500 | 0.6717 | 0.5938 |
| 0.503 | 0.39 | 3600 | 0.6938 | 0.5791 |
| 0.503 | 0.4 | 3700 | 0.6677 | 0.6052 |
| 0.503 | 0.41 | 3800 | 0.6544 | 0.5554 |
| 0.503 | 0.42 | 3900 | 0.6514 | 0.5728 |
| 0.4959 | 0.43 | 4000 | 0.6847 | 0.6188 |
| 0.4959 | 0.44 | 4100 | 0.6626 | 0.5869 |
| 0.4959 | 0.45 | 4200 | 0.6670 | 0.5700 |
| 0.4959 | 0.46 | 4300 | 0.6596 | 0.5846 |
| 0.4959 | 0.47 | 4400 | 0.6523 | 0.5468 |
| 0.4824 | 0.48 | 4500 | 0.6392 | 0.5688 |
| 0.4824 | 0.49 | 4600 | 0.6561 | 0.5687 |
| 0.4824 | 0.5 | 4700 | 0.6697 | 0.5817 |
| 0.4824 | 0.51 | 4800 | 0.6348 | 0.5608 |
| 0.4824 | 0.52 | 4900 | 0.6561 | 0.5600 |
| 0.4714 | 0.54 | 5000 | 0.6522 | 0.6181 |
| 0.4714 | 0.55 | 5100 | 0.6858 | 0.5921 |
| 0.4714 | 0.56 | 5200 | 0.6706 | 0.5497 |
| 0.4714 | 0.57 | 5300 | 0.7123 | 0.5768 |
| 0.4714 | 0.58 | 5400 | 0.6599 | 0.6100 |
| 0.471 | 0.59 | 5500 | 0.6421 | 0.5626 |
| 0.471 | 0.6 | 5600 | 0.6395 | 0.5753 |
| 0.471 | 0.61 | 5700 | 0.6788 | 0.5481 |
| 0.471 | 0.62 | 5800 | 0.6386 | 0.5516 |
| 0.471 | 0.63 | 5900 | 0.6694 | 0.5913 |
| 0.4707 | 0.64 | 6000 | 0.6251 | 0.5699 |
| 0.4707 | 0.65 | 6100 | 0.6243 | 0.5567 |
| 0.4707 | 0.66 | 6200 | 0.6645 | 0.5629 |
| 0.4707 | 0.67 | 6300 | 0.6296 | 0.5895 |
| 0.4707 | 0.69 | 6400 | 0.6078 | 0.5183 |
| 0.4632 | 0.7 | 6500 | 0.6270 | 0.5619 |
| 0.4632 | 0.71 | 6600 | 0.6050 | 0.5336 |
| 0.4632 | 0.72 | 6700 | 0.6185 | 0.5449 |
| 0.4632 | 0.73 | 6800 | 0.6281 | 0.5645 |
| 0.4632 | 0.74 | 6900 | 0.5877 | 0.5084 |
| 0.4514 | 0.75 | 7000 | 0.6199 | 0.5403 |
| 0.4514 | 0.76 | 7100 | 0.6293 | 0.5275 |
| 0.4514 | 0.77 | 7200 | 0.6290 | 0.5447 |
| 0.4514 | 0.78 | 7300 | 0.6130 | 0.5373 |
| 0.4514 | 0.79 | 7400 | 0.6138 | 0.5285 |
| 0.4457 | 0.8 | 7500 | 0.6040 | 0.5259 |
| 0.4457 | 0.81 | 7600 | 0.6220 | 0.5686 |
| 0.4457 | 0.82 | 7700 | 0.5915 | 0.5164 |
| 0.4457 | 0.84 | 7800 | 0.6270 | 0.5289 |
| 0.4457 | 0.85 | 7900 | 0.6224 | 0.5515 |
| 0.4458 | 0.86 | 8000 | 0.6161 | 0.5323 |
| 0.4458 | 0.87 | 8100 | 0.5827 | 0.5122 |
| 0.4458 | 0.88 | 8200 | 0.6067 | 0.5202 |
| 0.4458 | 0.89 | 8300 | 0.6087 | 0.5192 |
| 0.4458 | 0.9 | 8400 | 0.6859 | 0.5796 |
| 0.4409 | 0.91 | 8500 | 0.6180 | 0.5131 |
| 0.4409 | 0.92 | 8600 | 0.5945 | 0.4948 |
| 0.4409 | 0.93 | 8700 | 0.5967 | 0.5532 |
| 0.4409 | 0.94 | 8800 | 0.5770 | 0.4961 |
| 0.4409 | 0.95 | 8900 | 0.5809 | 0.5203 |
| 0.4305 | 0.96 | 9000 | 0.5805 | 0.5039 |
| 0.4305 | 0.97 | 9100 | 0.5873 | 0.5188 |
| 0.4305 | 0.98 | 9200 | 0.6277 | 0.5516 |
| 0.4305 | 1.0 | 9300 | 0.5727 | 0.5052 |
| 0.4305 | 1.01 | 9400 | 0.5858 | 0.5123 |
| 0.4264 | 1.02 | 9500 | 0.5692 | 0.4968 |
| 0.4264 | 1.03 | 9600 | 0.5954 | 0.5117 |
| 0.4264 | 1.04 | 9700 | 0.5904 | 0.5076 |
| 0.4264 | 1.05 | 9800 | 0.6046 | 0.5101 |
| 0.4264 | 1.06 | 9900 | 0.5616 | 0.4926 |
| 0.4176 | 1.07 | 10000 | 0.5971 | 0.5368 |
| 0.4176 | 1.08 | 10100 | 0.5706 | 0.4940 |
| 0.4176 | 1.09 | 10200 | 0.5612 | 0.5032 |
| 0.4176 | 1.1 | 10300 | 0.5672 | 0.4944 |
| 0.4176 | 1.11 | 10400 | 0.5915 | 0.5218 |
| 0.4033 | 1.12 | 10500 | 0.5706 | 0.5051 |
| 0.4033 | 1.13 | 10600 | 0.5661 | 0.4934 |
| 0.4033 | 1.15 | 10700 | 0.5724 | 0.4903 |
| 0.4033 | 1.16 | 10800 | 0.5792 | 0.4940 |
| 0.4033 | 1.17 | 10900 | 0.5744 | 0.4911 |
| 0.392 | 1.18 | 11000 | 0.5767 | 0.5162 |
| 0.392 | 1.19 | 11100 | 0.5588 | 0.4835 |
| 0.392 | 1.2 | 11200 | 0.5609 | 0.4922 |
| 0.392 | 1.21 | 11300 | 0.5890 | 0.4914 |
| 0.392 | 1.22 | 11400 | 0.5525 | 0.4897 |
| 0.387 | 1.23 | 11500 | 0.5704 | 0.5051 |
| 0.387 | 1.24 | 11600 | 0.5539 | 0.5014 |
| 0.387 | 1.25 | 11700 | 0.5473 | 0.4882 |
| 0.387 | 1.26 | 11800 | 0.5662 | 0.5004 |
| 0.387 | 1.27 | 11900 | 0.5785 | 0.5220 |
| 0.3956 | 1.28 | 12000 | 0.5990 | 0.5114 |
| 0.3956 | 1.3 | 12100 | 0.5497 | 0.4895 |
| 0.3956 | 1.31 | 12200 | 0.5538 | 0.4895 |
| 0.3956 | 1.32 | 12300 | 0.5652 | 0.4913 |
| 0.3956 | 1.33 | 12400 | 0.5682 | 0.5128 |
| 0.4043 | 1.34 | 12500 | 0.5830 | 0.4999 |
| 0.4043 | 1.35 | 12600 | 0.5686 | 0.4865 |
| 0.4043 | 1.36 | 12700 | 0.5688 | 0.4937 |
| 0.4043 | 1.37 | 12800 | 0.5753 | 0.5034 |
| 0.4043 | 1.38 | 12900 | 0.5898 | 0.4865 |
| 0.3997 | 1.39 | 13000 | 0.5723 | 0.4963 |
| 0.3997 | 1.4 | 13100 | 0.5767 | 0.4986 |
| 0.3997 | 1.41 | 13200 | 0.5960 | 0.5084 |
| 0.3997 | 1.42 | 13300 | 0.5859 | 0.5096 |
| 0.3997 | 1.43 | 13400 | 0.5491 | 0.4784 |
| 0.3997 | 1.45 | 13500 | 0.5636 | 0.5049 |
| 0.3997 | 1.46 | 13600 | 0.5667 | 0.4708 |
| 0.3997 | 1.47 | 13700 | 0.5757 | 0.4862 |
| 0.3997 | 1.48 | 13800 | 0.5444 | 0.4816 |
| 0.3997 | 1.49 | 13900 | 0.5557 | 0.4792 |
| 0.3954 | 1.5 | 14000 | 0.5437 | 0.4810 |
| 0.3954 | 1.51 | 14100 | 0.5489 | 0.4674 |
| 0.3954 | 1.52 | 14200 | 0.5415 | 0.4674 |
| 0.3954 | 1.53 | 14300 | 0.5481 | 0.4902 |
| 0.3954 | 1.54 | 14400 | 0.5474 | 0.4763 |
| 0.3814 | 1.55 | 14500 | 0.5588 | 0.4731 |
| 0.3814 | 1.56 | 14600 | 0.5746 | 0.4820 |
| 0.3814 | 1.57 | 14700 | 0.5676 | 0.4884 |
| 0.3814 | 1.58 | 14800 | 0.5495 | 0.4711 |
| 0.3814 | 1.6 | 14900 | 0.5565 | 0.4782 |
| 0.3877 | 1.61 | 15000 | 0.5671 | 0.5135 |
| 0.3877 | 1.62 | 15100 | 0.5512 | 0.4868 |
| 0.3877 | 1.63 | 15200 | 0.5683 | 0.4650 |
| 0.3877 | 1.64 | 15300 | 0.5427 | 0.4717 |
| 0.3877 | 1.65 | 15400 | 0.5519 | 0.4651 |
| 0.387 | 1.66 | 15500 | 0.5327 | 0.4456 |
| 0.387 | 1.67 | 15600 | 0.5371 | 0.4673 |
| 0.387 | 1.68 | 15700 | 0.5337 | 0.4705 |
| 0.387 | 1.69 | 15800 | 0.5606 | 0.4992 |
| 0.387 | 1.7 | 15900 | 0.5254 | 0.4613 |
| 0.3877 | 1.71 | 16000 | 0.5619 | 0.4882 |
| 0.3877 | 1.72 | 16100 | 0.5212 | 0.4560 |
| 0.3877 | 1.73 | 16200 | 0.5369 | 0.4696 |
| 0.3877 | 1.75 | 16300 | 0.5392 | 0.4677 |
| 0.3877 | 1.76 | 16400 | 0.5353 | 0.4768 |
| 0.3739 | 1.77 | 16500 | 0.5435 | 0.4777 |
| 0.3739 | 1.78 | 16600 | 0.5343 | 0.4884 |
| 0.3739 | 1.79 | 16700 | 0.5309 | 0.4942 |
| 0.3739 | 1.8 | 16800 | 0.5373 | 0.4727 |
| 0.3739 | 1.81 | 16900 | 0.5550 | 0.4686 |
| 0.3884 | 1.82 | 17000 | 0.5486 | 0.4826 |
| 0.3884 | 1.83 | 17100 | 0.5508 | 0.4862 |
| 0.3884 | 1.84 | 17200 | 0.5423 | 0.4855 |
| 0.3884 | 1.85 | 17300 | 0.5478 | 0.4730 |
| 0.3884 | 1.86 | 17400 | 0.5438 | 0.4938 |
| 0.3842 | 1.87 | 17500 | 0.5571 | 0.4818 |
| 0.3842 | 1.88 | 17600 | 0.5402 | 0.4753 |
| 0.3842 | 1.9 | 17700 | 0.5679 | 0.4827 |
| 0.3842 | 1.91 | 17800 | 0.5385 | 0.4642 |
| 0.3842 | 1.92 | 17900 | 0.5519 | 0.4942 |
| 0.3953 | 1.93 | 18000 | 0.5559 | 0.4745 |
| 0.3953 | 1.94 | 18100 | 0.5657 | 0.4963 |
| 0.3953 | 1.95 | 18200 | 0.5296 | 0.4642 |
| 0.3953 | 1.96 | 18300 | 0.5529 | 0.4907 |
| 0.3953 | 1.97 | 18400 | 0.5380 | 0.4536 |
| 0.3745 | 1.98 | 18500 | 0.5276 | 0.4678 |
| 0.3745 | 1.99 | 18600 | 0.5544 | 0.4854 |
| 0.3745 | 2.0 | 18700 | 0.5195 | 0.4535 |
| 0.3745 | 2.01 | 18800 | 0.5165 | 0.4635 |
| 0.3745 | 2.02 | 18900 | 0.5062 | 0.4431 |
| 0.3538 | 2.03 | 19000 | 0.5255 | 0.4509 |
| 0.3538 | 2.04 | 19100 | 0.5125 | 0.4512 |
| 0.3538 | 2.06 | 19200 | 0.5105 | 0.4504 |
| 0.3538 | 2.07 | 19300 | 0.5000 | 0.4490 |
| 0.3538 | 2.08 | 19400 | 0.5150 | 0.4520 |
| 0.356 | 2.09 | 19500 | 0.5053 | 0.4383 |
| 0.356 | 2.1 | 19600 | 0.5085 | 0.4417 |
| 0.356 | 2.11 | 19700 | 0.5229 | 0.4490 |
| 0.356 | 2.12 | 19800 | 0.5326 | 0.4492 |
| 0.356 | 2.13 | 19900 | 0.5139 | 0.4491 |
| 0.3474 | 2.14 | 20000 | 0.5134 | 0.4384 |
| 0.3474 | 2.15 | 20100 | 0.5498 | 0.4606 |
| 0.3474 | 2.16 | 20200 | 0.5324 | 0.4540 |
| 0.3474 | 2.17 | 20300 | 0.5338 | 0.4548 |
| 0.3474 | 2.18 | 20400 | 0.5076 | 0.4425 |
| 0.345 | 2.19 | 20500 | 0.5253 | 0.4550 |
| 0.345 | 2.21 | 20600 | 0.5125 | 0.4618 |
| 0.345 | 2.22 | 20700 | 0.5171 | 0.4487 |
| 0.345 | 2.23 | 20800 | 0.5232 | 0.4464 |
| 0.345 | 2.24 | 20900 | 0.5298 | 0.4588 |
| 0.341 | 2.25 | 21000 | 0.5342 | 0.4576 |
| 0.341 | 2.26 | 21100 | 0.5515 | 0.4678 |
| 0.341 | 2.27 | 21200 | 0.5041 | 0.4495 |
| 0.341 | 2.28 | 21300 | 0.5169 | 0.4473 |
| 0.341 | 2.29 | 21400 | 0.5227 | 0.4494 |
| 0.354 | 2.3 | 21500 | 0.5214 | 0.4458 |
| 0.354 | 2.31 | 21600 | 0.5303 | 0.4587 |
| 0.354 | 2.32 | 21700 | 0.5237 | 0.4597 |
| 0.354 | 2.33 | 21800 | 0.5067 | 0.4460 |
| 0.354 | 2.34 | 21900 | 0.5117 | 0.4560 |
| 0.3333 | 2.36 | 22000 | 0.5104 | 0.4359 |
| 0.3333 | 2.37 | 22100 | 0.5326 | 0.4679 |
| 0.3333 | 2.38 | 22200 | 0.5098 | 0.4510 |
| 0.3333 | 2.39 | 22300 | 0.5044 | 0.4445 |
| 0.3333 | 2.4 | 22400 | 0.5219 | 0.4489 |
| 0.3514 | 2.41 | 22500 | 0.4987 | 0.4433 |
| 0.3514 | 2.42 | 22600 | 0.5009 | 0.4338 |
| 0.3514 | 2.43 | 22700 | 0.5252 | 0.4444 |
| 0.3514 | 2.44 | 22800 | 0.4861 | 0.4269 |
| 0.3514 | 2.45 | 22900 | 0.5157 | 0.4421 |
| 0.3444 | 2.46 | 23000 | 0.5277 | 0.4426 |
| 0.3444 | 2.47 | 23100 | 0.5213 | 0.4378 |
| 0.3444 | 2.48 | 23200 | 0.5172 | 0.4482 |
| 0.3444 | 2.49 | 23300 | 0.5142 | 0.4376 |
| 0.3444 | 2.51 | 23400 | 0.5044 | 0.4231 |
| 0.3536 | 2.52 | 23500 | 0.5268 | 0.4496 |
| 0.3536 | 2.53 | 23600 | 0.5176 | 0.4326 |
| 0.3536 | 2.54 | 23700 | 0.5032 | 0.4296 |
| 0.3536 | 2.55 | 23800 | 0.5211 | 0.4460 |
| 0.3536 | 2.56 | 23900 | 0.5093 | 0.4379 |
| 0.337 | 2.57 | 24000 | 0.4990 | 0.4311 |
| 0.337 | 2.58 | 24100 | 0.4962 | 0.4329 |
| 0.337 | 2.59 | 24200 | 0.5033 | 0.4289 |
| 0.337 | 2.6 | 24300 | 0.5260 | 0.4534 |
| 0.337 | 2.61 | 24400 | 0.5309 | 0.4441 |
| 0.3393 | 2.62 | 24500 | 0.5132 | 0.4346 |
| 0.3393 | 2.63 | 24600 | 0.5189 | 0.4233 |
| 0.3393 | 2.64 | 24700 | 0.5074 | 0.4326 |
| 0.3393 | 2.66 | 24800 | 0.5111 | 0.4254 |
| 0.3393 | 2.67 | 24900 | 0.4933 | 0.4254 |
| 0.3334 | 2.68 | 25000 | 0.5046 | 0.4407 |
| 0.3334 | 2.69 | 25100 | 0.5010 | 0.4404 |
| 0.3334 | 2.7 | 25200 | 0.5045 | 0.4236 |
| 0.3334 | 2.71 | 25300 | 0.4938 | 0.4305 |
| 0.3334 | 2.72 | 25400 | 0.5021 | 0.4383 |
| 0.3366 | 2.73 | 25500 | 0.4953 | 0.4202 |
| 0.3366 | 2.74 | 25600 | 0.4985 | 0.4338 |
| 0.3366 | 2.75 | 25700 | 0.4765 | 0.4161 |
| 0.3366 | 2.76 | 25800 | 0.4873 | 0.4292 |
| 0.3366 | 2.77 | 25900 | 0.4998 | 0.4189 |
| 0.3359 | 2.78 | 26000 | 0.4991 | 0.4248 |
| 0.3359 | 2.79 | 26100 | 0.5012 | 0.4307 |
| 0.3359 | 2.81 | 26200 | 0.5081 | 0.4151 |
| 0.3359 | 2.82 | 26300 | 0.4997 | 0.4305 |
| 0.3359 | 2.83 | 26400 | 0.4969 | 0.4302 |
| 0.3396 | 2.84 | 26500 | 0.4784 | 0.4271 |
| 0.3396 | 2.85 | 26600 | 0.4804 | 0.4149 |
| 0.3396 | 2.86 | 26700 | 0.4900 | 0.4192 |
| 0.3396 | 2.87 | 26800 | 0.5044 | 0.4325 |
| 0.3396 | 2.88 | 26900 | 0.4935 | 0.4376 |
| 0.3356 | 2.89 | 27000 | 0.5007 | 0.4269 |
| 0.3356 | 2.9 | 27100 | 0.4887 | 0.4178 |
| 0.3356 | 2.91 | 27200 | 0.4770 | 0.4170 |
| 0.3356 | 2.92 | 27300 | 0.4847 | 0.4167 |
| 0.3356 | 2.93 | 27400 | 0.4861 | 0.4139 |
| 0.3395 | 2.94 | 27500 | 0.4975 | 0.4291 |
| 0.3395 | 2.95 | 27600 | 0.5056 | 0.4471 |
| 0.3395 | 2.97 | 27700 | 0.5111 | 0.4375 |
| 0.3395 | 2.98 | 27800 | 0.5327 | 0.4577 |
| 0.3395 | 2.99 | 27900 | 0.5067 | 0.4393 |
| 0.3332 | 3.0 | 28000 | 0.4898 | 0.4188 |
| 0.3332 | 3.01 | 28100 | 0.4790 | 0.4093 |
| 0.3332 | 3.02 | 28200 | 0.4828 | 0.4202 |
| 0.3332 | 3.03 | 28300 | 0.4836 | 0.4146 |
| 0.3332 | 3.04 | 28400 | 0.4901 | 0.4242 |
| 0.2984 | 3.05 | 28500 | 0.4772 | 0.4118 |
| 0.2984 | 3.06 | 28600 | 0.5055 | 0.4213 |
| 0.2984 | 3.07 | 28700 | 0.4911 | 0.4100 |
| 0.2984 | 3.08 | 28800 | 0.4737 | 0.4087 |
| 0.2984 | 3.09 | 28900 | 0.4930 | 0.4216 |
| 0.3056 | 3.1 | 29000 | 0.4736 | 0.4109 |
| 0.3056 | 3.12 | 29100 | 0.4863 | 0.4058 |
| 0.3056 | 3.13 | 29200 | 0.4784 | 0.4184 |
| 0.3056 | 3.14 | 29300 | 0.4923 | 0.4240 |
| 0.3056 | 3.15 | 29400 | 0.4846 | 0.4226 |
| 0.2995 | 3.16 | 29500 | 0.4829 | 0.4086 |
| 0.2995 | 3.17 | 29600 | 0.4934 | 0.4240 |
| 0.2995 | 3.18 | 29700 | 0.4893 | 0.4152 |
| 0.2995 | 3.19 | 29800 | 0.4730 | 0.4227 |
| 0.2995 | 3.2 | 29900 | 0.5027 | 0.4330 |
| 0.2926 | 3.21 | 30000 | 0.4903 | 0.4112 |
| 0.2926 | 3.22 | 30100 | 0.4961 | 0.4157 |
| 0.2926 | 3.23 | 30200 | 0.4980 | 0.4269 |
| 0.2926 | 3.24 | 30300 | 0.4896 | 0.4126 |
| 0.2926 | 3.25 | 30400 | 0.4726 | 0.4062 |
| 0.301 | 3.27 | 30500 | 0.4733 | 0.3985 |
| 0.301 | 3.28 | 30600 | 0.4772 | 0.4047 |
| 0.301 | 3.29 | 30700 | 0.4806 | 0.4082 |
| 0.301 | 3.3 | 30800 | 0.4683 | 0.4011 |
| 0.301 | 3.31 | 30900 | 0.4775 | 0.4079 |
| 0.2933 | 3.32 | 31000 | 0.4729 | 0.4083 |
| 0.2933 | 3.33 | 31100 | 0.4628 | 0.4016 |
| 0.2933 | 3.34 | 31200 | 0.4753 | 0.4192 |
| 0.2933 | 3.35 | 31300 | 0.4687 | 0.4185 |
| 0.2933 | 3.36 | 31400 | 0.4806 | 0.4106 |
| 0.2957 | 3.37 | 31500 | 0.4889 | 0.4240 |
| 0.2957 | 3.38 | 31600 | 0.4882 | 0.4182 |
| 0.2957 | 3.39 | 31700 | 0.4798 | 0.4162 |
| 0.2957 | 3.4 | 31800 | 0.4718 | 0.4108 |
| 0.2957 | 3.42 | 31900 | 0.4685 | 0.4101 |
| 0.3039 | 3.43 | 32000 | 0.4816 | 0.4188 |
| 0.3039 | 3.44 | 32100 | 0.4874 | 0.4139 |
| 0.3039 | 3.45 | 32200 | 0.4899 | 0.4115 |
| 0.3039 | 3.46 | 32300 | 0.4852 | 0.4180 |
| 0.3039 | 3.47 | 32400 | 0.5074 | 0.4129 |
| 0.3006 | 3.48 | 32500 | 0.4837 | 0.4076 |
| 0.3006 | 3.49 | 32600 | 0.4927 | 0.4098 |
| 0.3006 | 3.5 | 32700 | 0.4999 | 0.4172 |
| 0.3006 | 3.51 | 32800 | 0.4773 | 0.4194 |
| 0.3006 | 3.52 | 32900 | 0.4859 | 0.4058 |
| 0.3089 | 3.53 | 33000 | 0.4783 | 0.4104 |
| 0.3089 | 3.54 | 33100 | 0.4622 | 0.4020 |
| 0.3089 | 3.55 | 33200 | 0.4840 | 0.4065 |
| 0.3089 | 3.57 | 33300 | 0.4756 | 0.4241 |
| 0.3089 | 3.58 | 33400 | 0.4831 | 0.4170 |
| 0.3061 | 3.59 | 33500 | 0.4794 | 0.4068 |
| 0.3061 | 3.6 | 33600 | 0.4730 | 0.4037 |
| 0.3061 | 3.61 | 33700 | 0.4808 | 0.4138 |
| 0.3061 | 3.62 | 33800 | 0.4924 | 0.4248 |
| 0.3061 | 3.63 | 33900 | 0.4749 | 0.4112 |
| 0.3047 | 3.64 | 34000 | 0.4924 | 0.4326 |
| 0.3047 | 3.65 | 34100 | 0.4745 | 0.4104 |
| 0.3047 | 3.66 | 34200 | 0.4760 | 0.4123 |
| 0.3047 | 3.67 | 34300 | 0.4788 | 0.4066 |
| 0.3047 | 3.68 | 34400 | 0.4627 | 0.4158 |
| 0.3042 | 3.69 | 34500 | 0.4974 | 0.4131 |
| 0.3042 | 3.7 | 34600 | 0.4593 | 0.4063 |
| 0.3042 | 3.72 | 34700 | 0.4549 | 0.3928 |
| 0.3042 | 3.73 | 34800 | 0.4690 | 0.3898 |
| 0.3042 | 3.74 | 34900 | 0.4560 | 0.4007 |
| 0.2963 | 3.75 | 35000 | 0.4606 | 0.3959 |
| 0.2963 | 3.76 | 35100 | 0.4762 | 0.4057 |
| 0.2963 | 3.77 | 35200 | 0.4750 | 0.4034 |
| 0.2963 | 3.78 | 35300 | 0.4772 | 0.4114 |
| 0.2963 | 3.79 | 35400 | 0.4669 | 0.3995 |
| 0.3012 | 3.8 | 35500 | 0.4709 | 0.4090 |
| 0.3012 | 3.81 | 35600 | 0.4722 | 0.4123 |
| 0.3012 | 3.82 | 35700 | 0.4913 | 0.4165 |
| 0.3012 | 3.83 | 35800 | 0.4814 | 0.4063 |
| 0.3012 | 3.84 | 35900 | 0.4869 | 0.4171 |
| 0.3015 | 3.85 | 36000 | 0.4791 | 0.4059 |
| 0.3015 | 3.87 | 36100 | 0.4535 | 0.3976 |
| 0.3015 | 3.88 | 36200 | 0.4706 | 0.4009 |
| 0.3015 | 3.89 | 36300 | 0.4679 | 0.4012 |
| 0.3015 | 3.9 | 36400 | 0.4736 | 0.4096 |
| 0.2965 | 3.91 | 36500 | 0.4756 | 0.4106 |
| 0.2965 | 3.92 | 36600 | 0.4669 | 0.4085 |
| 0.2965 | 3.93 | 36700 | 0.4796 | 0.4054 |
| 0.2965 | 3.94 | 36800 | 0.4583 | 0.3932 |
| 0.2965 | 3.95 | 36900 | 0.4430 | 0.3969 |
| 0.2993 | 3.96 | 37000 | 0.4560 | 0.3914 |
| 0.2993 | 3.97 | 37100 | 0.4739 | 0.4002 |
| 0.2993 | 3.98 | 37200 | 0.4598 | 0.3912 |
| 0.2993 | 3.99 | 37300 | 0.4607 | 0.3907 |
| 0.2993 | 4.0 | 37400 | 0.4709 | 0.3986 |
| 0.2886 | 4.01 | 37500 | 0.4642 | 0.4067 |
| 0.2886 | 4.03 | 37600 | 0.4684 | 0.3984 |
| 0.2886 | 4.04 | 37700 | 0.4690 | 0.3979 |
| 0.2886 | 4.05 | 37800 | 0.4722 | 0.3980 |
| 0.2886 | 4.06 | 37900 | 0.4734 | 0.3927 |
| 0.2534 | 4.07 | 38000 | 0.4724 | 0.3988 |
| 0.2534 | 4.08 | 38100 | 0.4665 | 0.3986 |
| 0.2534 | 4.09 | 38200 | 0.4659 | 0.4036 |
| 0.2534 | 4.1 | 38300 | 0.4694 | 0.3952 |
| 0.2534 | 4.11 | 38400 | 0.4719 | 0.3891 |
| 0.2596 | 4.12 | 38500 | 0.4687 | 0.3994 |
| 0.2596 | 4.13 | 38600 | 0.4705 | 0.3903 |
| 0.2596 | 4.14 | 38700 | 0.4601 | 0.3975 |
| 0.2596 | 4.15 | 38800 | 0.4666 | 0.3971 |
| 0.2596 | 4.16 | 38900 | 0.4772 | 0.3892 |
| 0.2643 | 4.18 | 39000 | 0.4810 | 0.4071 |
| 0.2643 | 4.19 | 39100 | 0.4980 | 0.4167 |
| 0.2643 | 4.2 | 39200 | 0.4657 | 0.3996 |
| 0.2643 | 4.21 | 39300 | 0.4869 | 0.4002 |
| 0.2643 | 4.22 | 39400 | 0.4656 | 0.3913 |
| 0.265 | 4.23 | 39500 | 0.4720 | 0.3947 |
| 0.265 | 4.24 | 39600 | 0.4711 | 0.3970 |
| 0.265 | 4.25 | 39700 | 0.4689 | 0.3933 |
| 0.265 | 4.26 | 39800 | 0.4728 | 0.4017 |
| 0.265 | 4.27 | 39900 | 0.4673 | 0.3847 |
| 0.2644 | 4.28 | 40000 | 0.4636 | 0.3960 |
| 0.2644 | 4.29 | 40100 | 0.4699 | 0.3864 |
| 0.2644 | 4.3 | 40200 | 0.4580 | 0.3874 |
| 0.2644 | 4.31 | 40300 | 0.4763 | 0.3951 |
| 0.2644 | 4.33 | 40400 | 0.4752 | 0.4141 |
| 0.2633 | 4.34 | 40500 | 0.4918 | 0.3994 |
| 0.2633 | 4.35 | 40600 | 0.4783 | 0.4026 |
| 0.2633 | 4.36 | 40700 | 0.4739 | 0.4034 |
| 0.2633 | 4.37 | 40800 | 0.4750 | 0.4000 |
| 0.2633 | 4.38 | 40900 | 0.4608 | 0.3943 |
| 0.2679 | 4.39 | 41000 | 0.4615 | 0.3891 |
| 0.2679 | 4.4 | 41100 | 0.4730 | 0.3984 |
| 0.2679 | 4.41 | 41200 | 0.4728 | 0.4011 |
| 0.2679 | 4.42 | 41300 | 0.4675 | 0.3932 |
| 0.2679 | 4.43 | 41400 | 0.4662 | 0.3929 |
| 0.2682 | 4.44 | 41500 | 0.4490 | 0.3837 |
| 0.2682 | 4.45 | 41600 | 0.4611 | 0.3838 |
| 0.2682 | 4.46 | 41700 | 0.4605 | 0.3945 |
| 0.2682 | 4.48 | 41800 | 0.4730 | 0.3938 |
| 0.2682 | 4.49 | 41900 | 0.4567 | 0.3874 |
| 0.2658 | 4.5 | 42000 | 0.4715 | 0.3869 |
| 0.2658 | 4.51 | 42100 | 0.4514 | 0.3833 |
| 0.2658 | 4.52 | 42200 | 0.4602 | 0.3898 |
| 0.2658 | 4.53 | 42300 | 0.4846 | 0.4022 |
| 0.2658 | 4.54 | 42400 | 0.4474 | 0.3810 |
| 0.2676 | 4.55 | 42500 | 0.4513 | 0.3820 |
| 0.2676 | 4.56 | 42600 | 0.4588 | 0.3928 |
| 0.2676 | 4.57 | 42700 | 0.4601 | 0.3894 |
| 0.2676 | 4.58 | 42800 | 0.4516 | 0.3792 |
| 0.2676 | 4.59 | 42900 | 0.4482 | 0.3848 |
| 0.2693 | 4.6 | 43000 | 0.4695 | 0.4008 |
| 0.2693 | 4.61 | 43100 | 0.4580 | 0.3871 |
| 0.2693 | 4.63 | 43200 | 0.4419 | 0.3857 |
| 0.2693 | 4.64 | 43300 | 0.4534 | 0.3796 |
| 0.2693 | 4.65 | 43400 | 0.4532 | 0.3856 |
| 0.2641 | 4.66 | 43500 | 0.4421 | 0.3809 |
| 0.2641 | 4.67 | 43600 | 0.4400 | 0.3844 |
| 0.2641 | 4.68 | 43700 | 0.4515 | 0.3833 |
| 0.2641 | 4.69 | 43800 | 0.4462 | 0.3808 |
| 0.2641 | 4.7 | 43900 | 0.4741 | 0.3926 |
| 0.2626 | 4.71 | 44000 | 0.4542 | 0.3931 |
| 0.2626 | 4.72 | 44100 | 0.4555 | 0.3885 |
| 0.2626 | 4.73 | 44200 | 0.4505 | 0.3845 |
| 0.2626 | 4.74 | 44300 | 0.4593 | 0.3871 |
| 0.2626 | 4.75 | 44400 | 0.4359 | 0.3830 |
| 0.2648 | 4.76 | 44500 | 0.4387 | 0.3736 |
| 0.2648 | 4.78 | 44600 | 0.4529 | 0.3807 |
| 0.2648 | 4.79 | 44700 | 0.4566 | 0.3837 |
| 0.2648 | 4.8 | 44800 | 0.4557 | 0.4067 |
| 0.2648 | 4.81 | 44900 | 0.4609 | 0.3852 |
| 0.2603 | 4.82 | 45000 | 0.4667 | 0.4005 |
| 0.2603 | 4.83 | 45100 | 0.4666 | 0.3836 |
| 0.2603 | 4.84 | 45200 | 0.4775 | 0.3946 |
| 0.2603 | 4.85 | 45300 | 0.4701 | 0.3925 |
| 0.2603 | 4.86 | 45400 | 0.4579 | 0.3889 |
| 0.2626 | 4.87 | 45500 | 0.4516 | 0.3884 |
| 0.2626 | 4.88 | 45600 | 0.4605 | 0.3878 |
| 0.2626 | 4.89 | 45700 | 0.4576 | 0.3802 |
| 0.2626 | 4.9 | 45800 | 0.4553 | 0.3780 |
| 0.2626 | 4.91 | 45900 | 0.4336 | 0.3752 |
| 0.2602 | 4.93 | 46000 | 0.4419 | 0.3881 |
| 0.2602 | 4.94 | 46100 | 0.4601 | 0.3843 |
| 0.2602 | 4.95 | 46200 | 0.4437 | 0.3956 |
| 0.2602 | 4.96 | 46300 | 0.4524 | 0.3844 |
| 0.2602 | 4.97 | 46400 | 0.4709 | 0.4031 |
| 0.2609 | 4.98 | 46500 | 0.4500 | 0.3872 |
| 0.2609 | 4.99 | 46600 | 0.4366 | 0.3846 |
| 0.2609 | 5.0 | 46700 | 0.4653 | 0.3884 |
| 0.2609 | 5.01 | 46800 | 0.4602 | 0.3932 |
| 0.2609 | 5.02 | 46900 | 0.4668 | 0.3854 |
| 0.2472 | 5.03 | 47000 | 0.4616 | 0.3891 |
| 0.2472 | 5.04 | 47100 | 0.4543 | 0.3836 |
| 0.2472 | 5.05 | 47200 | 0.4526 | 0.3822 |
| 0.2472 | 5.06 | 47300 | 0.4539 | 0.3741 |
| 0.2472 | 5.07 | 47400 | 0.4776 | 0.3818 |
| 0.2278 | 5.09 | 47500 | 0.4771 | 0.3794 |
| 0.2278 | 5.1 | 47600 | 0.4662 | 0.3831 |
| 0.2278 | 5.11 | 47700 | 0.4558 | 0.4032 |
| 0.2278 | 5.12 | 47800 | 0.4904 | 0.3918 |
| 0.2278 | 5.13 | 47900 | 0.4765 | 0.3890 |
| 0.2311 | 5.14 | 48000 | 0.4674 | 0.3882 |
| 0.2311 | 5.15 | 48100 | 0.4609 | 0.3947 |
| 0.2311 | 5.16 | 48200 | 0.4588 | 0.3837 |
| 0.2311 | 5.17 | 48300 | 0.4827 | 0.3845 |
| 0.2311 | 5.18 | 48400 | 0.4711 | 0.3839 |
| 0.229 | 5.19 | 48500 | 0.4583 | 0.3873 |
| 0.229 | 5.2 | 48600 | 0.4800 | 0.3858 |
| 0.229 | 5.21 | 48700 | 0.4611 | 0.3800 |
| 0.229 | 5.22 | 48800 | 0.4504 | 0.3889 |
| 0.229 | 5.24 | 48900 | 0.4569 | 0.3761 |
| 0.2313 | 5.25 | 49000 | 0.4732 | 0.3915 |
| 0.2313 | 5.26 | 49100 | 0.4728 | 0.3832 |
| 0.2313 | 5.27 | 49200 | 0.4667 | 0.3815 |
| 0.2313 | 5.28 | 49300 | 0.4912 | 0.3856 |
| 0.2313 | 5.29 | 49400 | 0.4790 | 0.3946 |
| 0.2266 | 5.3 | 49500 | 0.4597 | 0.3763 |
| 0.2266 | 5.31 | 49600 | 0.4580 | 0.3778 |
| 0.2266 | 5.32 | 49700 | 0.4439 | 0.3721 |
| 0.2266 | 5.33 | 49800 | 0.4611 | 0.3704 |
| 0.2266 | 5.34 | 49900 | 0.4599 | 0.3769 |
| 0.235 | 5.35 | 50000 | 0.4543 | 0.3808 |
| 0.235 | 5.36 | 50100 | 0.4555 | 0.3773 |
| 0.235 | 5.37 | 50200 | 0.4525 | 0.3815 |
| 0.235 | 5.39 | 50300 | 0.4557 | 0.3814 |
| 0.235 | 5.4 | 50400 | 0.4604 | 0.3754 |
| 0.2299 | 5.41 | 50500 | 0.4658 | 0.3770 |
| 0.2299 | 5.42 | 50600 | 0.4658 | 0.3884 |
| 0.2299 | 5.43 | 50700 | 0.4701 | 0.3919 |
| 0.2299 | 5.44 | 50800 | 0.4495 | 0.3818 |
| 0.2299 | 5.45 | 50900 | 0.4703 | 0.3886 |
| 0.2307 | 5.46 | 51000 | 0.4395 | 0.3743 |
| 0.2307 | 5.47 | 51100 | 0.4487 | 0.3751 |
| 0.2307 | 5.48 | 51200 | 0.4355 | 0.3733 |
| 0.2307 | 5.49 | 51300 | 0.4622 | 0.3811 |
| 0.2307 | 5.5 | 51400 | 0.4443 | 0.3801 |
| 0.2383 | 5.51 | 51500 | 0.4411 | 0.3743 |
| 0.2383 | 5.52 | 51600 | 0.4438 | 0.3778 |
| 0.2383 | 5.54 | 51700 | 0.4559 | 0.3784 |
| 0.2383 | 5.55 | 51800 | 0.4309 | 0.3656 |
| 0.2383 | 5.56 | 51900 | 0.4455 | 0.3660 |
| 0.23 | 5.57 | 52000 | 0.4436 | 0.3598 |
| 0.23 | 5.58 | 52100 | 0.4344 | 0.3685 |
| 0.23 | 5.59 | 52200 | 0.4282 | 0.3690 |
| 0.23 | 5.6 | 52300 | 0.4464 | 0.3800 |
| 0.23 | 5.61 | 52400 | 0.4458 | 0.3909 |
| 0.2305 | 5.62 | 52500 | 0.4483 | 0.3756 |
| 0.2305 | 5.63 | 52600 | 0.4547 | 0.3785 |
| 0.2305 | 5.64 | 52700 | 0.4671 | 0.3820 |
| 0.2305 | 5.65 | 52800 | 0.4449 | 0.3658 |
| 0.2305 | 5.66 | 52900 | 0.4596 | 0.3716 |
| 0.2237 | 5.67 | 53000 | 0.4399 | 0.3669 |
| 0.2237 | 5.69 | 53100 | 0.4410 | 0.3719 |
| 0.2237 | 5.7 | 53200 | 0.4574 | 0.3619 |
| 0.2237 | 5.71 | 53300 | 0.4443 | 0.3690 |
| 0.2237 | 5.72 | 53400 | 0.4381 | 0.3678 |
| 0.2337 | 5.73 | 53500 | 0.4490 | 0.3687 |
| 0.2337 | 5.74 | 53600 | 0.4427 | 0.3752 |
| 0.2337 | 5.75 | 53700 | 0.4423 | 0.3858 |
| 0.2337 | 5.76 | 53800 | 0.4702 | 0.3825 |
| 0.2337 | 5.77 | 53900 | 0.4724 | 0.3800 |
| 0.23 | 5.78 | 54000 | 0.4476 | 0.3827 |
| 0.23 | 5.79 | 54100 | 0.4508 | 0.3919 |
| 0.23 | 5.8 | 54200 | 0.4564 | 0.3788 |
| 0.23 | 5.81 | 54300 | 0.4602 | 0.3888 |
| 0.23 | 5.82 | 54400 | 0.4538 | 0.3732 |
| 0.2334 | 5.84 | 54500 | 0.4500 | 0.3808 |
| 0.2334 | 5.85 | 54600 | 0.4475 | 0.3705 |
| 0.2334 | 5.86 | 54700 | 0.4415 | 0.3772 |
| 0.2334 | 5.87 | 54800 | 0.4515 | 0.3771 |
| 0.2334 | 5.88 | 54900 | 0.4410 | 0.3677 |
| 0.2259 | 5.89 | 55000 | 0.4555 | 0.3702 |
| 0.2259 | 5.9 | 55100 | 0.4509 | 0.3894 |
| 0.2259 | 5.91 | 55200 | 0.4472 | 0.3692 |
| 0.2259 | 5.92 | 55300 | 0.4438 | 0.3754 |
| 0.2259 | 5.93 | 55400 | 0.4399 | 0.3698 |
| 0.2289 | 5.94 | 55500 | 0.4496 | 0.3753 |
| 0.2289 | 5.95 | 55600 | 0.4506 | 0.3752 |
| 0.2289 | 5.96 | 55700 | 0.4482 | 0.3766 |
| 0.2289 | 5.97 | 55800 | 0.4415 | 0.3772 |
| 0.2289 | 5.98 | 55900 | 0.4447 | 0.3750 |
| 0.2281 | 6.0 | 56000 | 0.4566 | 0.3842 |
| 0.2281 | 6.01 | 56100 | 0.4694 | 0.3774 |
| 0.2281 | 6.02 | 56200 | 0.4454 | 0.3788 |
| 0.2281 | 6.03 | 56300 | 0.4676 | 0.3718 |
| 0.2281 | 6.04 | 56400 | 0.4650 | 0.3751 |
| 0.1979 | 6.05 | 56500 | 0.4601 | 0.3765 |
| 0.1979 | 6.06 | 56600 | 0.4647 | 0.3840 |
| 0.1979 | 6.07 | 56700 | 0.4782 | 0.3756 |
| 0.1979 | 6.08 | 56800 | 0.4709 | 0.3736 |
| 0.1979 | 6.09 | 56900 | 0.4707 | 0.3734 |
| 0.1923 | 6.1 | 57000 | 0.4704 | 0.3751 |
| 0.1923 | 6.11 | 57100 | 0.4542 | 0.3721 |
| 0.1923 | 6.12 | 57200 | 0.4542 | 0.3735 |
| 0.1923 | 6.13 | 57300 | 0.4587 | 0.3804 |
| 0.1923 | 6.15 | 57400 | 0.4428 | 0.3687 |
| 0.2012 | 6.16 | 57500 | 0.4456 | 0.3748 |
| 0.2012 | 6.17 | 57600 | 0.4578 | 0.3762 |
| 0.2012 | 6.18 | 57700 | 0.4699 | 0.3722 |
| 0.2012 | 6.19 | 57800 | 0.4499 | 0.3756 |
| 0.2012 | 6.2 | 57900 | 0.4633 | 0.3680 |
| 0.1951 | 6.21 | 58000 | 0.4548 | 0.3712 |
| 0.1951 | 6.22 | 58100 | 0.4520 | 0.3759 |
| 0.1951 | 6.23 | 58200 | 0.4458 | 0.3616 |
| 0.1951 | 6.24 | 58300 | 0.4307 | 0.3637 |
| 0.1951 | 6.25 | 58400 | 0.4546 | 0.3621 |
| 0.1967 | 6.26 | 58500 | 0.4459 | 0.3623 |
| 0.1967 | 6.27 | 58600 | 0.4535 | 0.3690 |
| 0.1967 | 6.28 | 58700 | 0.4574 | 0.3771 |
| 0.1967 | 6.3 | 58800 | 0.4493 | 0.3744 |
| 0.1967 | 6.31 | 58900 | 0.4494 | 0.3769 |
| 0.1998 | 6.32 | 59000 | 0.4529 | 0.3644 |
| 0.1998 | 6.33 | 59100 | 0.4416 | 0.3662 |
| 0.1998 | 6.34 | 59200 | 0.4468 | 0.3785 |
| 0.1998 | 6.35 | 59300 | 0.4377 | 0.3664 |
| 0.1998 | 6.36 | 59400 | 0.4647 | 0.3755 |
| 0.2009 | 6.37 | 59500 | 0.4700 | 0.3824 |
| 0.2009 | 6.38 | 59600 | 0.4488 | 0.3685 |
| 0.2009 | 6.39 | 59700 | 0.4649 | 0.3804 |
| 0.2009 | 6.4 | 59800 | 0.4389 | 0.3689 |
| 0.2009 | 6.41 | 59900 | 0.4456 | 0.3531 |
| 0.2007 | 6.42 | 60000 | 0.4572 | 0.3658 |
| 0.2007 | 6.43 | 60100 | 0.4464 | 0.3669 |
| 0.2007 | 6.45 | 60200 | 0.4666 | 0.3711 |
| 0.2007 | 6.46 | 60300 | 0.4399 | 0.3660 |
| 0.2007 | 6.47 | 60400 | 0.4445 | 0.3631 |
| 0.2005 | 6.48 | 60500 | 0.4450 | 0.3621 |
| 0.2005 | 6.49 | 60600 | 0.4346 | 0.3571 |
| 0.2005 | 6.5 | 60700 | 0.4358 | 0.3581 |
| 0.2005 | 6.51 | 60800 | 0.4344 | 0.3646 |
| 0.2005 | 6.52 | 60900 | 0.4377 | 0.3621 |
| 0.2038 | 6.53 | 61000 | 0.4262 | 0.3570 |
| 0.2038 | 6.54 | 61100 | 0.4269 | 0.3614 |
| 0.2038 | 6.55 | 61200 | 0.4297 | 0.3592 |
| 0.2038 | 6.56 | 61300 | 0.4433 | 0.3682 |
| 0.2038 | 6.57 | 61400 | 0.4474 | 0.3644 |
| 0.199 | 6.58 | 61500 | 0.4464 | 0.3678 |
| 0.199 | 6.6 | 61600 | 0.4397 | 0.3562 |
| 0.199 | 6.61 | 61700 | 0.4415 | 0.3612 |
| 0.199 | 6.62 | 61800 | 0.4362 | 0.3601 |
| 0.199 | 6.63 | 61900 | 0.4442 | 0.3623 |
| 0.1995 | 6.64 | 62000 | 0.4558 | 0.3662 |
| 0.1995 | 6.65 | 62100 | 0.4477 | 0.3647 |
| 0.1995 | 6.66 | 62200 | 0.4542 | 0.3699 |
| 0.1995 | 6.67 | 62300 | 0.4411 | 0.3632 |
| 0.1995 | 6.68 | 62400 | 0.4408 | 0.3658 |
| 0.2014 | 6.69 | 62500 | 0.4426 | 0.3691 |
| 0.2014 | 6.7 | 62600 | 0.4246 | 0.3645 |
| 0.2014 | 6.71 | 62700 | 0.4466 | 0.3676 |
| 0.2014 | 6.72 | 62800 | 0.4493 | 0.3566 |
| 0.2014 | 6.73 | 62900 | 0.4336 | 0.3621 |
| 0.2015 | 6.75 | 63000 | 0.4367 | 0.3604 |
| 0.2015 | 6.76 | 63100 | 0.4424 | 0.3754 |
| 0.2015 | 6.77 | 63200 | 0.4679 | 0.3733 |
| 0.2015 | 6.78 | 63300 | 0.4483 | 0.3752 |
| 0.2015 | 6.79 | 63400 | 0.4746 | 0.3822 |
| 0.2048 | 6.8 | 63500 | 0.4340 | 0.3731 |
| 0.2048 | 6.81 | 63600 | 0.4346 | 0.3631 |
| 0.2048 | 6.82 | 63700 | 0.4525 | 0.3680 |
| 0.2048 | 6.83 | 63800 | 0.4360 | 0.3641 |
| 0.2048 | 6.84 | 63900 | 0.4299 | 0.3558 |
| 0.2017 | 6.85 | 64000 | 0.4370 | 0.3533 |
| 0.2017 | 6.86 | 64100 | 0.4293 | 0.3617 |
| 0.2017 | 6.87 | 64200 | 0.4431 | 0.3660 |
| 0.2017 | 6.88 | 64300 | 0.4362 | 0.3688 |
| 0.2017 | 6.9 | 64400 | 0.4507 | 0.3648 |
| 0.2045 | 6.91 | 64500 | 0.4439 | 0.3613 |
| 0.2045 | 6.92 | 64600 | 0.4249 | 0.3493 |
| 0.2045 | 6.93 | 64700 | 0.4362 | 0.3612 |
| 0.2045 | 6.94 | 64800 | 0.4336 | 0.3585 |
| 0.2045 | 6.95 | 64900 | 0.4387 | 0.3568 |
| 0.1977 | 6.96 | 65000 | 0.4313 | 0.3542 |
| 0.1977 | 6.97 | 65100 | 0.4287 | 0.3552 |
| 0.1977 | 6.98 | 65200 | 0.4372 | 0.3586 |
| 0.1977 | 6.99 | 65300 | 0.4378 | 0.3629 |
| 0.1977 | 7.0 | 65400 | 0.4518 | 0.3640 |
| 0.1971 | 7.01 | 65500 | 0.4480 | 0.3557 |
| 0.1971 | 7.02 | 65600 | 0.4530 | 0.3560 |
| 0.1971 | 7.03 | 65700 | 0.4581 | 0.3582 |
| 0.1971 | 7.04 | 65800 | 0.4492 | 0.3543 |
| 0.1971 | 7.06 | 65900 | 0.4448 | 0.3608 |
| 0.1672 | 7.07 | 66000 | 0.4469 | 0.3543 |
| 0.1672 | 7.08 | 66100 | 0.4262 | 0.3488 |
| 0.1672 | 7.09 | 66200 | 0.4289 | 0.3570 |
| 0.1672 | 7.1 | 66300 | 0.4455 | 0.3545 |
| 0.1672 | 7.11 | 66400 | 0.4449 | 0.3563 |
| 0.169 | 7.12 | 66500 | 0.4555 | 0.3565 |
| 0.169 | 7.13 | 66600 | 0.4432 | 0.3656 |
| 0.169 | 7.14 | 66700 | 0.4399 | 0.3610 |
| 0.169 | 7.15 | 66800 | 0.4383 | 0.3554 |
| 0.169 | 7.16 | 66900 | 0.4376 | 0.3536 |
| 0.1724 | 7.17 | 67000 | 0.4383 | 0.3572 |
| 0.1724 | 7.18 | 67100 | 0.4452 | 0.3535 |
| 0.1724 | 7.19 | 67200 | 0.4610 | 0.3668 |
| 0.1724 | 7.21 | 67300 | 0.4534 | 0.3546 |
| 0.1724 | 7.22 | 67400 | 0.4506 | 0.3604 |
| 0.1729 | 7.23 | 67500 | 0.4463 | 0.3507 |
| 0.1729 | 7.24 | 67600 | 0.4440 | 0.3630 |
| 0.1729 | 7.25 | 67700 | 0.4361 | 0.3550 |
| 0.1729 | 7.26 | 67800 | 0.4397 | 0.3643 |
| 0.1729 | 7.27 | 67900 | 0.4328 | 0.3548 |
| 0.1736 | 7.28 | 68000 | 0.4546 | 0.3614 |
| 0.1736 | 7.29 | 68100 | 0.4506 | 0.3558 |
| 0.1736 | 7.3 | 68200 | 0.4361 | 0.3513 |
| 0.1736 | 7.31 | 68300 | 0.4223 | 0.3500 |
| 0.1736 | 7.32 | 68400 | 0.4474 | 0.3497 |
| 0.1733 | 7.33 | 68500 | 0.4303 | 0.3549 |
| 0.1733 | 7.34 | 68600 | 0.4265 | 0.3483 |
| 0.1733 | 7.36 | 68700 | 0.4339 | 0.3558 |
| 0.1733 | 7.37 | 68800 | 0.4266 | 0.3491 |
| 0.1733 | 7.38 | 68900 | 0.4423 | 0.3565 |
| 0.1764 | 7.39 | 69000 | 0.4410 | 0.3554 |
| 0.1764 | 7.4 | 69100 | 0.4482 | 0.3703 |
| 0.1764 | 7.41 | 69200 | 0.4480 | 0.3641 |
| 0.1764 | 7.42 | 69300 | 0.4361 | 0.3500 |
| 0.1764 | 7.43 | 69400 | 0.4399 | 0.3632 |
| 0.1711 | 7.44 | 69500 | 0.4383 | 0.3591 |
| 0.1711 | 7.45 | 69600 | 0.4523 | 0.3636 |
| 0.1711 | 7.46 | 69700 | 0.4388 | 0.3502 |
| 0.1711 | 7.47 | 69800 | 0.4305 | 0.3565 |
| 0.1711 | 7.48 | 69900 | 0.4290 | 0.3538 |
| 0.1748 | 7.49 | 70000 | 0.4359 | 0.3511 |
| 0.1748 | 7.51 | 70100 | 0.4315 | 0.3460 |
| 0.1748 | 7.52 | 70200 | 0.4268 | 0.3555 |
| 0.1748 | 7.53 | 70300 | 0.4267 | 0.3455 |
| 0.1748 | 7.54 | 70400 | 0.4359 | 0.3517 |
| 0.1739 | 7.55 | 70500 | 0.4299 | 0.3491 |
| 0.1739 | 7.56 | 70600 | 0.4423 | 0.3409 |
| 0.1739 | 7.57 | 70700 | 0.4251 | 0.3420 |
| 0.1739 | 7.58 | 70800 | 0.4300 | 0.3414 |
| 0.1739 | 7.59 | 70900 | 0.4349 | 0.3422 |
| 0.1763 | 7.6 | 71000 | 0.4328 | 0.3418 |
| 0.1763 | 7.61 | 71100 | 0.4313 | 0.3452 |
| 0.1763 | 7.62 | 71200 | 0.4240 | 0.3534 |
| 0.1763 | 7.63 | 71300 | 0.4274 | 0.3474 |
| 0.1763 | 7.64 | 71400 | 0.4304 | 0.3467 |
| 0.171 | 7.66 | 71500 | 0.4331 | 0.3510 |
| 0.171 | 7.67 | 71600 | 0.4263 | 0.3478 |
| 0.171 | 7.68 | 71700 | 0.4301 | 0.3447 |
| 0.171 | 7.69 | 71800 | 0.4046 | 0.3452 |
| 0.171 | 7.7 | 71900 | 0.4300 | 0.3528 |
| 0.1792 | 7.71 | 72000 | 0.4253 | 0.3492 |
| 0.1792 | 7.72 | 72100 | 0.4296 | 0.3491 |
| 0.1792 | 7.73 | 72200 | 0.4118 | 0.3451 |
| 0.1792 | 7.74 | 72300 | 0.4348 | 0.3345 |
| 0.1792 | 7.75 | 72400 | 0.4283 | 0.3447 |
| 0.1801 | 7.76 | 72500 | 0.4232 | 0.3449 |
| 0.1801 | 7.77 | 72600 | 0.4491 | 0.3486 |
| 0.1801 | 7.78 | 72700 | 0.4261 | 0.3343 |
| 0.1801 | 7.79 | 72800 | 0.4382 | 0.3455 |
| 0.1801 | 7.81 | 72900 | 0.4301 | 0.3415 |
| 0.1731 | 7.82 | 73000 | 0.4236 | 0.3438 |
| 0.1731 | 7.83 | 73100 | 0.4257 | 0.3419 |
| 0.1731 | 7.84 | 73200 | 0.4368 | 0.3410 |
| 0.1731 | 7.85 | 73300 | 0.4207 | 0.3398 |
| 0.1731 | 7.86 | 73400 | 0.4118 | 0.3418 |
| 0.1748 | 7.87 | 73500 | 0.4357 | 0.3429 |
| 0.1748 | 7.88 | 73600 | 0.4277 | 0.3452 |
| 0.1748 | 7.89 | 73700 | 0.4173 | 0.3476 |
| 0.1748 | 7.9 | 73800 | 0.4191 | 0.3478 |
| 0.1748 | 7.91 | 73900 | 0.4197 | 0.3457 |
| 0.1745 | 7.92 | 74000 | 0.4197 | 0.3436 |
| 0.1745 | 7.93 | 74100 | 0.4253 | 0.3512 |
| 0.1745 | 7.94 | 74200 | 0.4217 | 0.3463 |
| 0.1745 | 7.95 | 74300 | 0.4305 | 0.3473 |
| 0.1745 | 7.97 | 74400 | 0.4215 | 0.3507 |
| 0.1743 | 7.98 | 74500 | 0.4127 | 0.3408 |
| 0.1743 | 7.99 | 74600 | 0.4191 | 0.3468 |
| 0.1743 | 8.0 | 74700 | 0.4381 | 0.3491 |
| 0.1743 | 8.01 | 74800 | 0.4510 | 0.3477 |
| 0.1743 | 8.02 | 74900 | 0.4482 | 0.3471 |
| 0.1588 | 8.03 | 75000 | 0.4471 | 0.3430 |
| 0.1588 | 8.04 | 75100 | 0.4296 | 0.3393 |
| 0.1588 | 8.05 | 75200 | 0.4480 | 0.3398 |
| 0.1588 | 8.06 | 75300 | 0.4302 | 0.3452 |
| 0.1588 | 8.07 | 75400 | 0.4410 | 0.3431 |
| 0.144 | 8.08 | 75500 | 0.4263 | 0.3455 |
| 0.144 | 8.09 | 75600 | 0.4523 | 0.3495 |
| 0.144 | 8.1 | 75700 | 0.4455 | 0.3511 |
| 0.144 | 8.12 | 75800 | 0.4379 | 0.3445 |
| 0.144 | 8.13 | 75900 | 0.4418 | 0.3411 |
| 0.1483 | 8.14 | 76000 | 0.4491 | 0.3463 |
| 0.1483 | 8.15 | 76100 | 0.4386 | 0.3467 |
| 0.1483 | 8.16 | 76200 | 0.4327 | 0.3524 |
| 0.1483 | 8.17 | 76300 | 0.4360 | 0.3613 |
| 0.1483 | 8.18 | 76400 | 0.4352 | 0.3498 |
| 0.1541 | 8.19 | 76500 | 0.4376 | 0.3414 |
| 0.1541 | 8.2 | 76600 | 0.4408 | 0.3464 |
| 0.1541 | 8.21 | 76700 | 0.4415 | 0.3445 |
| 0.1541 | 8.22 | 76800 | 0.4455 | 0.3482 |
| 0.1541 | 8.23 | 76900 | 0.4542 | 0.3415 |
| 0.1479 | 8.24 | 77000 | 0.4462 | 0.3426 |
| 0.1479 | 8.25 | 77100 | 0.4460 | 0.3413 |
| 0.1479 | 8.27 | 77200 | 0.4434 | 0.3375 |
| 0.1479 | 8.28 | 77300 | 0.4397 | 0.3473 |
| 0.1479 | 8.29 | 77400 | 0.4379 | 0.3484 |
| 0.1479 | 8.3 | 77500 | 0.4441 | 0.3494 |
| 0.1479 | 8.31 | 77600 | 0.4301 | 0.3466 |
| 0.1479 | 8.32 | 77700 | 0.4420 | 0.3474 |
| 0.1479 | 8.33 | 77800 | 0.4520 | 0.3589 |
| 0.1479 | 8.34 | 77900 | 0.4283 | 0.3482 |
| 0.1531 | 8.35 | 78000 | 0.4325 | 0.3446 |
| 0.1531 | 8.36 | 78100 | 0.4380 | 0.3469 |
| 0.1531 | 8.37 | 78200 | 0.4463 | 0.3503 |
| 0.1531 | 8.38 | 78300 | 0.4479 | 0.3499 |
| 0.1531 | 8.39 | 78400 | 0.4477 | 0.3529 |
| 0.1507 | 8.4 | 78500 | 0.4709 | 0.3551 |
| 0.1507 | 8.42 | 78600 | 0.4533 | 0.3531 |
| 0.1507 | 8.43 | 78700 | 0.4507 | 0.3522 |
| 0.1507 | 8.44 | 78800 | 0.4562 | 0.3583 |
| 0.1507 | 8.45 | 78900 | 0.4421 | 0.3577 |
| 0.1545 | 8.46 | 79000 | 0.4485 | 0.3547 |
| 0.1545 | 8.47 | 79100 | 0.4389 | 0.3465 |
| 0.1545 | 8.48 | 79200 | 0.4397 | 0.3502 |
| 0.1545 | 8.49 | 79300 | 0.4403 | 0.3471 |
| 0.1545 | 8.5 | 79400 | 0.4394 | 0.3482 |
| 0.153 | 8.51 | 79500 | 0.4393 | 0.3474 |
| 0.153 | 8.52 | 79600 | 0.4343 | 0.3495 |
| 0.153 | 8.53 | 79700 | 0.4395 | 0.3539 |
| 0.153 | 8.54 | 79800 | 0.4497 | 0.3535 |
| 0.153 | 8.55 | 79900 | 0.4443 | 0.3540 |
| 0.1558 | 8.57 | 80000 | 0.4495 | 0.3554 |
| 0.1558 | 8.58 | 80100 | 0.4387 | 0.3460 |
| 0.1558 | 8.59 | 80200 | 0.4378 | 0.3520 |
| 0.1558 | 8.6 | 80300 | 0.4446 | 0.3527 |
| 0.1558 | 8.61 | 80400 | 0.4513 | 0.3508 |
| 0.1527 | 8.62 | 80500 | 0.4396 | 0.3537 |
| 0.1527 | 8.63 | 80600 | 0.4405 | 0.3507 |
| 0.1527 | 8.64 | 80700 | 0.4398 | 0.3450 |
| 0.1527 | 8.65 | 80800 | 0.4458 | 0.3508 |
| 0.1527 | 8.66 | 80900 | 0.4380 | 0.3465 |
| 0.1522 | 8.67 | 81000 | 0.4373 | 0.3482 |
| 0.1522 | 8.68 | 81100 | 0.4363 | 0.3410 |
| 0.1522 | 8.69 | 81200 | 0.4290 | 0.3447 |
| 0.1522 | 8.7 | 81300 | 0.4409 | 0.3515 |
| 0.1522 | 8.72 | 81400 | 0.4363 | 0.3433 |
| 0.1502 | 8.73 | 81500 | 0.4313 | 0.3429 |
| 0.1502 | 8.74 | 81600 | 0.4263 | 0.3451 |
| 0.1502 | 8.75 | 81700 | 0.4297 | 0.3452 |
| 0.1502 | 8.76 | 81800 | 0.4449 | 0.3411 |
| 0.1502 | 8.77 | 81900 | 0.4465 | 0.3455 |
| 0.151 | 8.78 | 82000 | 0.4274 | 0.3425 |
| 0.151 | 8.79 | 82100 | 0.4525 | 0.3532 |
| 0.151 | 8.8 | 82200 | 0.4282 | 0.3502 |
| 0.151 | 8.81 | 82300 | 0.4189 | 0.3507 |
| 0.151 | 8.82 | 82400 | 0.4379 | 0.3451 |
| 0.1529 | 8.83 | 82500 | 0.4378 | 0.3419 |
| 0.1529 | 8.84 | 82600 | 0.4283 | 0.3392 |
| 0.1529 | 8.85 | 82700 | 0.4359 | 0.3399 |
| 0.1529 | 8.87 | 82800 | 0.4308 | 0.3358 |
| 0.1529 | 8.88 | 82900 | 0.4296 | 0.3335 |
| 0.151 | 8.89 | 83000 | 0.4387 | 0.3372 |
| 0.151 | 8.9 | 83100 | 0.4335 | 0.3420 |
| 0.151 | 8.91 | 83200 | 0.4329 | 0.3374 |
| 0.151 | 8.92 | 83300 | 0.4353 | 0.3404 |
| 0.151 | 8.93 | 83400 | 0.4384 | 0.3447 |
| 0.1522 | 8.94 | 83500 | 0.4444 | 0.3353 |
| 0.1522 | 8.95 | 83600 | 0.4413 | 0.3481 |
| 0.1522 | 8.96 | 83700 | 0.4247 | 0.3474 |
| 0.1522 | 8.97 | 83800 | 0.4197 | 0.3386 |
| 0.1522 | 8.98 | 83900 | 0.4216 | 0.3384 |
| 0.1511 | 8.99 | 84000 | 0.4159 | 0.3396 |
| 0.1511 | 9.0 | 84100 | 0.4213 | 0.3416 |
| 0.1511 | 9.01 | 84200 | 0.4399 | 0.3379 |
| 0.1511 | 9.03 | 84300 | 0.4318 | 0.3437 |
| 0.1511 | 9.04 | 84400 | 0.4356 | 0.3371 |
| 0.1336 | 9.05 | 84500 | 0.4403 | 0.3373 |
| 0.1336 | 9.06 | 84600 | 0.4545 | 0.3381 |
| 0.1336 | 9.07 | 84700 | 0.4313 | 0.3331 |
| 0.1336 | 9.08 | 84800 | 0.4257 | 0.3360 |
| 0.1336 | 9.09 | 84900 | 0.4285 | 0.3372 |
| 0.1315 | 9.1 | 85000 | 0.4378 | 0.3332 |
| 0.1315 | 9.11 | 85100 | 0.4352 | 0.3282 |
| 0.1315 | 9.12 | 85200 | 0.4360 | 0.3339 |
| 0.1315 | 9.13 | 85300 | 0.4404 | 0.3365 |
| 0.1315 | 9.14 | 85400 | 0.4345 | 0.3356 |
| 0.1272 | 9.15 | 85500 | 0.4468 | 0.3375 |
| 0.1272 | 9.16 | 85600 | 0.4331 | 0.3363 |
| 0.1272 | 9.18 | 85700 | 0.4330 | 0.3309 |
| 0.1272 | 9.19 | 85800 | 0.4424 | 0.3301 |
| 0.1272 | 9.2 | 85900 | 0.4520 | 0.3326 |
| 0.1289 | 9.21 | 86000 | 0.4421 | 0.3326 |
| 0.1289 | 9.22 | 86100 | 0.4480 | 0.3335 |
| 0.1289 | 9.23 | 86200 | 0.4351 | 0.3380 |
| 0.1289 | 9.24 | 86300 | 0.4350 | 0.3427 |
| 0.1289 | 9.25 | 86400 | 0.4362 | 0.3320 |
| 0.1333 | 9.26 | 86500 | 0.4260 | 0.3342 |
| 0.1333 | 9.27 | 86600 | 0.4357 | 0.3360 |
| 0.1333 | 9.28 | 86700 | 0.4505 | 0.3372 |
| 0.1333 | 9.29 | 86800 | 0.4342 | 0.3359 |
| 0.1333 | 9.3 | 86900 | 0.4295 | 0.3367 |
| 0.1318 | 9.31 | 87000 | 0.4320 | 0.3335 |
| 0.1318 | 9.33 | 87100 | 0.4332 | 0.3344 |
| 0.1318 | 9.34 | 87200 | 0.4373 | 0.3330 |
| 0.1318 | 9.35 | 87300 | 0.4490 | 0.3316 |
| 0.1318 | 9.36 | 87400 | 0.4188 | 0.3429 |
| 0.1275 | 9.37 | 87500 | 0.4502 | 0.3383 |
| 0.1275 | 9.38 | 87600 | 0.4463 | 0.3387 |
| 0.1275 | 9.39 | 87700 | 0.4385 | 0.3308 |
| 0.1275 | 9.4 | 87800 | 0.4464 | 0.3414 |
| 0.1275 | 9.41 | 87900 | 0.4563 | 0.3405 |
| 0.1331 | 9.42 | 88000 | 0.4286 | 0.3374 |
| 0.1331 | 9.43 | 88100 | 0.4389 | 0.3352 |
| 0.1331 | 9.44 | 88200 | 0.4301 | 0.3340 |
| 0.1331 | 9.45 | 88300 | 0.4417 | 0.3373 |
| 0.1331 | 9.46 | 88400 | 0.4450 | 0.3425 |
| 0.1266 | 9.48 | 88500 | 0.4456 | 0.3451 |
| 0.1266 | 9.49 | 88600 | 0.4517 | 0.3403 |
| 0.1266 | 9.5 | 88700 | 0.4447 | 0.3419 |
| 0.1266 | 9.51 | 88800 | 0.4486 | 0.3428 |
| 0.1266 | 9.52 | 88900 | 0.4591 | 0.3411 |
| 0.1316 | 9.53 | 89000 | 0.4481 | 0.3387 |
| 0.1316 | 9.54 | 89100 | 0.4308 | 0.3349 |
| 0.1316 | 9.55 | 89200 | 0.4411 | 0.3405 |
| 0.1316 | 9.56 | 89300 | 0.4378 | 0.3390 |
| 0.1316 | 9.57 | 89400 | 0.4448 | 0.3365 |
| 0.1325 | 9.58 | 89500 | 0.4575 | 0.3416 |
| 0.1325 | 9.59 | 89600 | 0.4608 | 0.3422 |
| 0.1325 | 9.6 | 89700 | 0.4396 | 0.3350 |
| 0.1325 | 9.61 | 89800 | 0.4380 | 0.3398 |
| 0.1325 | 9.63 | 89900 | 0.4337 | 0.3388 |
| 0.1324 | 9.64 | 90000 | 0.4376 | 0.3388 |
| 0.1324 | 9.65 | 90100 | 0.4185 | 0.3380 |
| 0.1324 | 9.66 | 90200 | 0.4394 | 0.3384 |
| 0.1324 | 9.67 | 90300 | 0.4472 | 0.3400 |
| 0.1324 | 9.68 | 90400 | 0.4523 | 0.3390 |
| 0.1361 | 9.69 | 90500 | 0.4466 | 0.3389 |
| 0.1361 | 9.7 | 90600 | 0.4414 | 0.3383 |
| 0.1361 | 9.71 | 90700 | 0.4288 | 0.3348 |
| 0.1361 | 9.72 | 90800 | 0.4445 | 0.3374 |
| 0.1361 | 9.73 | 90900 | 0.4252 | 0.3322 |
| 0.1353 | 9.74 | 91000 | 0.4312 | 0.3338 |
| 0.1353 | 9.75 | 91100 | 0.4326 | 0.3319 |
| 0.1353 | 9.76 | 91200 | 0.4212 | 0.3400 |
| 0.1353 | 9.78 | 91300 | 0.4191 | 0.3374 |
| 0.1353 | 9.79 | 91400 | 0.4399 | 0.3332 |
| 0.1308 | 9.8 | 91500 | 0.4340 | 0.3349 |
| 0.1308 | 9.81 | 91600 | 0.4280 | 0.3379 |
| 0.1308 | 9.82 | 91700 | 0.4419 | 0.3376 |
| 0.1308 | 9.83 | 91800 | 0.4309 | 0.3333 |
| 0.1308 | 9.84 | 91900 | 0.4274 | 0.3352 |
| 0.1321 | 9.85 | 92000 | 0.4147 | 0.3337 |
| 0.1321 | 9.86 | 92100 | 0.4252 | 0.3316 |
| 0.1321 | 9.87 | 92200 | 0.4378 | 0.3381 |
| 0.1321 | 9.88 | 92300 | 0.4265 | 0.3355 |
| 0.1321 | 9.89 | 92400 | 0.4247 | 0.3331 |
| 0.1358 | 9.9 | 92500 | 0.4099 | 0.3379 |
| 0.1358 | 9.91 | 92600 | 0.4142 | 0.3356 |
| 0.1358 | 9.93 | 92700 | 0.4220 | 0.3332 |
| 0.1358 | 9.94 | 92800 | 0.4219 | 0.3369 |
| 0.1358 | 9.95 | 92900 | 0.4178 | 0.3332 |
| 0.1331 | 9.96 | 93000 | 0.4305 | 0.3353 |
| 0.1331 | 9.97 | 93100 | 0.4324 | 0.3307 |
| 0.1331 | 9.98 | 93200 | 0.4315 | 0.3344 |
| 0.1331 | 9.99 | 93300 | 0.4212 | 0.3314 |
| 0.1331 | 10.0 | 93400 | 0.4203 | 0.3332 |
| 0.1304 | 10.01 | 93500 | 0.4424 | 0.3351 |
| 0.1304 | 10.02 | 93600 | 0.4474 | 0.3341 |
| 0.1304 | 10.03 | 93700 | 0.4466 | 0.3378 |
| 0.1304 | 10.04 | 93800 | 0.4388 | 0.3327 |
| 0.1304 | 10.05 | 93900 | 0.4312 | 0.3360 |
| 0.1152 | 10.06 | 94000 | 0.4471 | 0.3307 |
| 0.1152 | 10.07 | 94100 | 0.4472 | 0.3316 |
| 0.1152 | 10.09 | 94200 | 0.4462 | 0.3324 |
| 0.1152 | 10.1 | 94300 | 0.4383 | 0.3344 |
| 0.1152 | 10.11 | 94400 | 0.4671 | 0.3365 |
| 0.1097 | 10.12 | 94500 | 0.4596 | 0.3307 |
| 0.1097 | 10.13 | 94600 | 0.4517 | 0.3382 |
| 0.1097 | 10.14 | 94700 | 0.4285 | 0.3380 |
| 0.1097 | 10.15 | 94800 | 0.4628 | 0.3363 |
| 0.1097 | 10.16 | 94900 | 0.4478 | 0.3365 |
| 0.1153 | 10.17 | 95000 | 0.4464 | 0.3346 |
| 0.1153 | 10.18 | 95100 | 0.4432 | 0.3392 |
| 0.1153 | 10.19 | 95200 | 0.4326 | 0.3330 |
| 0.1153 | 10.2 | 95300 | 0.4480 | 0.3327 |
| 0.1153 | 10.21 | 95400 | 0.4436 | 0.3260 |
| 0.1149 | 10.22 | 95500 | 0.4549 | 0.3311 |
| 0.1149 | 10.24 | 95600 | 0.4573 | 0.3353 |
| 0.1149 | 10.25 | 95700 | 0.4373 | 0.3369 |
| 0.1149 | 10.26 | 95800 | 0.4459 | 0.3358 |
| 0.1149 | 10.27 | 95900 | 0.4288 | 0.3270 |
| 0.1169 | 10.28 | 96000 | 0.4474 | 0.3330 |
| 0.1169 | 10.29 | 96100 | 0.4524 | 0.3298 |
| 0.1169 | 10.3 | 96200 | 0.4517 | 0.3258 |
| 0.1169 | 10.31 | 96300 | 0.4366 | 0.3288 |
| 0.1169 | 10.32 | 96400 | 0.4574 | 0.3324 |
| 0.1137 | 10.33 | 96500 | 0.4507 | 0.3343 |
| 0.1137 | 10.34 | 96600 | 0.4414 | 0.3301 |
| 0.1137 | 10.35 | 96700 | 0.4524 | 0.3366 |
| 0.1137 | 10.36 | 96800 | 0.4563 | 0.3435 |
| 0.1137 | 10.37 | 96900 | 0.4315 | 0.3375 |
| 0.1162 | 10.39 | 97000 | 0.4429 | 0.3365 |
| 0.1162 | 10.4 | 97100 | 0.4489 | 0.3380 |
| 0.1162 | 10.41 | 97200 | 0.4352 | 0.3357 |
| 0.1162 | 10.42 | 97300 | 0.4390 | 0.3319 |
| 0.1162 | 10.43 | 97400 | 0.4570 | 0.3303 |
| 0.1151 | 10.44 | 97500 | 0.4692 | 0.3344 |
| 0.1151 | 10.45 | 97600 | 0.4605 | 0.3332 |
| 0.1151 | 10.46 | 97700 | 0.4457 | 0.3238 |
| 0.1151 | 10.47 | 97800 | 0.4298 | 0.3304 |
| 0.1151 | 10.48 | 97900 | 0.4619 | 0.3274 |
| 0.1105 | 10.49 | 98000 | 0.4362 | 0.3244 |
| 0.1105 | 10.5 | 98100 | 0.4568 | 0.3289 |
| 0.1105 | 10.51 | 98200 | 0.4522 | 0.3336 |
| 0.1105 | 10.52 | 98300 | 0.4302 | 0.3257 |
| 0.1105 | 10.54 | 98400 | 0.4505 | 0.3238 |
| 0.1164 | 10.55 | 98500 | 0.4430 | 0.3301 |
| 0.1164 | 10.56 | 98600 | 0.4575 | 0.3283 |
| 0.1164 | 10.57 | 98700 | 0.4447 | 0.3277 |
| 0.1164 | 10.58 | 98800 | 0.4400 | 0.3301 |
| 0.1164 | 10.59 | 98900 | 0.4427 | 0.3288 |
| 0.1113 | 10.6 | 99000 | 0.4538 | 0.3248 |
| 0.1113 | 10.61 | 99100 | 0.4519 | 0.3298 |
| 0.1113 | 10.62 | 99200 | 0.4290 | 0.3249 |
| 0.1113 | 10.63 | 99300 | 0.4501 | 0.3220 |
| 0.1113 | 10.64 | 99400 | 0.4410 | 0.3218 |
| 0.1159 | 10.65 | 99500 | 0.4478 | 0.3211 |
| 0.1159 | 10.66 | 99600 | 0.4462 | 0.3250 |
| 0.1159 | 10.67 | 99700 | 0.4543 | 0.3302 |
| 0.1159 | 10.69 | 99800 | 0.4462 | 0.3301 |
| 0.1159 | 10.7 | 99900 | 0.4468 | 0.3229 |
| 0.1161 | 10.71 | 100000 | 0.4515 | 0.3241 |
| 0.1161 | 10.72 | 100100 | 0.4404 | 0.3276 |
| 0.1161 | 10.73 | 100200 | 0.4439 | 0.3222 |
| 0.1161 | 10.74 | 100300 | 0.4392 | 0.3257 |
| 0.1161 | 10.75 | 100400 | 0.4476 | 0.3314 |
| 0.1199 | 10.76 | 100500 | 0.4493 | 0.3270 |
| 0.1199 | 10.77 | 100600 | 0.4462 | 0.3224 |
| 0.1199 | 10.78 | 100700 | 0.4467 | 0.3311 |
| 0.1199 | 10.79 | 100800 | 0.4198 | 0.3228 |
| 0.1199 | 10.8 | 100900 | 0.4349 | 0.3225 |
| 0.1146 | 10.81 | 101000 | 0.4371 | 0.3272 |
| 0.1146 | 10.82 | 101100 | 0.4525 | 0.3210 |
| 0.1146 | 10.84 | 101200 | 0.4293 | 0.3219 |
| 0.1146 | 10.85 | 101300 | 0.4238 | 0.3216 |
| 0.1146 | 10.86 | 101400 | 0.4377 | 0.3252 |
| 0.118 | 10.87 | 101500 | 0.4371 | 0.3208 |
| 0.118 | 10.88 | 101600 | 0.4216 | 0.3174 |
| 0.118 | 10.89 | 101700 | 0.4312 | 0.3189 |
| 0.118 | 10.9 | 101800 | 0.4317 | 0.3204 |
| 0.118 | 10.91 | 101900 | 0.4303 | 0.3235 |
| 0.114 | 10.92 | 102000 | 0.4416 | 0.3158 |
| 0.114 | 10.93 | 102100 | 0.4240 | 0.3195 |
| 0.114 | 10.94 | 102200 | 0.4340 | 0.3149 |
| 0.114 | 10.95 | 102300 | 0.4311 | 0.3215 |
| 0.114 | 10.96 | 102400 | 0.4261 | 0.3238 |
| 0.1152 | 10.97 | 102500 | 0.4263 | 0.3206 |
| 0.1152 | 10.98 | 102600 | 0.4325 | 0.3294 |
| 0.1152 | 11.0 | 102700 | 0.4327 | 0.3187 |
| 0.1152 | 11.01 | 102800 | 0.4423 | 0.3195 |
| 0.1152 | 11.02 | 102900 | 0.4341 | 0.3277 |
| 0.1084 | 11.03 | 103000 | 0.4232 | 0.3243 |
| 0.1084 | 11.04 | 103100 | 0.4355 | 0.3184 |
| 0.1084 | 11.05 | 103200 | 0.4374 | 0.3274 |
| 0.1084 | 11.06 | 103300 | 0.4484 | 0.3305 |
| 0.1084 | 11.07 | 103400 | 0.4423 | 0.3226 |
| 0.1003 | 11.08 | 103500 | 0.4518 | 0.3224 |
| 0.1003 | 11.09 | 103600 | 0.4518 | 0.3243 |
| 0.1003 | 11.1 | 103700 | 0.4282 | 0.3207 |
| 0.1003 | 11.11 | 103800 | 0.4418 | 0.3220 |
| 0.1003 | 11.12 | 103900 | 0.4411 | 0.3216 |
| 0.1009 | 11.13 | 104000 | 0.4474 | 0.3238 |
| 0.1009 | 11.15 | 104100 | 0.4406 | 0.3245 |
| 0.1009 | 11.16 | 104200 | 0.4384 | 0.3242 |
| 0.1009 | 11.17 | 104300 | 0.4702 | 0.3265 |
| 0.1009 | 11.18 | 104400 | 0.4611 | 0.3266 |
| 0.0992 | 11.19 | 104500 | 0.4425 | 0.3211 |
| 0.0992 | 11.2 | 104600 | 0.4575 | 0.3222 |
| 0.0992 | 11.21 | 104700 | 0.4449 | 0.3208 |
| 0.0992 | 11.22 | 104800 | 0.4715 | 0.3208 |
| 0.0992 | 11.23 | 104900 | 0.4469 | 0.3223 |
| 0.1021 | 11.24 | 105000 | 0.4536 | 0.3225 |
| 0.1021 | 11.25 | 105100 | 0.4629 | 0.3234 |
| 0.1021 | 11.26 | 105200 | 0.4550 | 0.3205 |
| 0.1021 | 11.27 | 105300 | 0.4598 | 0.3213 |
| 0.1021 | 11.28 | 105400 | 0.4522 | 0.3179 |
| 0.1021 | 11.3 | 105500 | 0.4658 | 0.3211 |
| 0.1021 | 11.31 | 105600 | 0.4664 | 0.3196 |
| 0.1021 | 11.32 | 105700 | 0.4736 | 0.3177 |
| 0.1021 | 11.33 | 105800 | 0.4587 | 0.3158 |
| 0.1021 | 11.34 | 105900 | 0.4589 | 0.3194 |
| 0.1025 | 11.35 | 106000 | 0.4692 | 0.3214 |
| 0.1025 | 11.36 | 106100 | 0.4382 | 0.3181 |
| 0.1025 | 11.37 | 106200 | 0.4556 | 0.3185 |
| 0.1025 | 11.38 | 106300 | 0.4445 | 0.3191 |
| 0.1025 | 11.39 | 106400 | 0.4379 | 0.3163 |
| 0.104 | 11.4 | 106500 | 0.4454 | 0.3220 |
| 0.104 | 11.41 | 106600 | 0.4463 | 0.3201 |
| 0.104 | 11.42 | 106700 | 0.4550 | 0.3173 |
| 0.104 | 11.43 | 106800 | 0.4404 | 0.3168 |
| 0.104 | 11.45 | 106900 | 0.4569 | 0.3170 |
| 0.1016 | 11.46 | 107000 | 0.4529 | 0.3168 |
| 0.1016 | 11.47 | 107100 | 0.4587 | 0.3173 |
| 0.1016 | 11.48 | 107200 | 0.4505 | 0.3172 |
| 0.1016 | 11.49 | 107300 | 0.4489 | 0.3159 |
| 0.1016 | 11.5 | 107400 | 0.4528 | 0.3130 |
| 0.1001 | 11.51 | 107500 | 0.4473 | 0.3181 |
| 0.1001 | 11.52 | 107600 | 0.4434 | 0.3176 |
| 0.1001 | 11.53 | 107700 | 0.4597 | 0.3186 |
| 0.1001 | 11.54 | 107800 | 0.4351 | 0.3159 |
| 0.1001 | 11.55 | 107900 | 0.4471 | 0.3185 |
| 0.1005 | 11.56 | 108000 | 0.4457 | 0.3191 |
| 0.1005 | 11.57 | 108100 | 0.4544 | 0.3293 |
| 0.1005 | 11.58 | 108200 | 0.4436 | 0.3221 |
| 0.1005 | 11.6 | 108300 | 0.4642 | 0.3270 |
| 0.1005 | 11.61 | 108400 | 0.4474 | 0.3270 |
| 0.1031 | 11.62 | 108500 | 0.4458 | 0.3196 |
| 0.1031 | 11.63 | 108600 | 0.4723 | 0.3205 |
| 0.1031 | 11.64 | 108700 | 0.4507 | 0.3226 |
| 0.1031 | 11.65 | 108800 | 0.4424 | 0.3213 |
| 0.1031 | 11.66 | 108900 | 0.4511 | 0.3213 |
| 0.1014 | 11.67 | 109000 | 0.4422 | 0.3205 |
| 0.1014 | 11.68 | 109100 | 0.4498 | 0.3180 |
| 0.1014 | 11.69 | 109200 | 0.4303 | 0.3167 |
| 0.1014 | 11.7 | 109300 | 0.4483 | 0.3108 |
| 0.1014 | 11.71 | 109400 | 0.4548 | 0.3169 |
| 0.0981 | 11.72 | 109500 | 0.4406 | 0.3122 |
| 0.0981 | 11.73 | 109600 | 0.4293 | 0.3114 |
| 0.0981 | 11.75 | 109700 | 0.4369 | 0.3159 |
| 0.0981 | 11.76 | 109800 | 0.4364 | 0.3164 |
| 0.0981 | 11.77 | 109900 | 0.4358 | 0.3189 |
| 0.1023 | 11.78 | 110000 | 0.4281 | 0.3183 |
| 0.1023 | 11.79 | 110100 | 0.4404 | 0.3159 |
| 0.1023 | 11.8 | 110200 | 0.4471 | 0.3135 |
| 0.1023 | 11.81 | 110300 | 0.4498 | 0.3201 |
| 0.1023 | 11.82 | 110400 | 0.4527 | 0.3161 |
| 0.0988 | 11.83 | 110500 | 0.4440 | 0.3173 |
| 0.0988 | 11.84 | 110600 | 0.4356 | 0.3136 |
| 0.0988 | 11.85 | 110700 | 0.4308 | 0.3135 |
| 0.0988 | 11.86 | 110800 | 0.4294 | 0.3192 |
| 0.0988 | 11.87 | 110900 | 0.4241 | 0.3168 |
| 0.1022 | 11.88 | 111000 | 0.4420 | 0.3157 |
| 0.1022 | 11.9 | 111100 | 0.4313 | 0.3125 |
| 0.1022 | 11.91 | 111200 | 0.4213 | 0.3168 |
| 0.1022 | 11.92 | 111300 | 0.4352 | 0.3135 |
| 0.1022 | 11.93 | 111400 | 0.4297 | 0.3116 |
| 0.1032 | 11.94 | 111500 | 0.4218 | 0.3137 |
| 0.1032 | 11.95 | 111600 | 0.4334 | 0.3123 |
| 0.1032 | 11.96 | 111700 | 0.4373 | 0.3175 |
| 0.1032 | 11.97 | 111800 | 0.4299 | 0.3160 |
| 0.1032 | 11.98 | 111900 | 0.4326 | 0.3189 |
| 0.0969 | 11.99 | 112000 | 0.4208 | 0.3186 |
| 0.0969 | 12.0 | 112100 | 0.4385 | 0.3169 |
| 0.0969 | 12.01 | 112200 | 0.4453 | 0.3156 |
| 0.0969 | 12.02 | 112300 | 0.4596 | 0.3133 |
| 0.0969 | 12.03 | 112400 | 0.4509 | 0.3093 |
| 0.0901 | 12.04 | 112500 | 0.4535 | 0.3138 |
| 0.0901 | 12.06 | 112600 | 0.4371 | 0.3144 |
| 0.0901 | 12.07 | 112700 | 0.4499 | 0.3154 |
| 0.0901 | 12.08 | 112800 | 0.4615 | 0.3198 |
| 0.0901 | 12.09 | 112900 | 0.4523 | 0.3177 |
| 0.0889 | 12.1 | 113000 | 0.4412 | 0.3130 |
| 0.0889 | 12.11 | 113100 | 0.4471 | 0.3181 |
| 0.0889 | 12.12 | 113200 | 0.4530 | 0.3169 |
| 0.0889 | 12.13 | 113300 | 0.4670 | 0.3149 |
| 0.0889 | 12.14 | 113400 | 0.4594 | 0.3141 |
| 0.0917 | 12.15 | 113500 | 0.4623 | 0.3127 |
| 0.0917 | 12.16 | 113600 | 0.4460 | 0.3133 |
| 0.0917 | 12.17 | 113700 | 0.4512 | 0.3191 |
| 0.0917 | 12.18 | 113800 | 0.4681 | 0.3136 |
| 0.0917 | 12.19 | 113900 | 0.4564 | 0.3129 |
| 0.0906 | 12.21 | 114000 | 0.4482 | 0.3107 |
| 0.0906 | 12.22 | 114100 | 0.4595 | 0.3133 |
| 0.0906 | 12.23 | 114200 | 0.4510 | 0.3118 |
| 0.0906 | 12.24 | 114300 | 0.4472 | 0.3131 |
| 0.0906 | 12.25 | 114400 | 0.4499 | 0.3130 |
| 0.0918 | 12.26 | 114500 | 0.4503 | 0.3138 |
| 0.0918 | 12.27 | 114600 | 0.4518 | 0.3135 |
| 0.0918 | 12.28 | 114700 | 0.4493 | 0.3114 |
| 0.0918 | 12.29 | 114800 | 0.4574 | 0.3133 |
| 0.0918 | 12.3 | 114900 | 0.4683 | 0.3200 |
| 0.0869 | 12.31 | 115000 | 0.4608 | 0.3165 |
| 0.0869 | 12.32 | 115100 | 0.4618 | 0.3183 |
| 0.0869 | 12.33 | 115200 | 0.4689 | 0.3173 |
| 0.0869 | 12.34 | 115300 | 0.4681 | 0.3224 |
| 0.0869 | 12.36 | 115400 | 0.4576 | 0.3231 |
| 0.0885 | 12.37 | 115500 | 0.4831 | 0.3176 |
| 0.0885 | 12.38 | 115600 | 0.4602 | 0.3181 |
| 0.0885 | 12.39 | 115700 | 0.4493 | 0.3168 |
| 0.0885 | 12.4 | 115800 | 0.4564 | 0.3149 |
| 0.0885 | 12.41 | 115900 | 0.4585 | 0.3158 |
| 0.091 | 12.42 | 116000 | 0.4713 | 0.3193 |
| 0.091 | 12.43 | 116100 | 0.4581 | 0.3139 |
| 0.091 | 12.44 | 116200 | 0.4637 | 0.3131 |
| 0.091 | 12.45 | 116300 | 0.4572 | 0.3124 |
| 0.091 | 12.46 | 116400 | 0.4489 | 0.3163 |
| 0.0886 | 12.47 | 116500 | 0.4679 | 0.3159 |
| 0.0886 | 12.48 | 116600 | 0.4712 | 0.3151 |
| 0.0886 | 12.49 | 116700 | 0.4750 | 0.3186 |
| 0.0886 | 12.51 | 116800 | 0.4673 | 0.3176 |
| 0.0886 | 12.52 | 116900 | 0.4601 | 0.3113 |
| 0.0917 | 12.53 | 117000 | 0.4341 | 0.3125 |
| 0.0917 | 12.54 | 117100 | 0.4462 | 0.3077 |
| 0.0917 | 12.55 | 117200 | 0.4502 | 0.3099 |
| 0.0917 | 12.56 | 117300 | 0.4482 | 0.3116 |
| 0.0917 | 12.57 | 117400 | 0.4459 | 0.3131 |
| 0.0881 | 12.58 | 117500 | 0.4464 | 0.3122 |
| 0.0881 | 12.59 | 117600 | 0.4471 | 0.3125 |
| 0.0881 | 12.6 | 117700 | 0.4319 | 0.3122 |
| 0.0881 | 12.61 | 117800 | 0.4421 | 0.3103 |
| 0.0881 | 12.62 | 117900 | 0.4326 | 0.3108 |
| 0.0913 | 12.63 | 118000 | 0.4414 | 0.3068 |
| 0.0913 | 12.64 | 118100 | 0.4421 | 0.3083 |
| 0.0913 | 12.66 | 118200 | 0.4449 | 0.3103 |
| 0.0913 | 12.67 | 118300 | 0.4380 | 0.3128 |
| 0.0913 | 12.68 | 118400 | 0.4390 | 0.3136 |
| 0.0921 | 12.69 | 118500 | 0.4452 | 0.3104 |
| 0.0921 | 12.7 | 118600 | 0.4378 | 0.3122 |
| 0.0921 | 12.71 | 118700 | 0.4459 | 0.3080 |
| 0.0921 | 12.72 | 118800 | 0.4369 | 0.3051 |
| 0.0921 | 12.73 | 118900 | 0.4474 | 0.3076 |
| 0.0886 | 12.74 | 119000 | 0.4508 | 0.3066 |
| 0.0886 | 12.75 | 119100 | 0.4456 | 0.3097 |
| 0.0886 | 12.76 | 119200 | 0.4503 | 0.3078 |
| 0.0886 | 12.77 | 119300 | 0.4460 | 0.3081 |
| 0.0886 | 12.78 | 119400 | 0.4404 | 0.3080 |
| 0.0897 | 12.79 | 119500 | 0.4351 | 0.3100 |
| 0.0897 | 12.81 | 119600 | 0.4446 | 0.3120 |
| 0.0897 | 12.82 | 119700 | 0.4407 | 0.3098 |
| 0.0897 | 12.83 | 119800 | 0.4406 | 0.3084 |
| 0.0897 | 12.84 | 119900 | 0.4492 | 0.3067 |
| 0.09 | 12.85 | 120000 | 0.4546 | 0.3098 |
| 0.09 | 12.86 | 120100 | 0.4547 | 0.3074 |
| 0.09 | 12.87 | 120200 | 0.4517 | 0.3111 |
| 0.09 | 12.88 | 120300 | 0.4320 | 0.3064 |
| 0.09 | 12.89 | 120400 | 0.4294 | 0.3072 |
| 0.0898 | 12.9 | 120500 | 0.4412 | 0.3050 |
| 0.0898 | 12.91 | 120600 | 0.4254 | 0.3074 |
| 0.0898 | 12.92 | 120700 | 0.4409 | 0.3071 |
| 0.0898 | 12.93 | 120800 | 0.4362 | 0.3071 |
| 0.0898 | 12.94 | 120900 | 0.4579 | 0.3090 |
| 0.0892 | 12.95 | 121000 | 0.4492 | 0.3059 |
| 0.0892 | 12.97 | 121100 | 0.4404 | 0.3105 |
| 0.0892 | 12.98 | 121200 | 0.4365 | 0.3066 |
| 0.0892 | 12.99 | 121300 | 0.4368 | 0.3048 |
| 0.0892 | 13.0 | 121400 | 0.4410 | 0.3033 |
| 0.085 | 13.01 | 121500 | 0.4450 | 0.3047 |
| 0.085 | 13.02 | 121600 | 0.4633 | 0.3013 |
| 0.085 | 13.03 | 121700 | 0.4600 | 0.3054 |
| 0.085 | 13.04 | 121800 | 0.4541 | 0.3047 |
| 0.085 | 13.05 | 121900 | 0.4546 | 0.3058 |
| 0.0791 | 13.06 | 122000 | 0.4536 | 0.3045 |
| 0.0791 | 13.07 | 122100 | 0.4589 | 0.3066 |
| 0.0791 | 13.08 | 122200 | 0.4581 | 0.3057 |
| 0.0791 | 13.09 | 122300 | 0.4546 | 0.3048 |
| 0.0791 | 13.1 | 122400 | 0.4673 | 0.3006 |
| 0.0789 | 13.12 | 122500 | 0.4551 | 0.3019 |
| 0.0789 | 13.13 | 122600 | 0.4467 | 0.3025 |
| 0.0789 | 13.14 | 122700 | 0.4593 | 0.3015 |
| 0.0789 | 13.15 | 122800 | 0.4598 | 0.3037 |
| 0.0789 | 13.16 | 122900 | 0.4532 | 0.3038 |
| 0.077 | 13.17 | 123000 | 0.4607 | 0.3015 |
| 0.077 | 13.18 | 123100 | 0.4385 | 0.3005 |
| 0.077 | 13.19 | 123200 | 0.4590 | 0.3041 |
| 0.077 | 13.2 | 123300 | 0.4359 | 0.3047 |
| 0.077 | 13.21 | 123400 | 0.4458 | 0.3039 |
| 0.0771 | 13.22 | 123500 | 0.4506 | 0.3075 |
| 0.0771 | 13.23 | 123600 | 0.4457 | 0.3079 |
| 0.0771 | 13.24 | 123700 | 0.4448 | 0.3048 |
| 0.0771 | 13.25 | 123800 | 0.4398 | 0.3036 |
| 0.0771 | 13.27 | 123900 | 0.4510 | 0.3055 |
| 0.0804 | 13.28 | 124000 | 0.4507 | 0.3059 |
| 0.0804 | 13.29 | 124100 | 0.4544 | 0.3076 |
| 0.0804 | 13.3 | 124200 | 0.4534 | 0.3073 |
| 0.0804 | 13.31 | 124300 | 0.4441 | 0.3061 |
| 0.0804 | 13.32 | 124400 | 0.4391 | 0.3075 |
| 0.0774 | 13.33 | 124500 | 0.4527 | 0.3063 |
| 0.0774 | 13.34 | 124600 | 0.4638 | 0.3057 |
| 0.0774 | 13.35 | 124700 | 0.4541 | 0.3064 |
| 0.0774 | 13.36 | 124800 | 0.4617 | 0.3078 |
| 0.0774 | 13.37 | 124900 | 0.4584 | 0.3041 |
| 0.0795 | 13.38 | 125000 | 0.4663 | 0.3032 |
| 0.0795 | 13.39 | 125100 | 0.4546 | 0.3025 |
| 0.0795 | 13.4 | 125200 | 0.4616 | 0.3021 |
| 0.0795 | 13.42 | 125300 | 0.4603 | 0.3016 |
| 0.0795 | 13.43 | 125400 | 0.4616 | 0.3040 |
| 0.0791 | 13.44 | 125500 | 0.4548 | 0.3021 |
| 0.0791 | 13.45 | 125600 | 0.4560 | 0.3025 |
| 0.0791 | 13.46 | 125700 | 0.4516 | 0.3037 |
| 0.0791 | 13.47 | 125800 | 0.4500 | 0.3013 |
| 0.0791 | 13.48 | 125900 | 0.4540 | 0.3009 |
| 0.0776 | 13.49 | 126000 | 0.4581 | 0.3026 |
| 0.0776 | 13.5 | 126100 | 0.4598 | 0.3028 |
| 0.0776 | 13.51 | 126200 | 0.4587 | 0.3038 |
| 0.0776 | 13.52 | 126300 | 0.4514 | 0.3024 |
| 0.0776 | 13.53 | 126400 | 0.4495 | 0.3036 |
| 0.0793 | 13.54 | 126500 | 0.4556 | 0.3016 |
| 0.0793 | 13.55 | 126600 | 0.4603 | 0.3025 |
| 0.0793 | 13.57 | 126700 | 0.4496 | 0.2995 |
| 0.0793 | 13.58 | 126800 | 0.4483 | 0.2969 |
| 0.0793 | 13.59 | 126900 | 0.4462 | 0.2980 |
| 0.0816 | 13.6 | 127000 | 0.4521 | 0.2982 |
| 0.0816 | 13.61 | 127100 | 0.4580 | 0.3019 |
| 0.0816 | 13.62 | 127200 | 0.4669 | 0.3009 |
| 0.0816 | 13.63 | 127300 | 0.4513 | 0.3017 |
| 0.0816 | 13.64 | 127400 | 0.4602 | 0.3015 |
| 0.0779 | 13.65 | 127500 | 0.4592 | 0.2998 |
| 0.0779 | 13.66 | 127600 | 0.4700 | 0.2981 |
| 0.0779 | 13.67 | 127700 | 0.4727 | 0.2978 |
| 0.0779 | 13.68 | 127800 | 0.4600 | 0.2983 |
| 0.0779 | 13.69 | 127900 | 0.4472 | 0.2978 |
| 0.0779 | 13.7 | 128000 | 0.4483 | 0.2984 |
| 0.0779 | 13.72 | 128100 | 0.4512 | 0.2968 |
| 0.0779 | 13.73 | 128200 | 0.4549 | 0.2988 |
| 0.0779 | 13.74 | 128300 | 0.4576 | 0.2992 |
| 0.0779 | 13.75 | 128400 | 0.4400 | 0.2974 |
| 0.0793 | 13.76 | 128500 | 0.4433 | 0.3009 |
| 0.0793 | 13.77 | 128600 | 0.4456 | 0.2982 |
| 0.0793 | 13.78 | 128700 | 0.4560 | 0.3019 |
| 0.0793 | 13.79 | 128800 | 0.4551 | 0.3008 |
| 0.0793 | 13.8 | 128900 | 0.4513 | 0.3007 |
| 0.0769 | 13.81 | 129000 | 0.4518 | 0.3008 |
| 0.0769 | 13.82 | 129100 | 0.4567 | 0.2981 |
| 0.0769 | 13.83 | 129200 | 0.4437 | 0.2985 |
| 0.0769 | 13.84 | 129300 | 0.4424 | 0.2970 |
| 0.0769 | 13.85 | 129400 | 0.4423 | 0.3010 |
| 0.0785 | 13.87 | 129500 | 0.4495 | 0.2999 |
| 0.0785 | 13.88 | 129600 | 0.4483 | 0.2975 |
| 0.0785 | 13.89 | 129700 | 0.4485 | 0.2982 |
| 0.0785 | 13.9 | 129800 | 0.4429 | 0.2972 |
| 0.0785 | 13.91 | 129900 | 0.4430 | 0.2958 |
| 0.0792 | 13.92 | 130000 | 0.4495 | 0.2954 |
| 0.0792 | 13.93 | 130100 | 0.4485 | 0.2947 |
| 0.0792 | 13.94 | 130200 | 0.4395 | 0.2972 |
| 0.0792 | 13.95 | 130300 | 0.4379 | 0.2973 |
| 0.0792 | 13.96 | 130400 | 0.4428 | 0.2989 |
| 0.0795 | 13.97 | 130500 | 0.4385 | 0.3000 |
| 0.0795 | 13.98 | 130600 | 0.4490 | 0.2983 |
| 0.0795 | 13.99 | 130700 | 0.4568 | 0.2970 |
| 0.0795 | 14.0 | 130800 | 0.4482 | 0.2963 |
| 0.0795 | 14.01 | 130900 | 0.4479 | 0.2962 |
| 0.075 | 14.03 | 131000 | 0.4565 | 0.2968 |
| 0.075 | 14.04 | 131100 | 0.4623 | 0.2962 |
| 0.075 | 14.05 | 131200 | 0.4617 | 0.2965 |
| 0.075 | 14.06 | 131300 | 0.4687 | 0.2949 |
| 0.075 | 14.07 | 131400 | 0.4718 | 0.2929 |
| 0.0709 | 14.08 | 131500 | 0.4720 | 0.2945 |
| 0.0709 | 14.09 | 131600 | 0.4604 | 0.2953 |
| 0.0709 | 14.1 | 131700 | 0.4655 | 0.2955 |
| 0.0709 | 14.11 | 131800 | 0.4695 | 0.2958 |
| 0.0709 | 14.12 | 131900 | 0.4666 | 0.2945 |
| 0.0705 | 14.13 | 132000 | 0.4605 | 0.2959 |
| 0.0705 | 14.14 | 132100 | 0.4581 | 0.2947 |
| 0.0705 | 14.15 | 132200 | 0.4597 | 0.2948 |
| 0.0705 | 14.16 | 132300 | 0.4612 | 0.2943 |
| 0.0705 | 14.18 | 132400 | 0.4611 | 0.2959 |
| 0.0727 | 14.19 | 132500 | 0.4569 | 0.2958 |
| 0.0727 | 14.2 | 132600 | 0.4556 | 0.2951 |
| 0.0727 | 14.21 | 132700 | 0.4597 | 0.2955 |
| 0.0727 | 14.22 | 132800 | 0.4472 | 0.2935 |
| 0.0727 | 14.23 | 132900 | 0.4573 | 0.2943 |
| 0.0723 | 14.24 | 133000 | 0.4572 | 0.2943 |
| 0.0723 | 14.25 | 133100 | 0.4582 | 0.2956 |
| 0.0723 | 14.26 | 133200 | 0.4599 | 0.2968 |
| 0.0723 | 14.27 | 133300 | 0.4633 | 0.2962 |
| 0.0723 | 14.28 | 133400 | 0.4604 | 0.2972 |
| 0.071 | 14.29 | 133500 | 0.4587 | 0.2971 |
| 0.071 | 14.3 | 133600 | 0.4598 | 0.2973 |
| 0.071 | 14.31 | 133700 | 0.4579 | 0.2976 |
| 0.071 | 14.33 | 133800 | 0.4539 | 0.2969 |
| 0.071 | 14.34 | 133900 | 0.4628 | 0.2961 |
| 0.0703 | 14.35 | 134000 | 0.4627 | 0.2974 |
| 0.0703 | 14.36 | 134100 | 0.4611 | 0.2974 |
| 0.0703 | 14.37 | 134200 | 0.4607 | 0.2977 |
| 0.0703 | 14.38 | 134300 | 0.4638 | 0.2983 |
| 0.0703 | 14.39 | 134400 | 0.4628 | 0.2969 |
| 0.0736 | 14.4 | 134500 | 0.4543 | 0.2965 |
| 0.0736 | 14.41 | 134600 | 0.4585 | 0.2963 |
| 0.0736 | 14.42 | 134700 | 0.4636 | 0.2950 |
| 0.0736 | 14.43 | 134800 | 0.4636 | 0.2964 |
| 0.0736 | 14.44 | 134900 | 0.4630 | 0.2958 |
| 0.0715 | 14.45 | 135000 | 0.4611 | 0.2968 |
| 0.0715 | 14.46 | 135100 | 0.4633 | 0.2966 |
| 0.0715 | 14.48 | 135200 | 0.4664 | 0.2954 |
| 0.0715 | 14.49 | 135300 | 0.4670 | 0.2945 |
| 0.0715 | 14.5 | 135400 | 0.4638 | 0.2961 |
| 0.073 | 14.51 | 135500 | 0.4635 | 0.2965 |
| 0.073 | 14.52 | 135600 | 0.4639 | 0.2956 |
| 0.073 | 14.53 | 135700 | 0.4617 | 0.2948 |
| 0.073 | 14.54 | 135800 | 0.4609 | 0.2933 |
| 0.073 | 14.55 | 135900 | 0.4614 | 0.2947 |
| 0.0717 | 14.56 | 136000 | 0.4567 | 0.2958 |
| 0.0717 | 14.57 | 136100 | 0.4615 | 0.2934 |
| 0.0717 | 14.58 | 136200 | 0.4606 | 0.2929 |
| 0.0717 | 14.59 | 136300 | 0.4652 | 0.2934 |
| 0.0717 | 14.6 | 136400 | 0.4664 | 0.2934 |
| 0.0717 | 14.61 | 136500 | 0.4657 | 0.2923 |
| 0.0717 | 14.63 | 136600 | 0.4633 | 0.2931 |
| 0.0717 | 14.64 | 136700 | 0.4624 | 0.2943 |
| 0.0717 | 14.65 | 136800 | 0.4615 | 0.2949 |
| 0.0717 | 14.66 | 136900 | 0.4619 | 0.2930 |
| 0.0707 | 14.67 | 137000 | 0.4608 | 0.2936 |
| 0.0707 | 14.68 | 137100 | 0.4615 | 0.2945 |
| 0.0707 | 14.69 | 137200 | 0.4605 | 0.2941 |
| 0.0707 | 14.7 | 137300 | 0.4598 | 0.2931 |
| 0.0707 | 14.71 | 137400 | 0.4596 | 0.2943 |
| 0.0694 | 14.72 | 137500 | 0.4624 | 0.2927 |
| 0.0694 | 14.73 | 137600 | 0.4614 | 0.2931 |
| 0.0694 | 14.74 | 137700 | 0.4621 | 0.2924 |
| 0.0694 | 14.75 | 137800 | 0.4589 | 0.2920 |
| 0.0694 | 14.76 | 137900 | 0.4590 | 0.2926 |
| 0.0706 | 14.78 | 138000 | 0.4588 | 0.2931 |
| 0.0706 | 14.79 | 138100 | 0.4583 | 0.2928 |
| 0.0706 | 14.8 | 138200 | 0.4552 | 0.2934 |
| 0.0706 | 14.81 | 138300 | 0.4551 | 0.2923 |
| 0.0706 | 14.82 | 138400 | 0.4555 | 0.2927 |
| 0.0717 | 14.83 | 138500 | 0.4547 | 0.2930 |
| 0.0717 | 14.84 | 138600 | 0.4546 | 0.2930 |
| 0.0717 | 14.85 | 138700 | 0.4553 | 0.2934 |
| 0.0717 | 14.86 | 138800 | 0.4554 | 0.2924 |
| 0.0717 | 14.87 | 138900 | 0.4573 | 0.2924 |
| 0.0722 | 14.88 | 139000 | 0.4582 | 0.2927 |
| 0.0722 | 14.89 | 139100 | 0.4586 | 0.2926 |
| 0.0722 | 14.9 | 139200 | 0.4570 | 0.2926 |
| 0.0722 | 14.91 | 139300 | 0.4571 | 0.2923 |
| 0.0722 | 14.93 | 139400 | 0.4564 | 0.2925 |
| 0.0698 | 14.94 | 139500 | 0.4573 | 0.2927 |
| 0.0698 | 14.95 | 139600 | 0.4574 | 0.2927 |
| 0.0698 | 14.96 | 139700 | 0.4573 | 0.2927 |
| 0.0698 | 14.97 | 139800 | 0.4576 | 0.2921 |
| 0.0698 | 14.98 | 139900 | 0.4578 | 0.2923 |
| 0.0705 | 14.99 | 140000 | 0.4579 | 0.2928 |
| 0.0705 | 15.0 | 140100 | 0.4578 | 0.2927 |
### Framework versions
- Transformers 4.16.0.dev0
- Pytorch 1.10.1+cu113
- Datasets 1.18.3
- Tokenizers 0.10.3
|
{"language": ["sv-SE"], "license": "apache-2.0", "tags": ["automatic-speech-recognition", "common_voice", "generated_from_trainer"], "model-index": [{"name": "wav2vec2-speechdat", "results": []}]}
|
automatic-speech-recognition
|
birgermoell/wav2vec2-speechdat
|
[
"transformers",
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"common_voice",
"generated_from_trainer",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"sv-SE"
] |
TAGS
#transformers #pytorch #wav2vec2 #automatic-speech-recognition #common_voice #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us
|
wav2vec2-speechdat
==================
This model is a fine-tuned version of facebook/wav2vec2-large-xlsr-53 on the COMMON\_VOICE - SV-SE dataset.
It achieves the following results on the evaluation set:
* Loss: 0.4578
* Wer: 0.2927
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 0.0003
* train\_batch\_size: 16
* eval\_batch\_size: 8
* seed: 42
* gradient\_accumulation\_steps: 2
* total\_train\_batch\_size: 32
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* lr\_scheduler\_warmup\_steps: 500
* num\_epochs: 15.0
* mixed\_precision\_training: Native AMP
### Training results
### Framework versions
* Transformers 4.16.0.dev0
* Pytorch 1.10.1+cu113
* Datasets 1.18.3
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0003\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 8\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 32\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 500\n* num\\_epochs: 15.0\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.16.0.dev0\n* Pytorch 1.10.1+cu113\n* Datasets 1.18.3\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #common_voice #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0003\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 8\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 32\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 500\n* num\\_epochs: 15.0\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.16.0.dev0\n* Pytorch 1.10.1+cu113\n* Datasets 1.18.3\n* Tokenizers 0.10.3"
] |
[
58,
159,
4,
40
] |
[
"passage: TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #common_voice #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0003\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 8\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 32\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 500\n* num\\_epochs: 15.0\n* mixed\\_precision\\_training: Native AMP### Training results### Framework versions\n\n\n* Transformers 4.16.0.dev0\n* Pytorch 1.10.1+cu113\n* Datasets 1.18.3\n* Tokenizers 0.10.3"
] |
[
-0.13250194489955902,
0.0668853223323822,
-0.002209613099694252,
0.039187654852867126,
0.1353345513343811,
-0.0048112450167536736,
0.10550779849290848,
0.13766154646873474,
-0.08541721105575562,
0.08948235958814621,
0.09455639868974686,
0.07506321370601654,
0.05965765193104744,
0.10457193851470947,
-0.024024801328778267,
-0.33244478702545166,
0.021367039531469345,
0.025098169222474098,
-0.13681842386722565,
0.11407382786273956,
0.11490131169557571,
-0.09650151431560516,
0.021762430667877197,
0.049194857478141785,
-0.13007251918315887,
0.007737272884696722,
-0.004181026015430689,
-0.07373028248548508,
0.11395125836133957,
0.05230306461453438,
0.08914756774902344,
0.01539565809071064,
0.07725261896848679,
-0.2549056112766266,
0.012488015927374363,
0.05604174733161926,
0.050987061113119125,
0.07216919958591461,
0.09066146612167358,
-0.008539678528904915,
0.13371720910072327,
-0.06422951817512512,
0.06938406080007553,
0.07522910088300705,
-0.11405466496944427,
-0.31705933809280396,
-0.0871492549777031,
0.02981797233223915,
0.11996332556009293,
0.0988200232386589,
-0.032115597277879715,
0.06985675543546677,
-0.06491482257843018,
0.08718698471784592,
0.23015470802783966,
-0.2549274265766144,
-0.08140875399112701,
-0.03652876615524292,
0.05433502048254013,
0.033284541219472885,
-0.12747371196746826,
-0.023804007098078728,
0.04540909826755524,
0.03588256984949112,
0.08829847723245621,
0.013560697436332703,
-0.01330589223653078,
0.001534164883196354,
-0.13438762724399567,
-0.05407880246639252,
0.15699517726898193,
0.09070857614278793,
-0.048932816833257675,
-0.0869792103767395,
-0.0073048630729317665,
-0.2068263739347458,
-0.046758562326431274,
0.006675561890006065,
0.028614191338419914,
-0.04182315245270729,
-0.11866098642349243,
0.014778180047869682,
-0.08693231642246246,
-0.10082969814538956,
0.017422160133719444,
0.19897954165935516,
0.03538740053772926,
-0.029047520831227303,
0.00044392564450390637,
0.09315399825572968,
0.02884094975888729,
-0.14530177414417267,
-0.0358235128223896,
0.046951230615377426,
-0.08447780460119247,
-0.018032550811767578,
-0.06423085182905197,
-0.03169168159365654,
-0.0041994014754891396,
0.15046609938144684,
-0.023433251306414604,
0.08168478310108185,
0.009294764138758183,
0.022199105471372604,
-0.10184106975793839,
0.2013516128063202,
-0.050832852721214294,
-0.017916252836585045,
-0.03872644528746605,
0.07998092472553253,
-0.014748298563063145,
-0.015966303646564484,
-0.05103122442960739,
0.018470430746674538,
0.10519763082265854,
0.043245211243629456,
-0.03714151680469513,
0.015852823853492737,
-0.053672246634960175,
-0.024231422692537308,
-0.017860818654298782,
-0.10338548570871353,
0.02191365696489811,
0.01836106926202774,
-0.07892902940511703,
0.02502310462296009,
0.015289287082850933,
0.02506484091281891,
-0.009991593658924103,
0.10511142015457153,
-0.08169135451316833,
-0.003129886230453849,
-0.07396045327186584,
-0.10024693608283997,
0.035622112452983856,
-0.010117716155946255,
0.008857741951942444,
-0.07600564509630203,
-0.10475078225135803,
-0.05091866850852966,
0.04264916852116585,
-0.033032774925231934,
-0.07818102836608887,
-0.05943650007247925,
-0.06841384619474411,
0.05758778005838394,
-0.035039693117141724,
0.13145656883716583,
-0.054850127547979355,
0.10923197865486145,
0.062461040914058685,
0.04420339688658714,
0.027018798515200615,
0.07030853629112244,
-0.05538402125239372,
0.03394271060824394,
-0.1476275771856308,
0.07430399954319,
-0.09363461285829544,
0.0684540644288063,
-0.13346603512763977,
-0.13463306427001953,
-0.025408221408724785,
-0.0014087961753830314,
0.09840293973684311,
0.09521584212779999,
-0.15229441225528717,
-0.10765617340803146,
0.17133715748786926,
-0.0858222171664238,
-0.12854710221290588,
0.1347983479499817,
-0.011863607913255692,
0.008538078516721725,
0.06000635027885437,
0.17249222099781036,
0.1062663123011589,
-0.10102783888578415,
-0.007556063588708639,
-0.06669662147760391,
0.13483217358589172,
0.009257352910935879,
0.1101059690117836,
-0.04571643844246864,
-0.0006304886192083359,
0.007709015626460314,
-0.046336837112903595,
0.06869446486234665,
-0.10239150375127792,
-0.08401450514793396,
-0.031766846776008606,
-0.09022440761327744,
0.006684800144284964,
0.06140388920903206,
0.04639853164553642,
-0.10404416173696518,
-0.1254485547542572,
0.05758703872561455,
0.11016902327537537,
-0.10680752247571945,
0.03767632320523262,
-0.08449813723564148,
0.02855793759226799,
-0.02541581727564335,
-0.021140968427062035,
-0.16596205532550812,
-0.02459007315337658,
0.02464054897427559,
-0.0747087374329567,
0.03959779441356659,
-0.04337408021092415,
0.079306460916996,
0.040218159556388855,
-0.05398968607187271,
-0.059555042535066605,
-0.08553000539541245,
-0.009736956097185612,
-0.06947138905525208,
-0.1939040720462799,
-0.08347980678081512,
-0.017874600365757942,
0.1624775230884552,
-0.21479880809783936,
0.014628131873905659,
0.02012620121240616,
0.11266638338565826,
0.028971606865525246,
-0.055387526750564575,
-0.01275172084569931,
0.0764075219631195,
-0.021062396466732025,
-0.05727045610547066,
0.022786060348153114,
0.017410604283213615,
-0.13055260479450226,
0.01671355962753296,
-0.12153813987970352,
0.11137893050909042,
0.10285785049200058,
-0.02324771136045456,
-0.06540428847074509,
-0.03948400169610977,
-0.0706339180469513,
-0.05945202708244324,
-0.0240461602807045,
-0.024993939325213432,
0.16460980474948883,
0.024874815717339516,
0.121311716735363,
-0.07914771884679794,
-0.040614377707242966,
0.037794943898916245,
-0.0017561536515131593,
-0.002772382227703929,
0.10872729122638702,
0.04728715121746063,
-0.030512236058712006,
0.09199324250221252,
0.06206390634179115,
-0.09349747747182846,
0.15204210579395294,
-0.07646306604146957,
-0.12023632973432541,
-0.024130217730998993,
0.015411538071930408,
0.04498543217778206,
0.10285166651010513,
-0.15402869880199432,
-0.0008266653749160469,
0.021350394934415817,
0.033185966312885284,
0.026302315294742584,
-0.2066614180803299,
-0.01141156442463398,
0.04809874668717384,
-0.0719810277223587,
-0.06304917484521866,
0.004664205014705658,
-0.014766020700335503,
0.07728435844182968,
0.012599486857652664,
-0.04385443404316902,
-0.013304227963089943,
-0.025384705513715744,
-0.09122230112552643,
0.19519194960594177,
-0.0851224884390831,
-0.15013255178928375,
-0.16171404719352722,
-0.045818667858839035,
-0.02366764470934868,
-0.0038629728369414806,
0.06076864153146744,
-0.11166933178901672,
-0.04163889214396477,
-0.04809047654271126,
0.056335873901844025,
-0.05557176098227501,
0.04315084591507912,
0.016748378053307533,
0.008385512977838516,
0.09828425943851471,
-0.11691930890083313,
0.01848180964589119,
-0.013277360238134861,
-0.04654602333903313,
0.003570080269128084,
0.01754065975546837,
0.10879185050725937,
0.17119891941547394,
0.03700268268585205,
0.025763442739844322,
-0.038721028715372086,
0.18903851509094238,
-0.11370182782411575,
-0.04753221943974495,
0.1219673901796341,
0.020117269828915596,
0.03415118530392647,
0.10272222757339478,
0.06215117126703262,
-0.09169083088636398,
0.03433660417795181,
0.05079195648431778,
-0.026815252378582954,
-0.24519066512584686,
-0.03355332091450691,
-0.07854495197534561,
-0.019299695268273354,
0.11551311612129211,
0.03153637796640396,
0.039956167340278625,
0.02901376225054264,
-0.02017555572092533,
0.009974395856261253,
0.007384869735687971,
0.0830732062458992,
0.11791469156742096,
0.055283546447753906,
0.12643268704414368,
-0.02347397617995739,
-0.03289756923913956,
0.023262154310941696,
-0.009535637684166431,
0.2469460666179657,
0.033026907593011856,
0.17842954397201538,
0.052690453827381134,
0.13894023001194,
0.014153112657368183,
0.09491374343633652,
0.022553037852048874,
-0.032482367008924484,
0.0315699428319931,
-0.060097020119428635,
-0.033749260008335114,
0.039913564920425415,
0.07485790550708771,
0.046378593891859055,
-0.14198049902915955,
-0.04249076917767525,
0.007249187678098679,
0.35699543356895447,
0.08297497779130936,
-0.3117411732673645,
-0.12609054148197174,
-0.0022141935769468546,
-0.09374178946018219,
-0.04618295654654503,
0.03672757372260094,
0.06957169622182846,
-0.09413834661245346,
0.07340872287750244,
-0.06988906115293503,
0.107953280210495,
-0.02835831418633461,
0.0014093738282099366,
0.0756266638636589,
0.06032392755150795,
-0.010854551568627357,
0.05742020905017853,
-0.2568429112434387,
0.2943805158138275,
-0.020738529041409492,
0.0944678783416748,
-0.022457826882600784,
0.027457362040877342,
0.038009438663721085,
-0.03355742245912552,
0.04885221645236015,
-0.016352113336324692,
-0.08964117616415024,
-0.19916442036628723,
-0.06395503133535385,
0.03426572307944298,
0.13061431050300598,
-0.05409819260239601,
0.13373327255249023,
-0.03536820784211159,
-0.0035659384448081255,
0.06986553221940994,
-0.07050897926092148,
-0.11837465316057205,
-0.0929614007472992,
0.0259659755975008,
0.04824702814221382,
0.12027539312839508,
-0.10184574127197266,
-0.11453685164451599,
-0.05336468666791916,
0.13682310283184052,
-0.08334597945213318,
-0.012236854061484337,
-0.1261838674545288,
0.08253505080938339,
0.1776382178068161,
-0.06953875720500946,
0.06359177082777023,
0.020230278372764587,
0.1408964842557907,
0.038058798760175705,
-0.00834337156265974,
0.09793519973754883,
-0.07787788659334183,
-0.19348080456256866,
-0.03784453868865967,
0.17828086018562317,
0.023007551208138466,
0.06990553438663483,
-0.020192256197333336,
0.031593263149261475,
-0.022410091012716293,
-0.08657650649547577,
0.04320818558335304,
-0.017347773537039757,
-0.029463106766343117,
0.06107235327363014,
-0.019762450829148293,
0.03135620057582855,
-0.08491834998130798,
-0.05693129450082779,
0.14830607175827026,
0.28867608308792114,
-0.06927461922168732,
0.0014161925064399838,
0.04008377715945244,
-0.03554639592766762,
-0.12216073274612427,
0.01995725929737091,
0.14200100302696228,
0.03577795624732971,
0.0014523169957101345,
-0.21877415478229523,
0.0755755677819252,
0.09643758833408356,
-0.028703976422548294,
0.09579737484455109,
-0.28599193692207336,
-0.14240238070487976,
0.110760398209095,
0.0841493234038353,
-0.0027206402737647295,
-0.15925417840480804,
-0.07057826966047287,
-0.03052222728729248,
-0.13071727752685547,
0.08380389958620071,
-0.005698198452591896,
0.1325247436761856,
-0.0034777314867824316,
0.07678452879190445,
0.021784745156764984,
-0.042011942714452744,
0.15756405889987946,
-0.0033837787341326475,
0.0376315638422966,
0.0019481105264276266,
0.05925630033016205,
0.019567767158150673,
-0.05275748670101166,
0.026662204414606094,
-0.07043351978063583,
0.01569337770342827,
-0.13710908591747284,
-0.04417050629854202,
-0.09398029744625092,
0.02527429908514023,
-0.03001214563846588,
-0.033410362899303436,
-0.0225395355373621,
0.03206080570816994,
0.0549197793006897,
0.011217300780117512,
0.15669459104537964,
-0.05279717966914177,
0.17971408367156982,
0.08833809942007065,
0.09571078419685364,
-0.022809041664004326,
-0.10397709161043167,
-0.007665228098630905,
-0.018314091488718987,
0.05840623006224632,
-0.13794274628162384,
0.03704153001308441,
0.14305806159973145,
0.06361176073551178,
0.1532175987958908,
0.06838435679674149,
-0.06895998120307922,
0.028903905302286148,
0.07602794468402863,
-0.059311725199222565,
-0.13582497835159302,
-0.027247203513979912,
0.05237109959125519,
-0.1593814194202423,
0.030303509905934334,
0.1052277609705925,
-0.06945886462926865,
-0.0019402424804866314,
0.012175059877336025,
0.00595500972121954,
-0.07219234108924866,
0.23383130133152008,
0.042398735880851746,
0.08352702111005783,
-0.09509003162384033,
0.06883374601602554,
0.04826192185282707,
-0.1533094048500061,
0.007416239473968744,
0.07193465530872345,
-0.035012684762477875,
-0.004910257179290056,
-0.013515894301235676,
0.038534604012966156,
-0.04277585819363594,
-0.06279813498258591,
-0.13049881160259247,
-0.15296974778175354,
0.09102733433246613,
0.11157193034887314,
0.03682345896959305,
0.03590014949440956,
-0.04869022220373154,
0.06298480927944183,
-0.11775247752666473,
0.08067413419485092,
0.0919884517788887,
0.07857383042573929,
-0.13968519866466522,
0.14887817203998566,
0.014460377395153046,
0.010968534275889397,
0.007889356464147568,
-0.022969288751482964,
-0.08546479046344757,
0.032205790281295776,
-0.11913362145423889,
-0.03892243281006813,
-0.050200577825307846,
-0.004712202586233616,
0.011664288118481636,
-0.07332389801740646,
-0.0854790136218071,
0.03359675034880638,
-0.12228208035230637,
-0.04945136979222298,
-0.005791433155536652,
0.0684647485613823,
-0.10598361492156982,
-0.0082022063434124,
0.06531314551830292,
-0.11861112713813782,
0.07862108200788498,
0.06987041980028152,
0.019840260967612267,
0.057941339910030365,
-0.09295038878917694,
0.01591518707573414,
0.053494058549404144,
-0.007564228493720293,
0.017808660864830017,
-0.16749125719070435,
-0.009245221503078938,
-0.0066556320525705814,
0.061394885182380676,
0.0003277557552792132,
0.013601860962808132,
-0.13327457010746002,
-0.05208481103181839,
-0.027465417981147766,
-0.04837173596024513,
-0.053892333060503006,
0.03133011236786842,
0.0634661465883255,
0.060687385499477386,
0.16585640609264374,
-0.08231581747531891,
0.03915480151772499,
-0.233186274766922,
0.016380684450268745,
-0.04289096221327782,
-0.07627616077661514,
-0.06861869990825653,
-0.02588389627635479,
0.07746395468711853,
-0.05588173121213913,
0.08119595050811768,
-0.07262147217988968,
0.075106680393219,
0.046345360577106476,
-0.08622237294912338,
0.025458157062530518,
0.030195558443665504,
0.2774602472782135,
0.0730249360203743,
-0.024079184979200363,
0.08792401850223541,
0.0023742488119751215,
0.058279868215322495,
0.14857861399650574,
0.15339145064353943,
0.15661588311195374,
0.00499439612030983,
0.10250025242567062,
0.0671669989824295,
-0.08313239365816116,
-0.13645638525485992,
0.08026453852653503,
-0.012426715344190598,
0.12464886158704758,
0.007977939210832119,
0.2355078160762787,
0.1251325011253357,
-0.19255538284778595,
0.061153121292591095,
-0.030797353014349937,
-0.0842016413807869,
-0.10373345762491226,
-0.03837282955646515,
-0.07404538244009018,
-0.20227381587028503,
0.0171409510076046,
-0.1245688945055008,
0.06741881370544434,
0.06024123728275299,
0.033240314573049545,
0.02085401862859726,
0.15251560509204865,
0.029856223613023758,
-0.012060618959367275,
0.11522406339645386,
-0.011989543214440346,
-0.016363071277737617,
-0.05087558552622795,
-0.09730128198862076,
0.04727957397699356,
-0.027950048446655273,
0.054688647389411926,
-0.047669969499111176,
-0.11408255994319916,
0.06195446103811264,
0.007749210111796856,
-0.11323914676904678,
0.021124020218849182,
0.006507029756903648,
0.07704897224903107,
0.07784511893987656,
0.025181569159030914,
-0.002052042866125703,
-0.014340190216898918,
0.25288787484169006,
-0.11084531247615814,
-0.05633923038840294,
-0.13747534155845642,
0.24064387381076813,
0.03428688645362854,
-0.017571846023201942,
0.010188461281359196,
-0.07536600530147552,
-0.017612913623452187,
0.1615016907453537,
0.10240724682807922,
0.009853215888142586,
-0.02605278417468071,
-0.002241380512714386,
-0.015414186753332615,
-0.04126515984535217,
0.07999547570943832,
0.11890467256307602,
0.07579698413610458,
-0.0520157553255558,
-0.028533997014164925,
-0.05079684033989906,
-0.055908460170030594,
-0.012436742894351482,
0.0720594972372055,
0.014842130243778229,
-0.02765788324177265,
-0.02731212228536606,
0.129679337143898,
-0.07054952532052994,
-0.10202670842409134,
0.01611930876970291,
-0.17580349743366241,
-0.178365558385849,
-0.040057916194200516,
0.03824243322014809,
0.035991448909044266,
0.05514710396528244,
-0.014034518972039223,
-0.02913382090628147,
0.1043119803071022,
0.0042246803641319275,
-0.0460100993514061,
-0.13851693272590637,
0.09879609942436218,
-0.06369531899690628,
0.18656738102436066,
-0.03841014578938484,
0.03291134536266327,
0.11332253366708755,
0.08269841223955154,
-0.07592230290174484,
0.05138220265507698,
0.06969599425792694,
-0.1436411589384079,
0.04596259072422981,
0.19841907918453217,
-0.041332535445690155,
0.14041060209274292,
0.025888657197356224,
-0.1559188961982727,
0.01175992377102375,
-0.06529903411865234,
-0.045927390456199646,
-0.07550480216741562,
-0.02792610041797161,
-0.05131090432405472,
0.12166745215654373,
0.21608398854732513,
-0.08298676460981369,
-0.030144203454256058,
-0.0534842275083065,
0.01870790310204029,
0.06253895908594131,
0.12334469705820084,
-0.05018370598554611,
-0.296786367893219,
0.01466682180762291,
0.014600170776247978,
-0.006112460047006607,
-0.2637186646461487,
-0.09304900467395782,
0.03895982354879379,
-0.06825409829616547,
-0.025470590218901634,
0.11768881976604462,
0.06657704710960388,
0.034422796219587326,
-0.04938029125332832,
-0.11782252788543701,
-0.03772309422492981,
0.20398901402950287,
-0.1810554563999176,
-0.07560206949710846
] |
null | null |
transformers
|
# Wav2Vec2-Large-XLSR-53-Swedish
Fine-tuned [facebook/wav2vec2-large-xlsr-53](https://huggingface.co/facebook/wav2vec2-large-xlsr-53) in Swedish using the [Common Voice](https://huggingface.co/datasets/common_voice). The training data amounts to 402 MB.
When using this model, make sure that your speech input is sampled at 16kHz.
## Usage
The model can be used directly (without a language model) as follows:
```python
import torch
import torchaudio
from datasets import load_dataset
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
test_dataset = load_dataset("common_voice", "sv-SE", split="test[:2%]").
processor = Wav2Vec2Processor.from_pretrained("birgermoell/wav2vec2-swedish-common-voice")
model = Wav2Vec2ForCTC.from_pretrained("birgermoell/wav2vec2-swedish-common-voice")
resampler = torchaudio.transforms.Resample(48_000, 16_000)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def speech_file_to_array_fn(batch):
speech_array, sampling_rate = torchaudio.load(batch["path"])
batch["speech"] = resampler(speech_array).squeeze().numpy()
return batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
inputs = processor(test_dataset["speech"][:2], sampling_rate=16_000, return_tensors="pt", padding=True)
with torch.no_grad():
logits = model(inputs.input_values, attention_mask=inputs.attention_mask).logits
predicted_ids = torch.argmax(logits, dim=-1)
print("Prediction:", processor.batch_decode(predicted_ids))
print("Reference:", test_dataset["sentence"][:2])
```
## Evaluation
The model can be evaluated as follows on the {language} test data of Common Voice.
```python
import torch
import torchaudio
from datasets import load_dataset, load_metric
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import re
test_dataset = load_dataset("common_voice", "sv-SE", split="test")
wer = load_metric("wer")
processor = Wav2Vec2Processor.from_pretrained("birgermoell/wav2vec2-swedish-common-voice")
model = Wav2Vec2ForCTC.from_pretrained("birgermoell/wav2vec2-swedish-common-voice")
model.to("cuda")
chars_to_ignore_regex = '[\,\?\.\!\-\;\:\"\“]'
resampler = torchaudio.transforms.Resample(48_000, 16_000)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def speech_file_to_array_fn(batch):
batch["sentence"] = re.sub(chars_to_ignore_regex, '', batch["sentence"]).lower()
speech_array, sampling_rate = torchaudio.load(batch["path"])
batch["speech"] = resampler(speech_array).squeeze().numpy()
return batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def evaluate(batch):
inputs = processor(batch["speech"], sampling_rate=16_000, return_tensors="pt", padding=True)
with torch.no_grad():
logits = model(inputs.input_values.to("cuda"), attention_mask=inputs.attention_mask.to("cuda")).logits
pred_ids = torch.argmax(logits, dim=-1)
batch["pred_strings"] = processor.batch_decode(pred_ids)
return batch
result = test_dataset.map(evaluate, batched=True, batch_size=8)
print("WER: {:2f}".format(100 * wer.compute(predictions=result["pred_strings"], references=result["sentence"])))
```
**Test Result**: 36.91 %
## Training
The Common Voice `train`, `validation` datasets were used for training.
The script used for training can be found [here](https://colab.research.google.com/drive/1KkD4PeZwnIwxxxOP1bUE7XTZMK7-SzRj?usp=sharing)
|
{"language": "sv", "license": "apache-2.0", "tags": ["audio", "automatic-speech-recognition", "speech", "xlsr-fine-tuning-week"], "datasets": ["common_voice"], "model-index": [{"name": "XLSR Wav2Vec2 Swedish by Birger Moell", "results": [{"task": {"type": "automatic-speech-recognition", "name": "Speech Recognition"}, "dataset": {"name": "Common Voice sv-SE", "type": "common_voice", "args": "sv-SE"}, "metrics": [{"type": "wer", "value": 36.91, "name": "Test WER"}]}]}]}
|
automatic-speech-recognition
|
birgermoell/wav2vec2-swedish-common-voice
|
[
"transformers",
"pytorch",
"jax",
"wav2vec2",
"automatic-speech-recognition",
"audio",
"speech",
"xlsr-fine-tuning-week",
"sv",
"dataset:common_voice",
"license:apache-2.0",
"model-index",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"sv"
] |
TAGS
#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #sv #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us
|
# Wav2Vec2-Large-XLSR-53-Swedish
Fine-tuned facebook/wav2vec2-large-xlsr-53 in Swedish using the Common Voice. The training data amounts to 402 MB.
When using this model, make sure that your speech input is sampled at 16kHz.
## Usage
The model can be used directly (without a language model) as follows:
## Evaluation
The model can be evaluated as follows on the {language} test data of Common Voice.
Test Result: 36.91 %
## Training
The Common Voice 'train', 'validation' datasets were used for training.
The script used for training can be found here
|
[
"# Wav2Vec2-Large-XLSR-53-Swedish\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 in Swedish using the Common Voice. The training data amounts to 402 MB.\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"## Usage\n\nThe model can be used directly (without a language model) as follows:",
"## Evaluation\n\nThe model can be evaluated as follows on the {language} test data of Common Voice.\n\n\n\n\nTest Result: 36.91 %",
"## Training\n\nThe Common Voice 'train', 'validation' datasets were used for training.\n\nThe script used for training can be found here"
] |
[
"TAGS\n#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #sv #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us \n",
"# Wav2Vec2-Large-XLSR-53-Swedish\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 in Swedish using the Common Voice. The training data amounts to 402 MB.\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"## Usage\n\nThe model can be used directly (without a language model) as follows:",
"## Evaluation\n\nThe model can be evaluated as follows on the {language} test data of Common Voice.\n\n\n\n\nTest Result: 36.91 %",
"## Training\n\nThe Common Voice 'train', 'validation' datasets were used for training.\n\nThe script used for training can be found here"
] |
[
80,
73,
20,
29,
32
] |
[
"passage: TAGS\n#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #sv #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us \n# Wav2Vec2-Large-XLSR-53-Swedish\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 in Swedish using the Common Voice. The training data amounts to 402 MB.\nWhen using this model, make sure that your speech input is sampled at 16kHz.## Usage\n\nThe model can be used directly (without a language model) as follows:## Evaluation\n\nThe model can be evaluated as follows on the {language} test data of Common Voice.\n\n\n\n\nTest Result: 36.91 %## Training\n\nThe Common Voice 'train', 'validation' datasets were used for training.\n\nThe script used for training can be found here"
] |
[
-0.17753846943378448,
0.023915262892842293,
-0.0012993579730391502,
-0.014094054698944092,
0.07853042334318161,
-0.0756961852312088,
0.12540803849697113,
0.09385199844837189,
-0.06869460642337799,
0.004628640599548817,
0.056321196258068085,
-0.02703750506043434,
0.0829947292804718,
0.11619453132152557,
-0.008712762035429478,
-0.198263481259346,
0.015637101605534554,
-0.0044322446919977665,
0.06482444703578949,
0.10767514258623123,
0.12098342925310135,
-0.06143202632665634,
-0.024422455579042435,
0.061924878507852554,
-0.14241935312747955,
0.03270883858203888,
0.0538003034889698,
-0.09652552753686905,
0.13763907551765442,
0.06295956671237946,
0.0534687303006649,
0.06787263602018356,
0.10802165418863297,
-0.1810411959886551,
0.021707890555262566,
0.013279602862894535,
0.044819753617048264,
0.016949106007814407,
0.055605698376894,
0.03743252158164978,
0.08390767872333527,
0.0730152279138565,
-0.049464285373687744,
0.08396010845899582,
-0.021958524361252785,
-0.22534538805484772,
-0.017724130302667618,
-0.00037608237471431494,
0.1053897812962532,
0.12415305525064468,
-0.0749364048242569,
0.08133701235055923,
-0.15089137852191925,
0.10351680219173431,
0.07778321951627731,
-0.17513388395309448,
0.0002987574553117156,
0.12664340436458588,
0.07972270995378494,
0.08494547009468079,
-0.06647839397192001,
0.05984517186880112,
0.06198578327894211,
0.03215201944112778,
0.03887559846043587,
-0.023677708581089973,
-0.23660285770893097,
-0.03498721122741699,
-0.13429738581180573,
-0.014059922657907009,
0.2436891496181488,
0.00015121142496354878,
-0.08942227065563202,
-0.1298273503780365,
0.014975432306528091,
-0.00020246466738171875,
0.021892234683036804,
-0.051704827696084976,
-0.0029014453757554293,
0.02139449678361416,
-0.03815620020031929,
-0.04884568601846695,
-0.1327352225780487,
-0.1535017043352127,
0.03422683849930763,
0.01089046336710453,
0.005497020669281483,
0.015212480910122395,
-0.08907396346330643,
0.07430663704872131,
-0.12428802996873856,
-0.06882423907518387,
-0.030136795714497566,
0.01838362216949463,
-0.08427857607603073,
-0.007642708253115416,
-0.09299181401729584,
-0.2214541882276535,
0.030898919329047203,
-0.023057883605360985,
0.06771102547645569,
0.0038048254791647196,
-0.0225748959928751,
0.057535018771886826,
0.029082948341965675,
0.13012701272964478,
-0.06283817440271378,
-0.057170320302248,
0.02892620675265789,
-0.0038295213598757982,
-0.04039799049496651,
-0.018644297495484352,
-0.08119833469390869,
-0.07641937583684921,
0.07558884471654892,
0.056332703679800034,
-0.0489506796002388,
-0.00436897948384285,
-0.022990891709923744,
-0.006603884510695934,
0.0065240575931966305,
-0.10830055177211761,
-0.030308887362480164,
0.06440495699644089,
-0.03354693204164505,
0.07945244014263153,
0.04370688274502754,
0.06091633439064026,
-0.10186272859573364,
-0.012593557126820087,
0.03030536323785782,
0.05341706424951553,
-0.006673954427242279,
-0.1232110857963562,
0.037929002195596695,
-0.05801219120621681,
-0.0070081125013530254,
-0.08086585253477097,
-0.11512526869773865,
-0.10854069888591766,
-0.009705676697194576,
0.03413429856300354,
-0.011851243674755096,
-0.1270621418952942,
-0.02787960134446621,
-0.00829029455780983,
-0.07781261205673218,
0.13424214720726013,
-0.04837651923298836,
0.05258765444159508,
0.0026035141199827194,
0.050345856696367264,
0.022978896275162697,
0.06452027708292007,
-0.08457713574171066,
-0.07169239968061447,
0.022389104589819908,
0.14702485501766205,
-0.053644850850105286,
-0.059194911271333694,
-0.08662287890911102,
-0.0854843333363533,
-0.03581400215625763,
0.07700931280851364,
0.06264917552471161,
0.11752767860889435,
-0.3010178208351135,
-0.08714856952428818,
0.22017119824886322,
-0.13073667883872986,
-0.01568840816617012,
0.19884292781352997,
0.005492347292602062,
0.09321650117635727,
0.17054906487464905,
0.24195486307144165,
0.0998506024479866,
-0.20589329302310944,
0.014457190409302711,
0.031249502673745155,
-0.02494298294186592,
-0.1076839342713356,
0.09158263355493546,
-0.05949220433831215,
0.014359730295836926,
0.0376008078455925,
-0.06912162154912949,
0.05227464810013771,
-0.01903357170522213,
-0.050572168081998825,
-0.00828381348401308,
-0.08204104006290436,
-0.009710812009871006,
0.04754098877310753,
0.012762721627950668,
-0.06432714313268661,
-0.05486690625548363,
0.055389419198036194,
0.09952566027641296,
-0.13662917912006378,
0.062300924211740494,
-0.05730399116873741,
0.0525377131998539,
-0.08497017621994019,
-0.0178002268075943,
-0.12676599621772766,
0.14912091195583344,
-0.024456607177853584,
0.0863463431596756,
0.03800150007009506,
0.1949397474527359,
0.029731595888733864,
0.03132361173629761,
-0.047300711274147034,
-0.003516237484291196,
0.008066799491643906,
-0.036214619874954224,
-0.04279814660549164,
-0.07997148483991623,
-0.03544911369681358,
-0.07201871275901794,
0.10592658817768097,
-0.18950214982032776,
-0.008605469018220901,
0.03988471254706383,
0.0017422330565750599,
0.007174855563789606,
-0.034744735807180405,
0.09201537817716599,
0.07352686673402786,
0.006944369524717331,
0.0033066016621887684,
0.02828548476099968,
0.0151869161054492,
-0.08289919048547745,
0.14939218759536743,
-0.09011361747980118,
-0.02832406386733055,
0.10238314419984818,
-0.01077154092490673,
-0.006064180284738541,
0.002449149964377284,
-0.0175591092556715,
-0.01995416358113289,
-0.10780991613864899,
-0.02615460939705372,
0.22595608234405518,
-0.0005977234686724842,
0.10693617910146713,
-0.11575858294963837,
-0.0012412688229233027,
0.021151773631572723,
-0.10008092224597931,
0.03646198287606239,
0.0648898258805275,
-0.031389378011226654,
-0.00534725422039628,
0.006483543664216995,
-0.09934299439191818,
-0.09776651114225388,
0.2826293706893921,
-0.023815618827939034,
-0.10853984206914902,
0.046490378677845,
-0.024439338594675064,
-0.022491147741675377,
0.06823180615901947,
-0.1082427054643631,
-0.04217151924967766,
0.03818054869771004,
0.06181525066494942,
0.06907178461551666,
-0.15354137122631073,
0.012201881036162376,
0.004369107075035572,
-0.14943544566631317,
-0.1418587565422058,
0.05618144944310188,
-0.0690431147813797,
0.04342389479279518,
-0.10306531935930252,
-0.050157397985458374,
-0.01461345236748457,
-0.04405396059155464,
-0.18321837484836578,
0.14086611568927765,
-0.08192607015371323,
-0.21038301289081573,
-0.1722436100244522,
0.06991389393806458,
0.02624218352138996,
0.03849980607628822,
0.09491438418626785,
-0.11531121283769608,
-0.017021048814058304,
-0.05487179756164551,
0.09300082921981812,
0.02690064162015915,
-0.02101641707122326,
-0.05667531490325928,
0.04005968198180199,
0.07023242115974426,
-0.14611926674842834,
0.019982364028692245,
-0.07502161711454391,
-0.031190384179353714,
-0.002306002890691161,
-0.02219812572002411,
0.02355358563363552,
0.1624949723482132,
0.019669372588396072,
0.012465848587453365,
-0.028684986755251884,
0.15964561700820923,
-0.0629907175898552,
-0.0018868447514250875,
0.19712254405021667,
0.0035544477868825197,
-0.02921304479241371,
0.09980642050504684,
0.0102390106767416,
-0.05367463082075119,
-0.0064402176067233086,
-0.017145279794931412,
-0.09075827151536942,
-0.2557574510574341,
-0.09210103005170822,
-0.05078568309545517,
-0.06120435148477554,
-0.023059073835611343,
0.014879160560667515,
0.010875103995203972,
0.02423783391714096,
-0.034663908183574677,
-0.1137700080871582,
0.08288611471652985,
-0.010507120750844479,
0.037007201462984085,
-0.0018880084389820695,
0.10405687242746353,
-0.038749851286411285,
0.01428925059735775,
0.02691400982439518,
0.026880431920289993,
0.1122017651796341,
0.009792005643248558,
0.058569930493831635,
0.0657917782664299,
0.07702776044607162,
0.08182892948389053,
0.07764802128076553,
-0.03190828859806061,
-0.02777000144124031,
0.025885893031954765,
-0.06795515865087509,
-0.0742163211107254,
0.04439171031117439,
0.1481284201145172,
-0.04168160632252693,
-0.058832403272390366,
0.0054725054651498795,
0.011264648288488388,
0.17023766040802002,
0.130621537566185,
-0.21375654637813568,
-0.11124033480882645,
-0.01558460108935833,
-0.062189362943172455,
-0.002816982101649046,
0.032170090824365616,
0.2076949030160904,
-0.12697799503803253,
0.033567819744348526,
0.0060704415664076805,
0.10501094907522202,
0.014599247835576534,
0.02489832602441311,
-0.08503834903240204,
0.03514431044459343,
-0.0008673053234815598,
0.09537075459957123,
-0.2511741816997528,
0.1893116533756256,
0.00028045690851286054,
0.16453804075717926,
-0.05303904414176941,
-0.000161831863806583,
-0.01767887733876705,
0.05944313481450081,
0.1417938619852066,
0.021160544827580452,
-0.024939801543951035,
-0.07349932938814163,
-0.07703616470098495,
0.04407425969839096,
-0.02455214224755764,
0.03849489986896515,
0.03655650466680527,
0.024561652913689613,
0.015674810856580734,
-0.0077606323175132275,
-0.07850810885429382,
-0.1406906247138977,
-0.01733209565281868,
-0.014193147420883179,
0.16219784319400787,
0.10448842495679855,
-0.029082445427775383,
-0.08770101517438889,
-0.11878052353858948,
0.10525866597890854,
-0.13043706119060516,
-0.08373961597681046,
-0.07476098090410233,
0.015434300526976585,
0.07687364518642426,
-0.06079544126987457,
-0.01770588755607605,
0.10405932366847992,
0.1373867690563202,
-0.060348376631736755,
-0.005049673840403557,
0.051889918744564056,
-0.10702476650476456,
-0.10313782095909119,
0.00021613930584862828,
0.21175388991832733,
0.09214448928833008,
0.0666717141866684,
0.06928703933954239,
0.013161585666239262,
0.0014818753115832806,
-0.04462774097919464,
0.03361550718545914,
0.08723202347755432,
-0.055224381387233734,
0.0024207530077546835,
0.02849196456372738,
-0.15838922560214996,
-0.0870160311460495,
-0.057061538100242615,
0.17855553328990936,
0.10234842449426651,
-0.05935794860124588,
0.1664905846118927,
0.22814764082431793,
-0.08111841976642609,
-0.24118672311306,
0.00040606819675303996,
0.12877368927001953,
0.14349375665187836,
-0.010762709192931652,
-0.15626434981822968,
0.0619218535721302,
0.014852190390229225,
-0.032571449875831604,
-0.08291296660900116,
-0.26947471499443054,
-0.15533235669136047,
0.10358002036809921,
-0.02685326151549816,
0.1383783519268036,
0.023735444992780685,
-0.02967502921819687,
-0.028271472081542015,
-0.012112187221646309,
-0.007917330600321293,
-0.13097812235355377,
0.12411026656627655,
0.03363877162337303,
0.06630843877792358,
0.04723498597741127,
-0.049596838653087616,
0.0849185660481453,
0.09128597378730774,
-0.02088462933897972,
-0.016405386850237846,
0.1161547303199768,
0.03280258551239967,
-0.001508615561760962,
0.18088044226169586,
-0.1247950941324234,
0.0247438233345747,
-0.10661023110151291,
-0.1014569103717804,
-0.08258771151304245,
0.09803879261016846,
0.014988973736763,
-0.046641308814287186,
0.028393689543008804,
-0.02372717298567295,
0.02300698310136795,
-0.0007016687304712832,
-0.03286005184054375,
-0.16734541952610016,
0.04144057258963585,
0.17901693284511566,
0.190363347530365,
-0.03556256368756294,
-0.0573396198451519,
0.0036006225273013115,
-0.0319780558347702,
0.12077432870864868,
-0.08054126799106598,
0.03099617175757885,
0.06795200705528259,
0.03523116558790207,
0.11774668097496033,
-0.009687148965895176,
-0.10024510324001312,
0.0742429867386818,
0.0399361327290535,
-0.05884106457233429,
-0.08038561791181564,
-0.03814777731895447,
-0.09707777947187424,
-0.03503871336579323,
0.03436117619276047,
0.12035038322210312,
-0.10568877309560776,
-0.00006984526407904923,
-0.03620113804936409,
0.04567255452275276,
-0.12833230197429657,
0.2542356252670288,
0.043682098388671875,
0.09111680835485458,
-0.11321405321359634,
0.058539435267448425,
-0.024514874443411827,
-0.017414573580026627,
0.06955955922603607,
-0.021155590191483498,
-0.08794432133436203,
-0.044990174472332,
0.018206872045993805,
0.07777151465415955,
0.04184233024716377,
-0.1433960497379303,
-0.06500369310379028,
-0.10705069452524185,
0.013098285533487797,
0.031954750418663025,
0.05285979062318802,
0.0194440595805645,
-0.08845096826553345,
-0.07341773808002472,
-0.10730771720409393,
0.05812970921397209,
0.08534044772386551,
-0.02864304929971695,
-0.09307163953781128,
0.21785730123519897,
0.04209364950656891,
0.020992686972022057,
-0.04553419351577759,
-0.04085250571370125,
0.0039014483336359262,
0.07934629917144775,
-0.07850778847932816,
0.004147831816226244,
-0.03032604046165943,
0.017731718719005585,
-0.02787019871175289,
-0.0693286582827568,
-0.0074441032484173775,
0.10212291777133942,
-0.07993638515472412,
0.061926644295454025,
-0.044488418847322464,
0.0430697537958622,
-0.0921834260225296,
0.006846628617495298,
-0.007048608269542456,
-0.03503984957933426,
0.06626062095165253,
0.10367055237293243,
-0.09471995383501053,
0.14389361441135406,
-0.20613409578800201,
-0.040071628987789154,
0.08711137622594833,
0.04120178148150444,
-0.0326358824968338,
-0.054769497364759445,
0.02157813124358654,
0.09772810339927673,
0.04778283089399338,
-0.010976041667163372,
0.05575378239154816,
-0.04356446489691734,
-0.03450462594628334,
-0.05700410157442093,
0.005933245178312063,
-0.03942008689045906,
0.07932069897651672,
0.06559731811285019,
0.15058238804340363,
0.16328322887420654,
-0.11494658142328262,
0.0797492042183876,
-0.1385994553565979,
0.01223425380885601,
-0.03539666905999184,
-0.032608941197395325,
-0.164026141166687,
-0.050073541700839996,
0.075843445956707,
-0.05894704908132553,
0.09613033384084702,
0.030553733929991722,
0.03298871964216232,
-0.019583838060498238,
-0.0660831555724144,
0.03260781988501549,
-0.004839912988245487,
0.24803154170513153,
0.03164658695459366,
0.03051009774208069,
-0.017874332144856453,
-0.0013288763584569097,
0.03737788647413254,
0.10747220367193222,
0.05758362635970116,
0.1204335168004036,
0.01589120738208294,
0.11429739743471146,
0.07490045577287674,
-0.03833192586898804,
-0.07217026501893997,
0.007348765153437853,
-0.09923727810382843,
0.031331803649663925,
-0.052789248526096344,
0.11894416809082031,
0.15671753883361816,
-0.12201704829931259,
0.050531696528196335,
0.03578022122383118,
-0.10586301982402802,
-0.17441043257713318,
-0.17119015753269196,
-0.08547879010438919,
-0.0920054242014885,
0.033525869250297546,
-0.10251474380493164,
0.02681058831512928,
0.016409175470471382,
0.05530982092022896,
-0.028251999989151955,
0.148219034075737,
-0.005239277612417936,
-0.10614795237779617,
0.08710632473230362,
-0.09584592282772064,
0.012936181388795376,
-0.04399929195642471,
0.025165949016809464,
0.19888727366924286,
0.023615308105945587,
0.07177908718585968,
0.006562486290931702,
-0.023877689614892006,
0.016092870384454727,
-0.06536052376031876,
-0.05738462507724762,
-0.016071747988462448,
0.004897445440292358,
0.10259320586919785,
0.10592544078826904,
0.1381714940071106,
-0.08739568293094635,
0.014926805160939693,
0.13275323808193207,
-0.040723856538534164,
-0.14407290518283844,
-0.16094428300857544,
0.1264200359582901,
0.03759106621146202,
0.052954353392124176,
-0.007459146436303854,
-0.04726275056600571,
0.017222454771399498,
0.207274928689003,
0.21943673491477966,
0.07102585583925247,
0.021670054644346237,
-0.05415090173482895,
-0.016978606581687927,
-0.02976962924003601,
0.07776752859354019,
0.047689225524663925,
0.14690761268138885,
-0.00799337588250637,
0.059671591967344284,
-0.0627199113368988,
-0.08377228677272797,
-0.0068408045917749405,
0.04444966837763786,
-0.0853743627667427,
-0.0885901004076004,
-0.006685018539428711,
0.1521579474210739,
-0.026476750150322914,
-0.11800572276115417,
-0.10755894333124161,
-0.028123049065470695,
-0.09994541853666306,
-0.000797871733084321,
0.04049893468618393,
0.11606824398040771,
0.0113188112154603,
-0.0559375025331974,
0.06547371298074722,
0.09903373569250107,
-0.013662684708833694,
-0.046184297651052475,
-0.05407540500164032,
0.02787916548550129,
-0.060765590518713,
0.014204154722392559,
0.007171113044023514,
0.16014821827411652,
0.014715561643242836,
0.08331809192895889,
-0.023951413109898567,
0.1683340221643448,
-0.02162310667335987,
-0.058780230581760406,
0.04899658262729645,
0.16985730826854706,
-0.04632841423153877,
0.13029037415981293,
0.008092780597507954,
-0.1345101296901703,
0.026028793305158615,
-0.12358077615499496,
-0.02221859060227871,
-0.07970690727233887,
0.07705877721309662,
-0.037628889083862305,
0.1017235741019249,
0.09407737106084824,
-0.06301025301218033,
-0.04287301003932953,
-0.07802947610616684,
0.06785678118467331,
0.011417032219469547,
-0.05106659606099129,
-0.021016554906964302,
-0.23897822201251984,
-0.010893693193793297,
-0.06078362092375755,
-0.0084082568064332,
-0.166608065366745,
0.0006273089675232768,
-0.03304430469870567,
-0.08413341641426086,
0.01480211690068245,
0.05926201120018959,
0.09124504029750824,
0.004645723849534988,
-0.002414221875369549,
0.003237250493839383,
0.07472924143075943,
0.11228267103433609,
-0.17917516827583313,
-0.1257753074169159
] |
null | null |
transformers
|
# Model Trained Using AutoNLP
- Problem type: Multi-class Classification
- Model ID: 530615016
- CO2 Emissions (in grams): 2.2247356264808964
## Validation Metrics
- Loss: 0.7859578132629395
- Accuracy: 0.676854818831649
- Macro F1: 0.3297126297995653
- Micro F1: 0.676854818831649
- Weighted F1: 0.6429522696884535
- Macro Precision: 0.33152557743856437
- Micro Precision: 0.676854818831649
- Weighted Precision: 0.6276125515413322
- Macro Recall: 0.33784302289888885
- Micro Recall: 0.676854818831649
- Weighted Recall: 0.676854818831649
## Usage
You can use cURL to access this model:
```
$ curl -X POST -H "Authorization: Bearer YOUR_API_KEY" -H "Content-Type: application/json" -d '{"inputs": "I love AutoNLP"}' https://api-inference.huggingface.co/models/bitmorse/autonlp-ks-530615016
```
Or Python API:
```
from transformers import AutoModelForSequenceClassification, AutoTokenizer
model = AutoModelForSequenceClassification.from_pretrained("bitmorse/autonlp-ks-530615016", use_auth_token=True)
tokenizer = AutoTokenizer.from_pretrained("bitmorse/autonlp-ks-530615016", use_auth_token=True)
inputs = tokenizer("I love AutoNLP", return_tensors="pt")
outputs = model(**inputs)
```
|
{"language": "en", "tags": "autonlp", "datasets": ["bitmorse/autonlp-data-ks"], "widget": [{"text": "I love AutoNLP \ud83e\udd17"}], "co2_eq_emissions": 2.2247356264808964}
|
text-classification
|
bitmorse/autonlp-ks-530615016
|
[
"transformers",
"pytorch",
"distilbert",
"text-classification",
"autonlp",
"en",
"dataset:bitmorse/autonlp-data-ks",
"co2_eq_emissions",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #distilbert #text-classification #autonlp #en #dataset-bitmorse/autonlp-data-ks #co2_eq_emissions #autotrain_compatible #endpoints_compatible #region-us
|
# Model Trained Using AutoNLP
- Problem type: Multi-class Classification
- Model ID: 530615016
- CO2 Emissions (in grams): 2.2247356264808964
## Validation Metrics
- Loss: 0.7859578132629395
- Accuracy: 0.676854818831649
- Macro F1: 0.3297126297995653
- Micro F1: 0.676854818831649
- Weighted F1: 0.6429522696884535
- Macro Precision: 0.33152557743856437
- Micro Precision: 0.676854818831649
- Weighted Precision: 0.6276125515413322
- Macro Recall: 0.33784302289888885
- Micro Recall: 0.676854818831649
- Weighted Recall: 0.676854818831649
## Usage
You can use cURL to access this model:
Or Python API:
|
[
"# Model Trained Using AutoNLP\n\n- Problem type: Multi-class Classification\n- Model ID: 530615016\n- CO2 Emissions (in grams): 2.2247356264808964",
"## Validation Metrics\n\n- Loss: 0.7859578132629395\n- Accuracy: 0.676854818831649\n- Macro F1: 0.3297126297995653\n- Micro F1: 0.676854818831649\n- Weighted F1: 0.6429522696884535\n- Macro Precision: 0.33152557743856437\n- Micro Precision: 0.676854818831649\n- Weighted Precision: 0.6276125515413322\n- Macro Recall: 0.33784302289888885\n- Micro Recall: 0.676854818831649\n- Weighted Recall: 0.676854818831649",
"## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
"TAGS\n#transformers #pytorch #distilbert #text-classification #autonlp #en #dataset-bitmorse/autonlp-data-ks #co2_eq_emissions #autotrain_compatible #endpoints_compatible #region-us \n",
"# Model Trained Using AutoNLP\n\n- Problem type: Multi-class Classification\n- Model ID: 530615016\n- CO2 Emissions (in grams): 2.2247356264808964",
"## Validation Metrics\n\n- Loss: 0.7859578132629395\n- Accuracy: 0.676854818831649\n- Macro F1: 0.3297126297995653\n- Micro F1: 0.676854818831649\n- Weighted F1: 0.6429522696884535\n- Macro Precision: 0.33152557743856437\n- Micro Precision: 0.676854818831649\n- Weighted Precision: 0.6276125515413322\n- Macro Recall: 0.33784302289888885\n- Micro Recall: 0.676854818831649\n- Weighted Recall: 0.676854818831649",
"## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
68,
43,
145,
17
] |
[
"passage: TAGS\n#transformers #pytorch #distilbert #text-classification #autonlp #en #dataset-bitmorse/autonlp-data-ks #co2_eq_emissions #autotrain_compatible #endpoints_compatible #region-us \n# Model Trained Using AutoNLP\n\n- Problem type: Multi-class Classification\n- Model ID: 530615016\n- CO2 Emissions (in grams): 2.2247356264808964## Validation Metrics\n\n- Loss: 0.7859578132629395\n- Accuracy: 0.676854818831649\n- Macro F1: 0.3297126297995653\n- Micro F1: 0.676854818831649\n- Weighted F1: 0.6429522696884535\n- Macro Precision: 0.33152557743856437\n- Micro Precision: 0.676854818831649\n- Weighted Precision: 0.6276125515413322\n- Macro Recall: 0.33784302289888885\n- Micro Recall: 0.676854818831649\n- Weighted Recall: 0.676854818831649## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
-0.08854322135448456,
0.17336268723011017,
-0.0041908081620931625,
0.07902895659208298,
0.08892913907766342,
0.05166678503155708,
0.07989572733640671,
0.137925922870636,
0.03323274105787277,
0.1475956290960312,
0.11911827325820923,
0.17107145488262177,
0.06359871476888657,
0.18564507365226746,
-0.0861060842871666,
-0.13038060069084167,
0.03530771657824516,
-0.009172946214675903,
0.055341269820928574,
0.0713689997792244,
0.0847700759768486,
-0.06186734884977341,
0.11733318120241165,
-0.012930815108120441,
-0.08922033756971359,
0.021625494584441185,
0.074391670525074,
-0.08203987777233124,
0.06318380683660507,
0.10400770604610443,
0.11143515259027481,
-0.014622840099036694,
0.0743485540151596,
-0.150966614484787,
-0.02398049458861351,
0.0555831603705883,
-0.03263416513800621,
0.08307236433029175,
0.15363164246082306,
0.002519124886021018,
0.046004053205251694,
-0.09493617713451385,
0.08856143057346344,
0.08230426162481308,
-0.0895056277513504,
-0.08911720663309097,
-0.11729804426431656,
0.05021843686699867,
0.10863515734672546,
0.06938358396291733,
-0.005406711250543594,
0.17278286814689636,
-0.04837700352072716,
0.08320876210927963,
0.014997171238064766,
-0.23187124729156494,
-0.02890072390437126,
0.1542617529630661,
-0.01716676354408264,
-0.03344763070344925,
-0.012050851248204708,
-0.005105938296765089,
0.05480936914682388,
0.011071907356381416,
0.0030706548132002354,
-0.057919032871723175,
-0.051984354853630066,
-0.026909632608294487,
-0.12276475131511688,
-0.057712193578481674,
0.1820918321609497,
0.031885772943496704,
-0.07745273411273956,
-0.08785136044025421,
-0.06820923089981079,
-0.11497233808040619,
-0.04266982525587082,
-0.037266798317432404,
0.005200835410505533,
-0.02732609212398529,
-0.005732336547225714,
0.0828685462474823,
-0.04071516543626785,
-0.03404252976179123,
-0.1286628544330597,
-0.023588988929986954,
0.013158123008906841,
0.05697736144065857,
0.01585306227207184,
0.009553076699376106,
-0.07826124876737595,
-0.040127068758010864,
-0.01030639186501503,
0.005806010216474533,
-0.10329199582338333,
-0.06369897723197937,
0.013050289824604988,
0.09875456988811493,
0.052925433963537216,
0.2112841159105301,
-0.008876381441950798,
0.10884304344654083,
0.05752826854586601,
-0.019812684506177902,
-0.02944747731089592,
0.08193929493427277,
-0.0971616581082344,
-0.13321928679943085,
0.04371187463402748,
-0.037978533655405045,
0.018606586381793022,
-0.04603816196322441,
-0.06350143253803253,
-0.04997309297323227,
0.040101855993270874,
0.05440879985690117,
0.03766777366399765,
0.008485076949000359,
-0.07864238321781158,
-0.04784751310944557,
0.06940079480409622,
-0.09008615463972092,
0.06080339103937149,
0.004687800072133541,
-0.10632660984992981,
0.08867400884628296,
0.05533834174275398,
0.011311275884509087,
-0.10601120442152023,
0.029759133234620094,
-0.1252005249261856,
-0.028315572068095207,
-0.07486999779939651,
-0.12449862062931061,
0.06401404738426208,
0.01426562201231718,
-0.004809251055121422,
-0.12366051971912384,
-0.14840994775295258,
-0.051337990909814835,
0.0031485871877521276,
-0.09801509976387024,
-0.06828361749649048,
-0.00036809788434766233,
-0.031240295618772507,
0.06164761260151863,
0.009690533392131329,
0.03660212457180023,
-0.03484252840280533,
-0.00004904693196294829,
0.059840403497219086,
0.06826388090848923,
-0.055095262825489044,
0.011121009476482868,
-0.03268927335739136,
0.034452639520168304,
-0.10833526402711868,
0.056289173662662506,
-0.09452996402978897,
0.005021862220019102,
-0.17310816049575806,
-0.05107051879167557,
0.10111313313245773,
-0.036653585731983185,
0.06219318509101868,
0.09436500072479248,
-0.10340653359889984,
0.012954761274158955,
0.1078760176897049,
-0.046495210379362106,
-0.09078603237867355,
0.08421874791383743,
0.008909264579415321,
0.011493127793073654,
0.008846580050885677,
0.0834510549902916,
0.13134166598320007,
-0.12252230942249298,
-0.08875171095132828,
0.02275008149445057,
0.03286992758512497,
-0.045635323971509933,
0.0848253071308136,
-0.05034342035651207,
-0.11202102899551392,
-0.005092437379062176,
0.09954417496919632,
-0.019281242042779922,
-0.04597456008195877,
-0.06838082522153854,
-0.030735060572624207,
-0.024372516199946404,
0.013260595500469208,
-0.041581396013498306,
0.005594297312200069,
-0.042566634714603424,
-0.07253491133451462,
0.0236112829297781,
0.16546015441417694,
-0.01886957511305809,
-0.02989398129284382,
-0.17303575575351715,
0.07269052416086197,
-0.09240755438804626,
-0.04188414663076401,
-0.1879577338695526,
-0.06702343374490738,
0.025014683604240417,
-0.11597182601690292,
0.0067872111685574055,
-0.02104213275015354,
0.068449966609478,
0.051839668303728104,
0.045917585492134094,
0.029971757903695107,
0.08782393485307693,
-0.01966002956032753,
-0.1051144227385521,
-0.061902280896902084,
-0.0368618480861187,
0.0067241801880300045,
0.26294228434562683,
-0.19300930202007294,
-0.0004549211880657822,
0.043326493352651596,
0.05993211269378662,
-0.01689242199063301,
-0.04251956194639206,
-0.045897454023361206,
0.06352328509092331,
-0.007794066797941923,
-0.04570934921503067,
0.044486455619335175,
-0.033559996634721756,
-0.0397019125521183,
-0.03076785057783127,
-0.25815168023109436,
0.1391270011663437,
0.12767848372459412,
0.01859300583600998,
-0.08310720324516296,
-0.046882495284080505,
0.03847349435091019,
-0.0519825704395771,
-0.001174065051600337,
-0.00385801843367517,
0.10151515156030655,
0.02793378382921219,
0.09685744345188141,
-0.04859571158885956,
-0.02525511011481285,
0.042400676757097244,
-0.048017408698797226,
-0.023519713431596756,
0.17953534424304962,
0.08709553629159927,
-0.10324137657880783,
0.07418304681777954,
0.0023147251922637224,
-0.0769481360912323,
0.011801320128142834,
0.025147471576929092,
-0.046546194702386856,
-0.07984893023967743,
-0.01699819602072239,
0.05983821675181389,
0.036855895072221756,
-0.0027211569249629974,
0.09192592650651932,
0.08009270578622818,
-0.01281592808663845,
0.021320611238479614,
-0.09672300517559052,
0.02311624400317669,
0.019923251122236252,
-0.03217076137661934,
-0.04011233150959015,
0.016846291720867157,
0.03552569821476936,
0.10444984585046768,
-0.007305064704269171,
-0.028914369642734528,
0.011390131898224354,
-0.0025372595991939306,
-0.11862161755561829,
0.23904864490032196,
-0.14356324076652527,
-0.1763627976179123,
-0.16703200340270996,
-0.18466328084468842,
-0.056526731699705124,
-0.05280783772468567,
-0.0009414631640538573,
-0.050038114190101624,
-0.12724465131759644,
-0.06673070788383484,
-0.04607519507408142,
-0.03680175170302391,
-0.050606608390808105,
0.006518761161714792,
-0.014378776773810387,
0.07791359722614288,
-0.12950173020362854,
-0.0245687086135149,
0.033101391047239304,
-0.0984853059053421,
0.07497348636388779,
0.0022603042889386415,
0.07571947574615479,
0.17342540621757507,
-0.027134159579873085,
0.007239676546305418,
0.009811779484152794,
0.2638256847858429,
0.017072543501853943,
0.010751425288617611,
0.2008659541606903,
0.06921300292015076,
0.0737440437078476,
0.10338830202817917,
0.04183271527290344,
-0.07876020669937134,
-0.012311452068388462,
0.06171010434627533,
-0.011960387229919434,
-0.22047513723373413,
-0.19380225241184235,
0.0018476025434210896,
0.04188935458660126,
0.13891828060150146,
0.023623652756214142,
0.09677553176879883,
0.10183125734329224,
0.020482005551457405,
0.08688582479953766,
-0.06597067415714264,
0.07152803987264633,
0.15162967145442963,
0.03175293281674385,
0.12907515466213226,
-0.05642963945865631,
0.023408522829413414,
0.11837869882583618,
0.008970086462795734,
0.08284007757902145,
0.09301401674747467,
0.10956011712551117,
-0.011467182077467442,
0.11220023781061172,
0.0382782518863678,
0.09880851209163666,
0.05139632895588875,
-0.010121094062924385,
0.030448365956544876,
-0.0768207460641861,
-0.08865699172019958,
0.02243857830762863,
0.04399383068084717,
0.015214215964078903,
-0.09602164477109909,
0.04369647055864334,
0.00007651527266716585,
0.06472452729940414,
0.0985611230134964,
-0.43047910928726196,
-0.03925762325525284,
0.025662288069725037,
-0.035325393080711365,
-0.11544963717460632,
-0.024401778355240822,
-0.011855144053697586,
-0.1447831094264984,
0.03626803308725357,
-0.00550079345703125,
0.11615367233753204,
-0.06964348256587982,
-0.0364004410803318,
-0.05140304192900658,
0.07117288559675217,
-0.004941773600876331,
0.06412912905216217,
-0.14654146134853363,
0.1471327394247055,
0.047747716307640076,
0.03752422705292702,
-0.08269969373941422,
0.03513406589627266,
0.009195452556014061,
-0.016872350126504898,
0.13765905797481537,
0.023764370009303093,
-0.16385455429553986,
-0.30009743571281433,
-0.1493786871433258,
0.009082909673452377,
0.0062546515837311745,
0.016211766749620438,
0.08991458266973495,
-0.036836665123701096,
-0.00986788421869278,
-0.01850605010986328,
-0.02495124563574791,
-0.09988634288311005,
-0.09657029062509537,
0.04283823445439339,
0.10030679404735565,
-0.04252421483397484,
-0.034444309771060944,
-0.000505874864757061,
0.011891412548720837,
0.11922696977853775,
-0.13827797770500183,
-0.048313792794942856,
-0.1434168964624405,
-0.02051861211657524,
0.1540115475654602,
-0.112047478556633,
0.0833854004740715,
-0.020492451265454292,
0.06821095943450928,
-0.0120167788118124,
-0.11279388517141342,
0.08368003368377686,
-0.07510102540254593,
-0.04882930591702461,
-0.011030253022909164,
0.044678088277578354,
-0.0021035003010183573,
0.0667494535446167,
0.059311844408512115,
0.01294319611042738,
-0.04572124779224396,
-0.12725304067134857,
-0.013000497594475746,
0.02150788903236389,
0.13605298101902008,
0.040182434022426605,
-0.015113040804862976,
-0.04799897223711014,
-0.044551484286785126,
0.0599549263715744,
0.1329585760831833,
0.31237325072288513,
-0.0630999431014061,
-0.00396654661744833,
0.08663421869277954,
-0.04001954570412636,
-0.20642682909965515,
-0.05022464692592621,
0.028693271800875664,
0.006607819814234972,
-0.06356967240571976,
-0.11480791121721268,
0.1442638635635376,
0.19606897234916687,
-0.030734984204173088,
0.009248785674571991,
-0.26755067706108093,
-0.12283330410718918,
0.16365478932857513,
0.0643841028213501,
0.027691619470715523,
-0.1646466702222824,
-0.05250733345746994,
-0.12244097888469696,
-0.13450324535369873,
0.16715699434280396,
-0.04660825803875923,
0.05755072459578514,
-0.026490559801459312,
0.11213260143995285,
0.030877064913511276,
-0.053047243505716324,
0.2093043178319931,
-0.009395783767104149,
0.004182000644505024,
-0.04367019981145859,
-0.03069307841360569,
-0.0075494893826544285,
-0.07508264482021332,
0.09781228750944138,
0.026549294590950012,
0.05666559934616089,
-0.24757902324199677,
0.0018769428133964539,
0.0051599363796412945,
0.07091491669416428,
-0.0535103939473629,
-0.03747835382819176,
-0.021488839760422707,
0.03601395711302757,
-0.010495686903595924,
-0.0249205119907856,
-0.02495361864566803,
-0.024658244103193283,
0.05156771466135979,
0.216690331697464,
0.09194312989711761,
-0.010809758678078651,
-0.08895692229270935,
0.05769072473049164,
-0.06188983842730522,
0.03635145351290703,
-0.11351517587900162,
0.053862717002630234,
0.11239112913608551,
0.01802573725581169,
0.08254552632570267,
0.029688246548175812,
-0.05666022375226021,
-0.015374881215393543,
0.059276606887578964,
-0.11484906077384949,
0.04111890122294426,
0.030929500237107277,
0.09011659026145935,
-0.09817370772361755,
-0.07489142566919327,
0.13782227039337158,
0.02597212791442871,
-0.03531764820218086,
0.021033747121691704,
0.013780622743070126,
-0.02598324790596962,
0.2548375129699707,
0.015782633796334267,
0.09909259527921677,
-0.10505110770463943,
0.06541629880666733,
0.10761243849992752,
-0.1099362000823021,
0.01979312114417553,
0.0948026105761528,
-0.0760728195309639,
-0.06490691751241684,
-0.01898963376879692,
0.06714998930692673,
-0.14594033360481262,
-0.0668145939707756,
0.03987240418791771,
-0.0804699957370758,
0.061218276619911194,
0.1693212240934372,
0.07378365844488144,
-0.005597999785095453,
0.008650963194668293,
-0.09205634146928787,
-0.13483594357967377,
0.021032467484474182,
0.07258178293704987,
0.013302384875714779,
-0.0882989689707756,
0.14544303715229034,
-0.024503039196133614,
0.00562164606526494,
-0.007164841517806053,
0.012878569774329662,
-0.20526476204395294,
-0.041676294058561325,
-0.09938274323940277,
0.07110783457756042,
-0.06031062453985214,
0.029745446518063545,
0.009163198061287403,
0.031046928837895393,
-0.0687728300690651,
0.009618593379855156,
-0.059393905103206635,
-0.07308593392372131,
0.009117073379456997,
0.0557669959962368,
-0.08316273987293243,
-0.03258376568555832,
0.08018453419208527,
-0.04138477146625519,
0.04657108336687088,
0.08178159594535828,
0.06662242859601974,
0.004459821153432131,
-0.025937698781490326,
-0.005706758704036474,
0.06511161476373672,
0.04490470886230469,
0.09038253128528595,
-0.19280603528022766,
0.050388894975185394,
-0.011618712916970253,
0.014380641281604767,
0.04206104576587677,
0.09826869517564774,
-0.10939908772706985,
0.014801845885813236,
-0.11539418250322342,
-0.0775478407740593,
-0.10251038521528244,
0.022512508556246758,
0.13512636721134186,
0.01693839766085148,
0.058270882815122604,
-0.05800960958003998,
0.045893680304288864,
-0.16435359418392181,
-0.006835356820374727,
-0.04776933416724205,
-0.01773720607161522,
0.039040543138980865,
-0.009155606850981712,
0.08967909961938858,
-0.024029158055782318,
0.11859031021595001,
-0.04177168011665344,
0.034356847405433655,
0.02211696468293667,
0.05060602352023125,
-0.026824306696653366,
-0.023706795647740364,
0.18454128503799438,
0.1092309057712555,
0.018250875174999237,
0.08301400393247604,
0.08478227257728577,
0.05083496496081352,
0.013349671848118305,
0.03200126439332962,
0.03676740452647209,
-0.09121061861515045,
0.08647274971008301,
0.009313534013926983,
-0.14980299770832062,
-0.02586442232131958,
0.11030455678701401,
-0.08302639424800873,
0.03330886736512184,
-0.03885846212506294,
0.035047344863414764,
0.11338836699724197,
-0.11390617489814758,
0.014057948254048824,
-0.004060190636664629,
-0.07312098890542984,
-0.215865820646286,
-0.09579692035913467,
-0.12580646574497223,
-0.02312954142689705,
-0.03159729391336441,
-0.12132386118173599,
0.016073040664196014,
0.13910791277885437,
0.019948072731494904,
0.023922085762023926,
0.051312271505594254,
-0.21468383073806763,
-0.009441263973712921,
-0.06435690820217133,
0.007081642746925354,
-0.004942819010466337,
-0.02929195947945118,
-0.040966980159282684,
0.013448123820126057,
0.015600443817675114,
0.08959175646305084,
0.021855516359210014,
0.020357470959424973,
0.1112392470240593,
-0.007558448240160942,
-0.07700403779745102,
-0.045878008008003235,
0.022021014243364334,
0.014286783523857594,
0.15439896285533905,
0.020733309909701347,
0.004955719690769911,
-0.028944294899702072,
0.15829989314079285,
-0.08241549879312515,
0.01106157898902893,
-0.11817245930433273,
0.24619700014591217,
-0.012373512610793114,
0.07607360929250717,
0.023363953456282616,
-0.007567306514829397,
-0.02090051956474781,
0.18711449205875397,
0.1149168387055397,
-0.016085777431726456,
-0.02696160599589348,
0.03945408761501312,
-0.00991724245250225,
-0.04682346433401108,
0.10001350194215775,
0.05078931525349617,
0.16919173300266266,
-0.07283642143011093,
0.04512396454811096,
0.018154198303818703,
-0.00933043658733368,
-0.11148484796285629,
0.057274892926216125,
0.00425219489261508,
-0.007049561943858862,
0.03662244230508804,
0.07818936556577682,
-0.07700201869010925,
0.06168588995933533,
0.08577041327953339,
-0.07719610631465912,
-0.14100989699363708,
0.03567628934979439,
-0.07991309463977814,
-0.043958477675914764,
0.10343890637159348,
-0.054815873503685,
-0.036392632871866226,
0.06370456516742706,
-0.00805311743170023,
-0.19797328114509583,
-0.08153028786182404,
0.003169434145092964,
0.12592846155166626,
0.28420665860176086,
0.03445953503251076,
0.1374373435974121,
0.18037688732147217,
-0.015505893155932426,
-0.15373121201992035,
0.07246329635381699,
0.03921830281615257,
-0.12592701613903046,
0.11058223247528076,
0.047156933695077896,
-0.048937879502773285,
0.158254012465477,
0.04962216690182686,
-0.15421095490455627,
-0.017638063058257103,
0.040062855929136276,
0.07549376785755157,
-0.05084872618317604,
-0.011019784025847912,
-0.11026693135499954,
0.11929358541965485,
0.130706325173378,
-0.05613669753074646,
-0.0116577772423625,
-0.029681622982025146,
0.0832442119717598,
-0.004567569121718407,
-0.007768084295094013,
-0.029868675395846367,
-0.11454213410615921,
0.0764339491724968,
-0.18779870867729187,
0.025503462180495262,
-0.25898095965385437,
-0.021182091906666756,
-0.020247632637619972,
-0.03999646380543709,
-0.07293902337551117,
0.1074526458978653,
0.02367597073316574,
-0.010480367578566074,
-0.05752724036574364,
-0.15185360610485077,
-0.003096611239016056,
0.136738121509552,
-0.09490884095430374,
-0.13023731112480164
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information Keras had access to. You should
probably proofread and complete it, then remove this comment. -->
# kickstarter-distilbert-model
This model was trained from scratch on an unknown dataset.
It achieves the following results on the evaluation set:
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- optimizer: None
- training_precision: float32
### Training results
### Framework versions
- Transformers 4.16.2
- TensorFlow 2.7.0
- Datasets 1.18.2
- Tokenizers 0.11.0
|
{"tags": ["generated_from_keras_callback"], "model-index": [{"name": "kickstarter-distilbert-model", "results": []}]}
|
feature-extraction
|
bitmorse/kickstarter-distilbert-model
|
[
"transformers",
"pytorch",
"tf",
"distilbert",
"feature-extraction",
"generated_from_keras_callback",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tf #distilbert #feature-extraction #generated_from_keras_callback #endpoints_compatible #region-us
|
# kickstarter-distilbert-model
This model was trained from scratch on an unknown dataset.
It achieves the following results on the evaluation set:
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- optimizer: None
- training_precision: float32
### Training results
### Framework versions
- Transformers 4.16.2
- TensorFlow 2.7.0
- Datasets 1.18.2
- Tokenizers 0.11.0
|
[
"# kickstarter-distilbert-model\n\nThis model was trained from scratch on an unknown dataset.\nIt achieves the following results on the evaluation set:",
"## Model description\n\nMore information needed",
"## Intended uses & limitations\n\nMore information needed",
"## Training and evaluation data\n\nMore information needed",
"## Training procedure",
"### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- optimizer: None\n- training_precision: float32",
"### Training results",
"### Framework versions\n\n- Transformers 4.16.2\n- TensorFlow 2.7.0\n- Datasets 1.18.2\n- Tokenizers 0.11.0"
] |
[
"TAGS\n#transformers #pytorch #tf #distilbert #feature-extraction #generated_from_keras_callback #endpoints_compatible #region-us \n",
"# kickstarter-distilbert-model\n\nThis model was trained from scratch on an unknown dataset.\nIt achieves the following results on the evaluation set:",
"## Model description\n\nMore information needed",
"## Intended uses & limitations\n\nMore information needed",
"## Training and evaluation data\n\nMore information needed",
"## Training procedure",
"### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- optimizer: None\n- training_precision: float32",
"### Training results",
"### Framework versions\n\n- Transformers 4.16.2\n- TensorFlow 2.7.0\n- Datasets 1.18.2\n- Tokenizers 0.11.0"
] |
[
45,
36,
6,
12,
8,
3,
33,
4,
31
] |
[
"passage: TAGS\n#transformers #pytorch #tf #distilbert #feature-extraction #generated_from_keras_callback #endpoints_compatible #region-us \n# kickstarter-distilbert-model\n\nThis model was trained from scratch on an unknown dataset.\nIt achieves the following results on the evaluation set:## Model description\n\nMore information needed## Intended uses & limitations\n\nMore information needed## Training and evaluation data\n\nMore information needed## Training procedure### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- optimizer: None\n- training_precision: float32### Training results### Framework versions\n\n- Transformers 4.16.2\n- TensorFlow 2.7.0\n- Datasets 1.18.2\n- Tokenizers 0.11.0"
] |
[
-0.10274045169353485,
0.07483311742544174,
-0.002235176507383585,
0.09052950888872147,
0.16903987526893616,
0.019787034019827843,
0.07376401126384735,
0.10059912502765656,
-0.1029803678393364,
0.018250709399580956,
0.08954600244760513,
0.12589426338672638,
0.019111499190330505,
0.1316813975572586,
-0.007751100230962038,
-0.274394154548645,
0.015298223122954369,
0.030044903978705406,
-0.07748471200466156,
0.11287403851747513,
0.09786611050367355,
-0.11944321542978287,
0.05592601001262665,
0.018308447673916817,
-0.21846123039722443,
0.005274755880236626,
0.0009483133326284587,
-0.05231967940926552,
0.1018642708659172,
-0.014900165610015392,
0.149375781416893,
0.026381729170680046,
0.0923016369342804,
-0.07907652854919434,
0.019135721027851105,
0.07866782695055008,
0.017105016857385635,
0.07658080011606216,
0.047212421894073486,
-0.0255295280367136,
0.15784883499145508,
-0.050667740404605865,
0.07279098778963089,
0.013770854100584984,
-0.11502760648727417,
-0.16894154250621796,
-0.06316470354795456,
0.060604292899370193,
0.07218177616596222,
0.10754676908254623,
0.006179082673043013,
0.17192894220352173,
-0.09591048210859299,
0.08889702707529068,
0.17328858375549316,
-0.26372697949409485,
-0.08242295682430267,
0.012413674965500832,
0.05409308895468712,
-0.00784625206142664,
-0.09289968013763428,
-0.002856275998055935,
0.018699701875448227,
0.056296344846487045,
0.08995886892080307,
-0.031163915991783142,
-0.1598123013973236,
-0.018493380397558212,
-0.13806800544261932,
0.0034070515539497137,
0.13182352483272552,
-0.006122232880443335,
-0.06138783320784569,
-0.059588849544525146,
-0.07247648388147354,
-0.07433158159255981,
-0.03552829474210739,
-0.08016109466552734,
0.02657957747578621,
-0.02037365734577179,
-0.06414076685905457,
-0.07693327963352203,
-0.1088908463716507,
-0.07545837759971619,
-0.054961591958999634,
0.16928906738758087,
0.00544962752610445,
0.054873403161764145,
-0.0803733691573143,
0.13568218052387238,
-0.011412277817726135,
-0.10865694284439087,
-0.01181785762310028,
-0.04852459207177162,
-0.06871617585420609,
-0.0629943385720253,
-0.05895199254155159,
-0.10600029677152634,
-0.0005987974000163376,
0.08144377171993256,
-0.019573109224438667,
0.052625104784965515,
0.02746783010661602,
0.045939717441797256,
-0.011569682508707047,
0.1259339600801468,
-0.04587132856249809,
-0.01127011887729168,
0.024117907509207726,
0.024371467530727386,
-0.03643352910876274,
-0.024807607755064964,
-0.10737202316522598,
0.007606409955769777,
0.04357944056391716,
0.0395575650036335,
-0.04327128827571869,
0.0652502179145813,
-0.057579003274440765,
-0.04232000187039375,
-0.06617920845746994,
-0.1013113260269165,
0.012474399991333485,
-0.03562474250793457,
-0.09283449500799179,
0.04238293692469597,
0.09263678640127182,
-0.029667170718312263,
-0.0456334725022316,
0.02661219611763954,
-0.10907476395368576,
0.037078436464071274,
-0.0987396165728569,
-0.08703597635030746,
-0.0011202949099242687,
-0.16023488342761993,
0.021995840594172478,
-0.07297440618276596,
-0.22564280033111572,
-0.0013361333403736353,
0.08786258101463318,
-0.06760893762111664,
0.002882766304537654,
-0.06172383949160576,
-0.06845936179161072,
-0.004115007817745209,
0.01327536255121231,
0.08550391346216202,
-0.0281047485768795,
0.06860587000846863,
0.01860954239964485,
0.06306909769773483,
-0.043241798877716064,
0.03323271870613098,
-0.13227349519729614,
0.04015281796455383,
-0.1563158482313156,
0.11064925789833069,
-0.06550626456737518,
0.05514664947986603,
-0.09123428910970688,
-0.10341623425483704,
-0.061103470623493195,
-0.004960827063769102,
0.10234513133764267,
0.18239547312259674,
-0.21286706626415253,
-0.03758571669459343,
0.14243744313716888,
-0.110360287129879,
-0.09210192412137985,
0.07008785009384155,
-0.07832136005163193,
0.1962418407201767,
0.044177908450365067,
0.13661037385463715,
0.08818602561950684,
-0.14754395186901093,
0.07393496483564377,
0.05624230206012726,
-0.04624465852975845,
-0.018985018134117126,
-0.0011275715660303831,
-0.016330750659108162,
0.0024402018170803785,
0.04814663529396057,
-0.04838939756155014,
0.050450194627046585,
-0.12617227435112,
-0.07022154331207275,
-0.04891299083828926,
-0.09591099619865417,
0.09007959067821503,
0.024417242035269737,
0.0880657434463501,
-0.009619009681046009,
-0.09673239290714264,
0.14004336297512054,
0.06181652843952179,
-0.04547141492366791,
0.006723284255713224,
-0.10001851618289948,
0.05021032691001892,
-0.07114014029502869,
-0.022459041327238083,
-0.23758359253406525,
-0.10004226118326187,
0.001545488485135138,
0.061784230172634125,
0.03614411503076553,
0.06483247876167297,
0.09842433780431747,
0.05513577535748482,
-0.01259671151638031,
-0.013391043059527874,
-0.0711832046508789,
0.02207336761057377,
-0.11021964251995087,
-0.13765572011470795,
-0.07040292024612427,
-0.052801329642534256,
0.08039548248052597,
-0.2018747329711914,
0.009635268710553646,
-0.007840815000236034,
0.13215556740760803,
0.02324453555047512,
-0.04710155725479126,
-0.01695605367422104,
0.04841884598135948,
-0.011721525341272354,
-0.10298938304185867,
0.06986022740602493,
0.033342789858579636,
-0.11112526804208755,
-0.06604810059070587,
-0.05600207671523094,
0.09774257242679596,
0.08555824309587479,
-0.0637473464012146,
-0.11362328380346298,
0.017640799283981323,
-0.07654161006212234,
-0.033923108130693436,
-0.013083016499876976,
0.042389318346977234,
0.1630897969007492,
0.004525333642959595,
0.1176929920911789,
-0.041032738983631134,
-0.0271501075476408,
0.041646264493465424,
-0.028835928067564964,
0.006639501545578241,
0.04852066561579704,
0.03139595314860344,
-0.11281539499759674,
0.07849486172199249,
0.07347521930932999,
-0.05557505413889885,
0.1606442779302597,
-0.046900685876607895,
-0.08441103249788284,
-0.06412298232316971,
-0.009100827388465405,
-0.008090820163488388,
0.13383840024471283,
-0.17591403424739838,
-0.022197218611836433,
0.0276385135948658,
0.008045045658946037,
0.05418189987540245,
-0.16673734784126282,
-0.020842505618929863,
0.02118384651839733,
0.01012720912694931,
-0.024969257414340973,
0.02720201015472412,
-0.007105587515980005,
0.08282017707824707,
0.010403220541775227,
-0.0072991689667105675,
0.07739055156707764,
0.00115871150046587,
-0.05956544354557991,
0.20016935467720032,
-0.09826184809207916,
-0.1391875147819519,
-0.0796588659286499,
0.021451102569699287,
-0.021098574623465538,
0.008710320107638836,
0.008363700471818447,
-0.04272276908159256,
-0.03362487256526947,
-0.04765281826257706,
0.012543659657239914,
-0.0762532502412796,
0.01786934956908226,
-0.0007679449627175927,
0.00023210349900182337,
0.08458451181650162,
-0.11584591120481491,
-0.0021100568119436502,
-0.048494916409254074,
-0.07442477345466614,
0.020059965550899506,
0.003701397217810154,
0.0914679691195488,
0.14516912400722504,
-0.03899257257580757,
0.04501324146986008,
-0.033688776195049286,
0.25919610261917114,
-0.06378524005413055,
-0.00308058550581336,
0.12153377383947372,
-0.008894688449800014,
0.045847710222005844,
0.03991704061627388,
0.03654933720827103,
-0.10248729586601257,
0.03534088656306267,
0.01590588502585888,
-0.06910386681556702,
-0.16983169317245483,
-0.04135045409202576,
-0.06933978199958801,
-0.062212709337472916,
0.04763805866241455,
0.02863326668739319,
0.06083148345351219,
0.08017837256193161,
0.07394633442163467,
0.061045728623867035,
-0.03772546723484993,
0.05839895084500313,
0.03707747906446457,
0.029302135109901428,
0.09319613873958588,
-0.029418494552373886,
-0.05052312836050987,
0.040089476853609085,
-0.06374835222959518,
0.28536316752433777,
-0.018307453021407127,
0.04889596626162529,
0.07876318693161011,
0.19439199566841125,
0.0026144334115087986,
0.13061857223510742,
0.03452932834625244,
-0.03256840631365776,
0.013182293623685837,
-0.05123921111226082,
-0.09314762055873871,
0.01193434838205576,
-0.029672276228666306,
0.04087928682565689,
-0.12096986919641495,
0.0407351553440094,
0.017333852127194405,
0.2540457546710968,
0.012698885053396225,
-0.3168356120586395,
-0.1299232840538025,
-0.028387561440467834,
0.007315444760024548,
-0.0734301507472992,
-0.008675478398799896,
0.0981670618057251,
-0.10871075093746185,
-0.009956669062376022,
-0.08686157315969467,
0.08478948473930359,
0.028330476954579353,
0.012083000503480434,
0.02981964498758316,
0.10373073816299438,
0.0034196549095213413,
0.08722226321697235,
-0.232299342751503,
0.2211420238018036,
0.005086514633148909,
0.15317721664905548,
-0.08395998179912567,
0.008490591309964657,
0.010728111490607262,
0.08147523552179337,
0.18037551641464233,
-0.00345010869204998,
-0.058472320437431335,
-0.17323045432567596,
-0.01369735598564148,
-0.005626859609037638,
0.11033397167921066,
-0.010381126776337624,
0.1066679060459137,
-0.02254459448158741,
0.011614581570029259,
0.061674926429986954,
0.028988290578126907,
-0.1403013914823532,
-0.10353267937898636,
0.009132020175457,
-0.047645650804042816,
-0.044984474778175354,
-0.07351119071245193,
-0.09871164709329605,
0.07753223180770874,
0.15632690489292145,
-0.024811357259750366,
-0.06637810915708542,
-0.16846635937690735,
0.06888189166784286,
0.12332725524902344,
-0.030402034521102905,
0.053237393498420715,
0.011685462668538094,
0.08725766092538834,
0.049605678766965866,
-0.1267484575510025,
0.08263630419969559,
-0.10326259583234787,
-0.1274820864200592,
-0.0435110405087471,
0.10988130420446396,
0.09971415251493454,
0.014962464570999146,
0.015589047223329544,
0.016683103516697884,
-0.006868004333227873,
-0.10286641865968704,
0.01975340209901333,
0.021257152780890465,
0.030439108610153198,
0.0353669635951519,
-0.07510198652744293,
0.06118875369429588,
0.0021151008550077677,
0.05901741981506348,
0.09242073446512222,
0.13599391281604767,
-0.0876271054148674,
0.10143379122018814,
0.040823038667440414,
-0.10605819523334503,
-0.23151245713233948,
0.07280626147985458,
0.09681244194507599,
0.03002915158867836,
0.07109205424785614,
-0.1858566850423813,
0.1185927465558052,
0.04352264478802681,
-0.005113695282489061,
0.060269393026828766,
-0.3308897614479065,
-0.10746408998966217,
0.12088008224964142,
0.10653279721736908,
0.14039653539657593,
-0.10516420006752014,
-0.03361242637038231,
-0.0012047067284584045,
-0.0855688527226448,
0.18074212968349457,
-0.14237819612026215,
0.093794085085392,
-0.00291530997492373,
0.08207503706216812,
0.038280680775642395,
-0.049562808126211166,
0.08946826308965683,
0.03270481899380684,
0.10725322365760803,
-0.06423458456993103,
-0.01385811809450388,
0.216631218791008,
-0.03608928620815277,
0.060735832899808884,
0.09883368015289307,
0.06660725176334381,
-0.12096799165010452,
-0.02297944761812687,
-0.11178317666053772,
0.12103218585252762,
-0.028674883767962456,
-0.08181888610124588,
-0.0065895747393369675,
0.05211485177278519,
0.05165667459368706,
-0.01645379699766636,
0.07831815630197525,
0.03627007454633713,
0.1293741762638092,
0.1509774625301361,
0.11106157302856445,
0.0014626211486756802,
-0.0940287634730339,
0.01607242040336132,
-0.013042584992945194,
0.05416359007358551,
-0.07400365173816681,
0.0062137627974152565,
0.12650980055332184,
0.055330790579319,
0.10538534820079803,
0.09749104082584381,
-0.052941493690013885,
0.017846014350652695,
0.04216470196843147,
-0.1612335443496704,
-0.1327139288187027,
-0.02619418501853943,
-0.10344382375478745,
-0.07611013203859329,
0.06644736975431442,
0.11196614056825638,
-0.08983586728572845,
0.01098279096186161,
-0.01890156976878643,
-0.025031907483935356,
-0.08718626946210861,
0.19207355380058289,
0.03701600804924965,
0.05015521124005318,
-0.08260348439216614,
0.1360194981098175,
0.027087131515145302,
-0.06696447730064392,
0.03678908944129944,
0.012474151328206062,
-0.11015819758176804,
-0.039121270179748535,
0.05153099074959755,
0.16714084148406982,
-0.04452953860163689,
-0.02536550723016262,
-0.09639190882444382,
-0.07484012097120285,
0.008179377764463425,
0.13002854585647583,
0.06772983074188232,
0.047522470355033875,
-0.08982165157794952,
0.05603957176208496,
-0.14387822151184082,
0.05456078052520752,
0.0894390344619751,
0.019285229966044426,
-0.14055053889751434,
0.18755657970905304,
-0.014932227320969105,
0.08027516305446625,
-0.08083786815404892,
-0.01788071170449257,
-0.10648738592863083,
0.00840840581804514,
-0.10550867021083832,
-0.024300511926412582,
-0.04037686064839363,
-0.03518654406070709,
0.004480551462620497,
-0.03597379848361015,
-0.04061725735664368,
0.05246284231543541,
-0.08885610103607178,
-0.0037693351041525602,
0.0409964993596077,
0.007907635532319546,
-0.08487024158239365,
-0.028554851189255714,
-0.00044567385339178145,
-0.07174196094274521,
0.06126440316438675,
0.1280214637517929,
0.002001093467697501,
0.06302006542682648,
-0.11647316068410873,
-0.001653508865274489,
0.037035174667835236,
-0.010330196470022202,
0.08336373418569565,
-0.0018068809295073152,
-0.018750431016087532,
-0.010257095098495483,
0.04163031652569771,
-0.01251749787479639,
0.023336997255682945,
-0.12165087461471558,
-0.07978177815675735,
0.005848352797329426,
-0.0232318677008152,
-0.0650813952088356,
0.0367128849029541,
0.1200336366891861,
0.06505514681339264,
0.15737847983837128,
-0.08478301018476486,
0.062361348420381546,
-0.16705608367919922,
-0.05064673349261284,
0.018947243690490723,
-0.024868370965123177,
-0.017439579591155052,
-0.07309558987617493,
0.08067194372415543,
-0.056085940450429916,
0.1330251842737198,
0.023396100848913193,
0.13170255720615387,
0.005815110635012388,
-0.024068107828497887,
0.013988681137561798,
0.03306715935468674,
0.21106082201004028,
0.0352516807615757,
-0.016394950449466705,
0.023048289120197296,
0.044557809829711914,
0.03225937485694885,
0.04854458570480347,
0.19926562905311584,
0.0693616271018982,
-0.042415715754032135,
0.08970893174409866,
0.0415610708296299,
-0.035452596843242645,
-0.1311250478029251,
0.037718672305345535,
0.0023579811677336693,
0.09881393611431122,
-0.07316417247056961,
0.10798697918653488,
0.047011490911245346,
-0.09873956441879272,
0.07338422536849976,
-0.031431470066308975,
-0.10530976206064224,
-0.09229225665330887,
-0.1386275291442871,
-0.03445005044341087,
-0.11355392634868622,
0.004310780670493841,
-0.1008877232670784,
0.005720778834074736,
0.05320490524172783,
-0.0012516817077994347,
-0.036446183919906616,
0.2354053258895874,
-0.030879786238074303,
-0.000709404528606683,
0.10580246150493622,
-0.01283228863030672,
-0.017921851947903633,
-0.06548549979925156,
-0.029200686141848564,
0.00278291292488575,
0.012734073214232922,
0.02889895811676979,
-0.06126268208026886,
-0.03761408105492592,
0.005837838631123304,
0.008750290609896183,
-0.09340491890907288,
0.01784428209066391,
0.041188374161720276,
0.0015772058395668864,
-0.038415007293224335,
0.03253386914730072,
-0.025768311694264412,
-0.05332692340016365,
0.24152375757694244,
-0.09000919759273529,
-0.04969865828752518,
-0.10393139719963074,
0.24256284534931183,
0.03744756802916527,
0.009707190096378326,
0.04656808823347092,
-0.08582551032304764,
0.00398117583245039,
0.2116241455078125,
0.14453376829624176,
-0.08181773126125336,
-0.009093920700252056,
0.013249550014734268,
-0.014748112298548222,
-0.058179158717393875,
0.13878172636032104,
0.05344394966959953,
0.08075264096260071,
-0.06428258121013641,
0.005279258359223604,
-0.036552008241415024,
-0.04966742545366287,
-0.06602688133716583,
0.07129577547311783,
0.041477933526039124,
0.02156856656074524,
-0.051409292966127396,
0.07666099816560745,
-0.15646028518676758,
-0.2244500070810318,
0.09077178686857224,
-0.12433305382728577,
-0.14345400035381317,
-0.06386931985616684,
0.015931706875562668,
0.01696898229420185,
0.10778091847896576,
-0.04602110758423805,
0.016471007838845253,
0.1730574071407318,
0.008130223490297794,
-0.06835399568080902,
-0.056739162653684616,
0.0948794037103653,
-0.08176641166210175,
0.16736890375614166,
-0.018886882811784744,
0.04803943634033203,
0.08253014832735062,
0.042076218873262405,
-0.09630982577800751,
0.03899592161178589,
0.021074557676911354,
-0.05370990186929703,
0.01328760851174593,
0.14932064712047577,
-0.015341157093644142,
-0.009682235307991505,
-0.006046175491064787,
-0.21515315771102905,
0.008377606980502605,
-0.025148971006274223,
-0.03173859417438507,
-0.07110012322664261,
-0.026545662432909012,
-0.05468979850411415,
0.1318449229001999,
0.18503503501415253,
-0.039195913821458817,
0.014471739530563354,
-0.08820484578609467,
0.06787265837192535,
0.06964023411273956,
0.06482240557670593,
-0.04717138037085533,
-0.18961776793003082,
-0.0003075109561905265,
0.05090642720460892,
-0.037445731461048126,
-0.18934495747089386,
-0.07436267286539078,
0.014190820045769215,
-0.06766920536756516,
-0.03934447839856148,
0.07372841238975525,
0.06651826202869415,
0.06070657819509506,
-0.03129027411341667,
-0.034871987998485565,
-0.05538449063897133,
0.12855225801467896,
-0.12156383693218231,
-0.04245288670063019
] |
null | null |
transformers
|
# AlephBERT
## Hebrew Language Model
State-of-the-art language model for Hebrew.
Based on Google's BERT architecture [(Devlin et al. 2018)](https://arxiv.org/abs/1810.04805).
#### How to use
```python
from transformers import BertModel, BertTokenizerFast
alephbert_tokenizer = BertTokenizerFast.from_pretrained('onlplab/alephbert-base')
alephbert = BertModel.from_pretrained('onlplab/alephbert-base')
# if not finetuning - disable dropout
alephbert.eval()
```
## Training data
1. OSCAR [(Ortiz, 2019)](https://oscar-corpus.com/) Hebrew section (10 GB text, 20 million sentences).
2. Hebrew dump of [Wikipedia](https://dumps.wikimedia.org/hewiki/latest/) (650 MB text, 3 million sentences).
3. Hebrew Tweets collected from the Twitter sample stream (7 GB text, 70 million sentences).
## Training procedure
Trained on a DGX machine (8 V100 GPUs) using the standard huggingface training procedure.
Since the larger part of our training data is based on tweets we decided to start by optimizing using Masked Language Model loss only.
To optimize training time we split the data into 4 sections based on max number of tokens:
1. num tokens < 32 (70M sentences)
2. 32 <= num tokens < 64 (12M sentences)
3. 64 <= num tokens < 128 (10M sentences)
4. 128 <= num tokens < 512 (1.5M sentences)
Each section was first trained for 5 epochs with an initial learning rate set to 1e-4. Then each section was trained for another 5 epochs with an initial learning rate set to 1e-5, for a total of 10 epochs.
Total training time was 8 days.
|
{"language": ["he"], "license": "apache-2.0", "tags": ["language model"], "datasets": ["oscar", "wikipedia", "twitter"]}
|
fill-mask
|
biu-nlp/alephbert-base
|
[
"transformers",
"pytorch",
"bert",
"fill-mask",
"language model",
"he",
"dataset:oscar",
"dataset:wikipedia",
"dataset:twitter",
"arxiv:1810.04805",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"1810.04805"
] |
[
"he"
] |
TAGS
#transformers #pytorch #bert #fill-mask #language model #he #dataset-oscar #dataset-wikipedia #dataset-twitter #arxiv-1810.04805 #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us
|
# AlephBERT
## Hebrew Language Model
State-of-the-art language model for Hebrew.
Based on Google's BERT architecture (Devlin et al. 2018).
#### How to use
## Training data
1. OSCAR (Ortiz, 2019) Hebrew section (10 GB text, 20 million sentences).
2. Hebrew dump of Wikipedia (650 MB text, 3 million sentences).
3. Hebrew Tweets collected from the Twitter sample stream (7 GB text, 70 million sentences).
## Training procedure
Trained on a DGX machine (8 V100 GPUs) using the standard huggingface training procedure.
Since the larger part of our training data is based on tweets we decided to start by optimizing using Masked Language Model loss only.
To optimize training time we split the data into 4 sections based on max number of tokens:
1. num tokens < 32 (70M sentences)
2. 32 <= num tokens < 64 (12M sentences)
3. 64 <= num tokens < 128 (10M sentences)
4. 128 <= num tokens < 512 (1.5M sentences)
Each section was first trained for 5 epochs with an initial learning rate set to 1e-4. Then each section was trained for another 5 epochs with an initial learning rate set to 1e-5, for a total of 10 epochs.
Total training time was 8 days.
|
[
"# AlephBERT",
"## Hebrew Language Model\n\nState-of-the-art language model for Hebrew.\nBased on Google's BERT architecture (Devlin et al. 2018).",
"#### How to use",
"## Training data\n1. OSCAR (Ortiz, 2019) Hebrew section (10 GB text, 20 million sentences).\n2. Hebrew dump of Wikipedia (650 MB text, 3 million sentences).\n3. Hebrew Tweets collected from the Twitter sample stream (7 GB text, 70 million sentences).",
"## Training procedure\n\nTrained on a DGX machine (8 V100 GPUs) using the standard huggingface training procedure.\n\nSince the larger part of our training data is based on tweets we decided to start by optimizing using Masked Language Model loss only.\n\nTo optimize training time we split the data into 4 sections based on max number of tokens:\n\n1. num tokens < 32 (70M sentences)\n2. 32 <= num tokens < 64 (12M sentences)\n3. 64 <= num tokens < 128 (10M sentences)\n4. 128 <= num tokens < 512 (1.5M sentences)\n\nEach section was first trained for 5 epochs with an initial learning rate set to 1e-4. Then each section was trained for another 5 epochs with an initial learning rate set to 1e-5, for a total of 10 epochs.\n\nTotal training time was 8 days."
] |
[
"TAGS\n#transformers #pytorch #bert #fill-mask #language model #he #dataset-oscar #dataset-wikipedia #dataset-twitter #arxiv-1810.04805 #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us \n",
"# AlephBERT",
"## Hebrew Language Model\n\nState-of-the-art language model for Hebrew.\nBased on Google's BERT architecture (Devlin et al. 2018).",
"#### How to use",
"## Training data\n1. OSCAR (Ortiz, 2019) Hebrew section (10 GB text, 20 million sentences).\n2. Hebrew dump of Wikipedia (650 MB text, 3 million sentences).\n3. Hebrew Tweets collected from the Twitter sample stream (7 GB text, 70 million sentences).",
"## Training procedure\n\nTrained on a DGX machine (8 V100 GPUs) using the standard huggingface training procedure.\n\nSince the larger part of our training data is based on tweets we decided to start by optimizing using Masked Language Model loss only.\n\nTo optimize training time we split the data into 4 sections based on max number of tokens:\n\n1. num tokens < 32 (70M sentences)\n2. 32 <= num tokens < 64 (12M sentences)\n3. 64 <= num tokens < 128 (10M sentences)\n4. 128 <= num tokens < 512 (1.5M sentences)\n\nEach section was first trained for 5 epochs with an initial learning rate set to 1e-4. Then each section was trained for another 5 epochs with an initial learning rate set to 1e-5, for a total of 10 epochs.\n\nTotal training time was 8 days."
] |
[
74,
5,
36,
5,
63,
194
] |
[
"passage: TAGS\n#transformers #pytorch #bert #fill-mask #language model #he #dataset-oscar #dataset-wikipedia #dataset-twitter #arxiv-1810.04805 #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us \n# AlephBERT## Hebrew Language Model\n\nState-of-the-art language model for Hebrew.\nBased on Google's BERT architecture (Devlin et al. 2018).#### How to use## Training data\n1. OSCAR (Ortiz, 2019) Hebrew section (10 GB text, 20 million sentences).\n2. Hebrew dump of Wikipedia (650 MB text, 3 million sentences).\n3. Hebrew Tweets collected from the Twitter sample stream (7 GB text, 70 million sentences).## Training procedure\n\nTrained on a DGX machine (8 V100 GPUs) using the standard huggingface training procedure.\n\nSince the larger part of our training data is based on tweets we decided to start by optimizing using Masked Language Model loss only.\n\nTo optimize training time we split the data into 4 sections based on max number of tokens:\n\n1. num tokens < 32 (70M sentences)\n2. 32 <= num tokens < 64 (12M sentences)\n3. 64 <= num tokens < 128 (10M sentences)\n4. 128 <= num tokens < 512 (1.5M sentences)\n\nEach section was first trained for 5 epochs with an initial learning rate set to 1e-4. Then each section was trained for another 5 epochs with an initial learning rate set to 1e-5, for a total of 10 epochs.\n\nTotal training time was 8 days."
] |
[
-0.02087954245507717,
0.11003583669662476,
-0.002011623466387391,
0.07119014859199524,
0.0937439575791359,
0.03832339867949486,
0.16900216042995453,
0.09576477855443954,
-0.05309392139315605,
0.05629819631576538,
0.0954609140753746,
-0.07731560617685318,
0.04740629717707634,
0.10118826478719711,
0.06822758167982101,
-0.29741108417510986,
0.003562592202797532,
-0.10978308320045471,
-0.0786779597401619,
0.09579741954803467,
0.09672350436449051,
-0.043807584792375565,
0.06432981044054031,
-0.04128774628043175,
-0.10713603347539902,
0.06818202883005142,
0.002529872814193368,
-0.07036076486110687,
0.09131842851638794,
0.04826562479138374,
0.016729865223169327,
0.0057754553854465485,
0.061730023473501205,
-0.19123582541942596,
0.014685898087918758,
0.08346530050039291,
0.012790719978511333,
0.008707650005817413,
0.026293566450476646,
0.030879540368914604,
0.27415892481803894,
-0.15668131411075592,
0.02061467245221138,
0.05786538124084473,
-0.09796667098999023,
-0.08566147089004517,
-0.07427334040403366,
0.035398855805397034,
0.04743938520550728,
0.08942724019289017,
-0.05893431231379509,
0.07996150851249695,
-0.08223102241754532,
0.04480424150824547,
0.037968724966049194,
-0.3092059791088104,
-0.04331684112548828,
0.08248055726289749,
-0.09465330839157104,
0.10956678539514542,
-0.1052330732345581,
0.030102506279945374,
0.07996047288179398,
0.023931292816996574,
0.06738850474357605,
0.039832957088947296,
0.08789945393800735,
0.008633937686681747,
-0.1237119734287262,
-0.0992521345615387,
0.1061488687992096,
0.08271054923534393,
-0.027282848954200745,
-0.1818883866071701,
-0.04288247227668762,
-0.07188505679368973,
0.04122478514909744,
-0.008763712830841541,
-0.008045177906751633,
0.03254500404000282,
-0.0897362008690834,
-0.038128241896629333,
-0.10779976844787598,
0.0586785189807415,
-0.09862234443426132,
0.025912949815392494,
0.043305788189172745,
0.04136589542031288,
0.012094452045857906,
0.04580925777554512,
0.047788579016923904,
-0.1334281712770462,
-0.008354348130524158,
-0.048907894641160965,
-0.09949245303869247,
-0.02996804751455784,
-0.010414463467895985,
-0.028805876150727272,
-0.04730037972331047,
0.08849915117025375,
-0.00858127512037754,
0.03434735909104347,
0.01945822685956955,
-0.029832348227500916,
0.03140822798013687,
0.11191928386688232,
-0.06902924925088882,
-0.08786360174417496,
-0.02611462213099003,
0.03709938004612923,
0.014477214775979519,
0.020418928936123848,
-0.016479013487696648,
-0.020430458709597588,
0.03477393463253975,
0.05154737830162048,
-0.0644369050860405,
0.06503194570541382,
0.0689380019903183,
-0.04156845808029175,
0.03829525411128998,
-0.1542070060968399,
0.0032520529348403215,
-0.007415764033794403,
-0.07238265126943588,
0.05250169336795807,
-0.045319218188524246,
-0.044238656759262085,
-0.04897857829928398,
0.016490591689944267,
-0.0817650556564331,
-0.019566336646676064,
-0.03205724060535431,
-0.09427275508642197,
0.047064557671546936,
-0.013523105531930923,
-0.05364367365837097,
-0.13775348663330078,
-0.027586879208683968,
0.006618790328502655,
0.008776850067079067,
-0.01792440563440323,
-0.03341348096728325,
-0.07133808732032776,
-0.014029838144779205,
-0.004980168770998716,
-0.009561475366353989,
-0.004457560833543539,
-0.026988649740815163,
0.0658726915717125,
-0.03329860419034958,
0.0616612434387207,
0.06585106998682022,
0.021007606759667397,
-0.10019878298044205,
0.027970170602202415,
-0.08429906517267227,
0.18871979415416718,
-0.021620241925120354,
0.052064310759305954,
-0.11262112855911255,
-0.07526612281799316,
-0.06627704948186874,
-0.027232900261878967,
0.02810615301132202,
0.14608031511306763,
-0.1509026288986206,
-0.10027613490819931,
0.14990051090717316,
-0.06300700455904007,
-0.06005622819066048,
0.17251187562942505,
-0.05515168979763985,
-0.014457779936492443,
0.16016243398189545,
0.1858518123626709,
0.020108507946133614,
-0.06523037701845169,
-0.18413646519184113,
-0.07525456696748734,
0.011282959021627903,
0.0725589245557785,
0.0727636069059372,
0.06891917437314987,
0.027477048337459564,
0.021945588290691376,
0.05916176736354828,
0.1084793210029602,
-0.03482285887002945,
-0.10547557473182678,
0.0032996125519275665,
-0.05134594440460205,
-0.003986678551882505,
0.031217938289046288,
0.0035364534705877304,
-0.06540995091199875,
-0.12455141544342041,
0.029080508276820183,
0.11278041452169418,
-0.09332531690597534,
0.0181280467659235,
-0.09160725027322769,
-0.06721065193414688,
-0.04685064032673836,
-0.004556463565677404,
-0.09071991592645645,
-0.08337422460317612,
0.042747486382722855,
-0.017459658905863762,
0.113271564245224,
-0.02717244066298008,
0.10029792785644531,
0.052773360162973404,
-0.09910998493432999,
0.0457744300365448,
-0.054433420300483704,
-0.04439669847488403,
-0.04150569066405296,
-0.08713012933731079,
-0.0398089624941349,
-0.00784069113433361,
0.14911098778247833,
-0.14859460294246674,
0.0019333850359544158,
-0.004281423054635525,
0.10202252864837646,
0.0019930750131607056,
-0.047313984483480453,
0.003706512972712517,
-0.026051916182041168,
-0.015803949907422066,
-0.11820099502801895,
-0.03689281642436981,
-0.011410207487642765,
-0.07611314207315445,
0.045743197202682495,
-0.15617573261260986,
-0.06478328257799149,
0.1009138822555542,
0.16050030291080475,
-0.06787989288568497,
0.02397601120173931,
-0.13785749673843384,
-0.011166124604642391,
-0.05833709239959717,
-0.010855220258235931,
0.09908207505941391,
-0.007358937058597803,
0.06200030446052551,
-0.0799335464835167,
-0.04487576708197594,
0.017829155549407005,
0.028398096561431885,
-0.07868744432926178,
0.04589148983359337,
0.04229370132088661,
-0.22565631568431854,
0.0881117656826973,
0.006165535654872656,
0.03685754910111427,
0.2347584217786789,
-0.02501375414431095,
-0.10704266279935837,
-0.030577590689063072,
0.009280475787818432,
-0.001935711014084518,
0.06100383400917053,
-0.03495864197611809,
0.00426215585321188,
0.0006382216815836728,
0.02405807562172413,
0.03940192982554436,
-0.052330683916807175,
0.01758635602891445,
0.03496678173542023,
-0.07278060913085938,
-0.027771120890975,
0.017535431310534477,
-0.016427112743258476,
0.09513353556394577,
0.029282674193382263,
0.020930400118231773,
-0.03246935084462166,
0.007410356309264898,
-0.07754102349281311,
0.11713690310716629,
-0.06796780228614807,
-0.19841642677783966,
-0.10502535104751587,
0.012792013585567474,
-0.07612515240907669,
-0.002151686465367675,
0.011868658475577831,
-0.16474267840385437,
-0.06837806850671768,
-0.09686178714036942,
0.11666349321603775,
-0.08542176336050034,
0.054310064762830734,
0.023943355306982994,
0.026054879650473595,
-0.00928062666207552,
-0.10491166263818741,
-0.009891422465443611,
-0.016681648790836334,
-0.08895790576934814,
-0.020097609609365463,
-0.059780508279800415,
-0.003941098693758249,
0.11395177990198135,
-0.019088264554739,
0.030544593930244446,
-0.04867706820368767,
0.231684148311615,
-0.0980391576886177,
0.0689760148525238,
0.03648455813527107,
0.051459912210702896,
0.031104514375329018,
0.1414775252342224,
-0.0031219040974974632,
-0.06949035823345184,
0.08216671645641327,
0.08231671154499054,
-0.049808990210294724,
-0.25419774651527405,
-0.05292463302612305,
-0.08749791234731674,
0.02489408291876316,
0.1549621969461441,
0.06291506439447403,
0.013096892274916172,
0.043784063309431076,
-0.14456163346767426,
0.08310680836439133,
0.021832317113876343,
0.04577697440981865,
0.0788651630282402,
0.0668250098824501,
0.05236799642443657,
-0.07978028804063797,
-0.07744590193033218,
0.12867669761180878,
-0.05106588825583458,
0.20277827978134155,
-0.039973001927137375,
0.11463821679353714,
0.04808618500828743,
0.10635866969823837,
0.000853016332257539,
0.0076037500984966755,
0.007249435409903526,
0.030928784981369972,
0.0016469215042889118,
-0.08128983527421951,
-0.02188935875892639,
0.05998978018760681,
0.09188762307167053,
-0.04510490968823433,
-0.027972564101219177,
0.04248437285423279,
0.08496195077896118,
0.22054286301136017,
0.09653648734092712,
-0.2347734570503235,
-0.12743912637233734,
0.035758089274168015,
-0.11798811703920364,
-0.02191527746617794,
0.01458711177110672,
0.17149637639522552,
-0.06716559082269669,
0.1213560700416565,
-0.0102962926030159,
0.05605466291308403,
-0.018290476873517036,
-0.015150628052651882,
0.045932456851005554,
0.06887438893318176,
-0.03379395604133606,
0.10757318139076233,
-0.257821649312973,
0.14295822381973267,
0.010452226735651493,
0.10922626405954361,
-0.0880030170083046,
0.007716297637671232,
0.037011921405792236,
-0.07345003634691238,
0.0864693745970726,
0.038550104945898056,
-0.053030505776405334,
-0.020337486639618874,
-0.11508829146623611,
-0.008947458118200302,
0.0703882947564125,
-0.026916390284895897,
0.12147863954305649,
0.0009404011652804911,
0.0010377829894423485,
-0.0011497599771246314,
0.03339154273271561,
-0.09645932912826538,
-0.20979906618595123,
-0.020914429798722267,
0.012936867773532867,
-0.012730316258966923,
-0.01197342574596405,
-0.07780396938323975,
-0.01456004660576582,
0.1667906641960144,
0.017366629093885422,
-0.10109874606132507,
-0.10818520188331604,
0.10087606310844421,
0.15435492992401123,
-0.06330569833517075,
0.0005952190258540213,
0.04569147154688835,
0.10636880248785019,
-0.06681448966264725,
-0.11929396539926529,
-0.0074314698576927185,
-0.014558459632098675,
-0.06438475102186203,
0.02954617142677307,
0.20653145015239716,
0.04673123359680176,
0.07317996025085449,
0.002481698989868164,
-0.017027823254466057,
0.013822704553604126,
-0.08253498375415802,
0.009842250496149063,
0.026241222396492958,
0.026097454130649567,
0.02381272055208683,
0.003635150147601962,
-0.011799120344221592,
-0.1322619765996933,
-0.0027962271124124527,
0.05333803594112396,
0.21326084434986115,
-0.06138890981674194,
0.11121431738138199,
0.0826839879155159,
-0.023313378915190697,
-0.11769204586744308,
-0.046666860580444336,
0.07430124282836914,
0.0914129912853241,
0.006392067763954401,
-0.21083052456378937,
0.0025499220937490463,
0.01987987384200096,
0.03395436331629753,
-0.03708747401833534,
-0.3094369173049927,
-0.09640642255544662,
0.03545871376991272,
-0.003889498533681035,
0.12007365375757217,
-0.04384496808052063,
0.014714713208377361,
-0.04911106824874878,
0.07486627250909805,
0.18538682162761688,
-0.012468750588595867,
0.1365826278924942,
0.018406199291348457,
0.052506137639284134,
0.04044270142912865,
-0.03254035487771034,
0.10942215472459793,
0.07622624188661575,
0.06094704940915108,
-0.0785413458943367,
-0.01826103776693344,
0.19731302559375763,
-0.04839124158024788,
0.09862471371889114,
-0.054885584861040115,
0.019948408007621765,
-0.11896640807390213,
-0.11006102710962296,
-0.05641807243227959,
0.00222984841093421,
-0.016325870528817177,
-0.09027712792158127,
-0.09916502237319946,
0.0984884575009346,
0.13056786358356476,
-0.012808854691684246,
0.015385832637548447,
0.04455472528934479,
-0.00032194299274124205,
0.024109823629260063,
0.11375853419303894,
0.04790419340133667,
0.022250724956393242,
-0.03865757957100868,
0.016249341890215874,
0.05556316673755646,
-0.183821439743042,
-0.01194350142031908,
0.11138830333948135,
-0.023189252242445946,
0.11434811353683472,
-0.00017557416867930442,
-0.14421014487743378,
-0.01728987693786621,
0.0961739793419838,
-0.11065993458032608,
-0.2476016730070114,
-0.038677629083395004,
-0.10128142684698105,
-0.12009081989526749,
-0.07325700670480728,
0.09638985991477966,
-0.11944810301065445,
-0.007248260080814362,
0.03398611769080162,
0.040932588279247284,
-0.029341941699385643,
0.1729678362607956,
-0.01668217033147812,
0.014877763576805592,
-0.0725024864077568,
0.10930266231298447,
0.1146460771560669,
-0.09963927417993546,
0.038994599133729935,
0.13005448877811432,
-0.12353258579969406,
0.038146693259477615,
-0.05888320878148079,
-0.05065162479877472,
0.05910706892609596,
0.0152538837864995,
-0.002844369737431407,
-0.07963577657938004,
0.041945893317461014,
-0.03821109980344772,
-0.0053459894843399525,
0.11184638738632202,
-0.07747245579957962,
0.03850777447223663,
-0.10675569623708725,
0.06583353132009506,
0.08001033216714859,
-0.023456016555428505,
-0.07927920669317245,
0.16266517341136932,
-0.04052017256617546,
0.07366463541984558,
-0.022066732868552208,
-0.0058663394302129745,
-0.0486384741961956,
-0.016394933685660362,
-0.03619382157921791,
-0.024131977930665016,
-0.06901385635137558,
-0.00928511656820774,
-0.015088989399373531,
-0.014460843987762928,
-0.09730977565050125,
0.015494602732360363,
-0.03505716845393181,
-0.06541723757982254,
-0.03461506962776184,
0.05245941877365112,
-0.08605330437421799,
0.0059697129763662815,
0.03116150014102459,
-0.10859503597021103,
0.11420363187789917,
0.0781903937458992,
0.010179963894188404,
0.028964929282665253,
0.07673517614603043,
-0.06036420539021492,
0.052455976605415344,
0.05726001039147377,
-0.005138108041137457,
-0.07341106981039047,
0.023177525028586388,
0.01681811548769474,
-0.000353851675754413,
0.01917359046638012,
-0.05554599687457085,
-0.05971924960613251,
-0.023798584938049316,
-0.0020865683909505606,
0.006198191549628973,
-0.04237787052989006,
0.04246059060096741,
0.0035009069833904505,
0.02381252683699131,
0.12676149606704712,
-0.05624943599104881,
-0.02756182663142681,
-0.22169971466064453,
-0.018295230343937874,
-0.030697965994477272,
-0.029736964032053947,
0.03639728203415871,
-0.020049771293997765,
0.08458108454942703,
0.0005247776280157268,
0.11037558317184448,
0.03463646396994591,
-0.08747121691703796,
0.04007284343242645,
0.00196828949265182,
0.00375187280587852,
-0.023677697405219078,
0.20025551319122314,
0.058665353804826736,
-0.060188110917806625,
0.03867630288004875,
-0.11255661398172379,
0.031908709555864334,
0.1312771588563919,
0.1744806319475174,
0.1240333691239357,
0.0966154932975769,
0.02123837172985077,
0.027973322197794914,
-0.05197659507393837,
-0.08237861841917038,
0.09117108583450317,
-0.05933811888098717,
0.03543657436966896,
-0.04312262311577797,
0.034088149666786194,
0.18855524063110352,
-0.1509847790002823,
0.10496676713228226,
0.020192859694361687,
-0.07995020598173141,
-0.10143640637397766,
-0.16315442323684692,
-0.04257148131728172,
-0.1049339547753334,
0.04696185514330864,
-0.08609453588724136,
0.028501436114311218,
0.05271708965301514,
0.11392626166343689,
-0.03610784187912941,
0.1138019934296608,
-0.04885777458548546,
-0.09643614292144775,
0.10198143869638443,
-0.016066178679466248,
-0.024096019566059113,
0.07427907735109329,
-0.031534064561128616,
0.018455667421221733,
-0.03013235330581665,
0.09402672201395035,
-0.004198698792606592,
0.0870494544506073,
0.05550628527998924,
-0.010689880698919296,
-0.051703501492738724,
-0.025027446448802948,
-0.02734140306711197,
0.033317577093839645,
0.09132304042577744,
0.05668139457702637,
-0.016055237501859665,
-0.01196406502276659,
0.15773139894008636,
-0.009081118740141392,
-0.13187719881534576,
-0.1697164922952652,
0.04881167411804199,
0.03686622157692909,
-0.012958469800651073,
0.017077120020985603,
-0.09363359957933426,
-0.034757617861032486,
0.12138980627059937,
0.14735287427902222,
0.0233356524258852,
-0.041042838245630264,
-0.04339173063635826,
-0.005610408261418343,
-0.00007503820961574093,
0.15864931046962738,
-0.03682076558470726,
0.1413642317056656,
-0.03687434270977974,
-0.006454089656472206,
-0.013163330964744091,
-0.047859255224466324,
0.007729994598776102,
0.18489690124988556,
-0.021219724789261818,
0.0028468884993344545,
-0.04130825027823448,
0.07752380520105362,
-0.06350214034318924,
-0.1702899932861328,
-0.04029111936688423,
-0.08944341540336609,
-0.12160646170377731,
0.016335442662239075,
-0.023513048887252808,
0.05045022442936897,
0.08436726778745651,
0.01572544127702713,
0.029671646654605865,
0.11768916249275208,
0.03478328883647919,
-0.16810071468353271,
-0.044961098581552505,
0.12369228154420853,
0.02035239152610302,
0.11582446843385696,
0.0018904529279097915,
0.030819043517112732,
0.06171039864420891,
-0.004705412313342094,
-0.09616277366876602,
0.0066504399292171,
0.038930539041757584,
0.04194909334182739,
-0.006916103418916464,
0.18104727566242218,
-0.029123947024345398,
0.029817691072821617,
0.04067061468958855,
0.016503410413861275,
-0.02117631398141384,
0.02189255692064762,
0.03589870408177376,
-0.09902811795473099,
0.051053356379270554,
-0.025218641385436058,
0.11628899723291397,
0.18151479959487915,
-0.0477948896586895,
0.048261746764183044,
-0.06180436909198761,
-0.03374800458550453,
0.04971492663025856,
0.08487477153539658,
0.008223566226661205,
-0.1877930760383606,
-0.08654829114675522,
-0.10831119865179062,
0.022938335314393044,
-0.17886708676815033,
-0.06747656315565109,
0.021270863711833954,
-0.041072554886341095,
-0.08053522557020187,
0.17303581535816193,
0.04156317189335823,
0.021176716312766075,
-0.021840324625372887,
-0.06634924560785294,
0.005889029707759619,
0.06858205795288086,
-0.12756727635860443,
-0.041692104190588
] |
null | null |
transformers
|
# Cross-Document Language Modeling
CDLM: Cross-Document Language Modeling.
Avi Caciularu, Arman Cohan, Iz Beltagy, Matthew E Peters, Arie Cattan and Ido Dagan. In EMNLP Findings, 2021. [PDF](https://arxiv.org/pdf/2101.00406.pdf)
Please note that during our pretraining we used the document and sentence separators, which you might want to add to your data. The document and sentence separators are `<doc-s>`, `</doc-s>` (the last two tokens in the vocabulary), and `<s>`, `</s>`, respectively.
```python
from transformers import AutoTokenizer, AutoModel
# load model and tokenizer
tokenizer = AutoTokenizer.from_pretrained('biu-nlp/cdlm')
model = AutoModel.from_pretrained('biu-nlp/cdlm')
```
The original repo is [here](https://github.com/aviclu/CDLM).
If you find our work useful, please cite the paper as:
```python
@article{caciularu2021cross,
title={Cross-Document Language Modeling},
author={Caciularu, Avi and Cohan, Arman and Beltagy, Iz and Peters, Matthew E and Cattan, Arie and Dagan, Ido},
journal={Findings of the Association for Computational Linguistics: EMNLP 2021},
year={2021}
}
```
|
{"language": "en", "license": "apache-2.0", "tags": ["longformer", "cdlm"], "inference": false}
|
fill-mask
|
biu-nlp/cdlm
|
[
"transformers",
"pytorch",
"longformer",
"fill-mask",
"cdlm",
"en",
"arxiv:2101.00406",
"license:apache-2.0",
"autotrain_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"2101.00406"
] |
[
"en"
] |
TAGS
#transformers #pytorch #longformer #fill-mask #cdlm #en #arxiv-2101.00406 #license-apache-2.0 #autotrain_compatible #region-us
|
# Cross-Document Language Modeling
CDLM: Cross-Document Language Modeling.
Avi Caciularu, Arman Cohan, Iz Beltagy, Matthew E Peters, Arie Cattan and Ido Dagan. In EMNLP Findings, 2021. PDF
Please note that during our pretraining we used the document and sentence separators, which you might want to add to your data. The document and sentence separators are '<doc-s>', '</doc-s>' (the last two tokens in the vocabulary), and '<s>', '</s>', respectively.
The original repo is here.
If you find our work useful, please cite the paper as:
|
[
"# Cross-Document Language Modeling\n\nCDLM: Cross-Document Language Modeling. \nAvi Caciularu, Arman Cohan, Iz Beltagy, Matthew E Peters, Arie Cattan and Ido Dagan. In EMNLP Findings, 2021. PDF\n\n\nPlease note that during our pretraining we used the document and sentence separators, which you might want to add to your data. The document and sentence separators are '<doc-s>', '</doc-s>' (the last two tokens in the vocabulary), and '<s>', '</s>', respectively.\n\n\n\n\nThe original repo is here.\n\nIf you find our work useful, please cite the paper as:"
] |
[
"TAGS\n#transformers #pytorch #longformer #fill-mask #cdlm #en #arxiv-2101.00406 #license-apache-2.0 #autotrain_compatible #region-us \n",
"# Cross-Document Language Modeling\n\nCDLM: Cross-Document Language Modeling. \nAvi Caciularu, Arman Cohan, Iz Beltagy, Matthew E Peters, Arie Cattan and Ido Dagan. In EMNLP Findings, 2021. PDF\n\n\nPlease note that during our pretraining we used the document and sentence separators, which you might want to add to your data. The document and sentence separators are '<doc-s>', '</doc-s>' (the last two tokens in the vocabulary), and '<s>', '</s>', respectively.\n\n\n\n\nThe original repo is here.\n\nIf you find our work useful, please cite the paper as:"
] |
[
50,
149
] |
[
"passage: TAGS\n#transformers #pytorch #longformer #fill-mask #cdlm #en #arxiv-2101.00406 #license-apache-2.0 #autotrain_compatible #region-us \n# Cross-Document Language Modeling\n\nCDLM: Cross-Document Language Modeling. \nAvi Caciularu, Arman Cohan, Iz Beltagy, Matthew E Peters, Arie Cattan and Ido Dagan. In EMNLP Findings, 2021. PDF\n\n\nPlease note that during our pretraining we used the document and sentence separators, which you might want to add to your data. The document and sentence separators are '<doc-s>', '</doc-s>' (the last two tokens in the vocabulary), and '<s>', '</s>', respectively.\n\n\n\n\nThe original repo is here.\n\nIf you find our work useful, please cite the paper as:"
] |
[
-0.08891650289297104,
-0.08009159564971924,
-0.002096025040373206,
0.032956190407276154,
0.03264201059937477,
0.024886421859264374,
0.15813851356506348,
0.024844788014888763,
0.01550864428281784,
-0.01701924577355385,
0.10864454507827759,
0.10843458026647568,
0.015382189303636551,
0.05316314846277237,
-0.0783781111240387,
-0.322050541639328,
0.04859088733792305,
0.019284863024950027,
0.024458549916744232,
0.07637849450111389,
0.13481485843658447,
-0.09170079231262207,
0.04219236224889755,
0.0334165096282959,
-0.13203813135623932,
-0.05904672294855118,
-0.06065405160188675,
-0.09322720766067505,
0.12884441018104553,
0.03537054359912872,
0.13229012489318848,
0.057338498532772064,
0.04517339915037155,
-0.04806015267968178,
0.039926111698150635,
-0.06650213152170181,
-0.008030313067138195,
0.05424255132675171,
-0.01870294101536274,
-0.010105421766638756,
0.1047169417142868,
-0.0023094406351447105,
0.006476376671344042,
-0.066063292324543,
-0.10329543799161911,
-0.19534382224082947,
-0.10737390071153641,
-0.03492789342999458,
0.12472038716077805,
0.08629650622606277,
0.011052008718252182,
0.17577999830245972,
-0.07297714799642563,
0.018251048400998116,
0.09216701984405518,
-0.2704421579837799,
-0.02556275762617588,
0.09189897775650024,
0.1167391985654831,
-0.011585717089474201,
0.0009515488054603338,
0.05684560909867287,
0.027544178068637848,
-0.0013718933332711458,
-0.04721641540527344,
-0.07262381166219711,
-0.018396079540252686,
-0.05768450349569321,
-0.13456059992313385,
-0.003917225636541843,
0.42457008361816406,
-0.041205741465091705,
-0.02997179701924324,
-0.0008115787641145289,
-0.03562593087553978,
0.09753680974245071,
-0.008040494285523891,
-0.14053645730018616,
0.0016415304271504283,
-0.0005706792580895126,
0.13853581249713898,
-0.05108194798231125,
-0.17167866230010986,
0.0024242184590548277,
-0.2701554000377655,
0.19839327037334442,
0.03592686727643013,
-0.007631723303347826,
-0.046435657888650894,
0.034798070788383484,
-0.08927828818559647,
-0.12025588750839233,
-0.01357361115515232,
-0.05160079523921013,
0.06523516029119492,
-0.016822462901473045,
-0.09028025716543198,
-0.14956218004226685,
0.03146786242723465,
0.17158477008342743,
0.09496266394853592,
-0.036931831389665604,
-0.11395498365163803,
0.11958927661180496,
0.023936741054058075,
0.2173936516046524,
-0.0006836168467998505,
0.029041333124041557,
0.08695828169584274,
-0.07386231422424316,
0.08482810854911804,
-0.04940813407301903,
-0.2039637267589569,
-0.007540945429354906,
-0.024554811418056488,
0.05183927342295647,
0.036302778869867325,
0.03940092772245407,
-0.029037509113550186,
-0.043555766344070435,
0.12570926547050476,
-0.09815426170825958,
-0.01203316543251276,
-0.0604206845164299,
0.007613216061145067,
0.07295767962932587,
0.007868623360991478,
0.11081984639167786,
-0.01062008272856474,
0.03238850086927414,
-0.00983075425028801,
0.01662127673625946,
-0.03428918495774269,
-0.13242079317569733,
0.07203208655118942,
-0.02610994130373001,
0.03411499410867691,
-0.1892991065979004,
-0.1371414214372635,
0.0036662870552390814,
0.051932305097579956,
-0.02027605101466179,
0.04373979941010475,
-0.04619525000452995,
0.0520596019923687,
-0.04888977110385895,
-0.009022751823067665,
-0.05462449789047241,
-0.03858927637338638,
0.0037692778278142214,
-0.029210330918431282,
0.06068753823637962,
-0.21573705971240997,
0.03446026146411896,
-0.09001016616821289,
0.01323016919195652,
-0.1331033706665039,
-0.040520403534173965,
0.03116115741431713,
0.014114652760326862,
-0.03192153573036194,
0.014507977291941643,
-0.1047624945640564,
0.034593015909194946,
-0.04170934483408928,
0.11064741015434265,
-0.20345675945281982,
-0.053861528635025024,
0.10273577272891998,
-0.13757552206516266,
-0.13263362646102905,
0.12766706943511963,
-0.03257713466882706,
0.1612347513437271,
0.05726504698395729,
0.1397881805896759,
0.05491827428340912,
-0.16678552329540253,
0.10132905095815659,
-0.0076528689824044704,
-0.03064229153096676,
0.007684048265218735,
0.14142686128616333,
-0.022688116878271103,
-0.034593649208545685,
0.03210948035120964,
-0.05986471474170685,
-0.02983398362994194,
-0.028718315064907074,
-0.04157836735248566,
0.03777322173118591,
0.0010317523265257478,
0.03863034397363663,
-0.010448979213833809,
0.07312092930078506,
-0.03771412745118141,
0.00701476912945509,
-0.09593232721090317,
0.04929862171411514,
0.003582393517717719,
0.040637802332639694,
-0.043518103659152985,
0.11398480832576752,
-0.06775875389575958,
0.024645434692502022,
-0.13869895040988922,
-0.049810852855443954,
0.035172238945961,
0.2077610045671463,
0.06602296233177185,
0.1248609870672226,
-0.014403946697711945,
0.0306315366178751,
0.006116109434515238,
0.040291644632816315,
0.03466971218585968,
0.01676819659769535,
-0.038602422922849655,
-0.0906572937965393,
0.08562903851270676,
-0.06775394827127457,
0.1222454309463501,
-0.025247791782021523,
0.011491415090858936,
-0.08218098431825638,
0.04715082794427872,
0.005564457271248102,
0.09429261833429337,
-0.06381821632385254,
0.06208232417702675,
-0.0761290192604065,
0.07207444310188293,
0.025416787713766098,
-0.009711327962577343,
-0.08572328835725784,
0.16806639730930328,
-0.10603810846805573,
0.13074439764022827,
0.16218429803848267,
-0.11250754445791245,
0.0484032928943634,
-0.13999994099140167,
0.0012160791084170341,
-0.01605847477912903,
0.01682288572192192,
-0.00953525397926569,
0.11133945733308792,
0.013355080038309097,
0.10714657604694366,
-0.07771384716033936,
0.04656120389699936,
-0.03305031731724739,
-0.09823262691497803,
-0.07045964896678925,
0.03557370603084564,
0.10946173220872879,
-0.1027430072426796,
0.09501486271619797,
0.2811431884765625,
-0.007495375815778971,
0.1010655090212822,
-0.013284653425216675,
-0.04691692441701889,
-0.08084819465875626,
-0.01601361855864525,
-0.00516317505389452,
0.0455392561852932,
-0.020507436245679855,
0.04656365513801575,
0.06952492892742157,
0.033665671944618225,
0.046984318643808365,
-0.12465764582157135,
-0.0314263217151165,
0.044817209243774414,
0.02807537280023098,
-0.02891133911907673,
0.10798116773366928,
-0.01700836420059204,
0.058869343250989914,
-0.0004549556761048734,
-0.09899914264678955,
0.00953678134828806,
0.02739005722105503,
-0.06382540613412857,
0.15882869064807892,
-0.16003423929214478,
-0.38487958908081055,
-0.11293613910675049,
-0.050263673067092896,
-0.038493797183036804,
0.05996614694595337,
0.06947753578424454,
-0.01494787447154522,
-0.04191162809729576,
-0.0037597136106342077,
0.05990012735128403,
-0.05830521509051323,
-0.040112171322107315,
0.05129504203796387,
-0.02009422518312931,
-0.13470996916294098,
-0.08209455758333206,
-0.048822641372680664,
-0.05425131693482399,
0.023822056129574776,
0.10297254472970963,
-0.1159500703215599,
0.13006864488124847,
0.11385393142700195,
0.04714517667889595,
0.0208134762942791,
-0.022217687219381332,
0.057302072644233704,
-0.0630105659365654,
0.013116667047142982,
0.22060982882976532,
-0.10431299358606339,
0.03691263496875763,
0.13709893822669983,
0.0013280194252729416,
-0.0286845825612545,
-0.01833498850464821,
-0.08709809184074402,
-0.06859505921602249,
-0.18156568706035614,
-0.1440778225660324,
-0.08461710810661316,
0.009091362357139587,
-0.03580809757113457,
-0.009568444453179836,
0.09793546795845032,
0.07792293280363083,
-0.008020555600523949,
0.029932266101241112,
0.023814605548977852,
0.08948004990816116,
0.20739588141441345,
-0.047996412962675095,
0.10925357043743134,
0.0053543271496891975,
-0.10154277086257935,
0.07362653315067291,
0.04631833732128143,
0.16217218339443207,
0.1613035649061203,
0.022285660728812218,
0.09348321706056595,
0.03833158686757088,
0.03483768552541733,
0.08060374855995178,
0.01075515616685152,
-0.012210691347718239,
-0.073672354221344,
-0.06655760109424591,
0.007181431632488966,
0.06654978543519974,
0.015699470415711403,
-0.04798882454633713,
-0.10161975026130676,
0.06569739431142807,
0.04793880879878998,
-0.006989589426666498,
0.024228621274232864,
-0.15203142166137695,
-0.029083870351314545,
0.004283708054572344,
0.040977660566568375,
-0.04895125329494476,
-0.01085488311946392,
-0.09617063403129578,
-0.08327271789312363,
0.024077201262116432,
0.006467064842581749,
0.07395578175783157,
-0.04095683991909027,
0.05143235623836517,
-0.19585801661014557,
-0.05198490992188454,
0.04992189630866051,
0.07668502628803253,
-0.22565804421901703,
0.28687918186187744,
0.031381912529468536,
-0.03919866308569908,
-0.11701471358537674,
0.0030611471738666296,
-0.005821079947054386,
0.1681985706090927,
0.0690850168466568,
-0.021027175709605217,
-0.059014879167079926,
-0.017522627487778664,
-0.09869252890348434,
0.038734376430511475,
0.12188298255205154,
-0.11454608291387558,
0.049874838441610336,
-0.004234556574374437,
0.012005924247205257,
0.03980822488665581,
0.10247398912906647,
-0.12554582953453064,
-0.14250126481056213,
0.14804328978061676,
0.006713741458952427,
0.018410416319966316,
-0.01212492398917675,
-0.07643894851207733,
-0.13376301527023315,
0.17147429287433624,
-0.047741033136844635,
-0.017942845821380615,
-0.05975086987018585,
0.008267061784863472,
0.09347688406705856,
-0.08781696110963821,
0.05627603456377983,
-0.05568833649158478,
0.04694891348481178,
-0.12715986371040344,
-0.061078596860170364,
0.06273204833269119,
-0.12559808790683746,
-0.03423970937728882,
-0.05435220152139664,
0.13796983659267426,
0.01913585141301155,
0.02941909246146679,
0.08803118020296097,
0.0426306277513504,
-0.08927565068006516,
-0.05564865842461586,
-0.014618674293160439,
0.07339853793382645,
0.17404791712760925,
0.015787336975336075,
-0.18403200805187225,
-0.06619718670845032,
-0.02014894410967827,
-0.12916484475135803,
0.2239435464143753,
0.21930952370166779,
-0.06136548891663551,
0.11591681838035583,
0.13328228890895844,
-0.06032919883728027,
-0.2359665483236313,
-0.0964680165052414,
-0.03718720003962517,
0.03173541650176048,
0.004631078336387873,
-0.11769906431436539,
0.062140949070453644,
0.11330417543649673,
-0.03604985028505325,
0.01299411989748478,
-0.2564440965652466,
-0.12043318152427673,
0.23696905374526978,
-0.08024495840072632,
0.3109271824359894,
-0.0759071484208107,
-0.003525950713083148,
-0.10895518958568573,
-0.09572674334049225,
0.0939861387014389,
-0.06786560267210007,
0.06928877532482147,
0.005159485153853893,
0.03517361730337143,
-0.01023157313466072,
-0.027135461568832397,
0.16153834760189056,
-0.03373557701706886,
0.044410329312086105,
-0.0848337858915329,
-0.10127683728933334,
0.033280834555625916,
-0.03709930554032326,
0.08512312173843384,
-0.009234524331986904,
-0.02084292098879814,
-0.014254697598516941,
-0.08282879739999771,
-0.0002274028811370954,
0.07823887467384338,
-0.002505469135940075,
-0.08083897083997726,
-0.09546074271202087,
0.01985335163772106,
-0.06023957580327988,
-0.01554245688021183,
0.14679554104804993,
0.0017246109200641513,
-0.016635533422231674,
0.021231582388281822,
0.11424917727708817,
-0.08818580210208893,
0.1547396183013916,
-0.05120239406824112,
-0.08583204448223114,
0.07017388194799423,
0.005912005435675383,
0.016418317332863808,
0.1269371211528778,
-0.09187540411949158,
0.10846332460641861,
0.041438840329647064,
-0.04952940344810486,
-0.017771074548363686,
0.08188473433256149,
-0.08627946674823761,
-0.07299555093050003,
-0.039236944168806076,
0.026069406419992447,
0.07347089052200317,
0.04995102062821388,
0.12100981920957565,
-0.02296307124197483,
-0.03443257510662079,
0.006321621127426624,
0.022274455055594444,
0.03319144248962402,
0.0577114075422287,
0.023366468027234077,
-0.03138688579201698,
-0.07055377215147018,
0.08680049329996109,
0.10417523235082626,
-0.0431075245141983,
0.012210718356072903,
-0.01606743223965168,
-0.08862096816301346,
-0.1350124180316925,
-0.05367941036820412,
0.10557324439287186,
-0.15604020655155182,
-0.08511312305927277,
-0.05554564297199249,
-0.10770808160305023,
0.008270434103906155,
0.18170979619026184,
0.1021672785282135,
-0.013089437037706375,
-0.02214263379573822,
-0.016289938241243362,
-0.02501394972205162,
0.09066224098205566,
0.08578123897314072,
0.016132337972521782,
-0.04497310891747475,
0.09304121136665344,
-0.01447093766182661,
0.06367399543523788,
-0.04362938180565834,
-0.045338619500398636,
-0.07178901135921478,
0.0016052178107202053,
-0.06891583651304245,
0.020445525646209717,
-0.0761960819363594,
-0.035141848027706146,
0.03272336721420288,
-0.036707308143377304,
-0.019818294793367386,
-0.01288587972521782,
-0.09082572162151337,
0.027676844969391823,
-0.05927463620901108,
0.04503019154071808,
-0.04932241886854172,
-0.09220339357852936,
0.0678633376955986,
0.010125560685992241,
0.06754688918590546,
0.060788318514823914,
0.03258161246776581,
0.078379787504673,
-0.2086355835199356,
0.01821708492934704,
0.116754449903965,
0.104976586997509,
0.021655390039086342,
-0.09948396682739258,
-0.006923930253833532,
0.09630030393600464,
-0.03386951610445976,
0.026934215798974037,
0.13413049280643463,
-0.09147805720567703,
-0.04628637805581093,
-0.05111568793654442,
-0.1490028202533722,
0.008152708411216736,
-0.034087084233760834,
0.0015327190048992634,
0.050775784999132156,
0.1376047134399414,
-0.012158194556832314,
0.07483604550361633,
-0.04160112887620926,
0.022786209359765053,
-0.04491136968135834,
-0.11181924492120743,
-0.019763300195336342,
-0.13204626739025116,
-0.015932928770780563,
-0.014422835782170296,
0.24029476940631866,
0.060524214059114456,
0.04618019983172417,
-0.031023899093270302,
0.10115578025579453,
-0.005345429293811321,
-0.008837653324007988,
0.10893744975328445,
0.13177146017551422,
0.0261048786342144,
-0.11836466193199158,
0.08165115863084793,
0.06293006241321564,
-0.01224554143846035,
0.13386569917201996,
0.11269015818834305,
0.05945717915892601,
0.1101488545536995,
0.055947862565517426,
-0.03078336827456951,
-0.012801241129636765,
-0.20984236896038055,
0.011751102283596992,
0.019449593499302864,
0.013564316555857658,
0.035437487065792084,
0.1328229457139969,
-0.0670282393693924,
-0.013360554352402687,
0.015580844134092331,
-0.01953485980629921,
-0.15705014765262604,
-0.12527507543563843,
-0.1113404706120491,
-0.024851562455296516,
-0.0417080782353878,
-0.07117071002721786,
-0.0121539905667305,
-0.05888381600379944,
0.038652993738651276,
-0.030661387369036674,
0.08670540153980255,
-0.10741154104471207,
-0.13733509182929993,
0.019143592566251755,
-0.026010647416114807,
0.042985882610082626,
-0.058715637773275375,
-0.02931010164320469,
-0.0655420646071434,
0.02108074352145195,
-0.012602985836565495,
-0.015571185387670994,
0.04856124520301819,
-0.024312829598784447,
-0.11409763246774673,
-0.06779734790325165,
-0.05277268588542938,
0.0336783304810524,
0.04112205654382706,
0.07353024184703827,
0.0265007633715868,
-0.014285827055573463,
0.05529879033565521,
0.06613177061080933,
0.03928770497441292,
-0.17166605591773987,
-0.08720064163208008,
0.10855494439601898,
0.03403725475072861,
0.02562098391354084,
-0.050473812967538834,
-0.031411707401275635,
-0.022697890177369118,
0.2531461715698242,
0.2979068458080292,
-0.007642737589776516,
-0.0010757908457890153,
0.04243919253349304,
0.021319372579455376,
0.017467226833105087,
0.05079899728298187,
0.08522637188434601,
0.25727376341819763,
-0.022476382553577423,
-0.08619590103626251,
-0.10585536807775497,
0.03905064985156059,
-0.1274702101945877,
0.020978830754756927,
0.02099406160414219,
-0.08356291800737381,
0.033925700932741165,
0.09172404557466507,
0.030618058517575264,
0.022237801924347878,
-0.01529186125844717,
-0.11469990760087967,
-0.08701949566602707,
0.0010514580644667149,
0.027711834758520126,
-0.009756478480994701,
0.041439078748226166,
-0.06468763947486877,
-0.08392813056707382,
0.10481956601142883,
0.01744149811565876,
-0.11614128947257996,
0.001314579276368022,
0.13658687472343445,
0.0895308256149292,
0.05021011829376221,
-0.018465716391801834,
0.17221355438232422,
0.04258168861269951,
0.09778286516666412,
0.028337979689240456,
0.1077764630317688,
0.05518632382154465,
0.0547352209687233,
0.12679600715637207,
-0.039376795291900635,
-0.03992471471428871,
-0.0007661636918783188,
0.13996315002441406,
-0.08783196657896042,
0.09184373915195465,
0.007276126183569431,
-0.1304779201745987,
-0.0024717270862311125,
0.11078579723834991,
-0.1557377278804779,
0.0969720259308815,
0.09515818953514099,
0.0038735775742679834,
-0.020608440041542053,
-0.034432388842105865,
0.0639009028673172,
0.053411051630973816,
-0.01889973133802414,
-0.07828675210475922,
-0.09656845778226852,
-0.01948019117116928,
0.045121874660253525,
0.005502653773874044,
-0.15560537576675415,
-0.07816466689109802,
-0.07834625989198685,
-0.033269740641117096,
-0.03692326322197914,
0.002671873662620783,
0.08100096136331558,
-0.013824462890625,
-0.051136791706085205,
-0.08252913504838943,
0.021112099289894104,
-0.000437002454418689,
-0.1221688911318779,
-0.029989352449774742
] |
null | null |
transformers
|
# SuperPAL model
Summary-Source Proposition-level Alignment: Task, Datasets and Supervised Baseline
Ori Ernst, Ori Shapira, Ramakanth Pasunuru, Michael Lepioshkin, Jacob Goldberger, Mohit Bansal, Ido Dagan, 2021. [PDF](https://arxiv.org/pdf/2009.00590)
**How to use?**
```python
from transformers import AutoTokenizer, AutoModelForSequenceClassification
tokenizer = AutoTokenizer.from_pretrained("biu-nlp/superpal")
model = AutoModelForSequenceClassification.from_pretrained("biu-nlp/superpal")
```
The original repo is [here](https://github.com/oriern/SuperPAL).
If you find our work useful, please cite the paper as:
```python
@inproceedings{ernst-etal-2021-summary,
title = "Summary-Source Proposition-level Alignment: Task, Datasets and Supervised Baseline",
author = "Ernst, Ori and Shapira, Ori and Pasunuru, Ramakanth and Lepioshkin, Michael and Goldberger, Jacob and Bansal, Mohit and Dagan, Ido",
booktitle = "Proceedings of the 25th Conference on Computational Natural Language Learning",
month = nov,
year = "2021",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.conll-1.25",
pages = "310--322"
}
```
|
{"widget": [{"text": "Prime Minister Hun Sen insisted that talks take place in Cambodia. </s><s> Cambodian leader Hun Sen rejected opposition parties' demands for talks outside the country."}]}
|
text-classification
|
biu-nlp/superpal
|
[
"transformers",
"pytorch",
"roberta",
"text-classification",
"arxiv:2009.00590",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"2009.00590"
] |
[] |
TAGS
#transformers #pytorch #roberta #text-classification #arxiv-2009.00590 #autotrain_compatible #endpoints_compatible #region-us
|
# SuperPAL model
Summary-Source Proposition-level Alignment: Task, Datasets and Supervised Baseline
Ori Ernst, Ori Shapira, Ramakanth Pasunuru, Michael Lepioshkin, Jacob Goldberger, Mohit Bansal, Ido Dagan, 2021. PDF
How to use?
The original repo is here.
If you find our work useful, please cite the paper as:
|
[
"# SuperPAL model\n\nSummary-Source Proposition-level Alignment: Task, Datasets and Supervised Baseline\nOri Ernst, Ori Shapira, Ramakanth Pasunuru, Michael Lepioshkin, Jacob Goldberger, Mohit Bansal, Ido Dagan, 2021. PDF\n\nHow to use?\n\n\n\n\n\nThe original repo is here.\n\n\nIf you find our work useful, please cite the paper as:"
] |
[
"TAGS\n#transformers #pytorch #roberta #text-classification #arxiv-2009.00590 #autotrain_compatible #endpoints_compatible #region-us \n",
"# SuperPAL model\n\nSummary-Source Proposition-level Alignment: Task, Datasets and Supervised Baseline\nOri Ernst, Ori Shapira, Ramakanth Pasunuru, Michael Lepioshkin, Jacob Goldberger, Mohit Bansal, Ido Dagan, 2021. PDF\n\nHow to use?\n\n\n\n\n\nThe original repo is here.\n\n\nIf you find our work useful, please cite the paper as:"
] |
[
44,
89
] |
[
"passage: TAGS\n#transformers #pytorch #roberta #text-classification #arxiv-2009.00590 #autotrain_compatible #endpoints_compatible #region-us \n# SuperPAL model\n\nSummary-Source Proposition-level Alignment: Task, Datasets and Supervised Baseline\nOri Ernst, Ori Shapira, Ramakanth Pasunuru, Michael Lepioshkin, Jacob Goldberger, Mohit Bansal, Ido Dagan, 2021. PDF\n\nHow to use?\n\n\n\n\n\nThe original repo is here.\n\n\nIf you find our work useful, please cite the paper as:"
] |
[
-0.08062505722045898,
0.04790615662932396,
0.0002861368702724576,
0.01120904739946127,
0.12414659559726715,
-0.006813571322709322,
0.10178445279598236,
0.022186478599905968,
0.008181940764188766,
0.0017511161277070642,
0.11765868216753006,
0.1028825044631958,
0.08718381077051163,
0.13373297452926636,
-0.07030180841684341,
-0.2922707200050354,
0.014243374578654766,
0.0703393965959549,
-0.06060606613755226,
0.07875014841556549,
0.09492556750774384,
-0.09309916943311691,
0.04439302533864975,
0.009127075783908367,
-0.11332223564386368,
0.05845917761325836,
-0.05860394984483719,
-0.04656987264752388,
0.08319200575351715,
0.08874884992837906,
0.1320020705461502,
0.09218835830688477,
0.039236947894096375,
-0.004342909436672926,
0.03785242512822151,
-0.04319564625620842,
-0.020476697012782097,
0.06945788115262985,
0.05365915223956108,
0.0005697868764400482,
0.11949622631072998,
-0.038111597299575806,
-0.046386685222387314,
0.008459768258035183,
-0.1523299366235733,
0.027474932372570038,
-0.06445154547691345,
0.06457555294036865,
0.15178099274635315,
0.0025233174674212933,
-0.004945322405546904,
0.15700173377990723,
-0.03032609447836876,
0.047058068215847015,
-0.008553802967071533,
-0.2645077407360077,
-0.08299127221107483,
0.1544121503829956,
-0.02521630749106407,
-0.013651694171130657,
0.02647794596850872,
0.022222919389605522,
0.07882486283779144,
-0.0178334042429924,
-0.013324056752026081,
-0.11558922380208969,
-0.14155437052249908,
0.035747770220041275,
-0.1569315493106842,
0.07042594254016876,
0.293419748544693,
-0.002026469213888049,
-0.04387064650654793,
0.030040577054023743,
-0.09973153471946716,
0.0010354779660701752,
0.012881876900792122,
-0.13089486956596375,
0.027993682771921158,
-0.029528506100177765,
0.11802604794502258,
-0.03084980510175228,
-0.07158910483121872,
0.009083445183932781,
-0.16404591500759125,
0.1856250762939453,
-0.014014189131557941,
0.02291199564933777,
-0.04295535013079643,
0.07182275503873825,
-0.20359881222248077,
-0.10882701724767685,
0.04656974598765373,
-0.11710779368877411,
-0.041566986590623856,
-0.04909395053982735,
-0.029160507023334503,
-0.15377944707870483,
-0.008267818950116634,
0.10592049360275269,
0.1339797079563141,
0.039999641478061676,
0.037837423384189606,
0.05998111516237259,
0.07489140331745148,
0.16944925487041473,
-0.1374228447675705,
-0.13520380854606628,
0.0668589323759079,
0.003157515311613679,
0.09065031260251999,
-0.031307294964790344,
-0.12206284701824188,
-0.04895270988345146,
-0.12680870294570923,
0.049005176872015,
0.04179484024643898,
0.044672876596450806,
-0.039332639425992966,
-0.08066093921661377,
0.05923021212220192,
-0.08410162478685379,
0.004066861234605312,
-0.06854813545942307,
-0.054058369249105453,
0.13624843955039978,
0.0038348401430994272,
0.05135368928313255,
-0.08581791818141937,
0.1113630086183548,
-0.06509239226579666,
0.006456286180764437,
-0.07208888232707977,
-0.07586944848299026,
0.010417722165584564,
-0.10643627494573593,
0.048601049929857254,
-0.149287149310112,
-0.1133662536740303,
-0.027505580335855484,
0.05043493211269379,
-0.08084654808044434,
-0.05129041150212288,
-0.07725990563631058,
0.04964803531765938,
-0.027776993811130524,
-0.04712029919028282,
-0.008459408767521381,
-0.053881507366895676,
-0.02112055942416191,
0.044890303164720535,
0.07594191282987595,
-0.15235556662082672,
0.04819227755069733,
-0.13309212028980255,
0.0037325925659388304,
-0.12567360699176788,
-0.008276376873254776,
-0.017361124977469444,
0.08706088364124298,
-0.06107170879840851,
0.00752401165664196,
-0.08182361721992493,
0.003159456653520465,
0.0749819427728653,
0.22765934467315674,
-0.07485959678888321,
-0.05961773172020912,
0.05532931536436081,
-0.11394903808832169,
-0.15483494102954865,
0.057764481753110886,
0.016784021630883217,
0.10381356626749039,
0.04528568312525749,
0.11298511922359467,
0.04475024715065956,
0.036274660378694534,
0.002007481874898076,
-0.012000752612948418,
-0.00392477260902524,
-0.07718406617641449,
0.08803031593561172,
0.08380109071731567,
-0.16457995772361755,
0.020376065745949745,
0.005365628283470869,
0.012117807753384113,
-0.056340742856264114,
-0.06201284006237984,
-0.02379431016743183,
-0.035281483083963394,
0.016600925475358963,
-0.007716196123510599,
0.08195432275533676,
-0.03156155347824097,
-0.00045261444756761193,
-0.05595045164227486,
0.04321551322937012,
0.011405388824641705,
0.01359860971570015,
-0.04527555778622627,
0.12492707371711731,
-0.18188585340976715,
0.019367342814803123,
-0.15631495416164398,
0.028133206069469452,
-0.005430358927696943,
0.11309488117694855,
0.06836934387683868,
0.07165025919675827,
0.022717198356986046,
0.021223340183496475,
-0.028779638931155205,
-0.02165628783404827,
0.09046762436628342,
-0.00817160215228796,
-0.029191678389906883,
-0.11644887179136276,
-0.006233885418623686,
-0.03916109353303909,
-0.014784873463213444,
-0.10047699511051178,
-0.014651360921561718,
-0.060895487666130066,
0.12869103252887726,
-0.013901771046221256,
0.07734611630439758,
0.03844572603702545,
0.11708876490592957,
-0.06022633612155914,
0.02594219706952572,
0.0893888995051384,
0.006628596223890781,
-0.0996008887887001,
0.09112311899662018,
-0.03324965015053749,
0.20262496173381805,
0.1274436116218567,
-0.18117953836917877,
0.00034264620626345277,
-0.039934199303388596,
-0.012658733874559402,
-0.0069456021301448345,
-0.02325437031686306,
0.10533114522695541,
0.09049851447343826,
0.005391126964241266,
0.09588462859392166,
-0.038233157247304916,
0.05827063322067261,
0.003982985857874155,
-0.06931357830762863,
-0.006152943708002567,
0.10362270474433899,
0.23442324995994568,
-0.15927058458328247,
0.12758904695510864,
0.10569749772548676,
-0.020747844129800797,
0.17735619843006134,
0.0005740058259107172,
-0.03679196909070015,
-0.029264986515045166,
-0.055150095373392105,
-0.05218304321169853,
0.011183584108948708,
-0.14321371912956238,
0.008869036100804806,
0.056657224893569946,
-0.006018112413585186,
0.028943786397576332,
-0.11987647414207458,
-0.03143364563584328,
0.05828242376446724,
0.05814456194639206,
-0.06287870556116104,
0.03185923770070076,
-0.04749242588877678,
0.10198267549276352,
-0.008444080129265785,
-0.02671191655099392,
0.025651508942246437,
0.014245848171412945,
-0.08792119473218918,
0.11663049459457397,
-0.0006966512301005423,
-0.26072418689727783,
-0.1349717080593109,
-0.08781161159276962,
-0.12211652100086212,
0.01608860492706299,
0.043553676456213,
-0.008511783555150032,
-0.08810240775346756,
0.041118912398815155,
0.060696858912706375,
0.01757087931036949,
-0.003979402594268322,
0.005688228644430637,
-0.0024289540015161037,
-0.05458274856209755,
-0.061376579105854034,
-0.04486194998025894,
-0.009660706855356693,
0.01966782473027706,
0.07398536056280136,
-0.059082239866256714,
0.138131782412529,
0.0899023711681366,
-0.029382621869444847,
0.011083811521530151,
0.016018351539969444,
0.16349296271800995,
-0.08944448828697205,
0.0706949457526207,
0.21992819011211395,
-0.04454301297664642,
0.04504036158323288,
0.1613570749759674,
0.019101127982139587,
-0.019909005612134933,
-0.011321666650474072,
-0.0720481276512146,
-0.06264646351337433,
-0.20507942140102386,
-0.10756675899028778,
-0.12578031420707703,
0.03499310463666916,
0.0248761847615242,
-0.024080585688352585,
-0.020953701809048653,
0.15948596596717834,
0.01760207675397396,
-0.008678148500621319,
-0.12334597855806351,
0.10577423125505447,
0.17180956900119781,
0.030866609886288643,
0.16314907371997833,
-0.11042811721563339,
-0.1459088921546936,
0.06914302706718445,
-0.012913279235363007,
0.13667894899845123,
0.15306426584720612,
-0.0859379768371582,
0.0005528883193619549,
0.0070620751939713955,
0.07945823669433594,
0.14140689373016357,
0.012715764343738556,
-0.07539911568164825,
-0.08050257712602615,
-0.059731267392635345,
-0.06409823894500732,
0.10532335937023163,
-0.0334990993142128,
-0.021313736215233803,
-0.03951053321361542,
-0.10823162645101547,
0.06319322437047958,
0.15445071458816528,
0.11270065605640411,
-0.22636042535305023,
-0.04943826422095299,
0.036878619343042374,
0.0028228650335222483,
-0.03181947395205498,
0.002422880847007036,
-0.10440939664840698,
-0.05334611237049103,
0.14141161739826202,
0.007979677990078926,
0.13194742798805237,
-0.007297356612980366,
0.03477977216243744,
-0.19026176631450653,
-0.07157015055418015,
-0.023325610905885696,
0.08507617563009262,
-0.15418633818626404,
0.2707115411758423,
0.010459735058248043,
-0.028923198580741882,
-0.057847823947668076,
-0.003845958737656474,
0.04975901544094086,
0.25127437710762024,
0.06653948873281479,
0.007875155657529831,
-0.1271124929189682,
-0.04998266324400902,
-0.04795641452074051,
0.07018132507801056,
0.03604506701231003,
-0.011380348354578018,
0.07757117599248886,
0.0013210212346166372,
0.012467021122574806,
-0.020166758447885513,
0.060693059116601944,
-0.016964221373200417,
-0.09928728640079498,
0.07535367459058762,
-0.04328868165612221,
0.07972653210163116,
0.05611185356974602,
-0.09583038091659546,
0.002861034357920289,
0.17578163743019104,
-0.029428578913211823,
-0.06642122566699982,
-0.09011435508728027,
0.008075088262557983,
0.07252143323421478,
-0.06067856401205063,
0.012706481851637363,
-0.04286113381385803,
0.0007096268236637115,
-0.0008649759693071246,
-0.08824510127305984,
0.08018708229064941,
-0.09243345260620117,
0.0009692200110293925,
-0.019864363595843315,
0.09474388509988785,
-0.037951670587062836,
0.025951433926820755,
-0.02263377606868744,
0.05593240261077881,
-0.026913994923233986,
-0.10726987570524216,
0.048625826835632324,
-0.01176343485713005,
0.11059103906154633,
0.04123873636126518,
-0.013076318427920341,
-0.02043241262435913,
-0.011251218616962433,
-0.058614857494831085,
0.14182904362678528,
0.24245943129062653,
-0.02165663242340088,
0.05115223303437233,
0.15516948699951172,
-0.06792404502630234,
-0.2300960123538971,
-0.02255694381892681,
-0.08265146613121033,
0.08483274281024933,
-0.03292292729020119,
-0.15201832354068756,
0.11506997048854828,
0.045413363724946976,
-0.009602033533155918,
0.0919915959239006,
-0.13493189215660095,
-0.10041394084692001,
0.0944007933139801,
0.043458107858896255,
0.3414826989173889,
-0.1118905320763588,
-0.004496601410210133,
-0.060987554490566254,
-0.12078161537647247,
0.10626184195280075,
0.016035275533795357,
0.08277525752782822,
-0.04718475788831711,
0.012332353740930557,
0.018937181681394577,
-0.03562316298484802,
0.17952241003513336,
-0.12503038346767426,
0.010212608613073826,
-0.0715366080403328,
-0.052706655114889145,
0.045321159064769745,
0.0032028756104409695,
0.0777464509010315,
0.06050671637058258,
-0.004707622807472944,
-0.1166815385222435,
-0.08105842769145966,
0.013794033788144588,
0.0539654865860939,
0.01911051757633686,
-0.10316705703735352,
-0.02944306470453739,
0.07436170428991318,
-0.021625692024827003,
-0.002150963759049773,
0.14584428071975708,
-0.09301229566335678,
0.08456942439079285,
0.12535658478736877,
0.16417911648750305,
-0.16149866580963135,
0.05130286142230034,
-0.004731486551463604,
-0.07032781839370728,
0.06930942088365555,
-0.14245764911174774,
-0.005209183320403099,
0.19372272491455078,
-0.012604051269590855,
0.0226190984249115,
0.05889138579368591,
0.025208789855241776,
-0.009467199444770813,
0.13074228167533875,
-0.17561455070972443,
-0.03963736072182655,
-0.060699909925460815,
-0.046242646872997284,
0.06171717867255211,
0.08073488622903824,
0.13407662510871887,
-0.08452049642801285,
-0.039438601583242416,
0.027929682284593582,
0.009129276499152184,
-0.06769857555627823,
0.06488177180290222,
0.06663667410612106,
0.012275627814233303,
-0.07113754749298096,
0.08599439263343811,
0.0805596336722374,
-0.14031390845775604,
-0.011135198175907135,
0.0714007094502449,
-0.09988514333963394,
-0.06049639359116554,
-0.1368194967508316,
0.11566336452960968,
-0.17980709671974182,
-0.10996092855930328,
-0.11323051899671555,
-0.05602181702852249,
0.022649265825748444,
0.08836300671100616,
0.07745356112718582,
-0.04584459960460663,
-0.06238361820578575,
-0.05134708434343338,
-0.05073568597435951,
0.01530308835208416,
0.14069458842277527,
0.04671099781990051,
-0.1400103121995926,
-0.005502250045537949,
-0.016057537868618965,
0.12044061720371246,
-0.08633053302764893,
-0.02944515459239483,
-0.17688165605068207,
-0.0029604248702526093,
-0.13181209564208984,
-0.04970073699951172,
-0.0692506805062294,
-0.030038688331842422,
-0.03896761313080788,
-0.09721655398607254,
-0.14033403992652893,
-0.030641911551356316,
-0.08206609636545181,
0.053487829864025116,
-0.027288217097520828,
-0.0038110953755676746,
0.01051077526062727,
-0.0421462319791317,
0.10083741694688797,
0.004478713497519493,
-0.018866179510951042,
0.05054271221160889,
-0.03932909667491913,
0.06060190126299858,
-0.04847946763038635,
0.009519113227725029,
0.04037344083189964,
0.05494535341858864,
0.07172995805740356,
-0.14837203919887543,
0.034611593931913376,
0.09811350703239441,
0.014364858157932758,
0.02868010476231575,
0.08339251577854156,
-0.08491325378417969,
-0.006555532105267048,
-0.06702364981174469,
-0.16109636425971985,
0.004164862912148237,
-0.022807002067565918,
0.03951650112867355,
0.09589886665344238,
0.10809579491615295,
-0.02879241481423378,
0.04423130303621292,
-0.11318513751029968,
0.03291202336549759,
-0.04826968163251877,
-0.14741550385951996,
0.025552889332175255,
-0.14625880122184753,
0.023036928847432137,
-0.0145998140797019,
0.24746227264404297,
0.04597862809896469,
0.10028200596570969,
0.0384170338511467,
-0.05676959082484245,
0.04430763050913811,
0.012716591358184814,
0.15559692680835724,
0.10611238330602646,
-0.011272121220827103,
-0.07134693115949631,
0.16392143070697784,
0.013742141425609589,
0.10813193023204803,
0.1589846909046173,
0.008390967734158039,
-0.03851041570305824,
0.08501327782869339,
-0.05068247765302658,
-0.018283499404788017,
-0.008859545923769474,
-0.1557038277387619,
0.030361134558916092,
0.054811831563711166,
0.011050927452743053,
0.11181385815143585,
0.18061985075473785,
-0.03226571902632713,
0.05542868748307228,
-0.048323847353458405,
-0.023913966491818428,
-0.14987048506736755,
-0.14523448050022125,
-0.11490463465452194,
-0.14147774875164032,
-0.030002670362591743,
-0.08206479996442795,
-0.022249050438404083,
0.1137043759226799,
0.04013742133975029,
-0.05866430699825287,
0.12101835757493973,
0.0257432758808136,
-0.0194567684084177,
0.08156972378492355,
-0.0310229379683733,
0.03681909665465355,
-0.04031464830040932,
-0.016302891075611115,
-0.06295882165431976,
0.021144259721040726,
-0.05204616114497185,
0.028602372854948044,
-0.023942843079566956,
-0.03270987793803215,
0.009808878414332867,
-0.07248751819133759,
-0.06918302923440933,
0.06065952777862549,
0.030353037640452385,
0.0734221413731575,
-0.024775126948952675,
0.057853661477565765,
0.012237118557095528,
0.2105061113834381,
-0.05805915221571922,
-0.07800967991352081,
-0.11212457716464996,
0.18856249749660492,
0.00919035729020834,
0.07081668078899384,
0.010321138426661491,
-0.06130886822938919,
-0.010759752243757248,
0.1702289581298828,
0.2512736916542053,
-0.021651877090334892,
-0.017498815432190895,
-0.002617445308715105,
0.04097861796617508,
0.04591421037912369,
0.07096148282289505,
0.08205259591341019,
0.13159921765327454,
-0.11203642934560776,
-0.017545314505696297,
-0.0850902870297432,
0.025825712829828262,
-0.014674630016088486,
0.10920675098896027,
0.10892873257398605,
-0.03570234403014183,
-0.04152088984847069,
0.1067737340927124,
-0.0642055794596672,
0.0013240693369880319,
-0.04229041188955307,
-0.16131308674812317,
-0.14597612619400024,
-0.052424442023038864,
-0.03169086575508118,
-0.014961883425712585,
0.09456340968608856,
-0.0782066062092781,
-0.06397654116153717,
0.026755787432193756,
0.05335398018360138,
-0.0800388753414154,
-0.08740273863077164,
0.13105016946792603,
-0.0029057650826871395,
-0.0034666210412979126,
-0.0012052335077896714,
0.055518172681331635,
0.09914491325616837,
0.018468603491783142,
-0.05533097684383392,
0.05572234094142914,
0.027640653774142265,
-0.015198537148535252,
0.05476747453212738,
-0.00713600218296051,
-0.003370304126292467,
-0.03910309821367264,
0.024214347824454308,
-0.1192457526922226,
0.016072997823357582,
0.0875345915555954,
-0.00930252019315958,
-0.032577164471149445,
0.09608505666255951,
-0.11529911309480667,
0.11908730119466782,
0.1608676016330719,
-0.037104591727256775,
0.001470800838433206,
-0.07643906772136688,
0.12936119735240936,
0.05189052224159241,
0.01080713514238596,
-0.0092387106269598,
-0.13010559976100922,
-0.03319806605577469,
0.0793202668428421,
0.020756050944328308,
-0.19258882105350494,
-0.02182145044207573,
-0.12131112068891525,
0.011600092984735966,
-0.03366287425160408,
0.047877728939056396,
-0.00046261012903414667,
0.004149374086409807,
-0.043203793466091156,
-0.19958068430423737,
0.00589723838493228,
0.013290680013597012,
-0.061347946524620056,
-0.07333657145500183
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# layoutlxlm-finetuned-funsd-test
This model is a fine-tuned version of [microsoft/layoutxlm-base](https://huggingface.co/microsoft/layoutxlm-base) on an unknown dataset.
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 5e-05
- train_batch_size: 8
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_ratio: 0.1
- training_steps: 1000
- mixed_precision_training: Native AMP
### Training results
### Framework versions
- Transformers 4.13.0.dev0
- Pytorch 1.8.0+cu101
- Datasets 1.15.1
- Tokenizers 0.10.3
|
{"license": "cc-by-nc-sa-4.0", "tags": ["generated_from_trainer"], "model-index": [{"name": "layoutlxlm-finetuned-funsd-test", "results": []}]}
|
token-classification
|
bjorz/layoutxlm-finetuned-funsd-test
|
[
"transformers",
"pytorch",
"tensorboard",
"layoutlmv2",
"token-classification",
"generated_from_trainer",
"license:cc-by-nc-sa-4.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #layoutlmv2 #token-classification #generated_from_trainer #license-cc-by-nc-sa-4.0 #autotrain_compatible #endpoints_compatible #region-us
|
# layoutlxlm-finetuned-funsd-test
This model is a fine-tuned version of microsoft/layoutxlm-base on an unknown dataset.
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 5e-05
- train_batch_size: 8
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_ratio: 0.1
- training_steps: 1000
- mixed_precision_training: Native AMP
### Training results
### Framework versions
- Transformers 4.13.0.dev0
- Pytorch 1.8.0+cu101
- Datasets 1.15.1
- Tokenizers 0.10.3
|
[
"# layoutlxlm-finetuned-funsd-test\n\nThis model is a fine-tuned version of microsoft/layoutxlm-base on an unknown dataset.",
"## Model description\n\nMore information needed",
"## Intended uses & limitations\n\nMore information needed",
"## Training and evaluation data\n\nMore information needed",
"## Training procedure",
"### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 5e-05\n- train_batch_size: 8\n- eval_batch_size: 8\n- seed: 42\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- lr_scheduler_warmup_ratio: 0.1\n- training_steps: 1000\n- mixed_precision_training: Native AMP",
"### Training results",
"### Framework versions\n\n- Transformers 4.13.0.dev0\n- Pytorch 1.8.0+cu101\n- Datasets 1.15.1\n- Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #layoutlmv2 #token-classification #generated_from_trainer #license-cc-by-nc-sa-4.0 #autotrain_compatible #endpoints_compatible #region-us \n",
"# layoutlxlm-finetuned-funsd-test\n\nThis model is a fine-tuned version of microsoft/layoutxlm-base on an unknown dataset.",
"## Model description\n\nMore information needed",
"## Intended uses & limitations\n\nMore information needed",
"## Training and evaluation data\n\nMore information needed",
"## Training procedure",
"### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 5e-05\n- train_batch_size: 8\n- eval_batch_size: 8\n- seed: 42\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- lr_scheduler_warmup_ratio: 0.1\n- training_steps: 1000\n- mixed_precision_training: Native AMP",
"### Training results",
"### Framework versions\n\n- Transformers 4.13.0.dev0\n- Pytorch 1.8.0+cu101\n- Datasets 1.15.1\n- Tokenizers 0.10.3"
] |
[
65,
42,
6,
12,
8,
3,
117,
4,
37
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #layoutlmv2 #token-classification #generated_from_trainer #license-cc-by-nc-sa-4.0 #autotrain_compatible #endpoints_compatible #region-us \n# layoutlxlm-finetuned-funsd-test\n\nThis model is a fine-tuned version of microsoft/layoutxlm-base on an unknown dataset.## Model description\n\nMore information needed## Intended uses & limitations\n\nMore information needed## Training and evaluation data\n\nMore information needed## Training procedure### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 5e-05\n- train_batch_size: 8\n- eval_batch_size: 8\n- seed: 42\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- lr_scheduler_warmup_ratio: 0.1\n- training_steps: 1000\n- mixed_precision_training: Native AMP### Training results### Framework versions\n\n- Transformers 4.13.0.dev0\n- Pytorch 1.8.0+cu101\n- Datasets 1.15.1\n- Tokenizers 0.10.3"
] |
[
-0.11350405961275101,
0.13143187761306763,
-0.0025706826709210873,
0.0884898453950882,
0.1298552006483078,
0.03712385520339012,
0.08364524692296982,
0.1604582965373993,
-0.07265586405992508,
0.06574681401252747,
0.0841170996427536,
0.05129355564713478,
0.054983146488666534,
0.13965065777301788,
-0.027820779010653496,
-0.23206786811351776,
0.012147538363933563,
-0.007684278767555952,
-0.03264794126152992,
0.10995525866746902,
0.09653837978839874,
-0.09403937309980392,
0.07007928937673569,
0.0025422440376132727,
-0.13737399876117706,
0.0024553975090384483,
-0.027813343331217766,
-0.055303607136011124,
0.09176438301801682,
-0.014314154163002968,
0.09522039443254471,
0.013749492354691029,
0.1434250921010971,
-0.19627492129802704,
-0.003222402185201645,
0.06557260453701019,
0.053246885538101196,
0.08222413808107376,
0.08259237557649612,
0.00026551177143119276,
0.07208012044429779,
-0.14500923454761505,
0.09061606973409653,
0.022753188386559486,
-0.07178466767072678,
-0.12480201572179794,
-0.09450789541006088,
0.06330011785030365,
0.07771764695644379,
0.09251338988542557,
0.01573377475142479,
0.14426888525485992,
-0.08694811165332794,
0.08945804089307785,
0.16801685094833374,
-0.24567081034183502,
-0.06584884226322174,
0.09313179552555084,
0.09334788471460342,
0.02872641198337078,
-0.10552140325307846,
-0.009254278615117073,
0.015758046880364418,
0.041654329746961594,
0.08767179399728775,
-0.028618091717362404,
-0.08804768323898315,
0.006017626728862524,
-0.1198224350810051,
-0.045385245233774185,
0.12527698278427124,
0.02342849038541317,
-0.04543434455990791,
-0.1142892837524414,
-0.036974042654037476,
-0.12580469250679016,
0.000799887755420059,
-0.034829068928956985,
0.028477579355239868,
-0.05175365135073662,
-0.043468330055475235,
-0.06810582429170609,
-0.07413741946220398,
-0.0788128450512886,
0.012208056636154652,
0.08800837397575378,
0.0256291925907135,
0.018908217549324036,
-0.02985023520886898,
0.14662547409534454,
0.004363358020782471,
-0.1202925369143486,
-0.028773624449968338,
-0.01532752811908722,
-0.11530955880880356,
-0.07094261050224304,
-0.02679937705397606,
-0.0482567735016346,
-0.006865006871521473,
0.12889744341373444,
-0.033626489341259,
0.0835791528224945,
0.02472039684653282,
-0.0050641861744225025,
-0.03297794610261917,
0.16868838667869568,
-0.030926775187253952,
-0.04311235994100571,
-0.005500268191099167,
0.10958414524793625,
-0.00775888143107295,
-0.0030057840049266815,
-0.07578901201486588,
-0.04887048155069351,
0.10212032496929169,
0.06933260709047318,
-0.027625111863017082,
0.03588098660111427,
-0.050227485597133636,
-0.026084614917635918,
0.021683668717741966,
-0.13741514086723328,
0.055827170610427856,
-0.006218260154128075,
-0.08887823671102524,
-0.044836677610874176,
0.04868202283978462,
-0.012688578106462955,
-0.04482145980000496,
0.0922071784734726,
-0.0643080621957779,
0.014993980526924133,
-0.09378533065319061,
-0.05991440638899803,
0.019534455612301826,
-0.11320099979639053,
-0.023286981508135796,
-0.043087709695100784,
-0.25329309701919556,
-0.0649145320057869,
0.04470004513859749,
-0.06338340789079666,
-0.02020927518606186,
-0.03971772640943527,
-0.06677291542291641,
0.015623433515429497,
-0.007531553041189909,
0.1436188817024231,
-0.04749404639005661,
0.08043217658996582,
0.006399156060069799,
0.023263586685061455,
0.042181648313999176,
0.045910246670246124,
-0.08344674110412598,
0.034803085029125214,
-0.13061785697937012,
0.092270627617836,
-0.08687761425971985,
0.013636365532875061,
-0.11139100044965744,
-0.10288932919502258,
0.010448288172483444,
-0.01943545788526535,
0.062326233834028244,
0.14415010809898376,
-0.18822045624256134,
0.008727140724658966,
0.13365685939788818,
-0.07211261242628098,
-0.04513115808367729,
0.06908290833234787,
-0.0487208217382431,
0.05104834958910942,
0.049382731318473816,
0.15175586938858032,
0.12001053243875504,
-0.1513267457485199,
-0.006675338838249445,
0.0027338960207998753,
0.02695658430457115,
0.016084210947155952,
0.048724595457315445,
-0.012117711827158928,
0.054649628698825836,
0.014543144032359123,
-0.09168518334627151,
-0.026649631559848785,
-0.07399965077638626,
-0.07563711702823639,
-0.07043100148439407,
-0.07334065437316895,
0.05994545295834541,
0.02813420258462429,
0.028882035985589027,
-0.05948062986135483,
-0.10913655906915665,
0.11253751069307327,
0.13471801578998566,
-0.05289792641997337,
0.001297331997193396,
-0.0908573642373085,
0.012626194395124912,
-0.02137875370681286,
-0.037528008222579956,
-0.21201615035533905,
-0.09920267015695572,
0.041876260191202164,
-0.06286867707967758,
0.034835588186979294,
0.01786419004201889,
0.07429979741573334,
0.05004945397377014,
-0.032074399292469025,
-0.040133554488420486,
-0.07879123836755753,
0.0019197346409782767,
-0.10302744060754776,
-0.1623535454273224,
-0.07543734461069107,
-0.03179360553622246,
0.12130960822105408,
-0.23294568061828613,
0.017225192859768867,
0.013955269008874893,
0.14827539026737213,
0.030288320034742355,
-0.061029210686683655,
0.0402393713593483,
0.04591214284300804,
0.004706635605543852,
-0.10419070720672607,
0.04050445556640625,
-0.0026081474497914314,
-0.08025367558002472,
-0.056474149227142334,
-0.11602877825498581,
-0.002273114863783121,
0.05484432354569435,
0.09535815566778183,
-0.11539878696203232,
-0.02417311631143093,
-0.047825127840042114,
-0.060563765466213226,
-0.08120838552713394,
-0.0006977012963034213,
0.1928449422121048,
0.02978575974702835,
0.10765744000673294,
-0.05231142044067383,
-0.0662238821387291,
-0.009477775543928146,
0.015130358748137951,
-0.0006451583467423916,
0.07867874950170517,
0.03531978651881218,
-0.09117332845926285,
0.07706256955862045,
0.059206511825323105,
-0.06210979074239731,
0.1466493457555771,
-0.04979357495903969,
-0.08666576445102692,
-0.03237086907029152,
0.026084719225764275,
-0.008218218572437763,
0.13923348486423492,
-0.07056856155395508,
-0.0037244881968945265,
0.031564902514219284,
0.024682359769940376,
0.02634109929203987,
-0.17906485497951508,
-0.0011879006633535028,
0.02034776844084263,
-0.061387479305267334,
-0.00522499717772007,
-0.010951577685773373,
0.04411213845014572,
0.06798600405454636,
0.010761176235973835,
-0.023314714431762695,
0.03433108329772949,
-0.015684986487030983,
-0.09202978014945984,
0.17103680968284607,
-0.10591547936201096,
-0.15728923678398132,
-0.13560576736927032,
0.08083809167146683,
-0.046148356050252914,
-0.032542724162340164,
-0.00004460949276108295,
-0.04686279594898224,
-0.04683418199419975,
-0.08740922808647156,
-0.0518149696290493,
-0.021070195361971855,
-0.018043655902147293,
0.000703074038028717,
0.01617877185344696,
0.07414963096380234,
-0.11919193714857101,
0.009697950445115566,
0.004988483153283596,
-0.0706753358244896,
0.026374520733952522,
0.0479988157749176,
0.09603326767683029,
0.10781840234994888,
-0.029022838920354843,
0.02528134174644947,
-0.030977504327893257,
0.18219025433063507,
-0.08737238496541977,
0.017846127972006798,
0.14724396169185638,
-0.006537057925015688,
0.052612677216529846,
0.09717927128076553,
0.013520706444978714,
-0.07977235317230225,
0.019865062087774277,
0.03219199925661087,
-0.014176104217767715,
-0.23583683371543884,
-0.0344393327832222,
-0.018041059374809265,
-0.04224833473563194,
0.09363552927970886,
0.04679788649082184,
-0.010195924900472164,
0.04141930490732193,
0.013963240198791027,
-0.003499975660815835,
-0.02278796210885048,
0.07505646347999573,
0.09069835394620895,
0.03335520252585411,
0.0926838219165802,
-0.02967565320432186,
-0.00707252835854888,
0.05551638826727867,
0.04656865820288658,
0.24586884677410126,
-0.03252450004220009,
0.11073978245258331,
0.00950130820274353,
0.14911620318889618,
-0.016678282991051674,
0.04715530201792717,
0.04171312600374222,
0.005421517416834831,
0.026753008365631104,
-0.0581793375313282,
-0.03663930669426918,
0.02610023505985737,
0.0034886286593973637,
0.0270936731249094,
-0.07729976624250412,
0.021573850885033607,
0.016794772818684578,
0.27738261222839355,
0.04401461035013199,
-0.30120497941970825,
-0.07759767770767212,
-0.003055316861718893,
-0.027621440589427948,
-0.0856143981218338,
-0.017835251986980438,
0.1292390525341034,
-0.17335496842861176,
0.06653165817260742,
-0.08021590858697891,
0.08744185417890549,
-0.05953040346503258,
-0.0037854427937418222,
0.07089081406593323,
0.10718649625778198,
-0.0000028603751616174122,
0.08503394573926926,
-0.22278186678886414,
0.1906556934118271,
0.02544369176030159,
0.12058278173208237,
-0.07452957332134247,
0.03363056108355522,
0.024742091074585915,
0.06455978751182556,
0.11662928760051727,
-0.014564726501703262,
-0.07700634002685547,
-0.1640595644712448,
-0.10757388919591904,
0.024569785222411156,
0.11000170558691025,
-0.013105212710797787,
0.08893832564353943,
-0.04057927802205086,
0.002237008884549141,
0.02467632107436657,
-0.12315715849399567,
-0.15111826360225677,
-0.07769165933132172,
0.03450342267751694,
0.006237689405679703,
0.003231164999306202,
-0.07753247767686844,
-0.09050370752811432,
0.00037147049442864954,
0.17504440248012543,
-0.04078264907002449,
-0.04614773765206337,
-0.1510559618473053,
0.04985913634300232,
0.15072493255138397,
-0.044945426285266876,
0.02101775072515011,
0.0144257303327322,
0.11105509847402573,
0.0406443290412426,
-0.07182086259126663,
0.041135285049676895,
-0.06508636474609375,
-0.17971111834049225,
-0.0567183718085289,
0.1210755705833435,
0.05357949063181877,
0.04986460134387016,
0.007431809324771166,
0.03289016708731651,
-0.001957767875865102,
-0.08733593672513962,
0.0166400708258152,
0.07299434393644333,
0.07660751044750214,
0.06731900572776794,
-0.08502800017595291,
0.012224430218338966,
-0.023926839232444763,
-0.026577219367027283,
0.11868062615394592,
0.14774803817272186,
-0.09355388581752777,
0.08440955728292465,
0.02377985045313835,
-0.08788148313760757,
-0.19203130900859833,
0.08048488199710846,
0.12467549741268158,
0.05187109485268593,
0.06955140829086304,
-0.18003912270069122,
0.08379792422056198,
0.10822206735610962,
-0.029260680079460144,
0.09020208567380905,
-0.3229207992553711,
-0.12988422811031342,
0.06454755365848541,
0.09188855439424515,
-0.041361644864082336,
-0.12817762792110443,
-0.0467674694955349,
0.005091697908937931,
-0.12285957485437393,
0.09330124408006668,
-0.05894351005554199,
0.11824814230203629,
-0.019619915634393692,
0.10693009942770004,
0.03548033535480499,
-0.05056699365377426,
0.14343109726905823,
0.029152870178222656,
0.08108539879322052,
-0.05184616521000862,
0.025582393631339073,
0.07330689579248428,
-0.08523853123188019,
0.0879359096288681,
-0.03152308240532875,
0.07146622985601425,
-0.19657287001609802,
-0.007233407814055681,
-0.08048984408378601,
0.0761571153998375,
-0.04859510809183121,
-0.04365827143192291,
-0.03136242553591728,
0.06456724554300308,
0.02962014451622963,
-0.038630787283182144,
0.03335990384221077,
-0.009023160673677921,
0.06969109922647476,
0.14817696809768677,
0.1082877442240715,
0.023147910833358765,
-0.12943662703037262,
0.004003389738500118,
-0.007909310981631279,
0.03844700753688812,
-0.09155133366584778,
0.01307641714811325,
0.1486147791147232,
0.04431823268532753,
0.1333943009376526,
0.01947430521249771,
-0.050045937299728394,
-0.0020720064640045166,
0.03648462891578674,
-0.11164863407611847,
-0.09455849230289459,
-0.0008016495849005878,
-0.05323353409767151,
-0.13474278151988983,
0.002275595674291253,
0.10746142268180847,
-0.05039014294743538,
-0.01323621068149805,
-0.009739939123392105,
0.024356383830308914,
-0.021067215129733086,
0.21081121265888214,
0.02348746918141842,
0.07426849752664566,
-0.07248544692993164,
0.12389039248228073,
0.06505227833986282,
-0.08062528818845749,
0.038163863122463226,
0.0880943089723587,
-0.09658981114625931,
-0.023838860914111137,
0.06962169706821442,
0.14405518770217896,
-0.04358150064945221,
-0.04630590230226517,
-0.0857270359992981,
-0.08597176522016525,
0.04002992808818817,
0.10909461230039597,
0.049237873405218124,
0.01322959829121828,
-0.02818453311920166,
0.005795480217784643,
-0.12277802079916,
0.09103918820619583,
0.07242211699485779,
0.06847832351922989,
-0.15034416317939758,
0.15105989575386047,
0.0007642647251486778,
0.04831425100564957,
-0.016657687723636627,
0.021853992715477943,
-0.07575491070747375,
-0.007144525181502104,
-0.11584965139627457,
0.007090133149176836,
-0.0257802065461874,
-0.0013442946365103126,
-0.0010868305107578635,
-0.041708722710609436,
-0.012527357786893845,
0.040120888501405716,
-0.06760968267917633,
-0.06222556531429291,
-0.0080699622631073,
0.05107836425304413,
-0.1204628199338913,
-0.017020665109157562,
0.017805038020014763,
-0.09038566797971725,
0.07056274265050888,
0.050584640353918076,
0.00839075818657875,
0.008123626001179218,
-0.07170110195875168,
-0.0051927524618804455,
0.020023658871650696,
0.018480105325579643,
0.0411887988448143,
-0.10533463954925537,
-0.008586371317505836,
-0.023456675931811333,
0.03635484352707863,
0.03076786734163761,
0.08255708962678909,
-0.12822958827018738,
-0.02075030654668808,
-0.054476723074913025,
-0.02226858027279377,
-0.06330820918083191,
0.06287319213151932,
0.12059260904788971,
0.044911619275808334,
0.162339985370636,
-0.0818130299448967,
0.05908238887786865,
-0.19179025292396545,
-0.030159439891576767,
-0.006588384974747896,
-0.02513086050748825,
-0.06581827998161316,
-0.02516781911253929,
0.09760849922895432,
-0.048919085413217545,
0.09057586640119553,
0.00024137090076692402,
0.09762401878833771,
0.03142358362674713,
-0.04265020042657852,
-0.04449499025940895,
0.014759007841348648,
0.12575756013393402,
0.042121388018131256,
-0.013614766299724579,
0.11538239568471909,
-0.005757695995271206,
0.015578786842525005,
0.027545258402824402,
0.19229112565517426,
0.14149151742458344,
-0.026509592309594154,
0.08664552867412567,
0.07031260430812836,
-0.10080157220363617,
-0.15665920078754425,
0.09799085557460785,
-0.025940967723727226,
0.12528525292873383,
-0.06562221795320511,
0.16045533120632172,
0.06045416742563248,
-0.18826597929000854,
0.06397449970245361,
-0.055009081959724426,
-0.12198495864868164,
-0.09358175843954086,
-0.0685918778181076,
-0.07377459853887558,
-0.08924783766269684,
0.014620010741055012,
-0.10152692347764969,
0.051566675305366516,
0.077069953083992,
0.011623351834714413,
-0.0065721143037080765,
0.1521219164133072,
-0.058861296623945236,
0.002506287768483162,
0.0755457803606987,
0.01920420303940773,
0.01740572601556778,
-0.05136517435312271,
-0.057280685752630234,
0.035621073096990585,
0.024221837520599365,
0.08806431293487549,
-0.04416282847523689,
0.006135168485343456,
0.012680787593126297,
-0.014054421335458755,
-0.06632359325885773,
0.025403032079339027,
0.03411924093961716,
0.04201166331768036,
0.05754878371953964,
0.060705166310071945,
-0.0022665997967123985,
-0.04701254889369011,
0.26863786578178406,
-0.07942333072423935,
-0.08627400547266006,
-0.13237936794757843,
0.2070419192314148,
0.009586377069354057,
-0.029915519058704376,
0.06531713902950287,
-0.11024369299411774,
0.009169882163405418,
0.1549781709909439,
0.1359536051750183,
-0.06688424944877625,
-0.02270604483783245,
-0.016115572303533554,
-0.014351869001984596,
-0.031689755618572235,
0.09368427097797394,
0.06591468304395676,
0.041684988886117935,
-0.0683898776769638,
0.0002545449242461473,
-0.00029815256129950285,
-0.055846843868494034,
-0.07901057600975037,
0.036288078874349594,
-0.004855520091950893,
0.009593538008630276,
-0.02521190606057644,
0.08116303384304047,
-0.006686981301754713,
-0.17354175448417664,
0.07316701114177704,
-0.15976712107658386,
-0.17873170971870422,
-0.01645059511065483,
0.061542581766843796,
-0.03238726407289505,
0.04269654303789139,
-0.016123361885547638,
0.0037964715156704187,
0.1093648225069046,
-0.031086871400475502,
-0.04398370906710625,
-0.09574922919273376,
0.07746699452400208,
-0.06896854937076569,
0.19427354633808136,
0.000798111199401319,
0.09059029817581177,
0.0918913409113884,
0.016169575974345207,
-0.15271581709384918,
0.03972193971276283,
0.06958111375570297,
-0.0630522146821022,
0.04347081482410431,
0.16316373646259308,
-0.03918800130486488,
0.08435114473104477,
0.022275883704423904,
-0.1061062291264534,
-0.011322001926600933,
-0.026832453906536102,
-0.0095533961430192,
-0.08628233522176743,
-0.01059541292488575,
-0.048962656408548355,
0.17390674352645874,
0.2180752009153366,
-0.029092339798808098,
0.014613226056098938,
-0.08026870340108871,
0.030976654961705208,
0.036490168422460556,
0.06408990919589996,
-0.03488877788186073,
-0.18357764184474945,
0.03345586359500885,
0.008046976290643215,
0.020432306453585625,
-0.1939341425895691,
-0.10346053540706635,
0.04088955000042915,
-0.056693896651268005,
-0.0400080680847168,
0.11980313062667847,
0.04112345725297928,
0.04261397942900658,
-0.020191052928566933,
-0.08368534594774246,
-0.028769757598638535,
0.14188094437122345,
-0.17229385673999786,
-0.05340715870261192
] |
null | null |
transformers
|
# simple_kitchen
Autogenerated by HuggingPics🤗🖼️
Create your own image classifier for **anything** by running [the demo on Google Colab](https://colab.research.google.com/github/nateraw/huggingpics/blob/main/HuggingPics.ipynb).
Report any issues with the demo at the [github repo](https://github.com/nateraw/huggingpics).
## Example Images
#### best kitchen island

#### kitchen cabinet

#### kitchen countertop

|
{"tags": ["image-classification", "pytorch", "huggingpics"], "metrics": ["accuracy"]}
|
image-classification
|
black/simple_kitchen
|
[
"transformers",
"pytorch",
"tensorboard",
"vit",
"image-classification",
"huggingpics",
"model-index",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #vit #image-classification #huggingpics #model-index #autotrain_compatible #endpoints_compatible #region-us
|
# simple_kitchen
Autogenerated by HuggingPics️
Create your own image classifier for anything by running the demo on Google Colab.
Report any issues with the demo at the github repo.
## Example Images
#### best kitchen island
!best kitchen island
#### kitchen cabinet
!kitchen cabinet
#### kitchen countertop
!kitchen countertop
|
[
"# simple_kitchen\n\n\nAutogenerated by HuggingPics️\n\nCreate your own image classifier for anything by running the demo on Google Colab.\n\nReport any issues with the demo at the github repo.",
"## Example Images",
"#### best kitchen island\n\n!best kitchen island",
"#### kitchen cabinet\n\n!kitchen cabinet",
"#### kitchen countertop\n\n!kitchen countertop"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #vit #image-classification #huggingpics #model-index #autotrain_compatible #endpoints_compatible #region-us \n",
"# simple_kitchen\n\n\nAutogenerated by HuggingPics️\n\nCreate your own image classifier for anything by running the demo on Google Colab.\n\nReport any issues with the demo at the github repo.",
"## Example Images",
"#### best kitchen island\n\n!best kitchen island",
"#### kitchen cabinet\n\n!kitchen cabinet",
"#### kitchen countertop\n\n!kitchen countertop"
] |
[
49,
43,
4,
9,
8,
10
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #vit #image-classification #huggingpics #model-index #autotrain_compatible #endpoints_compatible #region-us \n# simple_kitchen\n\n\nAutogenerated by HuggingPics️\n\nCreate your own image classifier for anything by running the demo on Google Colab.\n\nReport any issues with the demo at the github repo.## Example Images#### best kitchen island\n\n!best kitchen island#### kitchen cabinet\n\n!kitchen cabinet#### kitchen countertop\n\n!kitchen countertop"
] |
[
-0.11179875582456589,
0.1615101844072342,
0.00033214804716408253,
0.03430011123418808,
0.10799963027238846,
0.057527005672454834,
0.0649498924612999,
0.17992615699768066,
0.18261964619159698,
0.02409958653151989,
0.0872439369559288,
0.17785964906215668,
-0.005746659357100725,
0.19882477819919586,
0.04556894302368164,
-0.2271580547094345,
0.0064462935552001,
0.11158040165901184,
0.06283387541770935,
0.11888512969017029,
0.04684993997216225,
-0.11092960834503174,
0.13660678267478943,
0.01604989916086197,
-0.19599686563014984,
-0.028308363631367683,
0.017377611249685287,
-0.08461914211511612,
0.12795871496200562,
-0.06260686367750168,
0.0940835028886795,
0.06299726665019989,
-0.024086281657218933,
-0.008828600868582726,
0.030892841517925262,
0.06621172279119492,
-0.04728124663233757,
0.07315962016582489,
0.027407487854361534,
0.023343253880739212,
0.07009851187467575,
0.04907257854938507,
-0.0317373052239418,
0.11584204435348511,
-0.11692246794700623,
-0.04043073579668999,
-0.043100714683532715,
-0.05918754264712334,
0.013271360658109188,
0.05925299972295761,
0.0224135909229517,
0.05472065880894661,
-0.12433048337697983,
0.08674702793359756,
0.19925162196159363,
-0.08715344220399857,
-0.11703694611787796,
0.09582296758890152,
0.059324126690626144,
-0.0788237452507019,
-0.017962748184800148,
0.09551993012428284,
0.04676264896988869,
0.05260342359542847,
-0.05232412740588188,
-0.023668210953474045,
-0.12657511234283447,
-0.05437050759792328,
-0.03428519889712334,
0.028624631464481354,
0.0690493956208229,
-0.04531875252723694,
-0.0431915819644928,
-0.08180738240480423,
-0.06854797154664993,
-0.06404095143079758,
-0.07232104986906052,
-0.04826455935835838,
-0.0051207710057497025,
-0.009564833715558052,
-0.14820596575737,
-0.08350478857755661,
-0.09291651844978333,
-0.08915884047746658,
-0.03459876775741577,
0.16237123310565948,
0.008910025469958782,
-0.03775814175605774,
-0.08630085736513138,
0.0960552766919136,
-0.05426037684082985,
-0.08707091957330704,
-0.02396230772137642,
-0.033508338034152985,
0.02317916601896286,
-0.0027268328703939915,
0.09036057442426682,
-0.06528536975383759,
0.11503365635871887,
0.06119801104068756,
0.02780621126294136,
0.0077736335806548595,
-0.03599241003394127,
-0.045364636927843094,
0.05463090538978577,
0.16816379129886627,
-0.03211922571063042,
-0.06064864620566368,
-0.014350608922541142,
0.007305615581572056,
-0.03371422737836838,
-0.0327410064637661,
-0.09553869813680649,
-0.04639209806919098,
0.05952677130699158,
0.026369629427790642,
0.1238420307636261,
-0.005502183921635151,
-0.08021575212478638,
-0.05248322710394859,
0.21652564406394958,
-0.01280650682747364,
0.045431267470121384,
-0.03149411082267761,
-0.0048367055132985115,
0.024939002469182014,
0.0471409447491169,
0.04397008568048477,
-0.03668918088078499,
0.021949097514152527,
-0.08051970601081848,
-0.03989781066775322,
-0.08909793943166733,
0.028670907020568848,
-0.014898605644702911,
-0.23861286044120789,
-0.036127783358097076,
-0.10051977634429932,
0.0300733745098114,
-0.0556320957839489,
0.07564190775156021,
-0.07337405532598495,
-0.08822053670883179,
-0.029411429539322853,
0.038750048726797104,
-0.054134637117385864,
0.04620961472392082,
0.042985010892152786,
-0.0031818151473999023,
0.054068345576524734,
0.02260776050388813,
0.0719246044754982,
-0.021806776523590088,
0.05538925528526306,
-0.12924441695213318,
0.06979382038116455,
-0.25017985701560974,
0.021806905046105385,
-0.029912512749433517,
0.09194990992546082,
-0.08765671402215958,
-0.0047910381108522415,
-0.04510382562875748,
0.00873464997857809,
0.002667985623702407,
0.1815280318260193,
-0.20035211741924286,
-0.03441738709807396,
0.03426636755466461,
-0.12220073491334915,
-0.06732161343097687,
0.10663528740406036,
0.0547059066593647,
0.11904997378587723,
0.08118794858455658,
0.08382881432771683,
0.029879620298743248,
-0.017711494117975235,
0.01641112007200718,
-0.019943762570619583,
-0.0770757719874382,
0.0013378553558140993,
-0.07201787829399109,
0.04750914126634598,
-0.12940238416194916,
0.041979819536209106,
0.0008255413267761469,
0.027744758874177933,
-0.02197366952896118,
-0.03830607980489731,
-0.04933174327015877,
-0.0470992773771286,
0.03459164500236511,
0.09797085076570511,
0.05324488878250122,
0.005000611767172813,
-0.04517366737127304,
-0.10954359173774719,
0.024721626192331314,
-0.03310682997107506,
-0.020024284720420837,
-0.060266438871622086,
0.21293634176254272,
0.03291740268468857,
-0.04708518460392952,
-0.07494941353797913,
-0.04743470251560211,
0.05138159170746803,
0.046033889055252075,
0.03978203237056732,
-0.029492374509572983,
0.07696229219436646,
0.014350520446896553,
0.06802178174257278,
-0.03344583883881569,
0.07995477318763733,
0.034433480352163315,
-0.07943815737962723,
-0.0530460961163044,
-0.002842965070158243,
-0.03979938477277756,
0.07825122028589249,
-0.10789555311203003,
-0.026550164446234703,
0.1279056966304779,
0.15889711678028107,
0.05254344269633293,
-0.05954737588763237,
0.0404481403529644,
-0.025903811678290367,
-0.03943444415926933,
-0.02480527013540268,
0.04673399031162262,
-0.05968673154711723,
-0.01083535049110651,
0.11644510179758072,
0.013959812931716442,
-0.09837272763252258,
0.0933537557721138,
-0.11127994954586029,
-0.0326627679169178,
-0.09458278119564056,
-0.04684107378125191,
0.03678040951490402,
-0.046199146658182144,
0.05158380791544914,
-0.0016419505700469017,
0.04068802669644356,
0.04362109303474426,
0.010510556399822235,
0.013549872674047947,
0.06835170835256577,
0.05331764742732048,
-0.12109668552875519,
0.08788798749446869,
0.13582266867160797,
-0.036008693277835846,
0.04227493330836296,
0.09790842980146408,
0.1287277638912201,
0.060187727212905884,
0.10264033824205399,
0.013191666454076767,
-0.005800723098218441,
-0.02969343774020672,
0.008524361997842789,
0.13938355445861816,
-0.23190999031066895,
-0.04586726799607277,
0.030133450403809547,
-0.11192106455564499,
0.03826113045215607,
-0.12418124079704285,
0.001322613563388586,
-0.02894744835793972,
-0.007544821128249168,
0.1804889291524887,
0.030609263107180595,
-0.02574245259165764,
0.005123965907841921,
0.014564815908670425,
-0.0307474322617054,
-0.012055106461048126,
0.03401387482881546,
0.04920805245637894,
0.14156663417816162,
-0.05637580528855324,
-0.23036888241767883,
-0.029874766245484352,
-0.12554888427257538,
0.0352984182536602,
0.018619095906615257,
0.05334184691309929,
-0.14597798883914948,
-0.10399410873651505,
-0.013563301414251328,
-0.05603313073515892,
0.07023151218891144,
-0.027956748381257057,
-0.17191565036773682,
0.03125380724668503,
-0.07993781566619873,
-0.09854327142238617,
-0.016841361299157143,
-0.0007377369329333305,
-0.08665402978658676,
0.16585329174995422,
-0.05967007204890251,
0.06123952940106392,
0.06450694799423218,
0.013056381605565548,
0.03947115316987038,
0.009259729646146297,
0.16955068707466125,
-0.15199464559555054,
0.08391503989696503,
0.1291448026895523,
0.08614493906497955,
0.026493754237890244,
0.11430694162845612,
0.029165921732783318,
-0.08891884982585907,
0.011138396337628365,
0.013002045452594757,
-0.05097378417849541,
-0.08193385601043701,
-0.07955791801214218,
-0.07514972984790802,
0.10829838365316391,
0.16435427963733673,
0.09472967684268951,
0.10939329117536545,
0.1668929010629654,
0.007989112287759781,
0.0512552484869957,
-0.032149478793144226,
0.006368245929479599,
0.0341656468808651,
-0.04949434474110603,
0.021722594276070595,
0.04591268301010132,
-0.07114432752132416,
0.1109510213136673,
0.09114298224449158,
0.14661912620067596,
0.005067887250334024,
0.12139123678207397,
0.04638770967721939,
0.10958610475063324,
0.07586398720741272,
0.00019692664500325918,
-0.0811842605471611,
0.01699141040444374,
-0.0008700403850525618,
-0.06420744210481644,
-0.03444874286651611,
0.011753726750612259,
0.019600166007876396,
-0.16270072758197784,
0.05576622858643532,
-0.04149680212140083,
0.03736454248428345,
0.16446562111377716,
0.07367086410522461,
-0.217386856675148,
0.030527612194418907,
0.0025759749114513397,
-0.02336808107793331,
-0.118141770362854,
-0.0025482731871306896,
-0.017809320241212845,
-0.06442955136299133,
-0.020017828792333603,
-0.0751296728849411,
0.08259982615709305,
-0.05016675218939781,
0.006754857487976551,
0.08371581882238388,
0.08289206027984619,
0.02084147185087204,
0.03486209362745285,
-0.1620243787765503,
0.1468961089849472,
-0.03283553943037987,
-0.037671856582164764,
-0.1696581244468689,
-0.034761056303977966,
0.012825598940253258,
0.096523217856884,
0.14333269000053406,
0.0420827679336071,
0.0014600199647247791,
-0.15588372945785522,
-0.09643137454986572,
0.028669539839029312,
-0.029829591512680054,
-0.0856151133775711,
-0.06970588862895966,
0.02422984689474106,
-0.08608008921146393,
-0.019719554111361504,
-0.06419450789690018,
-0.16435012221336365,
-0.08809405565261841,
0.01682281866669655,
0.025222469121217728,
0.05065267160534859,
-0.03042808547616005,
-0.07430711388587952,
0.03666524961590767,
0.12416823208332062,
0.12236584722995758,
-0.011965717189013958,
-0.1382475346326828,
0.03510454297065735,
0.09639719128608704,
-0.022292250767350197,
0.07963255047798157,
-0.057432256639003754,
0.1289711445569992,
-0.016188621520996094,
-0.06433971971273422,
0.12169711291790009,
-0.053345754742622375,
-0.10493021458387375,
-0.03761030361056328,
0.02470684051513672,
0.1172347366809845,
-0.018950853496789932,
0.05873846262693405,
0.10660919547080994,
-0.06797603517770767,
-0.026721734553575516,
-0.10291992127895355,
0.0030982033349573612,
0.05639415234327316,
0.0923200324177742,
-0.06334564089775085,
-0.03705684468150139,
-0.10147213935852051,
-0.00549244275316596,
0.09517326951026917,
0.059925004839897156,
-0.07790500670671463,
0.11109967529773712,
-0.025843795388936996,
0.040226344019174576,
-0.19306132197380066,
-0.07774736732244492,
-0.07041560858488083,
0.03689292073249817,
0.01553225889801979,
-0.1334586888551712,
0.25015968084335327,
0.07150796800851822,
-0.04745158553123474,
0.2097921073436737,
-0.09033142775297165,
-0.10774996131658554,
0.06483867764472961,
0.14114508032798767,
0.050871841609478,
-0.08920203149318695,
-0.0700984075665474,
0.010187827982008457,
-0.04287467524409294,
0.19494189321994781,
0.0067907837219536304,
0.05522353574633598,
-0.06997620314359665,
0.03308574855327606,
0.03528470918536186,
-0.03252464905381203,
0.06774985045194626,
0.011205559596419334,
-0.057684190571308136,
-0.07944051176309586,
-0.15723736584186554,
-0.11468495428562164,
-0.001293664681725204,
-0.016716234385967255,
0.03580429404973984,
0.003547967178747058,
-0.12358574569225311,
-0.014169665053486824,
-0.10023840516805649,
0.11611855030059814,
-0.033402301371097565,
-0.045545924454927444,
-0.043463677167892456,
0.1419491320848465,
-0.10225842893123627,
0.04037756472826004,
0.10079848766326904,
-0.03759719058871269,
0.13599415123462677,
0.08050420880317688,
0.08522161841392517,
-0.1641857773065567,
0.052645351737737656,
0.014056410640478134,
-0.0071350447833538055,
0.014120463281869888,
-0.015852175652980804,
0.048579297959804535,
0.0563618429005146,
0.052239447832107544,
0.055448297411203384,
-0.010876480489969254,
-0.0389181487262249,
-0.02989679016172886,
0.09256664663553238,
-0.08050771057605743,
-0.015829559415578842,
-0.03021826781332493,
-0.05095702409744263,
0.027336984872817993,
-0.029431838542222977,
0.14377261698246002,
0.012256812304258347,
-0.08908642828464508,
0.023191995918750763,
0.016157003119587898,
-0.028210222721099854,
0.09433457255363464,
0.11157030612230301,
0.008761005476117134,
-0.11536218971014023,
-0.012174960225820541,
0.09769821912050247,
-0.13569636642932892,
-0.05073762685060501,
0.1823270469903946,
-0.0748075395822525,
-0.12697547674179077,
0.06672544777393341,
0.11028420925140381,
-0.07920710742473602,
0.00731880497187376,
-0.07570721209049225,
-0.03180576115846634,
0.04006122797727585,
-0.07557752728462219,
0.10674044489860535,
0.06925398856401443,
0.020349133759737015,
0.024519074708223343,
-0.05859334394335747,
0.0243294145911932,
0.06824496388435364,
0.14430391788482666,
-0.15269774198532104,
-0.06258048117160797,
0.06502114981412888,
0.12040337920188904,
-0.0993257388472557,
-0.040048327296972275,
-0.10194218158721924,
-0.04801201447844505,
0.0453825369477272,
0.08873691409826279,
-0.15049684047698975,
-0.0022213240154087543,
-0.03642889857292175,
-0.07332903891801834,
-0.05391758307814598,
-0.02396383509039879,
-0.0935167670249939,
-0.010967588052153587,
-0.005901553202420473,
0.01574034057557583,
-0.04724617674946785,
0.011027522385120392,
0.08924335986375809,
-0.03838076815009117,
0.0889456570148468,
-0.0504378005862236,
-0.026203472167253494,
-0.032172273844480515,
-0.16618740558624268,
-0.07028274983167648,
0.03412097319960594,
-0.005668198224157095,
0.0980541780591011,
-0.04369886592030525,
0.03371628746390343,
0.012379402294754982,
0.10250186175107956,
-0.012132763862609863,
0.20379003882408142,
-0.13121536374092102,
-0.06508086621761322,
-0.022134903818368912,
-0.14023235440254211,
-0.046308428049087524,
0.04537699371576309,
0.08338693529367447,
-0.06416793167591095,
0.07402743399143219,
-0.054275594651699066,
0.060201894491910934,
-0.1477082371711731,
0.03209385648369789,
-0.056084852665662766,
-0.11066554486751556,
0.027027834206819534,
-0.028765439987182617,
0.036604635417461395,
0.059654608368873596,
0.08542050421237946,
0.13281655311584473,
0.04374858736991882,
0.04836147278547287,
0.013942865654826164,
-0.02766914665699005,
-0.011929474771022797,
0.11213251203298569,
0.021032258868217468,
0.08086046576499939,
-0.031279899179935455,
0.039261266589164734,
-0.01566922292113304,
0.04991646856069565,
0.04478800296783447,
0.07219953835010529,
-0.030341871082782745,
-0.02149985358119011,
0.11351751536130905,
0.03316769376397133,
-0.09858200699090958,
0.1001828983426094,
-0.1931234896183014,
0.08993169665336609,
-0.056625187397003174,
0.011168604716658592,
0.005800971761345863,
-0.11339915543794632,
0.047807760536670685,
0.01582653820514679,
-0.05289764329791069,
-0.044366415590047836,
-0.25105977058410645,
-0.09006651490926743,
-0.17513200640678406,
0.03993062302470207,
-0.02494761347770691,
-0.028950415551662445,
-0.025550689548254013,
-0.0020280838944017887,
0.006112885661423206,
0.23196573555469513,
0.008137776516377926,
0.013061422854661942,
0.10462360829114914,
0.06316493451595306,
-0.05530194565653801,
-0.00678618997335434,
-0.06329618394374847,
-0.08189719915390015,
0.15835444629192352,
-0.00009821564890444279,
-0.03147811070084572,
-0.02448452264070511,
0.02176053635776043,
0.02821456268429756,
-0.09064572304487228,
-0.05232404172420502,
-0.05844184756278992,
-0.009912656620144844,
-0.0009004529565572739,
-0.010419437661767006,
0.022048920392990112,
0.03396065533161163,
0.18619096279144287,
-0.00973424781113863,
-0.0004194783978164196,
-0.09589025378227234,
0.17916716635227203,
-0.058991529047489166,
-0.10042852908372879,
-0.01779811829328537,
-0.023926768451929092,
0.0016178613295778632,
0.2617410123348236,
0.23567770421504974,
-0.01155952550470829,
-0.034590642899274826,
0.02014675736427307,
0.012627038173377514,
0.024571724236011505,
0.08746325224637985,
0.03475930169224739,
0.11479172110557556,
-0.04606036841869354,
-0.04751048982143402,
0.03659871965646744,
-0.07630596309900284,
-0.1309855580329895,
-0.052131157368421555,
0.10977748781442642,
-0.03997967392206192,
-0.07733359187841415,
0.16957062482833862,
-0.029833154752850533,
-0.0009149191901087761,
0.063226118683815,
-0.13920624554157257,
-0.12973622977733612,
-0.07059921324253082,
0.14905820786952972,
-0.01471039094030857,
0.07893684506416321,
-0.02231157012283802,
0.04745577275753021,
0.02153298258781433,
-0.008046567440032959,
-0.209281325340271,
0.07016991078853607,
0.06174715608358383,
-0.13246910274028778,
0.26940032839775085,
-0.03989364579319954,
-0.09903395175933838,
0.11671552062034607,
-0.031058885157108307,
-0.11900383234024048,
-0.05335284769535065,
-0.050608985126018524,
-0.1289840042591095,
0.04507358744740486,
0.11532561480998993,
-0.0043075429275631905,
-0.20978327095508575,
0.020796682685613632,
-0.1868755966424942,
0.043607763946056366,
0.16465482115745544,
0.0656673014163971,
-0.061448343098163605,
0.11073210090398788,
-0.09860798716545105,
0.11858688294887543,
0.07238873839378357,
0.011402171105146408,
-0.04278935119509697,
-0.09176824241876602,
0.06686119735240936,
-0.02995637059211731,
-0.018549136817455292,
-0.09932404011487961,
-0.10980124771595001,
-0.08613283187150955,
-0.08085434883832932,
-0.11683620512485504,
-0.13499346375465393,
-0.0384126715362072,
-0.13153454661369324,
-0.03087235987186432,
-0.07703900337219238,
0.10889354348182678,
0.08865804970264435,
0.007035274989902973,
0.042222753167152405,
-0.0587262399494648,
0.002036887686699629,
0.07598036527633667,
-0.14423494040966034,
-0.03643603250384331
] |
null | null |
transformers
|
BERT based model finetuned on MNLI with our custom training routine.
Yields 60% accuraqcy on adversarial HANS dataset.
|
{}
|
text-classification
|
blackbird/bert-base-uncased-MNLI-v1
|
[
"transformers",
"pytorch",
"jax",
"safetensors",
"bert",
"text-classification",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #jax #safetensors #bert #text-classification #autotrain_compatible #endpoints_compatible #region-us
|
BERT based model finetuned on MNLI with our custom training routine.
Yields 60% accuraqcy on adversarial HANS dataset.
|
[] |
[
"TAGS\n#transformers #pytorch #jax #safetensors #bert #text-classification #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
44
] |
[
"passage: TAGS\n#transformers #pytorch #jax #safetensors #bert #text-classification #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
-0.021713463589549065,
0.0654725506901741,
-0.00794708076864481,
0.022156789898872375,
0.16400907933712006,
0.015473652631044388,
0.10103922337293625,
0.09779391437768936,
0.05949626490473747,
-0.030349696055054665,
0.12249185889959335,
0.21417325735092163,
-0.03984736651182175,
0.13577724993228912,
-0.12860147655010223,
-0.2440502643585205,
0.0882980152964592,
0.02172994613647461,
0.028411343693733215,
0.11552661657333374,
0.08964807540178299,
-0.10224562883377075,
0.05607806146144867,
-0.05696086958050728,
-0.09846892952919006,
0.026674916967749596,
0.05891455337405205,
-0.1369890570640564,
0.0983535647392273,
0.01722935400903225,
0.17299683392047882,
0.046053510159254074,
-0.05138836428523064,
-0.1584518551826477,
0.04156217351555824,
0.0077022067271173,
-0.08001645654439926,
0.03997304290533066,
0.07976111769676208,
-0.11045137047767639,
-0.01044571865350008,
0.022499991580843925,
0.034013718366622925,
0.048617806285619736,
-0.13905729353427887,
-0.11005605012178421,
-0.010662647895514965,
0.04192438721656799,
0.0870012566447258,
0.06714481860399246,
-0.006997082848101854,
0.18823473155498505,
-0.12497025728225708,
0.13017280399799347,
0.10634823888540268,
-0.30007341504096985,
-0.017160119488835335,
0.07763838022947311,
0.05102614685893059,
0.06910742074251175,
-0.06440568715333939,
0.045330747961997986,
0.033393945544958115,
-0.00997313391417265,
0.03747172653675079,
-0.06577962636947632,
-0.12370630353689194,
0.01016975287348032,
-0.07940340042114258,
-0.03855573385953903,
0.19136355817317963,
-0.05098869279026985,
0.053452592343091965,
-0.05703575536608696,
-0.09246117621660233,
-0.02188172936439514,
-0.02346687763929367,
0.0019310942152515054,
-0.0351005420088768,
0.05075797438621521,
0.0361248143017292,
0.02690335363149643,
-0.11150021106004715,
0.018141092732548714,
-0.17581529915332794,
0.20734457671642303,
0.01990143023431301,
0.046327587217092514,
-0.17929427325725555,
0.04431639239192009,
0.048998575657606125,
-0.1149277314543724,
0.05817234516143799,
-0.11155778169631958,
0.0514126680791378,
-0.03316843509674072,
-0.0372835211455822,
-0.037159424275159836,
0.12362152338027954,
0.1250646859407425,
-0.00912782084196806,
0.06013563275337219,
-0.04675215482711792,
0.08676736801862717,
0.03288273140788078,
0.08866127580404282,
0.05280424281954765,
-0.014787320978939533,
0.08290963619947433,
-0.06756016612052917,
0.019660266116261482,
-0.05727663263678551,
-0.11807560920715332,
0.01039548497647047,
0.11793402582406998,
0.11675688624382019,
0.012611068785190582,
0.09543249011039734,
-0.06506446748971939,
0.006505368743091822,
0.09588105231523514,
-0.07895839214324951,
0.017125414684414864,
0.0350351482629776,
0.04829170182347298,
0.024707822129130363,
-0.026487937197089195,
0.0030121563468128443,
-0.061472248286008835,
0.12689241766929626,
-0.05422953888773918,
-0.007692599203437567,
-0.02952299267053604,
-0.07651115208864212,
0.048986878246068954,
-0.10287142544984818,
0.034805621951818466,
-0.19008123874664307,
-0.11772795766592026,
0.006102612242102623,
0.011861618608236313,
0.024503471329808235,
-0.02513660490512848,
-0.023777177557349205,
-0.008414730429649353,
0.032708507031202316,
-0.05869688093662262,
-0.09407905489206314,
-0.08068183809518814,
0.10415510088205338,
-0.037043895572423935,
0.06219480559229851,
-0.08236175030469894,
0.038519203662872314,
-0.11986368894577026,
-0.023322315886616707,
-0.12570463120937347,
-0.0007101881201379001,
-0.06390572339296341,
0.20689509809017181,
0.026964673772454262,
-0.0224609375,
-0.05051908269524574,
0.05645376443862915,
-0.07215681672096252,
0.19493497908115387,
-0.07625866681337357,
-0.08623489737510681,
0.24678544700145721,
-0.1279580146074295,
-0.17000986635684967,
0.10133079439401627,
-0.013959852047264576,
-0.004501427989453077,
0.12573395669460297,
0.2081976979970932,
0.07290318608283997,
-0.03132502734661102,
0.06526272743940353,
0.09816371649503708,
-0.09078552573919296,
-0.0883825421333313,
-0.016022957861423492,
0.00846122857183218,
-0.15588414669036865,
0.047241490334272385,
0.06577310711145401,
0.06889242678880692,
-0.05379139259457588,
-0.04198388382792473,
-0.020334823057055473,
-0.02342434972524643,
0.11055458337068558,
0.05180465057492256,
0.0923023521900177,
-0.11551622301340103,
-0.011464827693998814,
-0.04335340857505798,
-0.007100371643900871,
0.023038135841488838,
0.001145660295151174,
-0.08214972168207169,
0.101126529276371,
0.021960193291306496,
0.031735386699438095,
-0.1913764625787735,
-0.11281553655862808,
-0.0008762988145463169,
0.11225378513336182,
-0.043872132897377014,
0.061931852251291275,
0.07055530697107315,
-0.0332125760614872,
-0.014029931277036667,
-0.05974722281098366,
0.19266952574253082,
0.03074836917221546,
-0.05821244791150093,
-0.09517458081245422,
0.08039040118455887,
-0.08065038919448853,
0.01600276120007038,
-0.10583970695734024,
0.029083125293254852,
0.07110360264778137,
0.10849476605653763,
0.029637476429343224,
0.05734116956591606,
-0.013249640353024006,
0.04259081557393074,
-0.05547172948718071,
0.011545118875801563,
0.10758806020021439,
0.0014412952587008476,
-0.061990465968847275,
0.15194498002529144,
-0.17725147306919098,
0.3685913383960724,
0.20586258172988892,
-0.24432027339935303,
-0.002561305882409215,
-0.010347469709813595,
0.007789911702275276,
0.030541660264134407,
0.0162393469363451,
0.013952181674540043,
0.039644233882427216,
-0.006282297428697348,
0.1877308487892151,
-0.05219453200697899,
-0.057030003517866135,
0.004496603738516569,
-0.052575986832380295,
-0.03682097792625427,
0.09196595102548599,
0.011779937893152237,
-0.2177613526582718,
0.19338391721248627,
0.2537401616573334,
0.025199389085173607,
0.16060958802700043,
-0.02582998387515545,
0.05915607884526253,
0.0824630931019783,
-0.013157798908650875,
-0.022533336654305458,
-0.04554226994514465,
-0.1455221325159073,
-0.03673265129327774,
0.059724047780036926,
0.02907709777355194,
0.040551792830228806,
-0.11503995209932327,
-0.05756233632564545,
-0.006099446211010218,
0.02140898071229458,
-0.02359427511692047,
0.06876208633184433,
0.054972827434539795,
0.1263849288225174,
-0.03098524548113346,
-0.09141557663679123,
0.10585656017065048,
-0.01901656575500965,
-0.08530191332101822,
0.191598579287529,
-0.14447884261608124,
-0.35428786277770996,
-0.10313033312559128,
-0.14785133302211761,
-0.0037141970824450254,
0.04614908620715141,
0.10450293868780136,
-0.11793774366378784,
-0.0380684994161129,
-0.0004040142521262169,
-0.038726743310689926,
-0.030250607058405876,
0.04581953212618828,
-0.05772920325398445,
0.07456407696008682,
-0.02582315355539322,
-0.06380484253168106,
-0.07301127165555954,
-0.03579798713326454,
-0.040448445826768875,
0.16221703588962555,
-0.0892651304602623,
0.07203865051269531,
0.13615871965885162,
-0.015593440271914005,
0.041886527091264725,
-0.04898044839501381,
0.1555844396352768,
-0.07631782442331314,
-0.019584430381655693,
0.16826319694519043,
-0.08730562776327133,
0.07662565261125565,
0.16526925563812256,
0.02406305819749832,
-0.06910371780395508,
0.04291778802871704,
-0.04478449746966362,
-0.0724785104393959,
-0.22262144088745117,
-0.1271025836467743,
-0.07931119948625565,
0.07219051569700241,
0.052540916949510574,
0.07992782443761826,
0.11519292742013931,
0.06368740648031235,
-0.0023743873462080956,
-0.04634237289428711,
0.05454256013035774,
0.07711439579725266,
0.17705762386322021,
0.0043832785449922085,
0.1426936239004135,
-0.05474034324288368,
-0.14214468002319336,
0.06859677284955978,
-0.01417678315192461,
0.07450098544359207,
0.09917003661394119,
-0.022616079077124596,
0.013673663139343262,
0.10604599863290787,
0.16437357664108276,
0.13580524921417236,
0.01983647607266903,
-0.04336691275238991,
-0.014547656290233135,
-0.013477444648742676,
-0.08312629908323288,
-0.004832982551306486,
0.009819258004426956,
-0.08831845968961716,
-0.09007405489683151,
-0.1315731555223465,
0.11031142622232437,
0.08005206286907196,
0.03532475233078003,
-0.21246163547039032,
0.018303824588656425,
0.11229286342859268,
-0.018204273656010628,
-0.09043079614639282,
0.10070422291755676,
-0.01678953878581524,
-0.117817223072052,
0.0996197760105133,
-0.037970688194036484,
0.11916128545999527,
-0.06753453612327576,
0.08645343780517578,
-0.07420429587364197,
-0.1188579797744751,
0.011212152428925037,
0.09781660884618759,
-0.2638496458530426,
0.22001980245113373,
0.00972041953355074,
-0.024888113141059875,
-0.0646883174777031,
-0.028239667415618896,
0.04417185112833977,
0.22146032750606537,
0.10971783846616745,
0.00009361281991004944,
-0.09073538333177567,
-0.15357232093811035,
-0.03785524144768715,
0.025326425209641457,
0.10204120725393295,
-0.025252096354961395,
-0.008983354084193707,
-0.046831171959638596,
-0.027956394478678703,
-0.02389000542461872,
-0.07279519736766815,
-0.014091418124735355,
-0.13749128580093384,
0.026165178045630455,
0.055422406643629074,
0.0877295657992363,
-0.02378608100116253,
-0.04513174295425415,
-0.1197848990559578,
0.17492496967315674,
-0.08910252898931503,
-0.07073826342821121,
-0.10059648752212524,
-0.11181958764791489,
0.0007767801289446652,
-0.07184138149023056,
0.04481477662920952,
-0.06904283910989761,
0.03184283897280693,
-0.06817156821489334,
-0.18812686204910278,
0.13625912368297577,
-0.13195689022541046,
-0.05899064242839813,
-0.07384099811315536,
0.15552079677581787,
-0.06302634626626968,
-0.0035479702055454254,
0.03970842435956001,
0.027778903022408485,
-0.07482940703630447,
-0.07818155735731125,
0.006534373387694359,
0.009610188193619251,
0.05552814528346062,
0.05251498147845268,
-0.09461628645658493,
-0.14215251803398132,
-0.02355608530342579,
0.01197721529752016,
0.25463566184043884,
0.22666041553020477,
-0.05347258225083351,
0.1358383297920227,
0.18503350019454956,
-0.05803385749459267,
-0.3454492390155792,
-0.09773682802915573,
-0.1344824731349945,
-0.059851933270692825,
-0.01793389953672886,
-0.09627822786569595,
0.11547074466943741,
0.004937163088470697,
-0.04625356197357178,
0.08518151193857193,
-0.1461019665002823,
-0.083231620490551,
0.2013142704963684,
0.016737112775444984,
0.3915456235408783,
-0.15460002422332764,
-0.08379893749952316,
-0.05153810977935791,
-0.09382570534944534,
0.14099515974521637,
-0.06832609325647354,
0.04495837166905403,
0.0019382579484954476,
-0.0028918490279465914,
0.04694472625851631,
-0.05234109237790108,
0.08885977417230606,
-0.04875560104846954,
0.04963110014796257,
-0.12214414030313492,
-0.09103014320135117,
0.030814101919531822,
-0.028373291715979576,
-0.005930721759796143,
-0.056702833622694016,
0.020790567621588707,
-0.1209561824798584,
-0.03191009536385536,
-0.06636703759431839,
0.06905783712863922,
0.02149762213230133,
-0.030735423788428307,
0.030209749937057495,
-0.01809299923479557,
-0.0042241644114255905,
0.0009792455239221454,
0.2923944890499115,
-0.038492076098918915,
0.22044970095157623,
0.10613560676574707,
0.14744965732097626,
-0.14929358661174774,
0.05999607965350151,
-0.04910130798816681,
-0.0754656121134758,
0.07482101023197174,
-0.06515238434076309,
0.07774417847394943,
0.10547447204589844,
-0.05612943693995476,
0.06839057803153992,
0.10780969262123108,
0.0478271059691906,
-0.038744617253541946,
0.16603823006153107,
-0.25541171431541443,
-0.0012396922102198005,
-0.038202524185180664,
0.033761367201805115,
0.06086653470993042,
0.09347615391016006,
0.12888671457767487,
0.035416025668382645,
-0.04951908811926842,
-0.01688341051340103,
0.019208915531635284,
0.010039051994681358,
0.05638628080487251,
0.06072856858372688,
0.04548066854476929,
-0.1262623816728592,
0.06109809875488281,
0.035816729068756104,
-0.1820671111345291,
-0.00909417774528265,
0.1431208699941635,
-0.16468144953250885,
-0.1327553391456604,
0.01471090316772461,
0.156119704246521,
-0.024150794371962547,
-0.07418226450681686,
-0.06659402698278427,
-0.14670325815677643,
0.04257367551326752,
0.2263869047164917,
0.10529720038175583,
0.07898824661970139,
-0.0014055153587833047,
-0.038048405200242996,
-0.007766969036310911,
0.03615836799144745,
-0.011422540992498398,
0.021358588710427284,
-0.14689211547374725,
0.0037143740337342024,
-0.010536531917750835,
0.11306611448526382,
-0.09912221878767014,
-0.047185856848955154,
-0.1922134906053543,
0.03933587297797203,
-0.059326570481061935,
-0.009605652652680874,
-0.07529854029417038,
-0.012454226613044739,
-0.002915274351835251,
-0.05342821776866913,
-0.026465559378266335,
-0.05807827413082123,
-0.10688316822052002,
0.04150213301181793,
0.002345814136788249,
0.03729872405529022,
-0.08780533075332642,
-0.05785459652543068,
0.0853082537651062,
-0.03572157397866249,
0.11269453167915344,
0.1070907786488533,
-0.089089035987854,
0.10803984850645065,
-0.2005157470703125,
-0.09468311071395874,
0.13732028007507324,
0.003848859341815114,
0.050899725407361984,
0.06576873362064362,
0.03185916319489479,
0.08350106328725815,
0.0003468405921012163,
0.07290676236152649,
0.06128183379769325,
-0.1181051954627037,
0.071310855448246,
0.011074036359786987,
-0.16973160207271576,
-0.022691503167152405,
-0.0701931044459343,
0.10188555717468262,
-0.041144873946905136,
0.17418168485164642,
-0.076426662504673,
0.07797541469335556,
-0.060945093631744385,
0.016739221289753914,
-0.01372558157891035,
-0.22573141753673553,
-0.11859673261642456,
-0.053039368242025375,
0.028635278344154358,
-0.015315480530261993,
0.22630931437015533,
0.05916966497898102,
0.013154883868992329,
0.06369247287511826,
0.035830263048410416,
0.01047768909484148,
0.04045509174466133,
0.17195628583431244,
0.06200168654322624,
-0.06969088315963745,
-0.06826761364936829,
0.030157266184687614,
0.02877793274819851,
-0.06939956545829773,
0.11072975397109985,
0.10129404067993164,
-0.0251607745885849,
0.05968738719820976,
-0.006188264582306147,
0.05709199234843254,
-0.07589592784643173,
-0.18348610401153564,
-0.0562448650598526,
0.05603133514523506,
0.026982419192790985,
0.05744026601314545,
0.142180934548378,
0.004996979609131813,
0.010084230452775955,
-0.07741006463766098,
-0.03342839702963829,
-0.1943444460630417,
-0.07347012311220169,
-0.11249250173568726,
-0.08961030840873718,
0.009935879148542881,
-0.08145841211080551,
-0.029054639860987663,
0.054039642214775085,
0.051013242453336716,
-0.049097608774900436,
0.08035353571176529,
0.05342123284935951,
-0.028418174013495445,
0.08278832584619522,
-0.024090737104415894,
0.0135620953515172,
0.005216807126998901,
-0.035933662205934525,
-0.12749753892421722,
-0.03550497815012932,
-0.058167051523923874,
0.03714491054415703,
-0.06676458567380905,
0.03030305542051792,
-0.14020051062107086,
-0.12535710632801056,
-0.021322326734662056,
0.05690642073750496,
-0.0517280288040638,
0.1114698275923729,
0.015188813209533691,
0.0023455366026610136,
0.06108670309185982,
0.22716949880123138,
-0.04502088204026222,
-0.07763572782278061,
-0.023048361763358116,
0.2505435645580292,
0.054953381419181824,
0.12029256671667099,
-0.004578823689371347,
-0.0050336215645074844,
-0.04786117002367973,
0.2800162136554718,
0.3008327782154083,
-0.049834996461868286,
0.07139132171869278,
-0.028420327231287956,
0.029972707852721214,
0.10022050142288208,
0.1346438229084015,
0.09786063432693481,
0.24367362260818481,
-0.06024493649601936,
0.012106356211006641,
-0.014927470125257969,
-0.002041126834228635,
-0.13366930186748505,
0.04151143506169319,
0.03682940825819969,
-0.02120414562523365,
-0.07004236429929733,
0.1041441485285759,
-0.16367332637310028,
0.12311186641454697,
-0.01414033304899931,
-0.20375603437423706,
-0.061052095144987106,
-0.028266752138733864,
0.16914020478725433,
-0.014487233012914658,
0.06880565732717514,
-0.0027392974589020014,
-0.10483374446630478,
-0.008490480482578278,
0.004270398523658514,
-0.17219771444797516,
-0.04357116296887398,
0.0455457866191864,
-0.03780859336256981,
0.08406970649957657,
-0.011100434698164463,
0.03013930656015873,
0.07466962188482285,
0.015251037664711475,
-0.0406903512775898,
0.07738472521305084,
0.008484533987939358,
-0.04552280530333519,
0.008135504089295864,
0.0024812573101371527,
0.0032781909685581923,
-0.05371193215250969,
0.07237998396158218,
-0.14029689133167267,
0.04713515564799309,
-0.09903568029403687,
-0.08193216472864151,
-0.019585581496357918,
0.08061938732862473,
-0.0352899469435215,
0.04849293828010559,
0.06978024542331696,
-0.004088457208126783,
-0.014330332167446613,
-0.03933378681540489,
-0.013950365595519543,
0.0020331216510385275,
-0.11243174225091934,
-0.10394086688756943,
-0.11117445677518845,
-0.06611403822898865,
0.13685625791549683,
0.013783015310764313,
-0.1875249594449997,
0.0034856752026826143,
-0.12291533499956131,
0.0509054958820343,
-0.19321024417877197,
0.06684365123510361,
0.06787635385990143,
0.026447894051671028,
-0.011317764408886433,
-0.06444274634122849,
0.04915736988186836,
0.0877934917807579,
-0.09415892511606216,
-0.09623237699270248
] |
null | null | null |
# TEST
# huggingface model
|
{}
| null |
blackface/dummy
|
[
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#region-us
|
# TEST
# huggingface model
|
[
"# TEST",
"# huggingface model"
] |
[
"TAGS\n#region-us \n",
"# TEST",
"# huggingface model"
] |
[
6,
3,
5
] |
[
"passage: TAGS\n#region-us \n# TEST# huggingface model"
] |
[
-0.03881610929965973,
-0.04575558751821518,
-0.005185125861316919,
-0.039399199187755585,
0.020794788375496864,
0.09639494121074677,
0.09159161150455475,
0.07754091918468475,
0.3179450035095215,
0.03235868364572525,
0.16059279441833496,
-0.11284822970628738,
-0.012858353555202484,
0.19177913665771484,
-0.031146695837378502,
-0.16874130070209503,
0.04295007511973381,
0.021408705040812492,
0.01680559664964676,
0.0862375870347023,
0.0031402723398059607,
0.0028784913010895252,
0.09439387917518616,
-0.06741135567426682,
-0.17962618172168732,
0.047026172280311584,
-0.03684516251087189,
0.04276606813073158,
0.11763175576925278,
-0.048622772097587585,
0.13279274106025696,
-0.00040376291144639254,
-0.12049499154090881,
-0.22241337597370148,
0.04005115106701851,
-0.05183897912502289,
-0.03237752988934517,
0.00842062383890152,
0.07905599474906921,
-0.028310758993029594,
0.15088243782520294,
0.1749982088804245,
0.02740078791975975,
0.04657692089676857,
-0.17715667188167572,
-0.05952158197760582,
-0.06081460416316986,
-0.12073265016078949,
-0.023564085364341736,
0.02646002545952797,
-0.019159965217113495,
0.27644726634025574,
-0.22160598635673523,
0.02307240292429924,
0.106169193983078,
-0.12343719601631165,
0.03570420295000076,
0.15552282333374023,
0.10268984735012054,
-0.05112473666667938,
-0.0423908494412899,
0.06257641315460205,
0.12839505076408386,
0.019677523523569107,
-0.16829736530780792,
-0.05185049772262573,
0.018049931153655052,
0.10819678753614426,
-0.10379631072282791,
-0.06046205013990402,
0.2203141301870346,
0.05840418115258217,
-0.02929980494081974,
-0.01574263721704483,
-0.019586483016610146,
0.040938399732112885,
0.07452630251646042,
-0.041924599558115005,
0.025958232581615448,
0.04327790439128876,
-0.05421127378940582,
0.040793947875499725,
-0.0557672381401062,
-0.0703301802277565,
-0.196674644947052,
0.15667080879211426,
-0.03416590392589569,
0.08454664051532745,
-0.21747130155563354,
-0.029452748596668243,
-0.26230525970458984,
-0.035760704427957535,
0.01015566661953926,
-0.12397339940071106,
0.004746920429170132,
-0.01432409230619669,
-0.09641430526971817,
0.1358875334262848,
0.19157807528972626,
0.1684962958097458,
-0.0379435196518898,
0.06841867417097092,
-0.05116331949830055,
0.0551346018910408,
0.05219583213329315,
0.16203410923480988,
0.12580186128616333,
-0.10003997385501862,
-0.07693468034267426,
-0.03727932274341583,
-0.028311431407928467,
-0.029620518907904625,
-0.0009110089740715921,
-0.059477463364601135,
-0.04187643155455589,
0.05137373507022858,
-0.0030451796483248472,
-0.03960735350847244,
-0.11099843680858612,
0.04716134071350098,
-0.06112892925739288,
-0.03119722381234169,
0.007913454435765743,
0.011347685940563679,
-0.040301788598299026,
-0.10118833184242249,
-0.04487902671098709,
-0.04085833579301834,
0.11095944046974182,
-0.02888862043619156,
-0.07718868553638458,
-0.050925105810165405,
-0.039109740406274796,
-0.006660944782197475,
0.04976198449730873,
-0.15847444534301758,
0.040734440088272095,
-0.10933388024568558,
-0.050669774413108826,
-0.0399886816740036,
-0.04830571636557579,
-0.01944718137383461,
0.10594836622476578,
-0.0769505500793457,
0.024591611698269844,
-0.0784655287861824,
0.024868542328476906,
-0.1595831960439682,
-0.07653416693210602,
-0.0014410391449928284,
0.10540971904993057,
0.09698940068483353,
-0.0355013832449913,
0.016325818374753,
-0.15962664783000946,
0.09120428562164307,
-0.07501969486474991,
0.01819395273923874,
-0.08282377570867538,
0.17069125175476074,
0.07211296260356903,
0.04073064401745796,
-0.189019575715065,
0.06582196056842804,
0.05924443528056145,
0.29803261160850525,
-0.15980809926986694,
-0.14764493703842163,
0.3037959635257721,
-0.10542645305395126,
-0.21598516404628754,
0.13310125470161438,
0.019259076565504074,
0.037302739918231964,
0.006902189459651709,
0.43992698192596436,
-0.08623728156089783,
-0.14865335822105408,
0.0725100189447403,
0.11006930470466614,
-0.14921657741069794,
0.05710573494434357,
0.03323948383331299,
-0.10793270170688629,
-0.27840009331703186,
0.04083451256155968,
0.019539454951882362,
0.1484902799129486,
-0.10600534826517105,
0.00995983649045229,
-0.04144812002778053,
-0.059805043041706085,
0.13541088998317719,
0.07924669981002808,
-0.0014191472437232733,
-0.19029203057289124,
0.060819413512945175,
-0.16775351762771606,
0.08091248571872711,
0.0474870502948761,
-0.027061475440859795,
-0.09490641951560974,
0.17368772625923157,
-0.02324403077363968,
-0.03811869025230408,
-0.08464859426021576,
-0.10614560544490814,
0.06256809830665588,
0.13447067141532898,
0.043287962675094604,
0.11268309503793716,
0.13502620160579681,
-0.06288572400808334,
0.056422583758831024,
-0.038808420300483704,
0.06496076285839081,
-0.03221363574266434,
0.00117283605504781,
-0.017398670315742493,
0.10770652443170547,
-0.10750904679298401,
0.11902756243944168,
-0.05648075044155121,
0.004983989521861076,
-0.11331699788570404,
0.03626050055027008,
0.0031937805470079184,
0.00015898255514912307,
0.03062228299677372,
-0.017446178942918777,
0.006792626343667507,
-0.037442997097969055,
0.09885438531637192,
-0.029495596885681152,
-0.09731858968734741,
0.13822239637374878,
-0.09869281202554703,
0.04512404277920723,
0.12060117721557617,
-0.07264220714569092,
-0.06765994429588318,
-0.11888857930898666,
-0.07909825444221497,
0.05643542483448982,
0.008097256533801556,
0.019782468676567078,
-0.0051202839240431786,
-0.04102395102381706,
0.031255874782800674,
-0.054411955177783966,
-0.018539249897003174,
-0.00967401172965765,
-0.12162661552429199,
-0.0991506278514862,
0.1330210417509079,
-0.08941175043582916,
-0.023952940478920937,
0.08268806338310242,
0.2069912552833557,
0.052204474806785583,
0.1250392198562622,
-0.07398360222578049,
-0.03712989389896393,
-0.023787718266248703,
0.10601074993610382,
-0.024108918383717537,
0.08906444162130356,
-0.21462303400039673,
-0.010398264974355698,
0.05325160548090935,
0.042935390025377274,
0.07928070425987244,
-0.08201174437999725,
-0.10070516169071198,
-0.018296973779797554,
-0.06296425312757492,
-0.10993301868438721,
0.13611266016960144,
-0.04370907321572304,
0.04229842498898506,
0.021542664617300034,
-0.08934137225151062,
0.030793223530054092,
-0.004238024353981018,
-0.13645964860916138,
0.12088554352521896,
-0.09721633046865463,
-0.1257638782262802,
-0.10630549490451813,
-0.04570239782333374,
-0.06566581130027771,
0.033579014241695404,
0.03019847720861435,
-0.1612984985113144,
0.03483863174915314,
-0.05335959419608116,
0.07911098748445511,
0.021973393857479095,
0.023315755650401115,
-0.08606434613466263,
0.020912140607833862,
-0.03960581123828888,
-0.06788057833909988,
-0.0850149542093277,
-0.09525619447231293,
0.07456526905298233,
0.15168246626853943,
-0.2882903218269348,
0.08483687788248062,
0.13868440687656403,
-0.004720637574791908,
0.05489905923604965,
-0.039427053183317184,
0.2517756223678589,
-0.046874936670064926,
-0.0840538740158081,
0.0485084131360054,
-0.017693405970931053,
0.04381918907165527,
0.06320076435804367,
-0.057652395218610764,
-0.16345542669296265,
0.06535904854536057,
-0.04907413572072983,
-0.11170768737792969,
-0.11996136605739594,
-0.06779596954584122,
-0.03092551976442337,
0.24181167781352997,
-0.0560702420771122,
0.01821136102080345,
0.07637825608253479,
0.045682813972234726,
0.15462401509284973,
-0.22099970281124115,
-0.05710557475686073,
-0.005739493295550346,
-0.07528124749660492,
-0.07830090075731277,
-0.0003223824896849692,
-0.0773862898349762,
-0.09965645521879196,
0.12366984784603119,
0.0018578064627945423,
0.09004994481801987,
0.14240898191928864,
-0.11334286630153656,
0.0841665118932724,
0.1665983945131302,
0.09986458718776703,
0.09462511539459229,
0.005734043195843697,
-0.08609889447689056,
-0.0323578305542469,
-0.0015216958709061146,
-0.07118649035692215,
0.02821185812354088,
0.11881402134895325,
-0.11247924715280533,
-0.07511138170957565,
-0.20735198259353638,
0.07865733653306961,
-0.04840753227472305,
0.21682403981685638,
-0.1397833228111267,
0.07715360075235367,
0.03615659847855568,
0.05816496163606644,
-0.01227585319429636,
0.04013068601489067,
0.07975970953702927,
-0.09429503232240677,
0.09073600172996521,
0.07091358304023743,
0.10514134168624878,
0.19354194402694702,
0.06783804297447205,
-0.21097882091999054,
-0.03786323219537735,
-0.02805551514029503,
0.05998745188117027,
-0.23110505938529968,
0.21953055262565613,
-0.027213556692004204,
-0.07509487867355347,
0.019278259947896004,
-0.07362441718578339,
0.15261191129684448,
0.1641225814819336,
0.1130075454711914,
0.023616569116711617,
-0.12711209058761597,
-0.06577611714601517,
-0.003921695984899998,
0.0615648478269577,
0.1585720330476761,
0.011206259950995445,
-0.023147476837038994,
-0.04042007774114609,
0.02475021779537201,
-0.030395865440368652,
0.24745114147663116,
-0.002901757601648569,
0.08177171647548676,
-0.14083729684352875,
-0.0810939148068428,
-0.09924479573965073,
0.040765225887298584,
0.06323602050542831,
-0.0744846910238266,
-0.019289707764983177,
0.24282413721084595,
0.11223149299621582,
-0.08779171854257584,
-0.15423572063446045,
0.1351645141839981,
-0.02767191082239151,
-0.07473739981651306,
-0.07555410265922546,
-0.07091542333364487,
-0.06219633296132088,
-0.0912523940205574,
0.06263985484838486,
-0.04902677983045578,
0.011056075803935528,
-0.03682761639356613,
0.07503921538591385,
-0.05108889192342758,
0.050624702125787735,
0.036603763699531555,
0.024746056646108627,
-0.08084078878164291,
-0.0391610711812973,
0.18121053278446198,
0.008934355340898037,
-0.18005190789699554,
0.15414929389953613,
-0.027195625007152557,
0.10228072106838226,
-0.0772518515586853,
0.06337983161211014,
0.12696167826652527,
0.41704925894737244,
0.01514880545437336,
0.08013663440942764,
0.190740704536438,
-0.05921516939997673,
-0.27597156167030334,
0.010218784213066101,
-0.16150875389575958,
0.08792290091514587,
0.12754090130329132,
-0.005605635698884726,
0.05641955882310867,
0.04652673751115799,
-0.029631247743964195,
0.14943379163742065,
-0.034311480820178986,
-0.041388366371393204,
0.16819533705711365,
-0.02574741467833519,
0.462005615234375,
-0.07409186661243439,
-0.08878505229949951,
-0.018046630546450615,
-0.10133319348096848,
0.017525160685181618,
-0.020952053368091583,
0.07283633202314377,
-0.07796221226453781,
0.0047632865607738495,
0.06286567449569702,
-0.011372365057468414,
0.2417561262845993,
-0.034510985016822815,
0.0907866433262825,
-0.0957908108830452,
-0.04977308213710785,
-0.011416515335440636,
-0.06539842486381531,
0.08209414780139923,
-0.04067041724920273,
0.02833159826695919,
-0.2143724262714386,
0.0068518673069775105,
-0.11083044856786728,
0.10497239232063293,
0.05145905911922455,
0.008672076277434826,
-0.05900762602686882,
0.009587370790541172,
-0.07061129808425903,
0.017543617635965347,
0.06410685926675797,
-0.14682988822460175,
0.23294878005981445,
-0.010236755944788456,
0.12100879102945328,
-0.02556440606713295,
0.09672820568084717,
0.0646195337176323,
-0.05798641964793205,
0.016219593584537506,
-0.18224725127220154,
-0.029048502445220947,
0.10136936604976654,
-0.02636072412133217,
0.048051945865154266,
0.10765239596366882,
-0.11959435045719147,
0.006298988591879606,
0.1445663571357727,
-0.12517958879470825,
-0.07672669738531113,
-0.03629765287041664,
-0.2729160189628601,
0.01462980080395937,
0.007002962753176689,
-0.0016747727058827877,
0.13137401640415192,
0.010534839704632759,
-0.04659204185009003,
-0.024430029094219208,
-0.14776408672332764,
-0.0017618959536775947,
0.09531417489051819,
0.006869553122669458,
-0.08104166388511658,
0.14435066282749176,
-0.11970169097185135,
-0.12216976284980774,
0.009716643020510674,
0.13860827684402466,
-0.03708488494157791,
-0.048214141279459,
-0.020890917629003525,
0.4504169225692749,
-0.039464112371206284,
-0.05706073343753815,
-0.08010005205869675,
-0.06397988647222519,
-0.03453566133975983,
0.16810423135757446,
0.1144886314868927,
0.10666675120592117,
0.09651064872741699,
-0.003303974401205778,
-0.017059504985809326,
-0.04981517046689987,
0.04948827996850014,
-0.028193218633532524,
0.04184828698635101,
-0.09437122195959091,
-0.07165887951850891,
0.16773730516433716,
-0.12160564959049225,
-0.09969628602266312,
-0.20736604928970337,
0.036761574447155,
-0.16609035432338715,
-0.07284509390592575,
-0.00966836791485548,
-0.07941296696662903,
0.08275631070137024,
-0.04018037021160126,
-0.03628335893154144,
-0.07445189356803894,
-0.13032123446464539,
0.07728591561317444,
0.02000250108540058,
-0.0014329912373796105,
0.0387214832007885,
0.0038883392699062824,
0.18826542794704437,
-0.08595412224531174,
0.12297439575195312,
0.1326216757297516,
-0.03274571895599365,
0.09732939302921295,
-0.17525307834148407,
-0.1044192686676979,
0.06692932546138763,
-0.05972413346171379,
0.06621750444173813,
0.01972603052854538,
-0.013913687318563461,
-0.010547935031354427,
-0.02938798815011978,
0.09843669831752777,
0.10417227447032928,
0.007634134031832218,
0.0592317134141922,
0.010652575641870499,
-0.26736217737197876,
-0.06975407898426056,
-0.14287878572940826,
0.11544537544250488,
0.14135736227035522,
-0.001752000767737627,
0.006504676770418882,
0.08256946504116058,
-0.0629645437002182,
-0.030884500592947006,
0.005984285846352577,
-0.0865476131439209,
0.12100233137607574,
-0.005862763151526451,
0.05541400611400604,
0.03893941640853882,
0.2670377790927887,
0.0028658988885581493,
0.03769807144999504,
-0.013603693805634975,
0.12044291943311691,
0.13903456926345825,
-0.014688301831483841,
0.11621180176734924,
0.017972184345126152,
-0.05368044972419739,
-0.13516280055046082,
0.08491594344377518,
-0.05199576914310455,
-0.12130430340766907,
0.02587357722222805,
0.07311976701021194,
-0.11577422171831131,
0.16466857492923737,
0.008687187917530537,
0.07740069925785065,
0.15224283933639526,
-0.1861501783132553,
-0.008126388303935528,
-0.010278550907969475,
0.06939508765935898,
0.2203100025653839,
0.062308523803949356,
-0.11675422638654709,
0.13308459520339966,
-0.07526460289955139,
-0.03578348085284233,
-0.1769818365573883,
-0.004582136869430542,
-0.03294258192181587,
-0.19622142612934113,
0.10265814512968063,
-0.03245389834046364,
-0.09079045802354813,
0.19573721289634705,
0.01971898227930069,
-0.02158447541296482,
0.04927246645092964,
-0.12105584144592285,
-0.028042227029800415,
0.11085487902164459,
-0.05042828619480133,
-0.03987593948841095,
0.04445873200893402,
-0.0927257314324379,
-0.01079292967915535,
-0.056197624653577805,
-0.02639702521264553,
-0.023473218083381653,
-0.07439043372869492,
-0.033554092049598694,
-0.14443323016166687,
-0.0830419510602951,
-0.02869691140949726,
0.030515074729919434,
-0.08250395208597183,
0.0024360481183975935,
0.023264843970537186,
0.05156123638153076,
0.050278909504413605,
0.13079534471035004,
-0.015035205520689487,
0.02133285067975521,
-0.12286451458930969,
0.16025151312351227,
-0.14750342071056366,
0.1315315067768097,
-0.10732308775186539,
-0.006784635595977306,
-0.09450827538967133,
0.2523571252822876,
0.17568616569042206,
-0.06326206773519516,
-0.05310368165373802,
-0.01012677326798439,
0.041627973318099976,
0.027800368145108223,
0.14074954390525818,
0.03118666633963585,
0.1537274718284607,
-0.07007297873497009,
0.050045523792505264,
-0.020763251930475235,
-0.06800945103168488,
0.04098568856716156,
-0.009449551813304424,
0.09022736549377441,
-0.07253727316856384,
-0.0904010534286499,
0.09957961738109589,
-0.19742712378501892,
0.14164775609970093,
0.14707133173942566,
-0.21054643392562866,
-0.03274192661046982,
-0.08633103966712952,
0.06514880061149597,
-0.034393951296806335,
0.1778625249862671,
-0.05586196482181549,
-0.02206338942050934,
-0.08552470803260803,
-0.05654773488640785,
-0.24414017796516418,
-0.10810312628746033,
0.024715865030884743,
-0.1381450891494751,
-0.015787392854690552,
-0.0000574369405512698,
0.06915228068828583,
0.05669822916388512,
0.008501476608216763,
-0.016631383448839188,
0.10660748928785324,
0.02381611056625843,
0.0716777890920639,
-0.1412181854248047,
0.08016131073236465,
0.019909746944904327,
-0.1783415526151657,
0.08664030581712723,
-0.08778882026672363,
0.05779065564274788,
-0.0044563706032931805,
-0.0577428862452507,
-0.09341620653867722,
0.03669225051999092,
-0.10912027209997177,
0.10228332132101059,
0.11175772547721863,
0.023457245901226997,
0.04417974501848221,
-0.017002809792757034,
0.07528706640005112,
0.039510175585746765,
-0.12746967375278473,
-0.0460863895714283,
0.019977539777755737,
-0.05633847042918205,
0.2730658948421478,
-0.15788504481315613,
-0.26712995767593384,
-0.02935369871556759,
-0.13356761634349823,
0.059217408299446106,
0.05267488211393356,
0.1182747408747673,
0.08339616656303406,
0.10384069383144379,
0.03667563572525978,
-0.27562415599823,
0.15433789789676666,
0.10018137097358704,
0.03232602775096893,
-0.1106196790933609
] |
null | null |
transformers
|
# RuBERT for Sentiment Analysis of Medical Reviews
This is a [DeepPavlov/rubert-base-cased-conversational](https://huggingface.co/DeepPavlov/rubert-base-cased-conversational) model trained on corpus of medical reviews.
## Labels
0: NEUTRAL
1: POSITIVE
2: NEGATIVE
## How to use
```python
import torch
from transformers import AutoModelForSequenceClassification
from transformers import BertTokenizerFast
tokenizer = BertTokenizerFast.from_pretrained('blanchefort/rubert-base-cased-sentiment-med')
model = AutoModelForSequenceClassification.from_pretrained('blanchefort/rubert-base-cased-sentiment-med', return_dict=True)
@torch.no_grad()
def predict(text):
inputs = tokenizer(text, max_length=512, padding=True, truncation=True, return_tensors='pt')
outputs = model(**inputs)
predicted = torch.nn.functional.softmax(outputs.logits, dim=1)
predicted = torch.argmax(predicted, dim=1).numpy()
return predicted
```
## Dataset used for model training
**[Отзывы о медучреждениях](https://github.com/blanchefort/datasets/tree/master/medical_comments)**
> Датасет содержит пользовательские отзывы о медицинских учреждениях. Датасет собран в мае 2019 года с сайта prodoctorov.ru
|
{"language": ["ru"], "tags": ["sentiment", "text-classification"]}
|
text-classification
|
blanchefort/rubert-base-cased-sentiment-med
|
[
"transformers",
"pytorch",
"tf",
"jax",
"safetensors",
"bert",
"text-classification",
"sentiment",
"ru",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"ru"
] |
TAGS
#transformers #pytorch #tf #jax #safetensors #bert #text-classification #sentiment #ru #autotrain_compatible #endpoints_compatible #region-us
|
# RuBERT for Sentiment Analysis of Medical Reviews
This is a DeepPavlov/rubert-base-cased-conversational model trained on corpus of medical reviews.
## Labels
0: NEUTRAL
1: POSITIVE
2: NEGATIVE
## How to use
## Dataset used for model training
Отзывы о медучреждениях
> Датасет содержит пользовательские отзывы о медицинских учреждениях. Датасет собран в мае 2019 года с сайта URL
|
[
"# RuBERT for Sentiment Analysis of Medical Reviews\n\nThis is a DeepPavlov/rubert-base-cased-conversational model trained on corpus of medical reviews.",
"## Labels\n 0: NEUTRAL\n 1: POSITIVE\n 2: NEGATIVE",
"## How to use",
"## Dataset used for model training\n\nОтзывы о медучреждениях\n\n> Датасет содержит пользовательские отзывы о медицинских учреждениях. Датасет собран в мае 2019 года с сайта URL"
] |
[
"TAGS\n#transformers #pytorch #tf #jax #safetensors #bert #text-classification #sentiment #ru #autotrain_compatible #endpoints_compatible #region-us \n",
"# RuBERT for Sentiment Analysis of Medical Reviews\n\nThis is a DeepPavlov/rubert-base-cased-conversational model trained on corpus of medical reviews.",
"## Labels\n 0: NEUTRAL\n 1: POSITIVE\n 2: NEGATIVE",
"## How to use",
"## Dataset used for model training\n\nОтзывы о медучреждениях\n\n> Датасет содержит пользовательские отзывы о медицинских учреждениях. Датасет собран в мае 2019 года с сайта URL"
] |
[
52,
41,
15,
4,
40
] |
[
"passage: TAGS\n#transformers #pytorch #tf #jax #safetensors #bert #text-classification #sentiment #ru #autotrain_compatible #endpoints_compatible #region-us \n# RuBERT for Sentiment Analysis of Medical Reviews\n\nThis is a DeepPavlov/rubert-base-cased-conversational model trained on corpus of medical reviews.## Labels\n 0: NEUTRAL\n 1: POSITIVE\n 2: NEGATIVE## How to use## Dataset used for model training\n\nОтзывы о медучреждениях\n\n> Датасет содержит пользовательские отзывы о медицинских учреждениях. Датасет собран в мае 2019 года с сайта URL"
] |
[
-0.011837666854262352,
0.03830361366271973,
-0.00632257666438818,
-0.059093356132507324,
0.10802332311868668,
0.049930546432733536,
0.03461373597383499,
0.15071484446525574,
0.01439262181520462,
0.07260885834693909,
0.06803380697965622,
0.08087430149316788,
0.011970395222306252,
0.0761507973074913,
-0.05160749703645706,
-0.23060151934623718,
-0.003960518166422844,
0.10264275223016739,
0.11918936669826508,
0.12064412981271744,
0.07903654128313065,
-0.07222006469964981,
0.06923495978116989,
0.036517929285764694,
0.0033670621924102306,
-0.02566666528582573,
0.0934070274233818,
-0.03296922147274017,
0.11711513996124268,
-0.05082666501402855,
0.028905751183629036,
0.05310586839914322,
0.022522298619151115,
-0.15121345221996307,
0.032046735286712646,
0.0030864186119288206,
-0.02430742420256138,
0.08303743600845337,
0.04295695200562477,
-0.1874110996723175,
0.18743225932121277,
-0.12222710996866226,
0.07523354887962341,
0.05036868155002594,
-0.11264064162969589,
-0.23124738037586212,
-0.09500221908092499,
0.13034497201442719,
0.00010062210640171543,
0.13611632585525513,
-0.020836148411035538,
0.24385209381580353,
-0.1315939873456955,
0.03422761708498001,
0.1622544825077057,
-0.19186754524707794,
-0.043893080204725266,
0.05028005689382553,
0.053346239030361176,
0.021164290606975555,
-0.08551615476608276,
0.03434118255972862,
0.0073730782605707645,
0.02385561540722847,
0.03273449465632439,
-0.042012374848127365,
0.02174091711640358,
0.042295876890420914,
-0.08272597938776016,
-0.015127751976251602,
0.19146493077278137,
-0.04357647895812988,
0.006651746574789286,
-0.0610521174967289,
0.03283635154366493,
-0.07032895088195801,
0.016414407640695572,
-0.12876735627651215,
-0.011207844130694866,
-0.06079498678445816,
-0.0698484480381012,
0.06485971063375473,
-0.07276201248168945,
-0.012249867431819439,
-0.09601074457168579,
0.004071198869496584,
-0.03347543627023697,
0.04327806085348129,
0.003294155700132251,
0.0967424064874649,
-0.019964316859841347,
-0.022562429308891296,
-0.0054658506996929646,
-0.03127420321106911,
-0.019681496545672417,
-0.08195702731609344,
-0.0662560984492302,
-0.004186671692878008,
0.08467860519886017,
0.042284443974494934,
0.009935669600963593,
-0.04007543995976448,
0.02408130280673504,
-0.03840003162622452,
0.10403219610452652,
-0.00800636038184166,
-0.11148753762245178,
-0.014955328777432442,
-0.05086595192551613,
0.07591081410646439,
0.031641777604818344,
0.06603015214204788,
0.03308766335248947,
0.004964863881468773,
0.08435565233230591,
0.06602340936660767,
-0.15467451512813568,
0.061182789504528046,
-0.11031478643417358,
-0.018434157595038414,
-0.011249726638197899,
0.005169225856661797,
-0.07350638508796692,
0.06020206958055496,
-0.0364563912153244,
0.07090071588754654,
0.05863117054104805,
0.012713953852653503,
-0.01808769255876541,
0.11992373317480087,
-0.03183308616280556,
-0.034303490072488785,
-0.01784461922943592,
-0.014545679092407227,
0.06014489755034447,
-0.009554561227560043,
0.048904724419116974,
-0.12138508260250092,
-0.08679452538490295,
-0.044613227248191833,
0.007811398711055517,
-0.07221554219722748,
0.04971718415617943,
-0.046164970844984055,
-0.007201435510069132,
-0.011127081699669361,
0.01981773041188717,
0.004585412330925465,
-0.06014756113290787,
0.048547763377428055,
-0.08420661836862564,
0.0849805548787117,
-0.03014560043811798,
0.00012277328642085195,
-0.14319424331188202,
-0.001191393588669598,
-0.04192880541086197,
-0.018753351643681526,
-0.07707148045301437,
0.048144351691007614,
-0.10564501583576202,
-0.06558717787265778,
-0.06270226836204529,
-0.04550069198012352,
0.02314336970448494,
0.10519628971815109,
-0.11600645631551743,
-0.0742252990603447,
0.11967993527650833,
-0.04957405850291252,
-0.20113280415534973,
0.14462251961231232,
-0.06791141629219055,
0.18711936473846436,
0.08004980534315109,
0.153676375746727,
0.03599395230412483,
-0.09721378237009048,
-0.06408221274614334,
-0.08836992084980011,
-0.09464262425899506,
0.05022275820374489,
0.0687737688422203,
0.06755396723747253,
-0.055643290281295776,
0.004062398336827755,
-0.05129420757293701,
-0.0015465457690879703,
-0.11631639301776886,
-0.043987322598695755,
0.005359623581171036,
-0.09592632949352264,
0.07551145553588867,
0.0426870733499527,
0.03989843651652336,
-0.13942208886146545,
-0.0014069329481571913,
-0.09356693923473358,
0.11570195108652115,
0.03129718452692032,
-0.057850226759910583,
-0.12479870766401291,
-0.01378235686570406,
0.0717787891626358,
-0.0006423978484235704,
-0.11158662289381027,
-0.002097578952088952,
-0.0549747534096241,
0.06260709464550018,
-0.011325879953801632,
0.1435781866312027,
0.002043419284746051,
-0.07438822090625763,
-0.04646900296211243,
0.013895570300519466,
0.07516474276781082,
0.0017359410412609577,
-0.02067205309867859,
-0.17637009918689728,
0.10304736346006393,
-0.08033307641744614,
0.1351006180047989,
-0.16022855043411255,
0.025629298761487007,
-0.003151870332658291,
0.0791383683681488,
0.05680592730641365,
0.0022847827058285475,
0.08356377482414246,
0.0003228369459975511,
0.027392735704779625,
0.058936700224876404,
0.09142369031906128,
-0.04461980238556862,
-0.05972253531217575,
0.11140988767147064,
-0.08969782292842865,
0.10713754594326019,
0.05940471589565277,
-0.15216581523418427,
-0.09062297642230988,
-0.04802454262971878,
-0.045444026589393616,
0.044914934784173965,
-0.10575170814990997,
0.005601728335022926,
0.15008407831192017,
-0.006975445430725813,
0.060161661356687546,
0.02790924906730652,
-0.007207581307739019,
-0.014603415504097939,
-0.09457266330718994,
-0.03461698442697525,
0.09153204411268234,
-0.021300021559000015,
-0.22621913254261017,
0.021149909123778343,
0.1583845317363739,
-0.022590795531868935,
0.12616053223609924,
0.015076352283358574,
-0.018120557069778442,
-0.05048290640115738,
-0.07701816409826279,
-0.002435771282762289,
0.027027640491724014,
-0.21717441082000732,
-0.025399930775165558,
0.07243388891220093,
-0.08624589443206787,
-0.0007451038109138608,
-0.06688728928565979,
-0.03175532817840576,
0.004052011761814356,
0.022587178274989128,
-0.06029791384935379,
0.14303815364837646,
0.02304953522980213,
0.15258090198040009,
0.02371765673160553,
0.05033104494214058,
0.03253365308046341,
-0.023602750152349472,
-0.15948998928070068,
0.08849753439426422,
-0.06275772303342819,
-0.18111415207386017,
-0.005382529925554991,
-0.03860185295343399,
0.016730986535549164,
-0.015489411540329456,
0.013745850883424282,
-0.13674704730510712,
-0.002168695442378521,
-0.03141532465815544,
0.004337787628173828,
0.1337987780570984,
-0.058057401329278946,
-0.017085380852222443,
0.0823470801115036,
0.062414515763521194,
-0.03523580729961395,
-0.035971369594335556,
-0.1400243043899536,
-0.08714986592531204,
0.12685348093509674,
-0.062113042920827866,
0.024712111800909042,
0.15686213970184326,
0.03665025904774666,
-0.0010336985578760505,
-0.04464193433523178,
0.09578965604305267,
-0.089784175157547,
-0.040086738765239716,
0.14793998003005981,
-0.010796268470585346,
-0.02913873828947544,
0.11438285559415817,
0.06233470141887665,
-0.04906727373600006,
0.09492066502571106,
0.06015195697546005,
0.009674153290688992,
-0.26320430636405945,
-0.09716304391622543,
-0.04595743492245674,
0.004183120094239712,
-0.07077888399362564,
0.0035879379138350487,
0.09945530444383621,
0.09147171676158905,
0.03639712557196617,
-0.1025482788681984,
-0.07348652929067612,
0.05187758430838585,
0.1460949331521988,
-0.03864140063524246,
0.11903563886880875,
-0.015493734739720821,
-0.057138893753290176,
0.0995047315955162,
-0.050966303795576096,
0.1159663274884224,
-0.002530045807361603,
0.024456074461340904,
0.10754109919071198,
0.10774767398834229,
0.04620850831270218,
0.006875508930534124,
0.048584260046482086,
-0.0504208579659462,
-0.02396269328892231,
-0.012432369403541088,
-0.18649329245090485,
0.05665752664208412,
-0.08376439660787582,
0.01487001683562994,
-0.09254560619592667,
-0.05458787456154823,
0.11692491918802261,
0.16248026490211487,
0.08393532037734985,
-0.1317828744649887,
-0.12244472652673721,
0.02288411185145378,
-0.06273619085550308,
0.012089998461306095,
0.011907079257071018,
0.0002482342242728919,
-0.11764656752347946,
0.06597817689180374,
-0.058523356914520264,
0.0823437049984932,
-0.09866859763860703,
0.11495190858840942,
-0.07647918164730072,
-0.09863239526748657,
-0.04911152645945549,
0.07619619369506836,
-0.18855182826519012,
0.23069369792938232,
-0.02825414389371872,
0.012461268343031406,
-0.12437745928764343,
-0.10451582819223404,
0.09810802340507507,
0.16514937579631805,
0.07456641644239426,
-0.011604922823607922,
0.07558999210596085,
-0.06441017985343933,
0.014783292077481747,
0.05585315823554993,
0.07436821609735489,
-0.06209916993975639,
0.02911200374364853,
-0.01898878626525402,
0.041063595563173294,
-0.015831172466278076,
0.012243492528796196,
-0.14005060493946075,
0.03549851104617119,
0.07965997606515884,
-0.027920624241232872,
0.05117252096533775,
-0.010185441002249718,
-0.10352343320846558,
-0.017800988629460335,
0.06065453961491585,
0.08412566035985947,
0.03237682208418846,
-0.12006044387817383,
0.1325039565563202,
-0.02603512816131115,
-0.0525125116109848,
-0.042937565594911575,
0.041925620287656784,
-0.0036990754306316376,
0.05811933055520058,
-0.11224646121263504,
0.05192643404006958,
-0.14670534431934357,
-0.17466312646865845,
-0.06054249033331871,
0.14924927055835724,
0.08734672516584396,
0.05094490572810173,
0.0010067732073366642,
0.008481469936668873,
-0.03601927310228348,
-0.006431227084249258,
0.2047223299741745,
0.07879003137350082,
-0.008376638405025005,
0.06602182239294052,
-0.050652697682380676,
-0.18023081123828888,
-0.09879719465970993,
-0.05007864162325859,
0.141423761844635,
0.12918955087661743,
-0.03747343271970749,
0.08936887979507446,
0.1053447499871254,
-0.011290863156318665,
-0.25654640793800354,
0.06024342030286789,
0.07414542883634567,
-0.021246423944830894,
0.09407684952020645,
-0.13455143570899963,
0.18219827115535736,
0.049412813037633896,
-0.007353087421506643,
-0.10978718101978302,
0.03662171587347984,
-0.034379784017801285,
0.13473230600357056,
0.07897788286209106,
0.3017483651638031,
-0.08480039238929749,
-0.04071586951613426,
0.07741161435842514,
-0.14114539325237274,
0.14329040050506592,
-0.17315082252025604,
0.04941737651824951,
-0.025672534480690956,
0.1346166729927063,
0.07776166498661041,
0.0037706666626036167,
0.06891024857759476,
-0.03542501851916313,
0.0670972391963005,
-0.11119506508111954,
-0.0884048268198967,
0.03949298337101936,
0.012893463484942913,
0.030599096789956093,
0.06243092194199562,
-0.00039898825343698263,
-0.09782462567090988,
-0.02223948948085308,
-0.14208044111728668,
-0.02004685066640377,
0.0352497324347496,
-0.10081581771373749,
-0.09475255757570267,
0.12917330861091614,
0.033909764140844345,
-0.042791400104761124,
0.021117087453603745,
-0.14455673098564148,
0.1389377862215042,
0.055210281163454056,
0.2736304998397827,
0.010110106319189072,
0.03941776603460312,
0.022922871634364128,
-0.13505277037620544,
0.01768237166106701,
-0.12284217774868011,
0.02100820280611515,
0.08874153345823288,
0.03151371330022812,
0.12932607531547546,
0.04448824003338814,
-0.09538616985082626,
0.02449476160109043,
0.10038244724273682,
-0.1816340684890747,
-0.12515957653522491,
-0.08071500808000565,
0.01818697340786457,
-0.11470355838537216,
0.055479180067777634,
0.13328418135643005,
-0.05650901794433594,
-0.009977811947464943,
-0.02474590204656124,
0.005408954340964556,
-0.06962466984987259,
0.12587371468544006,
0.035066138952970505,
-0.0028272701893001795,
-0.05026222765445709,
-0.0814378410577774,
0.030674653127789497,
-0.15548868477344513,
0.026833312585949898,
0.05247918516397476,
-0.1267848014831543,
-0.12514910101890564,
-0.07218029350042343,
0.2453937828540802,
-0.10196080058813095,
-0.06649962812662125,
-0.04224657267332077,
-0.12120276689529419,
0.04614489898085594,
0.23279976844787598,
0.04514177516102791,
-0.010384146124124527,
-0.1005815863609314,
-0.0024466263130307198,
-0.018045801669359207,
0.14240895211696625,
0.09322649240493774,
-0.016268528997898102,
0.02219908870756626,
0.07667866349220276,
-0.023752372711896896,
0.11291287839412689,
-0.08024612814188004,
0.016507819294929504,
-0.0760178342461586,
-0.029588954523205757,
-0.20111939311027527,
-0.060837116092443466,
-0.005203923676162958,
-0.02419571578502655,
0.027117155492305756,
-0.0403967909514904,
-0.030575063079595566,
-0.06743314862251282,
-0.053491219878196716,
0.006747412960976362,
0.10461819171905518,
0.09953249245882034,
-0.05863688886165619,
-0.02546742931008339,
0.07100903242826462,
0.01044748816639185,
0.17493489384651184,
0.05609019845724106,
-0.022408567368984222,
0.07553169131278992,
-0.2030872404575348,
-0.011737062595784664,
0.09786725044250488,
0.05116609111428261,
0.015725119039416313,
-0.1337434947490692,
-0.02989194728434086,
0.004070242866873741,
-0.055606529116630554,
0.11914835125207901,
0.12071233987808228,
-0.0062554143369197845,
0.0574636235833168,
0.06451176106929779,
-0.011222676374018192,
-0.08573930710554123,
-0.03132983297109604,
-0.034369178116321564,
0.04450104013085365,
0.11895845830440521,
-0.09163609147071838,
0.03246557340025902,
-0.11927303671836853,
-0.009221291169524193,
-0.022177571430802345,
-0.07569818943738937,
-0.20404523611068726,
-0.004976640455424786,
0.08049748837947845,
0.011230002157390118,
0.15190809965133667,
-0.0519731380045414,
-0.08785019814968109,
0.023586612194776535,
0.15020963549613953,
0.07854463905096054,
-0.0354904942214489,
0.04044051840901375,
0.09632375836372375,
-0.0759994238615036,
-0.13559557497501373,
0.01237679086625576,
-0.006572068203240633,
-0.08593110740184784,
0.20720747113227844,
0.08782058954238892,
0.1252748966217041,
-0.07174551486968994,
-0.05622939392924309,
0.027473827823996544,
0.04666941240429878,
-0.12231818586587906,
-0.07388433068990707,
-0.02114546298980713,
0.0030058943666517735,
0.07321605831384659,
0.19046075642108917,
-0.0351361483335495,
0.042760398238897324,
-0.11852747946977615,
-0.02710210531949997,
-0.07600923627614975,
-0.15978294610977173,
-0.030764736235141754,
-0.1273675560951233,
-0.005991878919303417,
-0.11130805313587189,
-0.01744968444108963,
0.01775861158967018,
0.08144392818212509,
-0.03516906872391701,
0.1029861643910408,
-0.13640819489955902,
0.02779398113489151,
0.1574753373861313,
-0.015696564689278603,
-0.053313106298446655,
-0.14731910824775696,
0.06229338422417641,
-0.0431983657181263,
0.03302690386772156,
0.022994183003902435,
-0.0012457316042855382,
-0.04097118228673935,
-0.062486521899700165,
-0.08172839134931564,
-0.11060702800750732,
0.07677178829908371,
-0.019873814657330513,
-0.013222298584878445,
0.12545625865459442,
0.06408241391181946,
0.07322543859481812,
0.05108824744820595,
0.24138343334197998,
-0.031026162207126617,
0.05221257731318474,
-0.15852726995944977,
0.11620409041643143,
-0.030891098082065582,
0.013363040052354336,
-0.007087253034114838,
-0.06665567308664322,
0.03607012704014778,
0.25431540608406067,
0.2455875426530838,
-0.06058342754840851,
0.020362604409456253,
-0.028395425528287888,
0.047351814806461334,
0.02719198912382126,
0.06581740081310272,
0.09710685908794403,
0.04546118155121803,
-0.09177440404891968,
0.02911790832877159,
-0.0635204166173935,
0.005501583218574524,
0.032635029405355453,
-0.04076603055000305,
0.1380932629108429,
-0.04971802234649658,
-0.041808176785707474,
0.1122504323720932,
-0.1190374568104744,
-0.055091820657253265,
-0.017793802544474602,
-0.1716492623090744,
-0.07136844843626022,
-0.10411061346530914,
0.013406678102910519,
0.07770911604166031,
0.02724139206111431,
0.022970596328377724,
0.03156103938817978,
0.1574474275112152,
0.036635544151067734,
-0.13495229184627533,
-0.06723485141992569,
0.16209368407726288,
0.04489202797412872,
0.03339649736881256,
-0.030863381922245026,
0.02908814325928688,
0.09371815621852875,
-0.03512221574783325,
-0.013047815300524235,
0.17271572351455688,
-0.03253047168254852,
-0.054429132491350174,
0.011197034269571304,
0.1241479441523552,
0.08346088975667953,
0.03023010492324829,
0.0700337365269661,
-0.24889783561229706,
-0.009076801128685474,
-0.05592511594295502,
-0.06918444484472275,
-0.1116006001830101,
0.25149354338645935,
-0.04096662998199463,
0.06951524317264557,
0.16933487355709076,
-0.014376632869243622,
0.0670274868607521,
-0.07249365001916885,
0.03241852670907974,
0.053416065871715546,
-0.021960170939564705,
0.037449900060892105,
-0.12794606387615204,
0.07079429924488068,
-0.042567089200019836,
-0.030124550685286522,
-0.2544778287410736,
-0.033437952399253845,
-0.05476582795381546,
0.018460538238286972,
-0.005300515331327915,
0.04058461636304855,
0.02705114521086216,
0.010441401973366737,
0.009034906513988972,
-0.08323255926370621,
0.035297200083732605,
0.12860381603240967,
-0.0551617331802845,
-0.036916960030794144
] |
null | null |
transformers
|
# RuBERT for Sentiment Analysis of Tweets
This is a [DeepPavlov/rubert-base-cased-conversational](https://huggingface.co/DeepPavlov/rubert-base-cased-conversational) model trained on [RuTweetCorp](https://study.mokoron.com/).
## Labels
0: POSITIVE
1: NEGATIVE
## How to use
```python
import torch
from transformers import AutoModelForSequenceClassification
from transformers import BertTokenizerFast
tokenizer = BertTokenizerFast.from_pretrained('blanchefort/rubert-base-cased-sentiment-mokoron')
model = AutoModelForSequenceClassification.from_pretrained('blanchefort/rubert-base-cased-sentiment-mokoron', return_dict=True)
@torch.no_grad()
def predict(text):
inputs = tokenizer(text, max_length=512, padding=True, truncation=True, return_tensors='pt')
outputs = model(**inputs)
predicted = torch.nn.functional.softmax(outputs.logits, dim=1)
predicted = torch.argmax(predicted, dim=1).numpy()
return predicted
```
## Dataset used for model training
**[RuTweetCorp](https://study.mokoron.com/)**
> Рубцова Ю. Автоматическое построение и анализ корпуса коротких текстов (постов микроблогов) для задачи разработки и тренировки тонового классификатора // Инженерия знаний и технологии семантического веба. – 2012. – Т. 1. – С. 109-116.
|
{"language": ["ru"], "tags": ["sentiment", "text-classification"], "datasets": ["RuTweetCorp"]}
|
text-classification
|
blanchefort/rubert-base-cased-sentiment-mokoron
|
[
"transformers",
"pytorch",
"tf",
"jax",
"safetensors",
"bert",
"text-classification",
"sentiment",
"ru",
"dataset:RuTweetCorp",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"ru"
] |
TAGS
#transformers #pytorch #tf #jax #safetensors #bert #text-classification #sentiment #ru #dataset-RuTweetCorp #autotrain_compatible #endpoints_compatible #region-us
|
# RuBERT for Sentiment Analysis of Tweets
This is a DeepPavlov/rubert-base-cased-conversational model trained on RuTweetCorp.
## Labels
0: POSITIVE
1: NEGATIVE
## How to use
## Dataset used for model training
RuTweetCorp
> Рубцова Ю. Автоматическое построение и анализ корпуса коротких текстов (постов микроблогов) для задачи разработки и тренировки тонового классификатора // Инженерия знаний и технологии семантического веба. – 2012. – Т. 1. – С. 109-116.
|
[
"# RuBERT for Sentiment Analysis of Tweets\n\nThis is a DeepPavlov/rubert-base-cased-conversational model trained on RuTweetCorp.",
"## Labels\n 0: POSITIVE\n 1: NEGATIVE",
"## How to use",
"## Dataset used for model training\n\nRuTweetCorp\n\n> Рубцова Ю. Автоматическое построение и анализ корпуса коротких текстов (постов микроблогов) для задачи разработки и тренировки тонового классификатора // Инженерия знаний и технологии семантического веба. – 2012. – Т. 1. – С. 109-116."
] |
[
"TAGS\n#transformers #pytorch #tf #jax #safetensors #bert #text-classification #sentiment #ru #dataset-RuTweetCorp #autotrain_compatible #endpoints_compatible #region-us \n",
"# RuBERT for Sentiment Analysis of Tweets\n\nThis is a DeepPavlov/rubert-base-cased-conversational model trained on RuTweetCorp.",
"## Labels\n 0: POSITIVE\n 1: NEGATIVE",
"## How to use",
"## Dataset used for model training\n\nRuTweetCorp\n\n> Рубцова Ю. Автоматическое построение и анализ корпуса коротких текстов (постов микроблогов) для задачи разработки и тренировки тонового классификатора // Инженерия знаний и технологии семантического веба. – 2012. – Т. 1. – С. 109-116."
] |
[
62,
42,
11,
4,
72
] |
[
"passage: TAGS\n#transformers #pytorch #tf #jax #safetensors #bert #text-classification #sentiment #ru #dataset-RuTweetCorp #autotrain_compatible #endpoints_compatible #region-us \n# RuBERT for Sentiment Analysis of Tweets\n\nThis is a DeepPavlov/rubert-base-cased-conversational model trained on RuTweetCorp.## Labels\n 0: POSITIVE\n 1: NEGATIVE## How to use## Dataset used for model training\n\nRuTweetCorp\n\n> Рубцова Ю. Автоматическое построение и анализ корпуса коротких текстов (постов микроблогов) для задачи разработки и тренировки тонового классификатора // Инженерия знаний и технологии семантического веба. – 2012. – Т. 1. – С. 109-116."
] |
[
0.025558527559041977,
-0.045972712337970734,
-0.004710712470114231,
0.06337469071149826,
0.10670731216669083,
0.018927108496427536,
0.05995048210024834,
0.08915482461452484,
-0.07575318217277527,
0.002483274322003126,
0.12583892047405243,
0.05620503053069115,
-0.0016999297076836228,
0.1388973444700241,
-0.04930096119642258,
-0.24248525500297546,
-0.0026500411331653595,
-0.025265490636229515,
0.07787369191646576,
0.1702525019645691,
0.1873670071363449,
-0.04059266671538353,
0.09492813795804977,
-0.043769922107458115,
-0.14995314180850983,
0.022727936506271362,
0.07007880508899689,
-0.07199735939502716,
0.11235007643699646,
0.007741948124021292,
0.09644142538309097,
0.05486490577459335,
0.03216814249753952,
-0.10565026849508286,
0.06053467094898224,
0.07255661487579346,
-0.050391849130392075,
0.06265436857938766,
0.06346035748720169,
-0.1035839319229126,
0.1945720613002777,
-0.041392359882593155,
0.052917610853910446,
0.09169550985097885,
-0.1302117556333542,
0.009850496426224709,
-0.07876203954219818,
0.13735061883926392,
0.09672398865222931,
0.10737147182226181,
-0.058657076209783554,
0.19948549568653107,
-0.10026593506336212,
0.09042170643806458,
0.13161145150661469,
-0.2848965525627136,
-0.0838460922241211,
0.04009983316063881,
-0.037011437118053436,
0.08208321034908295,
-0.09919759631156921,
0.019203368574380875,
0.08276467025279999,
0.07141871750354767,
0.02902659960091114,
-0.06581759452819824,
-0.028584277257323265,
-0.030150124803185463,
-0.09373628348112106,
0.012269060127437115,
0.21309861540794373,
0.0697975754737854,
-0.02687903679907322,
-0.036968059837818146,
-0.04838307201862335,
-0.03797491639852524,
-0.015658477321267128,
-0.09811992943286896,
-0.08110427111387253,
0.023267140612006187,
-0.09351862221956253,
0.0025232082698494196,
-0.13287311792373657,
0.031493280082941055,
-0.09270670264959335,
0.05940590053796768,
-0.021655956283211708,
0.009909610264003277,
-0.08679795265197754,
0.09248451143503189,
0.0385054387152195,
-0.10019490867853165,
0.039911579340696335,
-0.10896870493888855,
0.003913981840014458,
-0.050390295684337616,
-0.07759840041399002,
-0.09575004875659943,
0.02734445594251156,
0.08695176988840103,
-0.0024015975650399923,
0.033355168998241425,
0.053520627319812775,
-0.008162585087120533,
0.06034835800528526,
0.03300381079316139,
-0.08961901068687439,
-0.06519564241170883,
0.041031431406736374,
-0.05356530845165253,
-0.06455205380916595,
-0.009185769595205784,
-0.01966453343629837,
-0.021489489823579788,
0.10678713023662567,
0.008222531527280807,
-0.030166257172822952,
0.11006280034780502,
-0.09264639019966125,
-0.02038157917559147,
-0.0930095836520195,
-0.05894900858402252,
-0.03653085231781006,
0.018224233761429787,
-0.06691810488700867,
0.0910118967294693,
0.003845776664093137,
0.04238408803939819,
-0.0447654128074646,
0.021056506782770157,
-0.04013441875576973,
-0.04082738980650902,
0.002029231982305646,
-0.09390245378017426,
0.03225940093398094,
-0.042200203984975815,
0.01465978566557169,
-0.22089141607284546,
-0.15098579227924347,
-0.05657092109322548,
0.020549237728118896,
-0.04332375153899193,
-0.019130412489175797,
-0.051331933587789536,
-0.062237150967121124,
0.03506845608353615,
0.016321947798132896,
-0.029764367267489433,
-0.021840373054146767,
0.09300640225410461,
-0.09325849264860153,
0.04610038176178932,
-0.04422605782747269,
0.022505128756165504,
-0.17693381011486053,
-0.04966472461819649,
-0.008055726997554302,
0.1211903914809227,
-0.054776813834905624,
0.1626196801662445,
-0.12385658919811249,
-0.07571378350257874,
-0.06774194538593292,
0.00436060968786478,
0.001326843979768455,
0.17948688566684723,
-0.20710377395153046,
-0.03987080976366997,
0.08935993909835815,
-0.08407191932201385,
-0.09721113741397858,
0.18528087437152863,
-0.0724976658821106,
0.2215643674135208,
0.1638076901435852,
0.14010953903198242,
0.12885424494743347,
-0.0038410339038819075,
-0.02291012555360794,
-0.024491455405950546,
-0.10344938188791275,
0.10356241464614868,
0.004324676934629679,
0.12271081656217575,
-0.11100832372903824,
-0.0003647025441750884,
0.026247475296258926,
0.02991677075624466,
-0.08846910297870636,
-0.056411262601614,
0.01188220176845789,
-0.057398732751607895,
0.16723282635211945,
0.016739849001169205,
0.015570483170449734,
-0.13437098264694214,
-0.12540072202682495,
-0.11311350017786026,
0.050857894122600555,
0.017212899401783943,
-0.009497486986219883,
-0.14813697338104248,
-0.0031794761307537556,
0.15048757195472717,
-0.002936186268925667,
-0.12713606655597687,
-0.032229289412498474,
-0.046717818826436996,
0.222809299826622,
0.11876564472913742,
0.11112648248672485,
0.049787405878305435,
-0.06798437982797623,
-0.05438975244760513,
-0.031296249479055405,
-0.011782198213040829,
0.015480843372642994,
-0.03666093200445175,
-0.1360694020986557,
0.056599125266075134,
-0.08175207674503326,
0.08444451540708542,
-0.12950660288333893,
0.012813957408070564,
0.13257035613059998,
0.10439161956310272,
0.030721046030521393,
0.06653620302677155,
0.008190984837710857,
0.014033723622560501,
-0.021897606551647186,
-0.03633173555135727,
0.057494573295116425,
-0.00010701848077587783,
-0.09669214487075806,
0.0500948503613472,
-0.03174157440662384,
0.0965762510895729,
0.11003132909536362,
-0.06073760986328125,
-0.18107521533966064,
0.08757670223712921,
-0.04513823240995407,
0.010485333390533924,
0.012546832673251629,
-0.001280726632103324,
0.20502430200576782,
-0.03389875963330269,
0.027417292818427086,
-0.0382252000272274,
-0.038159195333719254,
0.019806111231446266,
-0.10331263393163681,
-0.03450866416096687,
0.08413581550121307,
-0.1150171086192131,
-0.33889323472976685,
0.08665776252746582,
0.14000211656093597,
-0.028337888419628143,
0.15557777881622314,
-0.00999861303716898,
0.02242608740925789,
-0.005441433750092983,
-0.021778330206871033,
-0.03870059922337532,
-0.021655412390828133,
-0.00762598542496562,
-0.015356648713350296,
0.01171762403100729,
-0.036923397332429886,
-0.000938552781008184,
-0.0541064478456974,
-0.04395603388547897,
0.016878604888916016,
0.03571082651615143,
0.014906374737620354,
0.08263985812664032,
0.022325605154037476,
0.1374601423740387,
-0.001488604350015521,
-0.04399489238858223,
0.05422093719244003,
-0.008032234385609627,
-0.16132979094982147,
0.14075493812561035,
-0.10978367179632187,
-0.28336772322654724,
-0.002821685280650854,
-0.016464591026306152,
0.003112961770966649,
-0.011844729073345661,
0.08908279240131378,
-0.22174789011478424,
-0.02041292004287243,
-0.05709966644644737,
0.008189556188881397,
0.017336444929242134,
0.037308357656002045,
-0.009771298617124557,
0.014630931429564953,
-0.0026020631194114685,
-0.04616004601120949,
-0.011213935911655426,
-0.016884820535779,
-0.10075534135103226,
0.113853819668293,
-0.08117182552814484,
0.031078895553946495,
0.14436273276805878,
-0.05175367370247841,
0.009950521402060986,
-0.10168527066707611,
0.15290334820747375,
-0.09888911247253418,
0.07658543437719345,
0.11787690222263336,
-0.026011237874627113,
0.05990178510546684,
0.11185869574546814,
-0.017725778743624687,
-0.08163794130086899,
0.13255728781223297,
0.011969826184213161,
-0.045587196946144104,
-0.20100919902324677,
-0.0968412458896637,
-0.001978259300813079,
0.11779715120792389,
0.04434782639145851,
0.030004311352968216,
0.11725971102714539,
0.06639420986175537,
-0.009068786166608334,
-0.07647070288658142,
0.04861365258693695,
0.06314507871866226,
0.009187569841742516,
0.005727152805775404,
0.09486782550811768,
-0.0700918659567833,
-0.012973887845873833,
0.1467778980731964,
-0.1325371116399765,
0.03299469128251076,
-0.012471153400838375,
-0.01764248125255108,
0.033138975501060486,
0.2378629893064499,
0.058008622378110886,
0.05562368035316467,
-0.036628078669309616,
-0.032633308321237564,
-0.039790116250514984,
-0.016657859086990356,
-0.10548003762960434,
0.07149887084960938,
-0.02513006515800953,
-0.023989398032426834,
-0.06134253367781639,
0.032885972410440445,
0.11613938212394714,
0.20673489570617676,
0.08421902358531952,
-0.25571271777153015,
-0.170228511095047,
-0.02142494171857834,
-0.07696127891540527,
0.005550026893615723,
0.0725526437163353,
0.10112874209880829,
-0.1165926456451416,
0.07451054453849792,
-0.029145464301109314,
0.0790679082274437,
0.053303543478250504,
0.05585816875100136,
0.02094140835106373,
0.00014893470506649464,
-0.037390194833278656,
0.0818738341331482,
-0.2674173414707184,
0.16309067606925964,
-0.04100517928600311,
0.028387531638145447,
-0.05428558588027954,
-0.09671616554260254,
0.061691757291555405,
0.04884535074234009,
0.08672714978456497,
0.01686873659491539,
0.03732350841164589,
-0.03142431378364563,
-0.003452235134318471,
0.01809106580913067,
0.08252253383398056,
-0.03653579577803612,
0.06563844531774521,
-0.044556908309459686,
0.04589718207716942,
0.042864393442869186,
0.0056475261226296425,
-0.1632169932126999,
-0.03599445894360542,
-0.02085944078862667,
-0.00388320186175406,
0.07525882124900818,
-0.014126046560704708,
-0.0718344897031784,
0.0632006824016571,
0.1347436159849167,
0.005151896737515926,
-0.017022136598825455,
-0.14673106372356415,
0.11198750883340836,
-0.0391385443508625,
-0.09125307947397232,
-0.009082992561161518,
0.01351216621696949,
0.033044781535863876,
0.008810855448246002,
-0.1345779299736023,
0.09640970081090927,
-0.07903057336807251,
-0.13521449267864227,
-0.010092931799590588,
0.12257231771945953,
0.13893872499465942,
0.040605682879686356,
0.07305554300546646,
-0.03281240910291672,
-0.03858407214283943,
-0.09353005886077881,
0.058893050998449326,
-0.05976572260260582,
-0.06341791898012161,
0.0043518212623894215,
0.033037781715393066,
-0.17281536757946014,
-0.1565578430891037,
0.015272757969796658,
0.19893161952495575,
0.10435719788074493,
-0.09144868701696396,
0.10891757160425186,
0.07817872613668442,
0.0035969193559139967,
-0.27507278323173523,
-0.020064545795321465,
0.04830702394247055,
0.015443868935108185,
0.07309331744909286,
-0.08806271851062775,
0.09212519973516464,
-0.035280317068099976,
0.052875831723213196,
-0.1183423325419426,
-0.1429256945848465,
-0.08745541423559189,
0.13727614283561707,
0.0189875066280365,
0.21066148579120636,
-0.04347187280654907,
0.0019667320884764194,
-0.044380877166986465,
-0.05950571969151497,
0.17865459620952606,
-0.1956339031457901,
0.04961902275681496,
0.05438094586133957,
0.214377298951149,
0.019547220319509506,
0.04731345921754837,
0.02756267413496971,
-0.007300324738025665,
0.04663342982530594,
-0.13624568283557892,
-0.07485011965036392,
0.09574764966964722,
-0.009669885039329529,
0.036689724773168564,
0.012164338491857052,
0.0866749957203865,
-0.14008833467960358,
-0.007515625562518835,
-0.13019795715808868,
0.06839235126972198,
-0.021714793518185616,
-0.07458215206861496,
-0.07373210042715073,
0.0798405110836029,
0.08683372288942337,
-0.05733654275536537,
-0.013695769011974335,
-0.03859178349375725,
0.13846993446350098,
0.12118741869926453,
0.19152942299842834,
0.05900220572948456,
0.04175936430692673,
-0.034670375287532806,
-0.06952821463346481,
0.04769356921315193,
-0.18807534873485565,
0.020870018750429153,
0.0680607333779335,
-0.002150364685803652,
0.08903738111257553,
0.03629793971776962,
-0.08011551946401596,
0.04399535804986954,
0.10386013239622116,
-0.1898609846830368,
-0.06511889398097992,
-0.06429163366556168,
0.0526643805205822,
-0.041252825409173965,
0.019886454567313194,
0.1932385116815567,
-0.12732698023319244,
-0.03236597776412964,
0.00790793914347887,
0.0452803410589695,
-0.09739747643470764,
0.06911975145339966,
0.05221946910023689,
0.0022738713305443525,
-0.05458517745137215,
0.03067435882985592,
0.016338160261511803,
-0.056446224451065063,
0.0644168108701706,
0.13166113197803497,
-0.15550319850444794,
-0.09334512799978256,
0.03821466863155365,
0.05500608682632446,
-0.0665060356259346,
0.011343833059072495,
-0.00941879115998745,
-0.1226477101445198,
0.02817683480679989,
0.23851744830608368,
0.05508787930011749,
0.0805947482585907,
-0.06783190369606018,
-0.004531079437583685,
-0.009062249213457108,
0.045903291553258896,
0.2017117738723755,
-0.05156254395842552,
-0.15623483061790466,
0.12221348285675049,
-0.018699873238801956,
0.09763184189796448,
-0.07795091718435287,
0.006255471147596836,
-0.09282002598047256,
-0.0059342095628380775,
-0.13379782438278198,
-0.018159916624426842,
-0.030537504702806473,
0.002631204202771187,
0.017073629423975945,
-0.06012909859418869,
-0.032165296375751495,
-0.03163040429353714,
-0.056336309760808945,
0.017739731818437576,
0.040727321058511734,
0.07036641985177994,
-0.06847959756851196,
0.002031113486737013,
0.07526355981826782,
-0.004879999440163374,
0.13648657500743866,
0.10046308487653732,
-0.09751340746879578,
0.10373988002538681,
-0.23686367273330688,
-0.005671244114637375,
0.06740579009056091,
-0.03981019929051399,
0.024975579231977463,
-0.02056545950472355,
0.002985406666994095,
-0.012140944600105286,
0.013671748340129852,
0.04810625687241554,
0.17283384501934052,
-0.053665537387132645,
0.10189574956893921,
0.025540700182318687,
-0.0765426978468895,
-0.12395306676626205,
-0.029181523248553276,
0.028671035543084145,
0.06628081947565079,
0.1996874213218689,
-0.12184695899486542,
0.08231177181005478,
-0.1056574359536171,
0.0002979121927637607,
0.013141323812305927,
-0.04031350091099739,
-0.15582410991191864,
-0.05240136757493019,
0.04915161803364754,
-0.04592007398605347,
-0.010311664082109928,
0.029974928125739098,
-0.10562929511070251,
0.0605769120156765,
0.001063059433363378,
-0.05228623002767563,
0.022587362676858902,
0.03968821093440056,
-0.0035139350220561028,
-0.0695679783821106,
-0.07286892831325531,
-0.03211522847414017,
-0.014842740260064602,
-0.07803261280059814,
0.20119406282901764,
0.10073159635066986,
0.08740589022636414,
0.007718748413026333,
-0.03386653959751129,
0.07865837216377258,
0.06678864359855652,
-0.020939121022820473,
-0.07585768401622772,
-0.005048844963312149,
-0.044209908694028854,
0.12272460013628006,
0.21235601603984833,
-0.051653649657964706,
-0.0036380658857524395,
-0.08580654859542847,
-0.05142153799533844,
-0.04481826350092888,
-0.200025737285614,
-0.09834099560976028,
-0.11209873110055923,
0.017205113545060158,
-0.10877356678247452,
-0.0371769517660141,
0.012797643430531025,
0.07653152942657471,
-0.06528592109680176,
0.08033115416765213,
-0.09739790856838226,
-0.1211148053407669,
0.19623155891895294,
-0.0052859121933579445,
-0.036107100546360016,
0.022933464497327805,
-0.018887728452682495,
-0.05796179547905922,
0.09148462861776352,
-0.0018295975169166923,
0.009434396401047707,
-0.04146512970328331,
0.01223263144493103,
-0.08999349921941757,
-0.130598247051239,
0.02085820771753788,
0.0401027649641037,
-0.018111173063516617,
0.0028780284337699413,
0.049033891409635544,
0.010427691973745823,
0.0190680380910635,
0.24763059616088867,
-0.033434368669986725,
-0.05743516981601715,
-0.15798956155776978,
0.12652499973773956,
-0.038817744702100754,
0.010419078171253204,
-0.00004668037217925303,
-0.0849219337105751,
-0.0015345513820648193,
0.17113474011421204,
0.24390390515327454,
-0.07714328169822693,
0.032969869673252106,
-0.10294020175933838,
0.04939815774559975,
0.016036977991461754,
0.12343519181013107,
0.06088636443018913,
0.08646813780069351,
-0.09818143397569656,
0.046623822301626205,
-0.0560733862221241,
-0.016733963042497635,
0.0004261616850271821,
0.011414527893066406,
0.04609432443976402,
-0.010860965587198734,
-0.08754871785640717,
0.19115550816059113,
-0.1561056524515152,
-0.19397525489330292,
0.007003073580563068,
-0.10928956419229507,
-0.0787307620048523,
-0.008797450922429562,
0.025710662826895714,
0.08586735278367996,
0.1426704227924347,
-0.005377746652811766,
0.002573777223005891,
0.022444719448685646,
0.05479026585817337,
-0.08717551082372665,
0.029129818081855774,
0.1311614066362381,
-0.11304248124361038,
0.04555349051952362,
-0.04308686777949333,
0.06430339813232422,
0.11284255981445312,
-0.008502566255629063,
-0.05108147859573364,
0.052157074213027954,
0.031030217185616493,
0.026843035593628883,
0.000918483710847795,
0.09105638414621353,
0.017418328672647476,
0.07538025081157684,
0.0891391932964325,
-0.24604716897010803,
-0.0014869109727442265,
0.019281119108200073,
0.005675438791513443,
-0.08868338912725449,
0.15556898713111877,
-0.038564637303352356,
0.11583098024129868,
0.15986481308937073,
-0.1106000766158104,
0.024750402197241783,
-0.08883162587881088,
-0.02848377637565136,
0.006770382635295391,
-0.08529198169708252,
0.010655042715370655,
-0.14975692331790924,
-0.05001930147409439,
0.030506353825330734,
0.00995415449142456,
-0.1480487734079361,
0.016403740271925926,
-0.09488062560558319,
0.01901303604245186,
-0.07542853057384491,
0.17529179155826569,
0.03486116975545883,
0.00011221046588616446,
0.04051154479384422,
-0.0004300205619074404,
0.028136588633060455,
0.1381056010723114,
-0.07458612322807312,
-0.04942479729652405
] |
null | null |
transformers
|
# RuBERT for Sentiment Analysis of Product Reviews
This is a [DeepPavlov/rubert-base-cased-conversational](https://huggingface.co/DeepPavlov/rubert-base-cased-conversational) model trained on [RuReviews](https://github.com/sismetanin/rureviews).
## Labels
0: NEUTRAL
1: POSITIVE
2: NEGATIVE
## How to use
```python
import torch
from transformers import AutoModelForSequenceClassification
from transformers import BertTokenizerFast
tokenizer = BertTokenizerFast.from_pretrained('blanchefort/rubert-base-cased-sentiment-rurewiews')
model = AutoModelForSequenceClassification.from_pretrained('blanchefort/rubert-base-cased-sentiment-rurewiews', return_dict=True)
@torch.no_grad()
def predict(text):
inputs = tokenizer(text, max_length=512, padding=True, truncation=True, return_tensors='pt')
outputs = model(**inputs)
predicted = torch.nn.functional.softmax(outputs.logits, dim=1)
predicted = torch.argmax(predicted, dim=1).numpy()
return predicted
```
## Dataset used for model training
**[RuReviews](https://github.com/sismetanin/rureviews)**
> RuReviews: An Automatically Annotated Sentiment Analysis Dataset for Product Reviews in Russian.
|
{"language": ["ru"], "tags": ["sentiment", "text-classification"], "datasets": ["RuReviews"]}
|
text-classification
|
blanchefort/rubert-base-cased-sentiment-rurewiews
|
[
"transformers",
"pytorch",
"tf",
"jax",
"safetensors",
"bert",
"text-classification",
"sentiment",
"ru",
"dataset:RuReviews",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"ru"
] |
TAGS
#transformers #pytorch #tf #jax #safetensors #bert #text-classification #sentiment #ru #dataset-RuReviews #autotrain_compatible #endpoints_compatible #has_space #region-us
|
# RuBERT for Sentiment Analysis of Product Reviews
This is a DeepPavlov/rubert-base-cased-conversational model trained on RuReviews.
## Labels
0: NEUTRAL
1: POSITIVE
2: NEGATIVE
## How to use
## Dataset used for model training
RuReviews
> RuReviews: An Automatically Annotated Sentiment Analysis Dataset for Product Reviews in Russian.
|
[
"# RuBERT for Sentiment Analysis of Product Reviews\n\nThis is a DeepPavlov/rubert-base-cased-conversational model trained on RuReviews.",
"## Labels\n 0: NEUTRAL\n 1: POSITIVE\n 2: NEGATIVE",
"## How to use",
"## Dataset used for model training\n\nRuReviews\n\n> RuReviews: An Automatically Annotated Sentiment Analysis Dataset for Product Reviews in Russian."
] |
[
"TAGS\n#transformers #pytorch #tf #jax #safetensors #bert #text-classification #sentiment #ru #dataset-RuReviews #autotrain_compatible #endpoints_compatible #has_space #region-us \n",
"# RuBERT for Sentiment Analysis of Product Reviews\n\nThis is a DeepPavlov/rubert-base-cased-conversational model trained on RuReviews.",
"## Labels\n 0: NEUTRAL\n 1: POSITIVE\n 2: NEGATIVE",
"## How to use",
"## Dataset used for model training\n\nRuReviews\n\n> RuReviews: An Automatically Annotated Sentiment Analysis Dataset for Product Reviews in Russian."
] |
[
64,
41,
15,
4,
36
] |
[
"passage: TAGS\n#transformers #pytorch #tf #jax #safetensors #bert #text-classification #sentiment #ru #dataset-RuReviews #autotrain_compatible #endpoints_compatible #has_space #region-us \n# RuBERT for Sentiment Analysis of Product Reviews\n\nThis is a DeepPavlov/rubert-base-cased-conversational model trained on RuReviews.## Labels\n 0: NEUTRAL\n 1: POSITIVE\n 2: NEGATIVE## How to use## Dataset used for model training\n\nRuReviews\n\n> RuReviews: An Automatically Annotated Sentiment Analysis Dataset for Product Reviews in Russian."
] |
[
0.01909596100449562,
-0.04279862344264984,
-0.0030845000874251127,
0.046009913086891174,
0.16973072290420532,
0.03818795084953308,
0.08680471777915955,
0.07083891332149506,
0.02048339881002903,
-0.011602367274463177,
0.04581345245242119,
0.03429652377963066,
0.018358655273914337,
0.17376966774463654,
-0.07073284685611725,
-0.2346014380455017,
0.022707553580403328,
-0.038033608347177505,
0.13599446415901184,
0.16163000464439392,
0.19269712269306183,
-0.07540205121040344,
0.1244019940495491,
-0.0049293553456664085,
-0.12556597590446472,
0.0058530583046376705,
0.09489788860082626,
-0.07623855024576187,
0.120326928794384,
-0.0029499181546270847,
0.11099187284708023,
0.059822916984558105,
0.019338196143507957,
-0.1551407426595688,
0.06105465814471245,
0.029354099184274673,
-0.040825676172971725,
0.0441424623131752,
0.0983818769454956,
-0.11251191794872284,
0.2596767842769623,
-0.020957985892891884,
0.05061078816652298,
0.10009501874446869,
-0.1092114970088005,
-0.16656796634197235,
-0.07674983888864517,
0.18426407873630524,
0.02601286582648754,
0.11625684052705765,
-0.06954175233840942,
0.14573600888252258,
-0.1380101442337036,
0.09444472938776016,
0.21993640065193176,
-0.27775391936302185,
-0.09177659451961517,
0.10674764215946198,
-0.07233962416648865,
0.03496703505516052,
-0.1158839613199234,
0.03270931914448738,
0.026371818035840988,
0.013711526989936829,
0.02481597103178501,
-0.059893541038036346,
-0.032828692346811295,
-0.013466141186654568,
-0.09015598148107529,
0.03804857283830643,
0.18943114578723907,
0.07054056972265244,
-0.04832359030842781,
-0.11705750226974487,
-0.015925783663988113,
-0.12551277875900269,
-0.03487133979797363,
-0.09939223527908325,
-0.05031050741672516,
-0.009791960939764977,
-0.12686321139335632,
0.01927943341434002,
-0.12104349583387375,
-0.006189131643623114,
-0.03591662645339966,
0.14043350517749786,
-0.02689027227461338,
0.034805841743946075,
0.0023557059466838837,
0.04521852359175682,
-0.05811959505081177,
-0.07802260667085648,
-0.06369350105524063,
-0.1473434716463089,
-0.03162062540650368,
-0.03558493033051491,
-0.08912926912307739,
-0.02279437519609928,
0.06170552596449852,
0.07280126214027405,
0.011530191637575626,
0.010007530450820923,
0.02582765370607376,
-0.006653832737356424,
0.018601352348923683,
0.17544066905975342,
0.03298027813434601,
-0.08988382667303085,
-0.06811250001192093,
-0.031911782920360565,
0.0019215968204662204,
0.00881605502218008,
-0.03692340850830078,
-0.062492016702890396,
0.10293249040842056,
0.02365230768918991,
-0.13274946808815002,
0.09492646157741547,
-0.12444684654474258,
-0.0034134623128920794,
-0.06961147487163544,
-0.03223973512649536,
-0.005550547502934933,
0.04973301291465759,
-0.06347604840993881,
0.10305887460708618,
-0.06904663890600204,
-0.0004915834288112819,
-0.004084364511072636,
0.07970239967107773,
0.0013613009359687567,
-0.007898879237473011,
-0.05114102363586426,
-0.06725745648145676,
0.04066082462668419,
-0.0922463908791542,
0.05899816006422043,
-0.20760901272296906,
-0.1268680989742279,
-0.054044079035520554,
0.0201832577586174,
-0.0907241627573967,
-0.016743045300245285,
-0.007320082280784845,
-0.038473304361104965,
-0.011377308517694473,
-0.005022083874791861,
-0.043690964579582214,
-0.05201646313071251,
0.06516526639461517,
-0.045933596789836884,
0.094869464635849,
0.05044379085302353,
0.02786877565085888,
-0.18361589312553406,
-0.07331061363220215,
0.0022376065608114004,
0.06267049163579941,
-0.11689455807209015,
0.11497002840042114,
-0.08624877035617828,
-0.10991128534078598,
-0.038104064762592316,
0.007053051143884659,
-0.06014684587717056,
0.13672465085983276,
-0.22792990505695343,
-0.05342205986380577,
0.09133446216583252,
-0.14880618453025818,
-0.05948375537991524,
0.14249545335769653,
-0.05979530140757561,
0.19516637921333313,
0.12234541773796082,
0.14452630281448364,
0.026172323152422905,
-0.012252598069608212,
-0.060648489743471146,
-0.034163229167461395,
-0.12074264883995056,
0.09598717093467712,
0.02653455175459385,
0.12971214950084686,
-0.05779898539185524,
0.05233240872621536,
0.015373371541500092,
-0.03271607309579849,
-0.06203598156571388,
-0.09690122306346893,
-0.015154181979596615,
-0.058451782912015915,
0.18706965446472168,
0.0263688862323761,
0.009985841810703278,
-0.14444385468959808,
-0.1411522477865219,
-0.18226976692676544,
0.06276609003543854,
0.020270735025405884,
-0.02121923863887787,
-0.1144593209028244,
0.13669732213020325,
0.1669774055480957,
0.009639125317335129,
-0.14680947363376617,
-0.012507534585893154,
-0.012815906666219234,
0.1083000898361206,
0.05100144073367119,
0.15663841366767883,
0.03500751778483391,
-0.11875718086957932,
-0.0666397362947464,
-0.03490826115012169,
-0.08726705610752106,
0.0064361379481852055,
0.03561578691005707,
-0.18405809998512268,
0.05421094968914986,
-0.06901545822620392,
0.13342948257923126,
-0.14098234474658966,
-0.025701934471726418,
0.16099639236927032,
0.07012100517749786,
0.08167380094528198,
0.04959859699010849,
-0.02723730355501175,
0.04063284769654274,
-0.02268243208527565,
-0.0034520854242146015,
0.098507359623909,
-0.043140310794115067,
-0.08240014314651489,
0.06188952922821045,
0.000007475207439711085,
0.12372405081987381,
0.11057915538549423,
-0.061305977404117584,
-0.18718139827251434,
0.01816718466579914,
-0.02546386979520321,
0.04670272022485733,
-0.026913771405816078,
0.10641457885503769,
0.15269100666046143,
0.013727220706641674,
-0.012019324116408825,
-0.015192420221865177,
0.030202405527234077,
0.026104379445314407,
-0.10561753809452057,
-0.05721108615398407,
0.09259369969367981,
-0.10325849056243896,
-0.27174103260040283,
0.10290057957172394,
0.16753028333187103,
-0.02424006536602974,
0.14131836593151093,
0.0033643164206296206,
0.016043981537222862,
0.01764044724404812,
-0.04405703768134117,
-0.0448923297226429,
0.09040209650993347,
-0.07824084907770157,
-0.036105673760175705,
0.0626855194568634,
-0.08386141061782837,
-0.028230812400579453,
-0.065299853682518,
-0.013566252775490284,
0.007008039858192205,
-0.013920028693974018,
0.0007317409035749733,
0.15039534866809845,
0.030567752197384834,
0.10977524518966675,
0.027282528579235077,
-0.05312936007976532,
0.05384700000286102,
-0.020807165652513504,
-0.08204754441976547,
0.12111083418130875,
-0.04607276991009712,
-0.26875075697898865,
-0.07839751243591309,
-0.03947462886571884,
-0.08641813695430756,
-0.013714798726141453,
0.07011066377162933,
-0.21121151745319366,
-0.07848495990037918,
-0.021325329318642616,
0.08765366673469543,
0.07873431593179703,
-0.003371364437043667,
-0.0694793090224266,
0.06079574301838875,
0.013587954454123974,
-0.051768023520708084,
-0.05649719014763832,
-0.04081811383366585,
-0.09830547869205475,
0.12434867024421692,
-0.03111603669822216,
0.06075344607234001,
0.07960470020771027,
-0.04835200682282448,
0.022136840969324112,
-0.09643616527318954,
0.14645519852638245,
-0.11957468837499619,
-0.035735905170440674,
0.18628880381584167,
-0.037640638649463654,
0.015542453154921532,
0.21510665118694305,
-0.017532823607325554,
-0.07332280278205872,
0.098973348736763,
0.016256151720881462,
-0.06268526613712311,
-0.19612109661102295,
-0.09609745442867279,
-0.01917843706905842,
0.07729215919971466,
-0.03610522300004959,
0.03562058508396149,
0.036998093128204346,
0.09815355390310287,
0.003218315774574876,
-0.1676959991455078,
0.015693239867687225,
0.09735532104969025,
0.11704286932945251,
-0.035862091928720474,
0.12404762953519821,
-0.0527050606906414,
0.0023873099125921726,
0.13607707619667053,
-0.13470962643623352,
0.1457475870847702,
-0.01730281487107277,
0.04100672900676727,
0.039930183440446854,
0.21250221133232117,
0.09608157724142075,
0.027760274708271027,
0.026498472318053246,
-0.045530665665864944,
-0.008521516807377338,
-0.006330732721835375,
-0.1483297497034073,
0.12871302664279938,
-0.03576245903968811,
0.0033724268432706594,
-0.06902379542589188,
-0.011384326964616776,
0.13669048249721527,
0.25267913937568665,
0.04590652510523796,
-0.2224614918231964,
-0.17470861971378326,
-0.002589399227872491,
-0.013459376990795135,
0.0014078057138249278,
0.031952451914548874,
-0.03564124181866646,
-0.15874963998794556,
0.08165698498487473,
-0.06505491584539413,
0.07721502333879471,
0.0013528098352253437,
0.0731067806482315,
0.03160455450415611,
-0.030891316011548042,
-0.011617423966526985,
0.08489049971103668,
-0.24169348180294037,
0.21158026158809662,
-0.04428578540682793,
0.019108736887574196,
-0.06965041160583496,
-0.08464308083057404,
0.07559192925691605,
0.06575935333967209,
0.08084627240896225,
-0.003200691659003496,
-0.029994765296578407,
-0.10887790471315384,
0.06317777931690216,
0.013954454101622105,
0.08301471918821335,
0.0173270832747221,
0.06110767647624016,
-0.07811637967824936,
0.04029081016778946,
0.005517605692148209,
0.12619861960411072,
-0.16071970760822296,
-0.047765057533979416,
0.002944035455584526,
0.0031278275419026613,
0.12299671024084091,
-0.06201780214905739,
-0.10615310817956924,
0.038116730749607086,
0.16322168707847595,
0.11905521899461746,
-0.006782611832022667,
-0.12325862795114517,
0.08872967213392258,
-0.03654351457953453,
-0.08964165300130844,
0.01244890782982111,
-0.01586247980594635,
0.10606635361909866,
0.05950678884983063,
-0.09860321134328842,
0.05888668820261955,
-0.055175554007291794,
-0.18822315335273743,
0.03732050955295563,
0.09078913182020187,
0.08887485414743423,
0.04143589735031128,
0.041327960789203644,
0.006587371230125427,
-0.04104981943964958,
-0.047165192663669586,
0.062247343361377716,
0.03447297215461731,
-0.09829649329185486,
0.05265814810991287,
0.015934234485030174,
-0.2357962429523468,
-0.15679143369197845,
-0.0330362468957901,
0.15833483636379242,
0.11424865573644638,
-0.11930598318576813,
0.144158273935318,
-0.014924702234566212,
0.03737032786011696,
-0.232086643576622,
0.0035663708113133907,
0.05326130986213684,
0.055466428399086,
0.1078733429312706,
-0.039404984563589096,
0.08821380138397217,
-0.07083884626626968,
-0.024275533854961395,
-0.09435070306062698,
-0.03394189104437828,
-0.08685149252414703,
0.17568370699882507,
0.0807495266199112,
0.22289463877677917,
-0.02518506534397602,
0.02407011203467846,
-0.015306132845580578,
-0.08576059341430664,
0.09097587317228317,
-0.12071029096841812,
0.07038721442222595,
0.007585871499031782,
0.26335084438323975,
0.057200681418180466,
0.03971308842301369,
0.04829171299934387,
-0.04330585524439812,
0.06821385771036148,
-0.12001629173755646,
-0.04101733863353729,
-0.012947717681527138,
-0.002146228449419141,
0.00850869994610548,
0.01365711446851492,
0.04373073950409889,
-0.009371848776936531,
-0.05175623670220375,
-0.08451142907142639,
0.11035550385713577,
-0.01808159425854683,
-0.09741003066301346,
-0.0840623751282692,
0.11609910428524017,
0.023618139326572418,
-0.06736738234758377,
0.1066264733672142,
-0.08573652803897858,
0.14170174300670624,
0.10891073942184448,
0.1399415135383606,
0.06937536597251892,
0.0004531969898380339,
-0.024975256994366646,
-0.06007517874240875,
0.04135765880346298,
-0.10602115094661713,
0.02566472440958023,
0.0583544559776783,
-0.04317718371748924,
0.10718978196382523,
0.04688268527388573,
-0.08631221204996109,
0.03298911452293396,
0.10968806594610214,
-0.1485910415649414,
-0.12503372132778168,
-0.0712796002626419,
0.006850140634924173,
-0.06935280561447144,
-0.008980215527117252,
0.18486414849758148,
-0.10756053030490875,
-0.03254861384630203,
-0.013546517118811607,
0.03554452955722809,
-0.043630920350551605,
0.028726503252983093,
0.0008539793780073524,
0.020243190228939056,
-0.08604985475540161,
0.014129431918263435,
-0.0031166409607976675,
-0.14223580062389374,
0.015486808493733406,
0.01948358118534088,
-0.16861100494861603,
-0.11130277067422867,
0.023867612704634666,
0.15321628749370575,
-0.13579033315181732,
-0.060859404504299164,
-0.05558286979794502,
-0.19765503704547882,
0.020488182082772255,
0.12275464087724686,
0.14161884784698486,
0.04870954528450966,
-0.05201077461242676,
-0.015964651480317116,
0.046132609248161316,
0.10350613296031952,
0.18616697192192078,
-0.04480809345841408,
-0.1784822940826416,
-0.04441706836223602,
-0.017215095460414886,
0.07077741622924805,
-0.09718802571296692,
0.007567858323454857,
-0.08590252697467804,
-0.042694028466939926,
-0.19279694557189941,
-0.0283402930945158,
-0.04686276614665985,
0.04248018190264702,
0.008574252016842365,
-0.024675244465470314,
-0.03537391498684883,
-0.0029456615447998047,
-0.0949653685092926,
0.01660640351474285,
0.009943199343979359,
0.133184552192688,
-0.0799974873661995,
0.02069680206477642,
0.051277898252010345,
-0.001283374847844243,
0.1628943234682083,
0.1390777826309204,
-0.054139845073223114,
0.132205069065094,
-0.19926731288433075,
0.05116503685712814,
0.05957670882344246,
-0.009285171516239643,
0.036865878850221634,
-0.06888845562934875,
0.010399706661701202,
-0.015003579668700695,
-0.0024062637239694595,
0.055995918810367584,
0.15596632659435272,
-0.07581081241369247,
0.07184183597564697,
-0.0015020972350612283,
-0.07266339659690857,
-0.0776347666978836,
0.007291131187230349,
0.03477064520120621,
0.1302216351032257,
0.21266014873981476,
-0.12807759642601013,
0.08077635616064072,
-0.026179222390055656,
0.028463684022426605,
-0.028666749596595764,
-0.07658309489488602,
-0.2188999354839325,
-0.0779753103852272,
0.024010661989450455,
-0.053832244127988815,
0.10979700088500977,
0.12352296710014343,
0.03683297336101532,
0.045896977186203,
-0.00036668384564109147,
0.017862526699900627,
-0.008953872136771679,
0.033374518156051636,
0.028617607429623604,
-0.03350212052464485,
-0.009783553890883923,
-0.005604594945907593,
-0.011449100449681282,
-0.06249605864286423,
0.18889938294887543,
0.19054171442985535,
0.07211478799581528,
0.03294198587536812,
-0.0376351960003376,
-0.012510765343904495,
0.1149529442191124,
-0.02131655625998974,
-0.09897173941135406,
0.022237401455640793,
-0.06626006215810776,
0.2211637645959854,
0.18708305060863495,
-0.0778861939907074,
0.04321907088160515,
-0.11395927518606186,
-0.05376432090997696,
-0.059374816715717316,
-0.1855081170797348,
-0.1335911750793457,
-0.18904124200344086,
0.046140819787979126,
-0.08593328297138214,
-0.05430986359715462,
-0.029070191085338593,
0.03841288387775421,
-0.09473070502281189,
0.074925996363163,
-0.17524586617946625,
-0.08863675594329834,
0.18084704875946045,
0.021566787734627724,
-0.09477508068084717,
-0.0018262192606925964,
0.00950950663536787,
-0.09203103184700012,
0.04776192083954811,
-0.019238267093896866,
-0.008232924155890942,
-0.06030280143022537,
-0.038856782019138336,
-0.08753646165132523,
-0.0919526070356369,
0.02750963717699051,
0.03491726517677307,
0.0015358689706772566,
0.0766892284154892,
0.01340468879789114,
0.03431175276637077,
0.012587825767695904,
0.3163726031780243,
-0.044734325259923935,
0.011957264505326748,
-0.12771394848823547,
0.12825194001197815,
-0.011227869428694248,
0.014709529466927052,
-0.00538463843986392,
-0.09356451779603958,
-0.016042063012719154,
0.15074528753757477,
0.2878788411617279,
-0.02470250055193901,
0.04653436318039894,
-0.08390259742736816,
0.06258079409599304,
-0.04252298176288605,
0.09157765656709671,
0.03573456034064293,
0.04081976041197777,
-0.06792666018009186,
0.09324761480093002,
-0.06170511618256569,
0.002059442922472954,
-0.00046278611989691854,
-0.00292415963485837,
0.12643498182296753,
-0.06724483519792557,
-0.1277264654636383,
0.17645718157291412,
-0.17160317301750183,
-0.1263885349035263,
0.08123678714036942,
-0.15172824263572693,
-0.11621557176113129,
-0.029714493080973625,
0.10705086588859558,
0.10014641284942627,
0.14524604380130768,
-0.027797147631645203,
-0.008267938159406185,
0.09848343580961227,
0.05390087142586708,
-0.11474159359931946,
-0.006790789309889078,
0.14172542095184326,
-0.05516757071018219,
0.04933391511440277,
-0.06868510693311691,
0.03197646513581276,
0.13462728261947632,
-0.005024223122745752,
-0.044194698333740234,
0.07892083376646042,
0.03943885117769241,
-0.10699889808893204,
0.005424229893833399,
0.16662035882472992,
-0.004440232180058956,
0.04590193182229996,
0.09934759885072708,
-0.26621773838996887,
0.02934873290359974,
-0.06722232699394226,
-0.013956158421933651,
-0.08858734369277954,
0.1973341703414917,
-0.09522280097007751,
0.06754899024963379,
0.2125224471092224,
-0.060862068086862564,
-0.015042050741612911,
-0.05858493223786354,
0.04019521549344063,
0.06526153534650803,
-0.06817497313022614,
0.04603058844804764,
-0.09810001403093338,
-0.04290511831641197,
0.07978151738643646,
-0.07046565413475037,
-0.18165013194084167,
-0.015879541635513306,
-0.1047896072268486,
0.0680401548743248,
-0.044817011803388596,
0.13139624893665314,
-0.012389212846755981,
0.005909129045903683,
0.056409623473882675,
-0.08493561297655106,
0.035143446177244186,
0.134639710187912,
-0.10440575331449509,
-0.08133921027183533
] |
null | null |
transformers
|
# RuBERT for Sentiment Analysis
This is a [DeepPavlov/rubert-base-cased-conversational](https://huggingface.co/DeepPavlov/rubert-base-cased-conversational) model trained on [RuSentiment](http://text-machine.cs.uml.edu/projects/rusentiment/).
## Labels
0: NEUTRAL
1: POSITIVE
2: NEGATIVE
## How to use
```python
import torch
from transformers import AutoModelForSequenceClassification
from transformers import BertTokenizerFast
tokenizer = BertTokenizerFast.from_pretrained('blanchefort/rubert-base-cased-sentiment-rusentiment')
model = AutoModelForSequenceClassification.from_pretrained('blanchefort/rubert-base-cased-sentiment-rusentiment', return_dict=True)
@torch.no_grad()
def predict(text):
inputs = tokenizer(text, max_length=512, padding=True, truncation=True, return_tensors='pt')
outputs = model(**inputs)
predicted = torch.nn.functional.softmax(outputs.logits, dim=1)
predicted = torch.argmax(predicted, dim=1).numpy()
return predicted
```
## Dataset used for model training
**[RuSentiment](http://text-machine.cs.uml.edu/projects/rusentiment/)**
> A. Rogers A. Romanov A. Rumshisky S. Volkova M. Gronas A. Gribov RuSentiment: An Enriched Sentiment Analysis Dataset for Social Media in Russian. Proceedings of COLING 2018.
|
{"language": ["ru"], "tags": ["sentiment", "text-classification"], "datasets": ["RuSentiment"]}
|
text-classification
|
blanchefort/rubert-base-cased-sentiment-rusentiment
|
[
"transformers",
"pytorch",
"tf",
"jax",
"safetensors",
"bert",
"text-classification",
"sentiment",
"ru",
"dataset:RuSentiment",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"ru"
] |
TAGS
#transformers #pytorch #tf #jax #safetensors #bert #text-classification #sentiment #ru #dataset-RuSentiment #autotrain_compatible #endpoints_compatible #has_space #region-us
|
# RuBERT for Sentiment Analysis
This is a DeepPavlov/rubert-base-cased-conversational model trained on RuSentiment.
## Labels
0: NEUTRAL
1: POSITIVE
2: NEGATIVE
## How to use
## Dataset used for model training
RuSentiment
> A. Rogers A. Romanov A. Rumshisky S. Volkova M. Gronas A. Gribov RuSentiment: An Enriched Sentiment Analysis Dataset for Social Media in Russian. Proceedings of COLING 2018.
|
[
"# RuBERT for Sentiment Analysis\n\nThis is a DeepPavlov/rubert-base-cased-conversational model trained on RuSentiment.",
"## Labels\n 0: NEUTRAL\n 1: POSITIVE\n 2: NEGATIVE",
"## How to use",
"## Dataset used for model training\n\nRuSentiment\n\n> A. Rogers A. Romanov A. Rumshisky S. Volkova M. Gronas A. Gribov RuSentiment: An Enriched Sentiment Analysis Dataset for Social Media in Russian. Proceedings of COLING 2018."
] |
[
"TAGS\n#transformers #pytorch #tf #jax #safetensors #bert #text-classification #sentiment #ru #dataset-RuSentiment #autotrain_compatible #endpoints_compatible #has_space #region-us \n",
"# RuBERT for Sentiment Analysis\n\nThis is a DeepPavlov/rubert-base-cased-conversational model trained on RuSentiment.",
"## Labels\n 0: NEUTRAL\n 1: POSITIVE\n 2: NEGATIVE",
"## How to use",
"## Dataset used for model training\n\nRuSentiment\n\n> A. Rogers A. Romanov A. Rumshisky S. Volkova M. Gronas A. Gribov RuSentiment: An Enriched Sentiment Analysis Dataset for Social Media in Russian. Proceedings of COLING 2018."
] |
[
64,
37,
15,
4,
65
] |
[
"passage: TAGS\n#transformers #pytorch #tf #jax #safetensors #bert #text-classification #sentiment #ru #dataset-RuSentiment #autotrain_compatible #endpoints_compatible #has_space #region-us \n# RuBERT for Sentiment Analysis\n\nThis is a DeepPavlov/rubert-base-cased-conversational model trained on RuSentiment.## Labels\n 0: NEUTRAL\n 1: POSITIVE\n 2: NEGATIVE## How to use## Dataset used for model training\n\nRuSentiment\n\n> A. Rogers A. Romanov A. Rumshisky S. Volkova M. Gronas A. Gribov RuSentiment: An Enriched Sentiment Analysis Dataset for Social Media in Russian. Proceedings of COLING 2018."
] |
[
0.006229622755199671,
0.02619825303554535,
-0.004815291613340378,
0.011574309319257736,
0.13599705696105957,
-0.012778791598975658,
0.13296756148338318,
0.04113146290183067,
0.02868250012397766,
0.027296418324112892,
0.12344759702682495,
0.007844585925340652,
-0.01574895903468132,
0.14844882488250732,
-0.05664916709065437,
-0.22085611522197723,
0.03696659207344055,
-0.05547572672367096,
0.010814458131790161,
0.12199821323156357,
0.1822632998228073,
-0.0757126733660698,
0.06644799560308456,
-0.02840319275856018,
-0.09145986288785934,
0.03206548094749451,
0.03356045484542847,
-0.055689841508865356,
0.1108686551451683,
0.02346929907798767,
0.06832683831453323,
0.0701952651143074,
0.04955281317234039,
-0.11135091632604599,
0.04644738510251045,
0.00023735004651825875,
-0.09412803500890732,
0.03552282229065895,
0.12774373590946198,
-0.14945796132087708,
0.2844430208206177,
-0.13820400834083557,
0.033286888152360916,
0.03689717873930931,
-0.16196362674236298,
-0.05547553673386574,
-0.0911451205611229,
0.11985254287719727,
0.06418949365615845,
0.11712724715471268,
-0.08252093940973282,
0.0822005495429039,
-0.09745899587869644,
0.0851658284664154,
0.13100919127464294,
-0.2773370146751404,
-0.09610242396593094,
0.10806107521057129,
-0.029442673549056053,
0.07418317347764969,
-0.08594018220901489,
0.0871286690235138,
0.02949591539800167,
-0.0034338354598730803,
0.050911203026771545,
-0.0740273967385292,
0.03158947452902794,
0.021621352061629295,
-0.12872865796089172,
0.006124636624008417,
0.23580384254455566,
0.06695278733968735,
0.023843025788664818,
-0.08929958939552307,
-0.00657116062939167,
-0.052421245723962784,
-0.006372328847646713,
-0.053405433893203735,
-0.04607968404889107,
-0.010005715303122997,
-0.07088270783424377,
-0.01721133477985859,
-0.08863747119903564,
0.010412059724330902,
-0.03755726292729378,
0.19065485894680023,
-0.038451626896858215,
0.020713286474347115,
-0.012899789027869701,
0.018199050799012184,
-0.013153796084225178,
-0.06726576387882233,
0.05981883406639099,
-0.11064323782920837,
-0.059556376188993454,
-0.008477064780890942,
-0.09905800223350525,
-0.008038698695600033,
0.0060644447803497314,
0.07394473999738693,
0.003988262731581926,
0.0296791885048151,
0.06488067656755447,
0.0062189712189137936,
0.0722908303141594,
0.0730743482708931,
-0.029687562957406044,
-0.052244883030653,
-0.052462268620729446,
-0.0284732673317194,
-0.016571292653679848,
0.005254173185676336,
-0.07730606943368912,
-0.034258853644132614,
0.0569758303463459,
0.036624785512685776,
-0.049567773938179016,
0.06878911703824997,
-0.09687548130750656,
0.014045272022485733,
-0.09841673821210861,
-0.02180529572069645,
0.004170675296336412,
0.029240652918815613,
-0.07258477061986923,
0.033325061202049255,
-0.050489503890275955,
0.027189141139388084,
0.02408703975379467,
0.06549298763275146,
0.006066668312996626,
0.03457286208868027,
-0.006563947070389986,
-0.10666372627019882,
0.050919175148010254,
-0.15021224319934845,
0.04350108280777931,
-0.20776112377643585,
-0.1226091980934143,
-0.08798635005950928,
0.010207644663751125,
-0.09780827164649963,
0.0012629929697141051,
-0.0940064862370491,
-0.07062643021345139,
0.018249139189720154,
-0.01686290092766285,
0.005287972744554281,
-0.05190984532237053,
0.020698463544249535,
-0.07450196146965027,
0.11106184124946594,
0.10113266855478287,
0.023981144651770592,
-0.12270719558000565,
-0.047681767493486404,
0.03107476234436035,
0.14414140582084656,
-0.1271962970495224,
0.06886967271566391,
-0.06493449956178665,
-0.05618754029273987,
-0.02875409834086895,
0.02028081752359867,
0.0045126029290258884,
0.23562674224376678,
-0.22135664522647858,
-0.05984607711434364,
0.11854967474937439,
-0.06882094591856003,
0.006428766530007124,
0.14701025187969208,
-0.07581935077905655,
0.11161322146654129,
0.1312820315361023,
0.13898657262325287,
-0.02139049954712391,
0.03956196457147598,
-0.05011007562279701,
-0.06512352079153061,
-0.06289827823638916,
0.13699112832546234,
0.06532899290323257,
0.12919923663139343,
-0.03911896422505379,
-0.0018677696352824569,
0.029890114441514015,
-0.017250383272767067,
-0.07782896608114243,
-0.09240510314702988,
0.03223947435617447,
-0.04687545821070671,
0.21528033912181854,
0.02057620696723461,
0.01795879192650318,
-0.11713793128728867,
-0.12269667536020279,
-0.1976177841424942,
0.06617651134729385,
0.024523742496967316,
0.020841466262936592,
-0.12341656535863876,
0.06873733550310135,
0.071995310485363,
0.024625560268759727,
-0.15722353756427765,
0.04094957187771797,
-0.05196945369243622,
0.18273574113845825,
0.08132202178239822,
0.16103801131248474,
0.053092360496520996,
-0.08483728021383286,
-0.07626453787088394,
-0.018935004249215126,
-0.04961080849170685,
0.009694564156234264,
-0.0075723170302808285,
-0.1994420439004898,
0.06233528256416321,
-0.08440721780061722,
0.174354687333107,
-0.17653228342533112,
0.0015709762228652835,
0.10957818478345871,
0.10121259838342667,
0.010312533937394619,
0.041938070207834244,
-0.00759179936721921,
0.02709910273551941,
-0.03261566534638405,
-0.002686417894437909,
0.10304972529411316,
-0.036886487156152725,
-0.12715893983840942,
0.09223871678113937,
-0.014799215830862522,
0.053060729056596756,
0.0833447054028511,
-0.09359908103942871,
-0.12728045880794525,
0.022511212155222893,
-0.045196592807769775,
0.056791484355926514,
0.048850636929273605,
0.08141165226697922,
0.17637531459331512,
-0.012242592871189117,
0.01837972365319729,
-0.012862928211688995,
0.0087523078545928,
0.007836814038455486,
-0.08664894849061966,
-0.09530633687973022,
0.17914478480815887,
-0.08251231163740158,
-0.25814905762672424,
0.1107456162571907,
0.09467655420303345,
-0.005406382959336042,
0.17866122722625732,
0.015894317999482155,
0.015261736698448658,
-0.03757775202393532,
-0.06828202307224274,
-0.053896937519311905,
0.06857968121767044,
-0.050810884684324265,
-0.08073491603136063,
0.013729932717978954,
-0.045974571257829666,
-0.005459636449813843,
-0.0753619447350502,
-0.06185274198651314,
0.01490712258964777,
0.0068642920814454556,
-0.007125355303287506,
0.09241051226854324,
0.013177434913814068,
0.12864908576011658,
0.038284461945295334,
-0.03913236781954765,
0.03401852026581764,
-0.041452571749687195,
-0.1207318902015686,
0.08627000451087952,
-0.08644145727157593,
-0.21774792671203613,
0.017893822863698006,
-0.0512392558157444,
-0.06381289660930634,
0.02301689051091671,
0.04959658905863762,
-0.21877968311309814,
-0.022144466638565063,
-0.016996625810861588,
0.06608019024133682,
-0.016151374205946922,
-0.011774188838899136,
0.017096584662795067,
0.04466906189918518,
-0.013828381896018982,
-0.011259373277425766,
-0.044077713042497635,
-0.08299608528614044,
-0.061412665992975235,
0.11153239011764526,
-0.026575440540909767,
0.06321194767951965,
0.04166854918003082,
0.0015522720059379935,
-0.005054021254181862,
-0.09201004356145859,
0.14419807493686676,
-0.12707285583019257,
0.009197771549224854,
0.08794010430574417,
-0.038230907171964645,
0.022147109732031822,
0.12205374240875244,
-0.00598510866984725,
-0.054394375532865524,
0.07725562155246735,
0.004135661292821169,
-0.05518600717186928,
-0.2096731811761856,
-0.2023666948080063,
-0.02985573373734951,
0.10247417539358139,
0.026580074802041054,
-0.002866750583052635,
0.014169176109135151,
0.06079568341374397,
-0.029155803844332695,
-0.13514220714569092,
0.06264275312423706,
0.0872216746211052,
0.11121302098035812,
-0.01051801722496748,
0.12590883672237396,
-0.057166825979948044,
-0.009937525726854801,
0.10972187668085098,
-0.14858193695545197,
0.1366914063692093,
0.050650328397750854,
-0.024999013170599937,
0.04054456949234009,
0.12730632722377777,
0.03903711587190628,
0.012454033829271793,
0.06857442110776901,
-0.040779564529657364,
-0.035356584936380386,
-0.03128296136856079,
-0.09294735640287399,
0.12827427685260773,
0.031964246183633804,
-0.0758088007569313,
-0.08566951751708984,
-0.01834527961909771,
0.1379537135362625,
0.16273371875286102,
0.0525280125439167,
-0.18336379528045654,
-0.14366990327835083,
0.061428796499967575,
-0.029376976191997528,
0.00978273618966341,
0.034631773829460144,
0.04019056260585785,
-0.1432463675737381,
0.10776803642511368,
-0.02844277024269104,
0.0830644816160202,
0.04447169229388237,
0.05488422513008118,
-0.10252392292022705,
-0.03492395579814911,
-0.012343761511147022,
0.09497300535440445,
-0.2376324087381363,
0.2979264259338379,
-0.01778995431959629,
-0.016014577820897102,
-0.10866744071245193,
-0.09155375510454178,
0.06244343891739845,
0.026060504838824272,
0.13216422498226166,
0.0031552501022815704,
-0.05698874220252037,
-0.06620404124259949,
-0.004484506789594889,
0.012321941554546356,
0.07110283523797989,
-0.08867338299751282,
0.09241146594285965,
-0.02361309342086315,
0.02711299993097782,
0.004684858489781618,
0.0654892697930336,
-0.12457022070884705,
-0.10240417718887329,
0.002741185249760747,
-0.019991615787148476,
0.05880825221538544,
-0.020205924287438393,
-0.10319552570581436,
-0.08645859360694885,
0.10383493453264236,
0.04471304640173912,
0.008413853123784065,
-0.09634285420179367,
0.12142661213874817,
-0.02994239330291748,
-0.05303927883505821,
-0.02085837721824646,
0.025804340839385986,
0.07595210522413254,
-0.02963036298751831,
-0.06431430578231812,
0.06746727973222733,
-0.09600633382797241,
-0.1748911738395691,
0.010085989721119404,
0.18076638877391815,
0.17794491350650787,
0.07488218694925308,
0.018395153805613518,
0.07799527049064636,
-0.02453925460577011,
-0.059797272086143494,
0.09538950771093369,
0.05257537588477135,
-0.048792943358421326,
0.07177717238664627,
0.006445137783885002,
-0.1796608716249466,
-0.15979517996311188,
-0.06304093450307846,
0.17295245826244354,
0.2490795999765396,
-0.14214058220386505,
0.18305820226669312,
-0.013989903032779694,
-0.014748644083738327,
-0.25808748602867126,
0.04165841266512871,
0.03409324213862419,
0.029990335926413536,
0.08910626918077469,
-0.14790908992290497,
-0.003671275218948722,
-0.054287638515233994,
0.0027165876235812902,
-0.14185155928134918,
-0.09062813967466354,
-0.09832846373319626,
0.09927219152450562,
-0.020707396790385246,
0.2760681211948395,
-0.031143588945269585,
0.009656558744609356,
-0.0608128122985363,
0.03700797259807587,
0.13467051088809967,
-0.05277116969227791,
0.05938085913658142,
0.034593332558870316,
0.1443946808576584,
0.05664704367518425,
0.03929801657795906,
0.12609492242336273,
-0.03738483414053917,
0.028307035565376282,
-0.12949104607105255,
-0.09745093435049057,
0.053862567991018295,
-0.023463336750864983,
-0.041274916380643845,
-0.007793745491653681,
0.011953379027545452,
-0.16537898778915405,
-0.03861468657851219,
-0.0878237709403038,
0.05102676525712013,
-0.002969882683828473,
-0.042408596724271774,
-0.11295539885759354,
0.10846110433340073,
0.09930302947759628,
-0.03619156405329704,
0.08704060316085815,
-0.09083550423383713,
0.10952536016702652,
0.028503812849521637,
0.20749032497406006,
0.08042780309915543,
0.07105699926614761,
0.001864725723862648,
-0.04820080101490021,
0.011202715337276459,
-0.18826782703399658,
-0.016021648421883583,
0.10608813911676407,
-0.033186230808496475,
0.09028679132461548,
0.04212280735373497,
-0.06401792913675308,
0.014850017614662647,
0.07833444327116013,
-0.18912972509860992,
-0.11098784953355789,
-0.060325752943754196,
0.03377853333950043,
-0.007534907665103674,
-0.03277161344885826,
0.18625396490097046,
-0.11119354516267776,
-0.016767220571637154,
-0.00563926063477993,
0.06691890954971313,
-0.04194324091076851,
0.028255274519324303,
0.004867710173130035,
0.012773863971233368,
-0.07318618893623352,
0.06711981445550919,
-0.007508139591664076,
-0.12195134907960892,
0.07285749912261963,
0.09956914186477661,
-0.12561489641666412,
-0.09164796024560928,
-0.07642749696969986,
0.18260829150676727,
-0.07834506779909134,
-0.03475411236286163,
-0.0165999922901392,
-0.169281467795372,
0.06643923372030258,
0.1315828114748001,
0.08963008970022202,
-0.011191882193088531,
-0.04226204752922058,
0.019057946279644966,
-0.0014847799902781844,
0.09196919202804565,
0.2046058624982834,
-0.12002643942832947,
-0.10069268196821213,
0.041604023426771164,
-0.05125537887215614,
0.07834438234567642,
-0.07881172001361847,
0.0020249050576239824,
-0.07817527651786804,
0.015797792002558708,
-0.1475464552640915,
-0.05392850935459137,
-0.08027540892362595,
-0.0018404374131932855,
-0.024556376039981842,
-0.056478649377822876,
-0.04939444363117218,
-0.03735413774847984,
-0.09637241810560226,
0.047030527144670486,
-0.006184786092489958,
0.08767340332269669,
-0.06244403123855591,
-0.030949080362915993,
0.012970037758350372,
-0.02287924289703369,
0.10137119144201279,
0.14436519145965576,
-0.007539310026913881,
0.09990161657333374,
-0.17373661696910858,
0.041119419038295746,
0.07512594014406204,
-0.03960344195365906,
0.004018055275082588,
-0.09818414598703384,
-0.029564060270786285,
0.004030977841466665,
0.03626984730362892,
0.054363030940294266,
0.09029915928840637,
-0.00800494384020567,
0.09174871444702148,
0.019381308928132057,
-0.06885726004838943,
-0.06254164129495621,
0.021761396899819374,
0.036229174584150314,
0.09256795048713684,
0.20936326682567596,
-0.13373367488384247,
0.11005555838346481,
-0.04796665906906128,
0.03902736306190491,
-0.03539835661649704,
-0.0987531766295433,
-0.14648912847042084,
-0.09872207790613174,
0.08206501603126526,
-0.06147459149360657,
0.12266996502876282,
0.0881728008389473,
-0.060401249676942825,
0.07002193480730057,
0.0474516786634922,
-0.04979942366480827,
0.04497602954506874,
-0.015481994487345219,
0.05911560356616974,
-0.03965708240866661,
-0.056382518261671066,
0.0404176339507103,
0.02224123291671276,
-0.0016053082654252648,
0.17599959671497345,
0.1353924423456192,
0.1638847142457962,
0.04447171464562416,
-0.029916124418377876,
-0.05076989904046059,
0.06970963627099991,
0.025477716699242592,
-0.13883773982524872,
0.01034765038639307,
-0.09536280483007431,
0.19962717592716217,
0.12334784865379333,
-0.09289387613534927,
0.04320278763771057,
-0.07773945480585098,
-0.031045513227581978,
-0.06802713125944138,
-0.1737816333770752,
-0.09281980991363525,
-0.11657779663801193,
0.011936433613300323,
-0.09680070728063583,
-0.023394210264086723,
-0.012535466812551022,
0.04583917185664177,
-0.083648182451725,
0.029359623789787292,
-0.10738233476877213,
-0.0774131566286087,
0.21430326998233795,
-0.004339373204857111,
-0.03126654401421547,
-0.009192481637001038,
-0.03773389384150505,
-0.04767385497689247,
0.017573880031704903,
-0.013445374555885792,
0.02317655272781849,
-0.03609461337327957,
-0.017316604033112526,
-0.05708296224474907,
-0.12122990936040878,
0.02576843835413456,
0.06963322311639786,
0.04854455962777138,
0.11806508898735046,
0.01409400999546051,
0.02270159125328064,
-0.004897697828710079,
0.2904946506023407,
-0.011432409286499023,
0.05248141661286354,
-0.09063329547643661,
0.08973617106676102,
-0.037785161286592484,
0.045076966285705566,
-0.0058855097740888596,
-0.07292469590902328,
-0.019368581473827362,
0.14078141748905182,
0.3305225670337677,
-0.006383465602993965,
0.031523432582616806,
-0.10000985860824585,
0.06488043814897537,
0.032818641513586044,
0.00946511048823595,
0.04406574368476868,
0.06461116671562195,
-0.10930293798446655,
0.04451306536793709,
-0.0488133542239666,
0.004241807386279106,
0.019635004922747612,
0.0081178592517972,
0.07478626817464828,
-0.0595775730907917,
-0.08237531781196594,
0.15264473855495453,
-0.1494378298521042,
-0.1452590823173523,
0.029025651514530182,
-0.21968846023082733,
-0.08643411844968796,
0.003295646281912923,
0.05131905898451805,
0.1300671547651291,
0.1719152331352234,
0.01843828521668911,
-0.02453923411667347,
-0.008111299015581608,
0.0551164448261261,
-0.05192531645298004,
-0.012055347673594952,
0.14779497683048248,
-0.02958420105278492,
0.04206708073616028,
-0.0662493109703064,
0.09653782099485397,
0.12767137587070465,
-0.01011603232473135,
-0.0607469268143177,
0.024974577128887177,
0.051638681441545486,
-0.05586973950266838,
-0.007629158440977335,
0.12894217669963837,
-0.01754317805171013,
0.05781402066349983,
0.158617302775383,
-0.22081607580184937,
0.034373681992292404,
-0.018204867839813232,
-0.03637102618813515,
-0.03404620662331581,
0.2195184975862503,
-0.11406756192445755,
0.06444921344518661,
0.17221541702747345,
-0.06717833131551743,
-0.018534936010837555,
-0.04837888479232788,
-0.004320778418332338,
0.003368981182575226,
0.016019878908991814,
0.017678236588835716,
-0.1598517894744873,
-0.03478873893618584,
0.053890157490968704,
0.020141929388046265,
-0.13270485401153564,
-0.029039621353149414,
-0.11398482322692871,
0.03130050748586655,
-0.027177607640624046,
0.12346062809228897,
0.005419560242444277,
-0.02188008464872837,
0.02268177457153797,
-0.17447131872177124,
0.056068193167448044,
0.12395492941141129,
-0.04762169346213341,
-0.03049449436366558
] |
null | null |
transformers
|
# RuBERT for Sentiment Analysis
Short Russian texts sentiment classification
This is a [DeepPavlov/rubert-base-cased-conversational](https://huggingface.co/DeepPavlov/rubert-base-cased-conversational) model trained on aggregated corpus of 351.797 texts.
## Labels
0: NEUTRAL
1: POSITIVE
2: NEGATIVE
## How to use
```python
import torch
from transformers import AutoModelForSequenceClassification
from transformers import BertTokenizerFast
tokenizer = BertTokenizerFast.from_pretrained('blanchefort/rubert-base-cased-sentiment')
model = AutoModelForSequenceClassification.from_pretrained('blanchefort/rubert-base-cased-sentiment', return_dict=True)
@torch.no_grad()
def predict(text):
inputs = tokenizer(text, max_length=512, padding=True, truncation=True, return_tensors='pt')
outputs = model(**inputs)
predicted = torch.nn.functional.softmax(outputs.logits, dim=1)
predicted = torch.argmax(predicted, dim=1).numpy()
return predicted
```
## Datasets used for model training
**[RuTweetCorp](https://study.mokoron.com/)**
> Рубцова Ю. Автоматическое построение и анализ корпуса коротких текстов (постов микроблогов) для задачи разработки и тренировки тонового классификатора //Инженерия знаний и технологии семантического веба. – 2012. – Т. 1. – С. 109-116.
**[RuReviews](https://github.com/sismetanin/rureviews)**
> RuReviews: An Automatically Annotated Sentiment Analysis Dataset for Product Reviews in Russian.
**[RuSentiment](http://text-machine.cs.uml.edu/projects/rusentiment/)**
> A. Rogers A. Romanov A. Rumshisky S. Volkova M. Gronas A. Gribov RuSentiment: An Enriched Sentiment Analysis Dataset for Social Media in Russian. Proceedings of COLING 2018.
**[Отзывы о медучреждениях](https://github.com/blanchefort/datasets/tree/master/medical_comments)**
> Датасет содержит пользовательские отзывы о медицинских учреждениях. Датасет собран в мае 2019 года с сайта prodoctorov.ru
|
{"language": ["ru"], "tags": ["sentiment", "text-classification"]}
|
text-classification
|
blanchefort/rubert-base-cased-sentiment
|
[
"transformers",
"pytorch",
"tf",
"jax",
"safetensors",
"bert",
"text-classification",
"sentiment",
"ru",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"ru"
] |
TAGS
#transformers #pytorch #tf #jax #safetensors #bert #text-classification #sentiment #ru #autotrain_compatible #endpoints_compatible #has_space #region-us
|
# RuBERT for Sentiment Analysis
Short Russian texts sentiment classification
This is a DeepPavlov/rubert-base-cased-conversational model trained on aggregated corpus of 351.797 texts.
## Labels
0: NEUTRAL
1: POSITIVE
2: NEGATIVE
## How to use
## Datasets used for model training
RuTweetCorp
> Рубцова Ю. Автоматическое построение и анализ корпуса коротких текстов (постов микроблогов) для задачи разработки и тренировки тонового классификатора //Инженерия знаний и технологии семантического веба. – 2012. – Т. 1. – С. 109-116.
RuReviews
> RuReviews: An Automatically Annotated Sentiment Analysis Dataset for Product Reviews in Russian.
RuSentiment
> A. Rogers A. Romanov A. Rumshisky S. Volkova M. Gronas A. Gribov RuSentiment: An Enriched Sentiment Analysis Dataset for Social Media in Russian. Proceedings of COLING 2018.
Отзывы о медучреждениях
> Датасет содержит пользовательские отзывы о медицинских учреждениях. Датасет собран в мае 2019 года с сайта URL
|
[
"# RuBERT for Sentiment Analysis\nShort Russian texts sentiment classification\n\nThis is a DeepPavlov/rubert-base-cased-conversational model trained on aggregated corpus of 351.797 texts.",
"## Labels\n 0: NEUTRAL\n 1: POSITIVE\n 2: NEGATIVE",
"## How to use",
"## Datasets used for model training\n\nRuTweetCorp\n\n> Рубцова Ю. Автоматическое построение и анализ корпуса коротких текстов (постов микроблогов) для задачи разработки и тренировки тонового классификатора //Инженерия знаний и технологии семантического веба. – 2012. – Т. 1. – С. 109-116.\n\nRuReviews\n\n> RuReviews: An Automatically Annotated Sentiment Analysis Dataset for Product Reviews in Russian.\n\nRuSentiment\n\n> A. Rogers A. Romanov A. Rumshisky S. Volkova M. Gronas A. Gribov RuSentiment: An Enriched Sentiment Analysis Dataset for Social Media in Russian. Proceedings of COLING 2018.\n\nОтзывы о медучреждениях\n\n> Датасет содержит пользовательские отзывы о медицинских учреждениях. Датасет собран в мае 2019 года с сайта URL"
] |
[
"TAGS\n#transformers #pytorch #tf #jax #safetensors #bert #text-classification #sentiment #ru #autotrain_compatible #endpoints_compatible #has_space #region-us \n",
"# RuBERT for Sentiment Analysis\nShort Russian texts sentiment classification\n\nThis is a DeepPavlov/rubert-base-cased-conversational model trained on aggregated corpus of 351.797 texts.",
"## Labels\n 0: NEUTRAL\n 1: POSITIVE\n 2: NEGATIVE",
"## How to use",
"## Datasets used for model training\n\nRuTweetCorp\n\n> Рубцова Ю. Автоматическое построение и анализ корпуса коротких текстов (постов микроблогов) для задачи разработки и тренировки тонового классификатора //Инженерия знаний и технологии семантического веба. – 2012. – Т. 1. – С. 109-116.\n\nRuReviews\n\n> RuReviews: An Automatically Annotated Sentiment Analysis Dataset for Product Reviews in Russian.\n\nRuSentiment\n\n> A. Rogers A. Romanov A. Rumshisky S. Volkova M. Gronas A. Gribov RuSentiment: An Enriched Sentiment Analysis Dataset for Social Media in Russian. Proceedings of COLING 2018.\n\nОтзывы о медучреждениях\n\n> Датасет содержит пользовательские отзывы о медицинских учреждениях. Датасет собран в мае 2019 года с сайта URL"
] |
[
56,
50,
15,
4,
194
] |
[
"passage: TAGS\n#transformers #pytorch #tf #jax #safetensors #bert #text-classification #sentiment #ru #autotrain_compatible #endpoints_compatible #has_space #region-us \n# RuBERT for Sentiment Analysis\nShort Russian texts sentiment classification\n\nThis is a DeepPavlov/rubert-base-cased-conversational model trained on aggregated corpus of 351.797 texts.## Labels\n 0: NEUTRAL\n 1: POSITIVE\n 2: NEGATIVE## How to use## Datasets used for model training\n\nRuTweetCorp\n\n> Рубцова Ю. Автоматическое построение и анализ корпуса коротких текстов (постов микроблогов) для задачи разработки и тренировки тонового классификатора //Инженерия знаний и технологии семантического веба. – 2012. – Т. 1. – С. 109-116.\n\nRuReviews\n\n> RuReviews: An Automatically Annotated Sentiment Analysis Dataset for Product Reviews in Russian.\n\nRuSentiment\n\n> A. Rogers A. Romanov A. Rumshisky S. Volkova M. Gronas A. Gribov RuSentiment: An Enriched Sentiment Analysis Dataset for Social Media in Russian. Proceedings of COLING 2018.\n\nОтзывы о медучреждениях\n\n> Датасет содержит пользовательские отзывы о медицинских учреждениях. Датасет собран в мае 2019 года с сайта URL"
] |
[
0.021753843873739243,
0.08815719932317734,
-0.008418937213718891,
-0.006120869889855385,
0.11941903829574585,
0.0016556759364902973,
0.18408173322677612,
0.07886433601379395,
0.05178210511803627,
0.047015607357025146,
0.05964174494147301,
0.008659319020807743,
0.06562689691781998,
0.24653925001621246,
-0.031530510634183884,
-0.18228240311145782,
0.055644065141677856,
-0.06900465488433838,
0.06354685127735138,
0.1450873762369156,
0.17494450509548187,
-0.04042363166809082,
0.09865588694810867,
-0.07497570663690567,
-0.030929051339626312,
-0.00010140358790522441,
0.054537106305360794,
-0.04395003244280815,
0.055352065712213516,
0.001038180897012353,
0.06044509634375572,
0.07886934280395508,
-0.04074712470173836,
-0.14981020987033844,
0.038945335894823074,
0.030483193695545197,
-0.07360298186540604,
0.025390751659870148,
0.11993736773729324,
-0.10458406805992126,
0.34255251288414,
-0.15408577024936676,
0.007370787672698498,
0.06445160508155823,
-0.11352405697107315,
-0.021799949929118156,
-0.12956388294696808,
0.09846043586730957,
0.09709209948778152,
0.10236131399869919,
-0.07140237838029861,
0.0952923521399498,
-0.03344953432679176,
0.06428905576467514,
0.08539528399705887,
-0.18157564103603363,
-0.08611321449279785,
0.09082692116498947,
-0.05450315400958061,
0.10352317243814468,
-0.06878899037837982,
0.05409913510084152,
0.019650457426905632,
0.027223771438002586,
0.054794177412986755,
-0.05573861300945282,
0.08745178580284119,
-0.009015360847115517,
-0.13649266958236694,
-0.004411844536662102,
0.19462069869041443,
0.08114562183618546,
-0.027876801788806915,
-0.10539252310991287,
0.020038967952132225,
-0.11010005325078964,
-0.05325507000088692,
-0.05479498952627182,
-0.031873784959316254,
-0.024849282577633858,
-0.035240937024354935,
0.0012277507921680808,
-0.09437720477581024,
-0.02504454366862774,
-0.048498187214136124,
0.1620703637599945,
-0.02461620420217514,
0.006717102602124214,
0.01383919920772314,
-0.03349487856030464,
0.00286958459764719,
-0.07194776833057404,
0.015156399458646774,
-0.11691667884588242,
-0.029896864667534828,
0.03589462861418724,
-0.050439588725566864,
-0.10193996876478195,
0.05448644980788231,
0.07733294367790222,
-0.036462776362895966,
0.05754837766289711,
0.04836495965719223,
-0.012823031283915043,
0.0703769326210022,
0.038844119757413864,
-0.059897277504205704,
-0.07435312122106552,
-0.07506093382835388,
0.03719988465309143,
0.02563518099486828,
-0.018948882818222046,
-0.01813322864472866,
0.003288946347311139,
0.01946471817791462,
0.015566762536764145,
-0.013144759461283684,
0.050190508365631104,
-0.14246000349521637,
-0.03885464742779732,
0.007424989249557257,
-0.0855875313282013,
-0.010533709079027176,
-0.005396036431193352,
-0.10767770558595657,
0.006308873649686575,
-0.08373406529426575,
0.0005136237014085054,
-0.023822937160730362,
0.10149691253900528,
0.008835024200379848,
0.04756028205156326,
-0.049096960574388504,
-0.08066491782665253,
0.02803780511021614,
-0.08622125536203384,
-0.010581398382782936,
-0.16347962617874146,
-0.11634749174118042,
-0.12463682889938354,
0.004120124038308859,
-0.07454285770654678,
-0.0048499321565032005,
-0.04134339839220047,
-0.03952108696103096,
0.009132083505392075,
-0.032115958631038666,
0.0010682567954063416,
-0.05057642608880997,
0.024050189182162285,
-0.12698791921138763,
0.10042670369148254,
0.038931023329496384,
0.03567354381084442,
-0.11829230934381485,
-0.06986045092344284,
0.017546338960528374,
0.14200051128864288,
-0.18049004673957825,
0.04670475050806999,
-0.14429134130477905,
-0.04167490452528,
-0.06753788143396378,
0.06086698919534683,
0.022438958287239075,
0.16654643416404724,
-0.2727508246898651,
-0.05198892951011658,
0.023330524563789368,
-0.11063089966773987,
-0.034343212842941284,
0.1279127597808838,
-0.029299268499016762,
0.08401602506637573,
0.11805294454097748,
0.14317739009857178,
0.007303363177925348,
0.0617205947637558,
-0.11589547246694565,
-0.10938405990600586,
-0.10192407667636871,
0.14422249794006348,
0.037643127143383026,
0.08081167936325073,
-0.006068553775548935,
0.011282629333436489,
-0.022145608440041542,
-0.08374559134244919,
-0.06594258546829224,
-0.07987711578607559,
0.04806561395525932,
-0.04470545053482056,
0.16409146785736084,
-0.01599150523543358,
-0.019614020362496376,
-0.0934339091181755,
-0.11289454996585846,
-0.12656430900096893,
0.07138792425394058,
0.054044004529714584,
0.008880991488695145,
-0.1525590717792511,
0.019632915034890175,
0.19941435754299164,
0.045074451714754105,
-0.14334484934806824,
-0.0013668383471667767,
0.015848957002162933,
0.04220068082213402,
0.0989142581820488,
0.018047500401735306,
0.04286488890647888,
-0.0997680127620697,
-0.03476344794034958,
-0.05641278997063637,
-0.1097249686717987,
-0.009587054140865803,
-0.0027311204466968775,
-0.17038032412528992,
0.02742592990398407,
-0.047153741121292114,
0.114105224609375,
-0.04854288697242737,
0.011159571819007397,
0.13967075943946838,
0.09089186042547226,
-0.0038051323499530554,
0.00819949246942997,
0.004662640392780304,
0.07646586745977402,
-0.030127139762043953,
0.031914081424474716,
0.06614141911268234,
-0.03983017057180405,
-0.09378998726606369,
0.07507366687059402,
-0.002643332816660404,
0.048261504620313644,
0.09809129685163498,
0.01708274707198143,
-0.14074818789958954,
0.04492718726396561,
-0.014845684170722961,
0.016917210072278976,
0.00311723118647933,
0.06303133815526962,
0.14609284698963165,
0.012918997555971146,
-0.02465604990720749,
-0.00692306412383914,
0.05402559041976929,
0.020919162780046463,
-0.059340715408325195,
-0.08823946863412857,
0.12337391078472137,
-0.12295765429735184,
-0.23766691982746124,
0.08416802436113358,
0.048923540860414505,
0.03918738290667534,
0.19114693999290466,
-0.0019725156016647816,
-0.005231785122305155,
-0.08140566200017929,
-0.06973087787628174,
-0.05271148681640625,
0.04547692462801933,
0.01284000463783741,
-0.003177367150783539,
0.04249200597405434,
-0.04596538096666336,
0.017679518088698387,
0.008738472126424313,
-0.03284192830324173,
-0.019846990704536438,
-0.0493476502597332,
-0.021333519369363785,
0.041138842701911926,
0.04926184192299843,
0.11456586420536041,
0.01925504207611084,
-0.001663349918089807,
-0.012853474356234074,
-0.06562542170286179,
-0.10430870950222015,
0.07504882663488388,
-0.08835911005735397,
-0.25684815645217896,
-0.0241928081959486,
0.031068105250597,
-0.10784687101840973,
0.004796498920768499,
0.034462954849004745,
-0.1599365472793579,
-0.08986866474151611,
-0.0731242299079895,
0.05947733297944069,
0.04779073968529701,
0.0003683950926642865,
-0.00948234274983406,
0.06223165988922119,
0.007431612350046635,
-0.011594508774578571,
-0.030560985207557678,
-0.07419058680534363,
-0.014346206560730934,
0.09130343049764633,
0.006717209238559008,
0.09847640991210938,
0.021969003602862358,
-0.012479817494750023,
0.0034935276489704847,
-0.07597268372774124,
0.11417702585458755,
-0.10556817799806595,
0.013148143887519836,
0.10594683885574341,
0.034169040620326996,
0.04794245585799217,
0.14363868534564972,
0.01042211800813675,
-0.08689893037080765,
0.08958227187395096,
0.026234816759824753,
-0.0492023229598999,
-0.22763971984386444,
-0.20066051185131073,
0.007111281622201204,
0.08955159783363342,
0.035241689532995224,
0.009866636246442795,
0.043422091752290726,
0.039228182286024094,
-0.05601960048079491,
-0.11684312671422958,
0.05299092084169388,
0.06675803661346436,
0.07815894484519958,
0.029161494225263596,
0.1290634274482727,
-0.06604090332984924,
0.05817274749279022,
0.13916923105716705,
-0.16842947900295258,
0.15479372441768646,
0.01965155079960823,
0.06721897423267365,
0.0674838125705719,
0.11006127297878265,
0.052561789751052856,
-0.042745962738990784,
0.0768178254365921,
-0.0030380261596292257,
-0.0061151632107794285,
-0.042064130306243896,
-0.11473935097455978,
0.14330534636974335,
-0.005575645714998245,
-0.09470608830451965,
-0.025824332609772682,
0.040393099188804626,
0.15850071609020233,
0.19196577370166779,
0.008507481776177883,
-0.19677944481372833,
-0.15306492149829865,
0.00725584477186203,
0.0360751748085022,
0.018160436302423477,
0.010576938278973103,
0.037079401314258575,
-0.12455232441425323,
0.17451371252536774,
-0.015539441257715225,
0.08685828745365143,
-0.0068919481709599495,
0.06066400557756424,
-0.008454368449747562,
-0.005834430921822786,
-0.047808289527893066,
0.06252118200063705,
-0.23850782215595245,
0.2579784095287323,
-0.01800188608467579,
0.020953644067049026,
-0.032336920499801636,
-0.06731933355331421,
0.07965830713510513,
-0.0733310654759407,
0.08670162409543991,
0.018315261229872704,
-0.17597690224647522,
-0.09418747574090958,
-0.0605558380484581,
-0.0048713963478803635,
0.08448400348424911,
-0.08828439563512802,
0.07849918305873871,
0.002547952113673091,
0.017214497551321983,
-0.05472397804260254,
-0.07636842876672745,
-0.16871732473373413,
-0.07243005186319351,
0.05252429097890854,
0.028341243043541908,
0.08699385821819305,
-0.018357370048761368,
-0.07399290800094604,
0.025752974674105644,
0.1307707577943802,
-0.02644333243370056,
-0.029457490891218185,
-0.09910415858030319,
0.10405299067497253,
0.004234613385051489,
-0.08968670666217804,
0.003890444990247488,
0.012144645676016808,
0.09577280282974243,
0.015162517316639423,
-0.068987175822258,
0.0416196770966053,
-0.036371294409036636,
-0.15044192969799042,
0.01266459934413433,
0.12052589654922485,
0.16352134943008423,
0.0507863350212574,
0.032188739627599716,
0.03613089770078659,
0.024296125397086143,
-0.09644214808940887,
0.09292421489953995,
0.05437197908759117,
-0.06420673429965973,
0.14046742022037506,
0.03158007934689522,
-0.3120870888233185,
-0.20959940552711487,
-0.06275417655706406,
0.07954537123441696,
0.17165875434875488,
-0.09893756359815598,
0.13294892013072968,
0.011635268107056618,
-0.02438683807849884,
-0.21025006473064423,
-0.010064776986837387,
0.05791926383972168,
0.008620884269475937,
0.10702330619096756,
-0.17418785393238068,
-0.04166039079427719,
0.012700331397354603,
0.009750504978001118,
-0.1029987782239914,
-0.12439129501581192,
-0.12213565409183502,
0.05512043461203575,
-0.023834289982914925,
0.006507860962301493,
-0.03622718155384064,
-0.08235348761081696,
-0.05784336477518082,
0.027613408863544464,
0.15602271258831024,
0.0038706096820533276,
0.026324626058340073,
0.0050194039940834045,
0.15213124454021454,
0.06696777790784836,
0.065253347158432,
0.14142797887325287,
0.050786036998033524,
0.06378114968538284,
-0.08118794113397598,
-0.025907527655363083,
0.031302087008953094,
-0.04111574962735176,
0.0627664253115654,
0.02098066732287407,
0.00996698159724474,
-0.12012278288602829,
0.0020571681670844555,
-0.07199036329984665,
0.052520833909511566,
-0.009695565328001976,
-0.02833505906164646,
-0.09562821686267853,
0.11957807838916779,
0.049010902643203735,
-0.018678029999136925,
0.12876315414905548,
-0.062376637011766434,
0.07888921350240707,
0.13654044270515442,
0.13861152529716492,
0.07849305123090744,
-0.0094536654651165,
-0.01891038380563259,
-0.00956981722265482,
0.039528295397758484,
-0.16724057495594025,
-0.004300865810364485,
0.08859091252088547,
-0.0041511524468660355,
0.051342807710170746,
-0.013363271951675415,
-0.09340301156044006,
0.022500135004520416,
0.1116461455821991,
-0.17190653085708618,
-0.09985169023275375,
-0.020764140412211418,
0.13192522525787354,
-0.013560498133301735,
0.006604458671063185,
0.15655019879341125,
-0.0658533051609993,
-0.021520989015698433,
0.007247593719512224,
0.08425793051719666,
0.0028599377255886793,
0.06546726077795029,
-0.030843371525406837,
0.014924375340342522,
-0.09822490066289902,
0.08717896044254303,
0.06145501881837845,
-0.134102001786232,
0.055329855531454086,
0.09175685793161392,
-0.10299449414014816,
-0.07743915170431137,
-0.03107185661792755,
0.09081651270389557,
-0.09324415773153305,
-0.055075883865356445,
-0.003114165738224983,
-0.1483277678489685,
0.02896927110850811,
0.08815521001815796,
0.09620720148086548,
0.06081800535321236,
0.013244443573057652,
-0.01026095449924469,
0.03817175328731537,
0.044923167675733566,
0.1794895976781845,
-0.06388889998197556,
-0.07072493433952332,
-0.009710883721709251,
0.0057542514987289906,
0.01004678476601839,
-0.06290930509567261,
-0.025103682652115822,
-0.02193712815642357,
0.0022811037488281727,
-0.12085793912410736,
0.006959777791053057,
-0.1134529560804367,
-0.002412262372672558,
-0.016910942271351814,
-0.03738686069846153,
-0.0943077877163887,
-0.00784482341259718,
-0.07545755058526993,
-0.019423048943281174,
-0.028341641649603844,
0.12315280735492706,
-0.11629121005535126,
0.002590334275737405,
0.020371299237012863,
-0.018517866730690002,
0.07928839325904846,
0.14854741096496582,
-0.004458489827811718,
0.07314103096723557,
-0.15347647666931152,
0.08032482862472534,
0.045370832085609436,
-0.015657326206564903,
0.00030949656502343714,
-0.06165752559900284,
-0.0034095137380063534,
-0.00765856122598052,
-0.030500223860144615,
0.04755054786801338,
0.12395583838224411,
-0.09254036098718643,
0.1144014522433281,
0.02112824097275734,
-0.02500327304005623,
-0.0442749485373497,
0.021217089146375656,
0.07983870059251785,
0.12239442020654678,
0.20708021521568298,
-0.0971512719988823,
0.06596305221319199,
-0.0667525976896286,
0.03542226180434227,
-0.040135521441698074,
-0.08374162763357162,
-0.1481085866689682,
-0.048331618309020996,
0.07001634687185287,
-0.028169211000204086,
0.10406941175460815,
0.11045175045728683,
-0.03463149443268776,
0.06892064213752747,
0.06489942967891693,
-0.058101337403059006,
0.06096350774168968,
-0.07620623707771301,
-0.0016864048084244132,
-0.05181121081113815,
-0.009859911166131496,
-0.04412961006164551,
-0.017300060018897057,
-0.04819229245185852,
0.13718962669372559,
0.12750062346458435,
0.1451440155506134,
0.044536277651786804,
-0.014293824322521687,
-0.03165212646126747,
0.04815862700343132,
0.09288565069437027,
-0.14973203837871552,
0.03459286689758301,
-0.04483877494931221,
0.11381491273641586,
0.08502554893493652,
-0.13867484033107758,
0.09540736675262451,
-0.04910305142402649,
-0.0323583148419857,
0.013410670682787895,
-0.17200659215450287,
-0.10870574414730072,
-0.1279977262020111,
0.0008731294074095786,
-0.10539241135120392,
0.006180607248097658,
0.053604189306497574,
-0.0028868233785033226,
-0.04914369061589241,
0.02023397758603096,
-0.1624484658241272,
-0.07973185181617737,
0.19695453345775604,
-0.019377190619707108,
-0.0471753366291523,
0.009612440131604671,
-0.03462488204240799,
-0.014807955361902714,
0.0695224478840828,
-0.002003123052418232,
0.04026491194963455,
0.0007438326720148325,
-0.024758832529187202,
-0.06924411654472351,
-0.1258479207754135,
0.03829048201441765,
-0.005463988520205021,
0.0195083599537611,
0.06876041740179062,
0.04185711219906807,
0.0005957988905720413,
0.027029482647776604,
0.2754714787006378,
0.044019266963005066,
0.052904386073350906,
-0.08341079205274582,
0.07979601621627808,
0.007384920492768288,
0.023778298869729042,
-0.016664311289787292,
-0.058652326464653015,
-0.04401794448494911,
0.026045147329568863,
0.23700854182243347,
0.019481338560581207,
0.024873774498701096,
-0.1370595097541809,
0.0605630949139595,
0.02222520485520363,
0.033658452332019806,
0.002449186984449625,
0.07943939417600632,
-0.03404203802347183,
0.0731743648648262,
-0.0762086883187294,
0.023383552208542824,
-0.0015733905602246523,
0.002282721223309636,
0.04563295096158981,
-0.03630226105451584,
-0.08814463019371033,
0.19842274487018585,
-0.1421094387769699,
-0.14523400366306305,
0.0162935983389616,
-0.2007470428943634,
-0.09988729655742645,
0.025039348751306534,
0.043240778148174286,
0.13377776741981506,
0.14848706126213074,
0.031763337552547455,
-0.03727589175105095,
-0.02036668173968792,
0.03655630722641945,
-0.08320631831884384,
-0.04933401197195053,
0.09815956652164459,
-0.08391828835010529,
0.1562889814376831,
-0.038446202874183655,
0.08924609422683716,
0.09888634830713272,
-0.02152174897491932,
-0.07384323328733444,
0.07172012329101562,
0.052773524075746536,
0.0003273329930379987,
0.06332213431596756,
0.12930245697498322,
-0.04372035339474678,
0.10443461686372757,
0.14188988506793976,
-0.11397624760866165,
0.021797524765133858,
-0.032443203032016754,
0.002611944219097495,
-0.016344472765922546,
0.17167998850345612,
-0.10159535706043243,
0.05730656534433365,
0.1702827513217926,
-0.06157321110367775,
-0.022742873057723045,
-0.026081997901201248,
0.02542594075202942,
-0.051642101258039474,
0.03628448024392128,
0.018076153472065926,
-0.15026919543743134,
-0.01899113319814205,
0.07063820958137512,
0.04299807548522949,
-0.1343819499015808,
0.010646301321685314,
-0.09100048243999481,
0.07329163700342178,
0.016334012150764465,
0.11770020425319672,
-0.05920564755797386,
-0.04677962139248848,
-0.000821371388155967,
-0.08984795957803726,
0.0787242203950882,
0.1186550110578537,
-0.05150661617517471,
-0.05811993032693863
] |
null | null |
transformers
|
# ss
|
{"tags": ["conversational"]}
|
text-generation
|
bleachybrain/DialoGPT-med-ss
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"conversational",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
# ss
|
[
"# ss"
] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"# ss"
] |
[
51,
3
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n# ss"
] |
[
-0.024268031120300293,
0.027011793106794357,
-0.007145066745579243,
-0.006262112408876419,
0.16064044833183289,
0.009302747435867786,
0.10980533808469772,
0.12710551917552948,
0.02069292590022087,
-0.01333542913198471,
0.15093548595905304,
0.1832849532365799,
0.012487837113440037,
0.09680309891700745,
-0.07103877514600754,
-0.24209894239902496,
0.08531837165355682,
0.03281093388795853,
-0.007681690622121096,
0.12081203609704971,
0.07941875606775284,
-0.050385478883981705,
0.08489038050174713,
-0.04121847078204155,
-0.1165800541639328,
0.015547600574791431,
0.037540026009082794,
-0.11837701499462128,
0.10854952782392502,
0.04466035217046738,
0.056620195508003235,
0.03936349228024483,
-0.045516129583120346,
-0.1499272584915161,
0.02885846234858036,
0.00885354820638895,
-0.052487775683403015,
0.045398060232400894,
0.03502877056598663,
-0.09280680865049362,
0.06798171252012253,
0.07122685760259628,
-0.012543727643787861,
0.05937418341636658,
-0.1514044851064682,
-0.03780992329120636,
-0.006886673159897327,
0.035176362842321396,
0.07219888269901276,
0.0937192440032959,
-0.029436448588967323,
0.0943518802523613,
-0.09776225686073303,
0.10611098259687424,
0.10506439954042435,
-0.29938361048698425,
0.006424235180020332,
0.0819718986749649,
0.012285110540688038,
0.07976081222295761,
-0.03184885159134865,
0.06454131752252579,
0.007828420959413052,
0.011158140376210213,
0.008893446996808052,
-0.06672374159097672,
-0.07706741988658905,
0.06644552201032639,
-0.08739598840475082,
-0.06612685322761536,
0.23983970284461975,
-0.041728682816028595,
0.06548239290714264,
-0.05176616460084915,
-0.10644427686929703,
-0.03555995970964432,
-0.030517691746354103,
0.0030495638493448496,
-0.07415011525154114,
0.08121100813150406,
-0.004797299392521381,
-0.08838225901126862,
-0.1371649205684662,
-0.04835277423262596,
-0.15883855521678925,
0.1533229798078537,
0.02127540111541748,
0.060761839151382446,
-0.1730431318283081,
0.09149179607629776,
0.01687481813132763,
-0.0860050767660141,
0.012027615681290627,
-0.10260157287120819,
0.014192705973982811,
0.03979009762406349,
-0.047402337193489075,
-0.0627749040722847,
0.07755976915359497,
0.13940271735191345,
0.011283157393336296,
0.03042178973555565,
-0.0007723315502516925,
0.08106172829866409,
0.04388973489403725,
0.08607227355241776,
0.0019952994771301746,
-0.026010926812887192,
0.0329773835837841,
-0.1092824935913086,
0.003186425194144249,
-0.08465849608182907,
-0.15654674172401428,
-0.005866366904228926,
0.02470407821238041,
0.07146473228931427,
0.013881138525903225,
0.12377350777387619,
-0.024917885661125183,
-0.015802158042788506,
0.07767338305711746,
-0.05810375139117241,
-0.023491494357585907,
0.0331781841814518,
0.022129487246274948,
0.11624740809202194,
0.0022865822538733482,
0.024923495948314667,
-0.13533915579319,
0.04765068739652634,
-0.06696532666683197,
-0.010906199924647808,
-0.04398685321211815,
-0.02576323226094246,
-0.003859475255012512,
-0.06133740395307541,
-0.011197809129953384,
-0.1451047658920288,
-0.19947262108325958,
-0.001854741363786161,
-0.03174616023898125,
-0.033383965492248535,
-0.052329543977975845,
-0.07704316079616547,
-0.023450206965208054,
0.042877841740846634,
-0.0807124450802803,
-0.03356143832206726,
-0.06385777145624161,
0.10874069482088089,
-0.05451696366071701,
0.074992336332798,
-0.13399973511695862,
0.07266205549240112,
-0.1321326196193695,
-0.02536816895008087,
-0.0685078352689743,
0.11164544522762299,
-0.036492183804512024,
0.09808295220136642,
0.0024155059363693,
-0.033416908234357834,
-0.08127271384000778,
0.06849975883960724,
-0.04150203987956047,
0.24495524168014526,
-0.08882873505353928,
-0.11048338562250137,
0.2919630706310272,
-0.07200176268815994,
-0.12365991622209549,
0.128501757979393,
0.014730436727404594,
0.015540643595159054,
0.10424830764532089,
0.22670018672943115,
-0.03096456453204155,
0.038475681096315384,
0.09057100862264633,
0.12166641652584076,
-0.08995208144187927,
-0.02842872217297554,
0.03737137094140053,
-0.03208914026618004,
-0.08911428600549698,
0.05934300273656845,
0.06902579963207245,
0.05069148167967796,
-0.043647732585668564,
-0.03463219106197357,
-0.013963082805275917,
-0.004955141339451075,
0.06615281850099564,
-0.00282881548628211,
0.10119900107383728,
-0.0570504292845726,
-0.039986442774534225,
-0.0458599217236042,
-0.00887943897396326,
-0.04201749712228775,
0.02266622893512249,
-0.05465995520353317,
0.09875962138175964,
0.03278006240725517,
0.07128196209669113,
-0.14711028337478638,
-0.08074823021888733,
-0.025701623409986496,
0.1688879132270813,
0.0343855582177639,
0.09507913142442703,
0.04846523702144623,
-0.04273608699440956,
-0.0010058258194476366,
0.0037323881406337023,
0.15831835567951202,
-0.02070312574505806,
-0.05467471852898598,
-0.060366202145814896,
0.06568916887044907,
-0.0476500578224659,
0.036546435207128525,
-0.03446141630411148,
0.0362018421292305,
0.0626426711678505,
0.11168773472309113,
-0.01091018971055746,
0.013579458929598331,
-0.00105760945007205,
0.004092223010957241,
-0.052907880395650864,
0.008195714093744755,
0.09242405742406845,
0.004541459959000349,
-0.08535666018724442,
0.20757503807544708,
-0.1790429651737213,
0.19169127941131592,
0.17000971734523773,
-0.2681911289691925,
0.013235366903245449,
-0.07170198857784271,
-0.04368366301059723,
0.026432517915964127,
0.04983144253492355,
-0.06601135432720184,
0.14868058264255524,
-0.013306671753525734,
0.17984220385551453,
-0.050103794783353806,
-0.04542224481701851,
-0.023890536278486252,
-0.05909949913620949,
0.015268097631633282,
0.08216018974781036,
0.09758166968822479,
-0.15027788281440735,
0.20771674811840057,
0.13294142484664917,
0.048231564462184906,
0.19330601394176483,
0.009601613506674767,
-0.01208505965769291,
0.08933477103710175,
-0.007376394234597683,
-0.0682622566819191,
-0.04650835320353508,
-0.2413666546344757,
-0.039422087371349335,
0.08798046410083771,
0.04083770141005516,
0.10125806927680969,
-0.1010640412569046,
-0.04166535288095474,
-0.015557243488729,
-0.02481621503829956,
-0.00511783454567194,
0.07813560217618942,
0.05554545298218727,
0.13157275319099426,
-0.0073541151359677315,
0.0015493420651182532,
0.0888277217745781,
0.0069648707285523415,
-0.11071965843439102,
0.1987047791481018,
-0.13818398118019104,
-0.3603856861591339,
-0.15580815076828003,
-0.11582539230585098,
-0.03985719382762909,
0.06843811273574829,
0.11778740584850311,
-0.11838283389806747,
-0.030825117602944374,
-0.002283075824379921,
0.09331144392490387,
-0.06041892617940903,
0.010781548917293549,
-0.0578308179974556,
0.02273452654480934,
-0.09905605018138885,
-0.08011601120233536,
-0.0622873418033123,
-0.03020497038960457,
-0.07762395590543747,
0.13686612248420715,
-0.09336261451244354,
0.043330006301403046,
0.19687823951244354,
0.060075849294662476,
0.04752305522561073,
-0.04574299976229668,
0.1839854121208191,
-0.09531563520431519,
-0.007686374709010124,
0.1866266131401062,
-0.0666680708527565,
0.07474011927843094,
0.1105804294347763,
-0.0004101310914848,
-0.0849507674574852,
0.02116120606660843,
-0.03776392340660095,
-0.09677848219871521,
-0.2225404977798462,
-0.13988642394542694,
-0.11570078879594803,
0.12547017633914948,
-0.0021569328382611275,
0.04537690803408623,
0.15676432847976685,
0.07856990396976471,
-0.05252126604318619,
-0.010827393271028996,
0.022928927093744278,
0.07986719906330109,
0.2192031592130661,
-0.05187702178955078,
0.15564759075641632,
-0.04776986315846443,
-0.13077720999717712,
0.08521062880754471,
0.04301636666059494,
0.06662065535783768,
0.03643031418323517,
0.06322361528873444,
0.01556673739105463,
0.07898270338773727,
0.11749552190303802,
0.07961716502904892,
0.026451440528035164,
-0.02111544832587242,
-0.03906114399433136,
-0.02574598789215088,
-0.09389963001012802,
0.05184121057391167,
0.04290405288338661,
-0.1653548777103424,
-0.04323459789156914,
-0.07476823776960373,
0.09412417560815811,
0.09585744142532349,
0.05606288090348244,
-0.18611839413642883,
-0.037686239928007126,
0.09458617866039276,
-0.02420445717871189,
-0.12132491916418076,
0.09866137057542801,
0.04353242740035057,
-0.1465151160955429,
0.03587229177355766,
-0.012920631095767021,
0.11577706784009933,
-0.06023438647389412,
0.1078072339296341,
-0.06803807616233826,
-0.08223535865545273,
0.022733423858880997,
0.13038606941699982,
-0.278401255607605,
0.2012343555688858,
-0.008860491216182709,
-0.06428548693656921,
-0.12308923155069351,
0.0005182449240237474,
-0.007249746005982161,
0.0769442617893219,
0.12268824130296707,
0.0031988415867090225,
-0.03227590397000313,
-0.08203635364770889,
-0.02603851445019245,
0.032092563807964325,
0.13195578753948212,
-0.05871544033288956,
-0.026498889550566673,
-0.03937076777219772,
0.0003831258218269795,
-0.03652742877602577,
-0.06524322181940079,
0.060196079313755035,
-0.17984327673912048,
0.09382174164056778,
0.04198804497718811,
0.08336655050516129,
0.015701215714216232,
0.014070750214159489,
-0.12385759502649307,
0.2541564404964447,
-0.099178247153759,
-0.11063076555728912,
-0.11317944526672363,
-0.03235611692070961,
0.030866172164678574,
-0.060586266219615936,
0.03470173850655556,
-0.06625065207481384,
0.024144157767295837,
-0.03856467828154564,
-0.19449910521507263,
0.11973755806684494,
-0.10729293525218964,
-0.07043680548667908,
-0.023845002055168152,
0.22623741626739502,
-0.05396386608481407,
0.011723590083420277,
0.0203146543353796,
0.013499835506081581,
-0.10957565903663635,
-0.11712498962879181,
0.05556901544332504,
-0.002776527311652899,
0.04714147746562958,
0.06397543102502823,
-0.05337216705083847,
-0.050096601247787476,
-0.03766106069087982,
-0.02195214107632637,
0.31545141339302063,
0.16391988098621368,
-0.054980531334877014,
0.20870721340179443,
0.12490818649530411,
-0.057133592665195465,
-0.3431239426136017,
-0.11950373649597168,
-0.11493449658155441,
-0.036498330533504486,
-0.051226720213890076,
-0.19787707924842834,
0.06729641556739807,
0.009913084097206593,
-0.01734332926571369,
0.06699828058481216,
-0.22672124207019806,
-0.07843688130378723,
0.15967783331871033,
-0.043054886162281036,
0.37032991647720337,
-0.12050960958003998,
-0.11182446777820587,
-0.02878999151289463,
-0.1429917961359024,
0.1574047952890396,
-0.011103829368948936,
0.09690950065851212,
-0.0006836394313722849,
0.1485157459974289,
0.060538068413734436,
-0.03263518959283829,
0.08523623645305634,
0.010735164396464825,
-0.03119945153594017,
-0.10004854947328568,
-0.029138678684830666,
0.024376684799790382,
0.027660666033625603,
0.026611147448420525,
-0.048055727034807205,
0.04746783897280693,
-0.12246908247470856,
-0.03728354349732399,
-0.09166543930768967,
0.02614356204867363,
0.04304727539420128,
-0.07286635041236877,
-0.014744113199412823,
-0.07952721416950226,
0.006862805690616369,
0.018402790650725365,
0.20288556814193726,
-0.08389861136674881,
0.1699306219816208,
0.09398119896650314,
0.13292407989501953,
-0.15138639509677887,
0.00654979981482029,
-0.0796615332365036,
-0.06715741753578186,
0.06897526979446411,
-0.10445471107959747,
0.06474717706441879,
0.10615669935941696,
-0.03601720184087753,
0.07766455411911011,
0.10932670533657074,
0.028468266129493713,
-0.009001065976917744,
0.10852357745170593,
-0.2824181318283081,
-0.034257933497428894,
-0.0714423805475235,
0.027731146663427353,
0.0865405723452568,
0.10789098590612411,
0.18073149025440216,
0.022042466327548027,
-0.042730912566185,
-0.005179823376238346,
0.03999040275812149,
-0.02978523075580597,
0.06467133015394211,
-0.0048056854866445065,
0.04184240847826004,
-0.15815049409866333,
0.0714598223567009,
-0.011925095692276955,
-0.13591061532497406,
0.032083746045827866,
0.15981408953666687,
-0.14080284535884857,
-0.11828061938285828,
-0.07240236550569534,
0.05554526299238205,
-0.10478448122739792,
-0.014383101835846901,
-0.05614219978451729,
-0.13817396759986877,
0.07814312726259232,
0.12420906126499176,
0.055777642875909805,
0.09087445586919785,
-0.05709662288427353,
-0.006282532121986151,
0.0063201989978551865,
-0.012741596437990665,
-0.004074475262314081,
-0.007957253605127335,
-0.048552583903074265,
0.07858546078205109,
-0.047766413539648056,
0.1414571851491928,
-0.10036858171224594,
-0.08428279310464859,
-0.16032297909259796,
0.04020966589450836,
-0.07388628274202347,
-0.09349475800991058,
-0.08977387100458145,
-0.06596460938453674,
0.00012681505177170038,
-0.04290986433625221,
-0.035759102553129196,
-0.04994808882474899,
-0.12140975147485733,
0.03765852004289627,
-0.0471552275121212,
0.01932341977953911,
-0.08195336163043976,
0.020672017708420753,
0.0900702178478241,
-0.038459256291389465,
0.15537282824516296,
0.16103778779506683,
-0.10767579823732376,
0.10398010909557343,
-0.14625605940818787,
-0.07496648281812668,
0.11500822007656097,
0.036835528910160065,
0.023959899321198463,
0.1438291370868683,
0.005669886711984873,
0.07027100026607513,
0.01961391419172287,
0.053974539041519165,
0.030283953994512558,
-0.09718285501003265,
0.09383751451969147,
-0.0156764667481184,
-0.1272079050540924,
-0.0504964217543602,
-0.062105681747198105,
0.011851956136524677,
0.029654404148459435,
0.08530537784099579,
-0.058082215487957,
0.09631185233592987,
-0.054098304361104965,
0.03246155008673668,
0.03464706987142563,
-0.16565796732902527,
-0.07379919290542603,
-0.08632197976112366,
0.05128682777285576,
0.008639967069029808,
0.2644043266773224,
0.012599308975040913,
-0.01621164381504059,
0.0472615510225296,
0.09085188806056976,
0.10151219367980957,
0.025017671287059784,
0.17036928236484528,
0.11075339466333389,
-0.07826569676399231,
-0.10676500201225281,
0.04788963496685028,
-0.0014603791059926152,
-0.0136743588373065,
0.14199340343475342,
0.006250111386179924,
0.025992948561906815,
0.07987035810947418,
-0.03219142183661461,
0.015286954119801521,
-0.11402019113302231,
-0.15447421371936798,
-0.05192037299275398,
0.04448696970939636,
-0.04769163206219673,
0.13649581372737885,
0.12369124591350555,
-0.022347165271639824,
0.04267851263284683,
-0.023956747725605965,
-0.05180199816823006,
-0.1766156554222107,
-0.14132703840732574,
-0.064317986369133,
-0.13907060027122498,
0.012778082862496376,
-0.10661455988883972,
0.052397195249795914,
0.07421161234378815,
0.06465309113264084,
-0.05073340982198715,
0.10953763127326965,
0.019056569784879684,
-0.08251703530550003,
0.049809012562036514,
-0.025853754952549934,
0.06165604293346405,
-0.0157008059322834,
-0.0268540158867836,
-0.060162220150232315,
0.02662903629243374,
0.02063651755452156,
0.049377985298633575,
-0.032818570733070374,
-0.0048751626163721085,
-0.1430857628583908,
-0.08124478906393051,
-0.05115024000406265,
0.060624927282333374,
-0.057196393609046936,
0.11240284144878387,
0.003756516380235553,
-0.019728640094399452,
0.042836155742406845,
0.23554660379886627,
-0.08825033903121948,
-0.055903855711221695,
-0.06434692442417145,
0.19537529349327087,
0.019663382321596146,
0.11173838376998901,
-0.018504435196518898,
-0.012634359300136566,
-0.0758928582072258,
0.34907713532447815,
0.29450488090515137,
-0.08863932639360428,
0.015933595597743988,
0.007259908597916365,
0.04543529450893402,
0.11870652437210083,
0.12271913886070251,
0.09157324582338333,
0.30979326367378235,
-0.07167954742908478,
-0.05022823438048363,
-0.005466282833367586,
-0.03846098482608795,
-0.09374968707561493,
0.0914185494184494,
0.030149919912219048,
-0.063329316675663,
-0.044440653175115585,
0.08300250768661499,
-0.26587143540382385,
0.11132583767175674,
-0.1133488267660141,
-0.1749456524848938,
-0.05295577645301819,
0.018056942149996758,
0.10225091874599457,
0.01827000267803669,
0.0806976780295372,
0.013639360666275024,
-0.09209353476762772,
0.06543305516242981,
0.028606003150343895,
-0.20558451116085052,
0.008970463648438454,
0.058175042271614075,
-0.0804997906088829,
0.003887389786541462,
-0.032488688826560974,
0.06745591759681702,
0.05777716264128685,
0.055230461061000824,
-0.0009713113540783525,
0.035029880702495575,
0.008401795290410519,
-0.0706632137298584,
-0.0034695914946496487,
0.06460735201835632,
0.018721621483564377,
-0.0710587203502655,
0.06188775599002838,
-0.1686355471611023,
0.02929331548511982,
0.00760743347927928,
-0.051521748304367065,
0.014237833209335804,
0.00666881212964654,
-0.060390569269657135,
0.04902846738696098,
0.05384402349591255,
-0.009546562097966671,
-0.010779032483696938,
-0.05317744240164757,
-0.023879583925008774,
-0.035100728273391724,
-0.09192361682653427,
-0.08633962273597717,
-0.1707318127155304,
-0.10835839807987213,
0.085739865899086,
0.005838847253471613,
-0.17646247148513794,
0.02303032949566841,
-0.0991864800453186,
0.09743867814540863,
-0.15571022033691406,
0.08729267865419388,
0.06314696371555328,
0.007816470228135586,
-0.0031242945697158575,
-0.03728900104761124,
0.0634688287973404,
0.08819102495908737,
-0.09193562716245651,
-0.07640274614095688
] |
null | null |
transformers
|
# RoBERTa-like language model trained on part of part of TAIGA corpus
## Training Details
- about 60k steps
![]()
## Example pipeline
```python
from transformers import pipeline
from transformers import RobertaTokenizerFast
tokenizer = RobertaTokenizerFast.from_pretrained('blinoff/roberta-base-russian-v0', max_len=512)
fill_mask = pipeline(
"fill-mask",
model="blinoff/roberta-base-russian-v0",
tokenizer=tokenizer
)
fill_mask("Мозг — это машина <mask>, которая пытается снизить ошибку в прогнозе.")
# {
# 'sequence': '<s>Мозг — это машина города, которая пытается снизить ошибку в прогнозе.</s>',
# 'score': 0.012859329581260681,
# 'token': 2144,
# 'token_str': 'ĠгоÑĢода'
# },
# {
# 'sequence': '<s>Мозг — это машина человека, которая пытается снизить ошибку в прогнозе.</s>',
# 'score': 0.01185101643204689,
# 'token': 1470,
# 'token_str': 'ĠÑĩеловека'
# },
# {
# 'sequence': '<s>Мозг — это машина дома, которая пытается снизить ошибку в прогнозе.</s>',
# 'score': 0.009940559044480324,
# 'token': 1411,
# 'token_str': 'Ġдома'
# },
# {
# 'sequence': '<s>Мозг — это машина женщина, которая пытается снизить ошибку в прогнозе.</s>',
# 'score': 0.007794599514454603,
# 'token': 2707,
# 'token_str': 'ĠженÑīина'
# },
# {
# 'sequence': '<s>Мозг — это машина женщины, которая пытается снизить ошибку в прогнозе.</s>',
# 'score': 0.007725382689386606,
# 'token': 3546,
# 'token_str': 'ĠженÑīинÑĭ'
# }
```
|
{"language": "ru", "widget": [{"text": "\u041c\u043e\u0437\u0433 \u2014 \u044d\u0442\u043e \u043c\u0430\u0448\u0438\u043d\u0430 \u0432\u044b\u0432\u043e\u0434\u0430, \u043a\u043e\u0442\u043e\u0440\u0430\u044f \u043f\u044b\u0442\u0430\u0435\u0442\u0441\u044f <mask> \u043e\u0448\u0438\u0431\u043a\u0443 \u0432 \u043f\u0440\u043e\u0433\u043d\u043e\u0437\u0435.", "example_title": "brain_example"}, {"text": "\u041d\u0438\u043a\u043e\u0433\u0434\u0430 \u043d\u0435 \u0441\u043f\u043e\u0440\u044c\u0442\u0435 \u0441 \u0438\u0434\u0438\u043e\u0442\u0430\u043c\u0438, <mask> \u043e\u043f\u0443\u0441\u0442\u0438\u0442\u0435\u0441\u044c \u0434\u043e \u0438\u0445 \u0443\u0440\u043e\u0432\u043d\u044f, \u0433\u0434\u0435 \u043e\u043d\u0438 \u0432\u0430\u0441 \u0437\u0430\u0434\u0430\u0432\u044f\u0442 \u0441\u0432\u043e\u0438\u043c \u043e\u043f\u044b\u0442\u043e\u043c.", "example_title": "idiot_example"}]}
|
fill-mask
|
blinoff/roberta-base-russian-v0
|
[
"transformers",
"pytorch",
"jax",
"safetensors",
"roberta",
"fill-mask",
"ru",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"ru"
] |
TAGS
#transformers #pytorch #jax #safetensors #roberta #fill-mask #ru #autotrain_compatible #endpoints_compatible #region-us
|
# RoBERTa-like language model trained on part of part of TAIGA corpus
## Training Details
- about 60k steps
![]()
## Example pipeline
|
[
"# RoBERTa-like language model trained on part of part of TAIGA corpus",
"## Training Details\n\n- about 60k steps\n\n![]()",
"## Example pipeline"
] |
[
"TAGS\n#transformers #pytorch #jax #safetensors #roberta #fill-mask #ru #autotrain_compatible #endpoints_compatible #region-us \n",
"# RoBERTa-like language model trained on part of part of TAIGA corpus",
"## Training Details\n\n- about 60k steps\n\n![]()",
"## Example pipeline"
] |
[
47,
18,
13,
5
] |
[
"passage: TAGS\n#transformers #pytorch #jax #safetensors #roberta #fill-mask #ru #autotrain_compatible #endpoints_compatible #region-us \n# RoBERTa-like language model trained on part of part of TAIGA corpus## Training Details\n\n- about 60k steps\n\n![]()## Example pipeline"
] |
[
-0.05197075009346008,
0.0546780601143837,
-0.00275677302852273,
0.07696351408958435,
0.15865740180015564,
-0.010463759303092957,
0.10704828053712845,
0.07477546483278275,
-0.006662611849606037,
-0.004247839562594891,
0.18418718874454498,
0.14665795862674713,
-0.009748551063239574,
0.1766991764307022,
0.007056348957121372,
-0.3689807653427124,
0.029041461646556854,
-0.034170642495155334,
-0.02394658885896206,
0.11953102052211761,
0.12561830878257751,
-0.05511299520730972,
0.08010074496269226,
-0.030962252989411354,
-0.05533578246831894,
0.019940149039030075,
0.003845985047519207,
-0.15166425704956055,
0.12036321312189102,
-0.022971512749791145,
0.14798469841480255,
-0.020958343520760536,
0.039514992386102676,
-0.06735240668058395,
0.039743900299072266,
-0.01311426516622305,
0.007942777127027512,
0.040874432772397995,
-0.03522133454680443,
-0.06698166579008102,
0.08581246435642242,
0.02379457838833332,
0.028864964842796326,
0.008031955920159817,
-0.20521250367164612,
-0.0740075409412384,
-0.009527136571705341,
-0.02764965407550335,
0.12078934907913208,
0.14851970970630646,
-0.010886063799262047,
0.16382482647895813,
-0.13935618102550507,
0.08809789270162582,
0.17571890354156494,
-0.28988751769065857,
-0.0638483390212059,
0.10264424234628677,
0.14501576125621796,
0.1094452366232872,
-0.03742362558841705,
0.027771497145295143,
0.04093703627586365,
0.025657199323177338,
-0.04024462029337883,
-0.10839295387268066,
-0.08252114802598953,
0.009367680177092552,
-0.1196134090423584,
0.005108705256134272,
0.24270708858966827,
-0.05399155244231224,
-0.030677326023578644,
-0.02567952871322632,
-0.03787555173039436,
-0.06763073056936264,
-0.06484044343233109,
-0.03893233463168144,
-0.0479765459895134,
0.017511989921331406,
-0.056141145527362823,
-0.047617897391319275,
-0.08533187210559845,
-0.05964038521051407,
-0.06922534853219986,
0.18187101185321808,
0.03692319616675377,
0.021129826083779335,
-0.13971757888793945,
0.07543621957302094,
-0.037325721234083176,
-0.10415834933519363,
0.03477140888571739,
-0.08009900152683258,
0.00018060642469208688,
-0.014850770123302937,
-0.0034282177221029997,
-0.06668450683355331,
0.10136708617210388,
0.1629481315612793,
0.007639175746589899,
0.040529076009988785,
0.10584267973899841,
0.062923863530159,
-0.03839685395359993,
0.1611441671848297,
-0.05893692374229431,
-0.10154415667057037,
0.0228936318308115,
-0.01594717800617218,
-0.02336837537586689,
-0.024252181872725487,
-0.16982947289943695,
-0.06214306876063347,
-0.022586528211832047,
0.09342797100543976,
-0.05660157650709152,
0.07305943965911865,
-0.028074923902750015,
0.0029067411087453365,
-0.060356322675943375,
-0.07885689288377762,
-0.024693835526704788,
-0.054336436092853546,
-0.014088436029851437,
0.008083240129053593,
-0.007085663732141256,
-0.012858894653618336,
-0.060754407197237015,
-0.008191457949578762,
-0.04145531728863716,
-0.033355772495269775,
-0.11804202198982239,
-0.10257319360971451,
-0.0181892067193985,
-0.14348889887332916,
0.038010988384485245,
-0.18178901076316833,
-0.15733599662780762,
0.01369717437773943,
0.1085587814450264,
-0.028782833367586136,
0.005286590661853552,
-0.09204225987195969,
-0.04311036318540573,
-0.0012154413852840662,
-0.002211417304351926,
-0.011464763432741165,
-0.05210142955183983,
0.05636703222990036,
0.014097054488956928,
0.11790911108255386,
-0.07028286159038544,
0.012826552614569664,
-0.10506884008646011,
0.015851300209760666,
-0.18544310331344604,
0.03652250021696091,
0.006753937806934118,
0.12228785455226898,
-0.05485574156045914,
-0.04136241599917412,
-0.09240109473466873,
0.08758267015218735,
0.039113059639930725,
0.1695040762424469,
-0.12412892282009125,
-0.04753424599766731,
0.2855105400085449,
-0.06196439266204834,
-0.05268082022666931,
0.1408747285604477,
-0.04558127745985985,
0.19216805696487427,
0.09629523009061813,
0.12744557857513428,
0.03148713335394859,
-0.10439129918813705,
0.1495220959186554,
0.06488244980573654,
-0.07315833121538162,
-0.056351207196712494,
0.050150178372859955,
-0.006864290684461594,
-0.02861231565475464,
0.07016746699810028,
0.019391681998968124,
0.07017166912555695,
-0.08602207154035568,
-0.06173517554998398,
0.026401923969388008,
-0.07600235939025879,
0.029861724004149437,
-0.001079966896213591,
0.09753286093473434,
-0.07049164175987244,
-0.06391825526952744,
-0.12101525813341141,
0.09233580529689789,
-0.013404229655861855,
-0.02801358513534069,
-0.10646744817495346,
0.037779174745082855,
0.002388204215094447,
0.022937782108783722,
-0.14925269782543182,
0.0134735107421875,
-0.01601649448275566,
0.1129499077796936,
0.08058150857686996,
0.0866771936416626,
0.09231331944465637,
0.0027752723544836044,
-0.01470104604959488,
0.008893932215869427,
0.046064343303442,
-0.00646181171759963,
-0.05963389202952385,
-0.128440722823143,
0.04777141660451889,
-0.09529300034046173,
0.0614197812974453,
-0.13038696348667145,
0.024064185097813606,
-0.10872635245323181,
0.024927275255322456,
0.02958645671606064,
0.05283011868596077,
-0.009296000935137272,
0.06109055131673813,
-0.06935496628284454,
-0.0249498151242733,
0.06952923536300659,
-0.022360075265169144,
-0.06179020553827286,
0.06556419283151627,
-0.10833007097244263,
0.20538903772830963,
0.15081210434436798,
-0.09857691079378128,
-0.09035474807024002,
0.07409083098173141,
-0.0100089181214571,
0.025025641545653343,
0.010362592525780201,
0.029196329414844513,
0.10016059875488281,
-0.012586730532348156,
0.16334620118141174,
-0.04384025186300278,
0.019989727064967155,
0.04477141052484512,
-0.15616586804389954,
0.03303370997309685,
0.15894292294979095,
-0.01615186408162117,
-0.21792535483837128,
0.13837140798568726,
0.1077081561088562,
-0.12369237095117569,
0.21221396327018738,
0.030830688774585724,
-0.034999433904886246,
-0.030501138418912888,
0.07010622322559357,
0.03746984899044037,
0.11085742712020874,
-0.14053834974765778,
-0.06959391385316849,
0.010314635001122952,
0.012902013026177883,
0.028963064774870872,
-0.12661899626255035,
-0.06816820800304413,
-0.017220379784703255,
-0.007821592502295971,
0.005622264463454485,
0.08054839074611664,
-0.06526501476764679,
0.09431889653205872,
-0.004882481414824724,
-0.16555126011371613,
0.04582889378070831,
0.0008851938764564693,
-0.030880039557814598,
0.24728594720363617,
-0.06758300960063934,
-0.28694161772727966,
-0.12306506931781769,
-0.1660207211971283,
0.04980246350169182,
0.046090465039014816,
0.03334306925535202,
-0.19722047448158264,
-0.015538224019110203,
0.04976798593997955,
-0.03387158736586571,
-0.05633745342493057,
-0.007273457013070583,
-0.11824905872344971,
0.10371869057416916,
-0.05896372348070145,
-0.051911819726228714,
-0.04988306015729904,
-0.07315121591091156,
-0.08801776170730591,
0.0979166105389595,
-0.13793054223060608,
0.09022872149944305,
0.14141252636909485,
-0.01174716092646122,
0.07746418565511703,
-0.031034080311655998,
0.1533607840538025,
-0.09205331653356552,
-0.0020454111509025097,
0.1888652741909027,
-0.05452445521950722,
0.03578510880470276,
0.13824962079524994,
-0.002022399799898267,
-0.08019967377185822,
0.03594481572508812,
-0.057293180376291275,
-0.12734933197498322,
-0.21862439811229706,
-0.04251200705766678,
-0.11181294173002243,
0.07559436559677124,
0.06563176214694977,
0.03284969553351402,
0.03717895597219467,
0.10174889862537384,
0.06853580474853516,
0.021215340122580528,
0.025068232789635658,
0.06413266807794571,
-0.01646377332508564,
-0.045779094099998474,
0.1046786978840828,
-0.05963196977972984,
-0.189724862575531,
0.02101750858128071,
0.08540884405374527,
0.20326559245586395,
0.10785797238349915,
0.07721914350986481,
0.02077079378068447,
0.13599897921085358,
0.10310178250074387,
0.11323248594999313,
0.03765176236629486,
-0.05156496539711952,
-0.05465150624513626,
-0.039535973221063614,
-0.03105730377137661,
0.041853442788124084,
0.007680489681661129,
-0.03784731402993202,
-0.045906368643045425,
0.05514281615614891,
0.05383268743753433,
0.1506529152393341,
0.07493336498737335,
-0.2941362261772156,
-0.057759907096624374,
0.021406810730695724,
-0.007132121827453375,
-0.0709068775177002,
0.08540627360343933,
0.042006608098745346,
-0.14110782742500305,
-0.032329268753528595,
-0.048221003264188766,
0.09629875421524048,
-0.004409881308674812,
0.05261153355240822,
-0.06812834739685059,
0.08553284406661987,
-0.014683796092867851,
0.08615756034851074,
-0.3281267583370209,
0.35533758997917175,
-0.012396215461194515,
0.0734466016292572,
-0.06206270307302475,
-0.029037175700068474,
0.07281610369682312,
0.038129497319459915,
0.18971897661685944,
0.003664253745228052,
-0.13758377730846405,
-0.13151815533638,
-0.0775466039776802,
0.03204089403152466,
0.09256472438573837,
-0.025195052847266197,
0.0730566680431366,
-0.019818134605884552,
-0.006102312821894884,
0.005366210360080004,
-0.024108562618494034,
-0.06574662029743195,
-0.125741109251976,
-0.01686142571270466,
0.039553504437208176,
-0.0518348403275013,
-0.030010512098670006,
-0.01874459907412529,
0.016380880028009415,
0.149725541472435,
0.041730768978595734,
-0.014226449653506279,
-0.07478255033493042,
0.013933354988694191,
0.10319709777832031,
-0.10534513741731644,
0.04259045049548149,
-0.06568454205989838,
-0.008122086524963379,
-0.060431431978940964,
-0.051132041960954666,
0.13647951185703278,
-0.13214078545570374,
-0.010834287852048874,
-0.06781769543886185,
0.07098045945167542,
0.04570788890123367,
0.009571687318384647,
0.03042326681315899,
-0.0011855767806991935,
-0.06912793964147568,
-0.07012232393026352,
-0.02581581100821495,
-0.04395538941025734,
-0.01677955873310566,
0.06982512772083282,
-0.12592923641204834,
-0.08425233513116837,
-0.08011914789676666,
-0.03768478333950043,
0.2288094162940979,
0.18290171027183533,
-0.05857628211379051,
0.037223316729068756,
0.22955259680747986,
-0.04856827110052109,
-0.3177202343940735,
-0.0498582124710083,
-0.037608399987220764,
0.06802879273891449,
0.007571164984256029,
-0.14734923839569092,
0.05487196147441864,
0.024675119668245316,
-0.03875473514199257,
0.06054544821381569,
-0.24268090724945068,
-0.09447697550058365,
0.23863252997398376,
0.06503092497587204,
0.34327876567840576,
-0.14051753282546997,
-0.010554872453212738,
-0.08712632209062576,
-0.011893556453287601,
0.022794760763645172,
-0.1541036069393158,
0.14369913935661316,
0.016607632860541344,
0.08242599666118622,
0.015918923541903496,
-0.06341269612312317,
0.08468739688396454,
-0.0561232715845108,
0.023351620882749557,
-0.07944664359092712,
-0.09144569933414459,
0.1280016154050827,
-0.0026056950446218252,
0.048525530844926834,
-0.00604978296905756,
0.016463754698634148,
-0.021735627204179764,
-0.04448311775922775,
-0.03243892639875412,
0.0717770978808403,
0.017171967774629593,
-0.09769436717033386,
0.0020955600775778294,
0.04808271676301956,
-0.03132036700844765,
0.005982756149023771,
0.10211735963821411,
-0.04559141770005226,
0.14772509038448334,
0.01199418492615223,
0.10797091573476791,
-0.0020019980147480965,
0.028125427663326263,
0.023697927594184875,
-0.058938201516866684,
0.08004774898290634,
-0.048294372856616974,
-0.03755711019039154,
0.05712687596678734,
0.009828793816268444,
0.09130717813968658,
0.07591357082128525,
-0.04810812696814537,
0.046569522470235825,
0.11197149008512497,
-0.11881797015666962,
-0.055188871920108795,
-0.027304574847221375,
-0.024606289342045784,
0.02672877162694931,
0.05884787067770958,
0.10266101360321045,
-0.044405799359083176,
-0.02474259026348591,
-0.04017772153019905,
-0.03672410547733307,
-0.09059006720781326,
0.11367247253656387,
0.09130055457353592,
0.03619002178311348,
-0.07786738872528076,
0.0311396736651659,
0.02323867753148079,
-0.09386405348777771,
0.03398832306265831,
0.12441933155059814,
-0.11013668775558472,
-0.09371691197156906,
0.01971607469022274,
0.17998063564300537,
-0.06251515448093414,
-0.06592237204313278,
-0.12928186357021332,
-0.10518412292003632,
0.019497888162732124,
0.10729169100522995,
0.09088790416717529,
-0.02900189720094204,
-0.04077219218015671,
0.047641851007938385,
-0.06977438926696777,
0.04830286651849747,
0.07562756538391113,
-0.022047458216547966,
-0.12564638257026672,
0.04261534661054611,
0.010895291343331337,
0.07571001350879669,
-0.05666119232773781,
-0.06493066251277924,
-0.20434829592704773,
0.1040063351392746,
-0.03156694024801254,
-0.001046599238179624,
-0.07757451385259628,
-0.043865084648132324,
-0.03331741690635681,
-0.04044710099697113,
-0.04261244833469391,
-0.01709180511534214,
-0.10464277118444443,
0.025630272924900055,
-0.018092883750796318,
0.011382224969565868,
0.021042505279183388,
-0.037342049181461334,
0.05273730680346489,
-0.04626418277621269,
0.06684254109859467,
0.11042256653308868,
-0.06871714442968369,
0.08097018301486969,
-0.19203044474124908,
-0.008699951693415642,
0.060698751360177994,
-0.0022309073247015476,
0.09433228522539139,
0.007302462588995695,
0.04643864557147026,
-0.002359511097893119,
0.08589326590299606,
0.024329669773578644,
0.03473345562815666,
-0.09452484548091888,
0.02992192842066288,
-0.0005295298178680241,
-0.14352448284626007,
-0.05391319841146469,
0.02738800272345543,
0.08809973299503326,
0.03407025709748268,
0.11283417046070099,
-0.10458403825759888,
0.08770472556352615,
-0.07690533250570297,
0.0010911391582340002,
-0.01735224761068821,
-0.12865030765533447,
-0.11071605980396271,
-0.0808483436703682,
0.040114372968673706,
-0.06361198425292969,
0.14319263398647308,
0.04143035039305687,
0.02807111106812954,
-0.02800118550658226,
-0.04567030444741249,
0.003977281507104635,
0.04194261133670807,
0.15215964615345,
0.09443017095327377,
-0.04055815562605858,
-0.11737241595983505,
0.06568171828985214,
0.02883158065378666,
0.04236815869808197,
0.06466427445411682,
0.10467424243688583,
0.11577664315700531,
0.12818346917629242,
-0.004742298275232315,
0.06961045414209366,
-0.08844640851020813,
-0.050286807119846344,
0.019701140001416206,
0.013996421359479427,
-0.0006243295501917601,
0.024615680798888206,
0.2989015579223633,
-0.06959868222475052,
0.045809779316186905,
-0.04788273945450783,
-0.07004697620868683,
-0.1892378032207489,
-0.20585694909095764,
-0.09226860851049423,
-0.028284676373004913,
0.011739946901798248,
-0.08558256924152374,
-0.007978308014571667,
0.021613122895359993,
0.050330787897109985,
-0.023157142102718353,
0.1325352042913437,
0.06447798758745193,
-0.0727059543132782,
0.07329224050045013,
-0.021994056180119514,
0.05654458701610565,
0.03454988822340965,
-0.022428182885050774,
-0.10610781610012054,
-0.07280024141073227,
-0.0223271232098341,
-0.039586566388607025,
-0.08776574581861496,
0.02875405177474022,
-0.10534988343715668,
-0.05585722625255585,
-0.043275464326143265,
0.05519679933786392,
0.05938879773020744,
0.06934686750173569,
0.027212025597691536,
-0.10379766672849655,
0.02558431215584278,
0.19248872995376587,
-0.032162100076675415,
-0.1884964555501938,
-0.1268942952156067,
0.30712032318115234,
0.026301391422748566,
0.061816055327653885,
-0.08027289062738419,
0.022338831797242165,
-0.0053219273686409,
0.35080885887145996,
0.32489556074142456,
-0.0959567129611969,
0.013057463802397251,
0.020144760608673096,
0.01090316753834486,
-0.012547471560537815,
0.14505507051944733,
0.08681005984544754,
0.2891113758087158,
-0.07992883771657944,
-0.029847295954823494,
-0.06103777140378952,
-0.034277088940143585,
-0.16493357717990875,
0.010512910783290863,
0.09894195199012756,
-0.009068612940609455,
-0.039400771260261536,
0.1331646591424942,
-0.1803329735994339,
0.012991991825401783,
-0.020007561892271042,
-0.18579721450805664,
-0.12438517063856125,
-0.049753155559301376,
0.06308048963546753,
0.07751084119081497,
0.11878927797079086,
-0.07040722668170929,
-0.04940363019704819,
-0.09114145487546921,
0.01841490902006626,
-0.06798754632472992,
-0.07398400455713272,
0.10384278744459152,
0.09512092173099518,
0.0804273709654808,
-0.0390830896794796,
0.02130972594022751,
0.13632193207740784,
0.012451530434191227,
-0.007471355143934488,
0.12948563694953918,
0.014298303984105587,
0.04941102862358093,
0.0281495563685894,
0.014484085142612457,
-0.049641165882349014,
-0.043116893619298935,
0.0194392129778862,
-0.10787808150053024,
0.08290032297372818,
-0.013884318992495537,
-0.02362656407058239,
-0.07023096829652786,
0.12260425090789795,
-0.05615215003490448,
0.10657071322202682,
0.14869646728038788,
-0.00469556450843811,
0.01875593699514866,
-0.04485579952597618,
0.06097223609685898,
0.06572280079126358,
-0.10268702358007431,
-0.11858800798654556,
-0.12468359619379044,
-0.09141256660223007,
0.05915571004152298,
-0.023102540522813797,
-0.2034868746995926,
-0.0044338032603263855,
-0.08981695771217346,
-0.0005302767385728657,
-0.11224491894245148,
0.06454040110111237,
0.1465035229921341,
0.06683658808469772,
0.011234622448682785,
0.03389599546790123,
-0.008171062916517258,
0.09155454486608505,
-0.16185560822486877,
-0.1152130737900734
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext-finetuned-pubmedqa-1
This model is a fine-tuned version of [microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext](https://huggingface.co/microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext) on the None dataset.
It achieves the following results on the evaluation set:
- Loss: 0.6660
- Accuracy: 0.7
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 1e-05
- train_batch_size: 8
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 10
### Training results
| Training Loss | Epoch | Step | Validation Loss | Accuracy |
|:-------------:|:-----:|:----:|:---------------:|:--------:|
| No log | 1.0 | 57 | 0.8471 | 0.58 |
| No log | 2.0 | 114 | 0.8450 | 0.58 |
| No log | 3.0 | 171 | 0.7846 | 0.58 |
| No log | 4.0 | 228 | 0.8649 | 0.58 |
| No log | 5.0 | 285 | 0.7220 | 0.68 |
| No log | 6.0 | 342 | 0.7395 | 0.66 |
| No log | 7.0 | 399 | 0.7198 | 0.72 |
| No log | 8.0 | 456 | 0.6417 | 0.72 |
| 0.7082 | 9.0 | 513 | 0.6265 | 0.74 |
| 0.7082 | 10.0 | 570 | 0.6660 | 0.7 |
### Framework versions
- Transformers 4.10.2
- Pytorch 1.9.0+cu102
- Datasets 1.12.0
- Tokenizers 0.10.3
|
{"license": "mit", "tags": ["generated_from_trainer"], "datasets": [], "metrics": ["accuracy"]}
|
text-classification
|
blizrys/BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext-finetuned-pubmedqa-1
|
[
"transformers",
"pytorch",
"tensorboard",
"bert",
"text-classification",
"generated_from_trainer",
"license:mit",
"model-index",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #bert #text-classification #generated_from_trainer #license-mit #model-index #autotrain_compatible #endpoints_compatible #region-us
|
BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext-finetuned-pubmedqa-1
========================================================================
This model is a fine-tuned version of microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext on the None dataset.
It achieves the following results on the evaluation set:
* Loss: 0.6660
* Accuracy: 0.7
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 1e-05
* train\_batch\_size: 8
* eval\_batch\_size: 8
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 10
### Training results
### Framework versions
* Transformers 4.10.2
* Pytorch 1.9.0+cu102
* Datasets 1.12.0
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 1e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 10",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.12.0\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #bert #text-classification #generated_from_trainer #license-mit #model-index #autotrain_compatible #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 1e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 10",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.12.0\n* Tokenizers 0.10.3"
] |
[
56,
98,
4,
34
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #bert #text-classification #generated_from_trainer #license-mit #model-index #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 1e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 10### Training results### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.12.0\n* Tokenizers 0.10.3"
] |
[
-0.10087471455335617,
0.07445205003023148,
-0.0018672136357054114,
0.11826130747795105,
0.18967922031879425,
0.030420133844017982,
0.12296590954065323,
0.11705076694488525,
-0.0897965282201767,
0.012555219233036041,
0.12275653332471848,
0.1861649751663208,
0.0020518277306109667,
0.10130710899829865,
-0.05488329753279686,
-0.2777828276157379,
-0.021608062088489532,
0.05884600803256035,
-0.06829068064689636,
0.13765493035316467,
0.09370884299278259,
-0.14887546002864838,
0.08389528095722198,
0.008579831570386887,
-0.2360214740037918,
0.01208409108221531,
0.029537731781601906,
-0.06604434549808502,
0.15655776858329773,
0.020534250885248184,
0.13675643503665924,
0.010262326337397099,
0.08835168927907944,
-0.1649406999349594,
0.01310371607542038,
0.04397793859243393,
0.00755735021084547,
0.0942433774471283,
0.06052087992429733,
-0.007056636270135641,
0.11515629291534424,
-0.07371179014444351,
0.05946892872452736,
0.021689768880605698,
-0.13217361271381378,
-0.2126310020685196,
-0.06642154604196548,
0.02727845311164856,
0.05743500962853432,
0.09789297729730606,
-0.006133230868726969,
0.13622774183750153,
-0.09938891977071762,
0.09681244194507599,
0.22032880783081055,
-0.28256186842918396,
-0.06461110711097717,
0.043785568326711655,
0.014646513387560844,
0.07640674710273743,
-0.12039827555418015,
-0.014144625514745712,
0.0585813969373703,
0.04299136623740196,
0.13209235668182373,
-0.03688430413603783,
-0.08971057087182999,
0.023791104555130005,
-0.141779825091362,
-0.02241506800055504,
0.1186365932226181,
0.026175886392593384,
-0.027931388467550278,
-0.03819325193762779,
-0.05127131938934326,
-0.16317535936832428,
-0.0438343770802021,
-0.017387235537171364,
0.04449324682354927,
-0.04007164388895035,
-0.0830298662185669,
-0.0016397204017266631,
-0.1107645034790039,
-0.06427336484193802,
-0.06960709393024445,
0.1529645323753357,
0.03561661019921303,
0.008191050961613655,
-0.03942060098052025,
0.1089097410440445,
-0.003116583451628685,
-0.1283807009458542,
0.037175390869379044,
0.03050795942544937,
0.0017121904529631138,
-0.06318014860153198,
-0.06389949470758438,
-0.07560490071773529,
-0.0054520536214113235,
0.08795551210641861,
-0.05356671288609505,
0.04373497888445854,
0.039178043603897095,
0.046573515981435776,
-0.09148012101650238,
0.20813997089862823,
-0.036197729408741,
-0.009130986407399178,
-0.000954429735429585,
0.04212907329201698,
-0.012447568587958813,
-0.009528695605695248,
-0.13113956153392792,
-0.003041745163500309,
0.11384139209985733,
0.010072685778141022,
-0.08009220659732819,
0.07969297468662262,
-0.04706133157014847,
-0.03210445120930672,
0.004182311240583658,
-0.09002657979726791,
0.038868069648742676,
0.00031449150992557406,
-0.08443127572536469,
-0.01521297823637724,
0.01800416223704815,
0.013403691351413727,
-0.020411815494298935,
0.1369602233171463,
-0.09691087156534195,
0.04585041105747223,
-0.10298489034175873,
-0.12360896915197372,
0.005544786807149649,
-0.09445403516292572,
0.028048787266016006,
-0.09776705503463745,
-0.1468457728624344,
-0.01642313413321972,
0.048322275280952454,
-0.021601524204015732,
-0.04933108389377594,
-0.05346988886594772,
-0.06828141957521439,
0.002529844641685486,
-0.012349748983979225,
0.14158034324645996,
-0.05274554714560509,
0.11273174732923508,
0.03212076053023338,
0.06280422955751419,
-0.05475148931145668,
0.0652107521891594,
-0.09677635133266449,
-0.0034415952395647764,
-0.19186139106750488,
0.04712645709514618,
-0.040700025856494904,
0.07201069593429565,
-0.0821763426065445,
-0.11528884619474411,
0.017881454899907112,
0.0045051840133965015,
0.07057970762252808,
0.09806141257286072,
-0.162412628531456,
-0.08723504841327667,
0.15380562841892242,
-0.058673568069934845,
-0.10650723427534103,
0.11127647757530212,
-0.07199294865131378,
0.06915052980184555,
0.08555793762207031,
0.17237983644008636,
0.07330083101987839,
-0.07016393542289734,
0.03746413066983223,
-0.004778741858899593,
0.04778588190674782,
-0.07594197243452072,
0.05105135962367058,
0.0015088138170540333,
-0.0023246698547154665,
0.03326098620891571,
-0.024722347036004066,
0.059644389897584915,
-0.10566816478967667,
-0.08970538526773453,
-0.026380063965916634,
-0.096671462059021,
0.07834067940711975,
0.08191876858472824,
0.09922488778829575,
-0.09693557769060135,
-0.07112260907888412,
0.10247140377759933,
0.060264505445957184,
-0.05659811198711395,
0.025869399309158325,
-0.056365933269262314,
0.06030962988734245,
-0.042948611080646515,
-0.03022135980427265,
-0.19571557641029358,
-0.011244597844779491,
0.005565489176660776,
0.020473124459385872,
0.030352793633937836,
0.037365250289440155,
0.0727711096405983,
0.059891607612371445,
-0.057853590697050095,
-0.009954105131328106,
-0.013413921929895878,
0.00007325362821575254,
-0.1546209156513214,
-0.18935628235340118,
-0.02566433697938919,
-0.015149794518947601,
0.11270678043365479,
-0.21750064194202423,
0.04049839451909065,
-0.01805323362350464,
0.06106065958738327,
0.0055373902432620525,
-0.009839157573878765,
-0.05142056941986084,
0.0920400619506836,
-0.03185184299945831,
-0.04095391184091568,
0.08654773235321045,
-0.003454808611422777,
-0.08718836307525635,
-0.04336337372660637,
-0.10840009897947311,
0.18414044380187988,
0.14098884165287018,
-0.1454484462738037,
-0.09236498177051544,
-0.012755293399095535,
-0.05156616494059563,
-0.02170892059803009,
-0.04523766413331032,
0.03586788475513458,
0.20059514045715332,
-0.012268475256860256,
0.1612502485513687,
-0.06689981371164322,
-0.04583900421857834,
0.0223530326038599,
-0.03364070504903793,
0.03314638137817383,
0.12456385791301727,
0.12383009493350983,
-0.08955555409193039,
0.1395193487405777,
0.1415599286556244,
-0.08706090599298477,
0.14398406445980072,
-0.030128633603453636,
-0.06498973071575165,
-0.012251322157680988,
-0.03653278574347496,
-0.0023718972224742174,
0.09199409186840057,
-0.15647822618484497,
-0.01872975192964077,
0.015141311101615429,
0.015821296721696854,
0.02420075424015522,
-0.22974810004234314,
-0.04504149779677391,
0.03679995611310005,
-0.02951870672404766,
-0.011743026785552502,
-0.017482250928878784,
0.013256818056106567,
0.1132611334323883,
0.004455277696251869,
-0.08083771914243698,
0.03977440297603607,
0.007252393290400505,
-0.08455335348844528,
0.22437596321105957,
-0.07326865196228027,
-0.14848440885543823,
-0.1264139711856842,
-0.08501695841550827,
-0.03692084550857544,
0.01439812034368515,
0.05887556076049805,
-0.10111205279827118,
-0.021752193570137024,
-0.04804990068078041,
0.022130535915493965,
-0.016008267179131508,
0.035250235348939896,
-0.01606730930507183,
0.000024005617888178676,
0.054995566606521606,
-0.10992296785116196,
-0.012746360152959824,
-0.06429212540388107,
-0.06623243540525436,
0.05292920023202896,
0.027794810011982918,
0.108037568628788,
0.1753002107143402,
-0.04140227288007736,
0.011297591961920261,
-0.03701674938201904,
0.2273317575454712,
-0.0702124610543251,
-0.029910041019320488,
0.12312145531177521,
-0.014154007658362389,
0.051207881420850754,
0.10723748803138733,
0.08148995786905289,
-0.08613479882478714,
0.0070084636099636555,
0.026971017941832542,
-0.042264122515916824,
-0.22022873163223267,
-0.04610889405012131,
-0.0532478466629982,
-0.02569865994155407,
0.09923196583986282,
0.0243906881660223,
0.053167764097452164,
0.07749416679143906,
0.054372914135456085,
0.07975420355796814,
-0.042837146669626236,
0.05561673268675804,
0.11992864310741425,
0.041178494691848755,
0.13108029961585999,
-0.04205411672592163,
-0.07842884212732315,
0.027184495702385902,
-0.03142145276069641,
0.21111464500427246,
0.012256471440196037,
0.12763746082782745,
0.054450590163469315,
0.16881492733955383,
0.016810424625873566,
0.08852731436491013,
-0.000012786502338713035,
-0.05023816600441933,
-0.009649022482335567,
-0.036660656332969666,
-0.039399489760398865,
0.013165823183953762,
-0.0456901378929615,
0.05036661773920059,
-0.13636426627635956,
-0.02900124527513981,
0.04399678111076355,
0.22730059921741486,
0.030904410406947136,
-0.3303312361240387,
-0.08747995644807816,
-0.004409965593367815,
-0.02814365178346634,
-0.016618741676211357,
0.015298642218112946,
0.09671124070882797,
-0.10410208255052567,
0.019142666831612587,
-0.07129934430122375,
0.09567778557538986,
-0.047720976173877716,
0.05531203746795654,
0.06544215232133865,
0.0835597962141037,
-0.005873092450201511,
0.08351713418960571,
-0.3126921057701111,
0.27711209654808044,
0.005778764374554157,
0.06630592793226242,
-0.08003444224596024,
-0.01509715337306261,
0.030926426872611046,
0.07679086923599243,
0.055114392191171646,
-0.014640505425632,
-0.004996994510293007,
-0.22130127251148224,
-0.037120431661605835,
0.023882251232862473,
0.09651049226522446,
-0.033639054745435715,
0.09181001037359238,
-0.025505540892481804,
0.0076469918712973595,
0.0789426639676094,
-0.020320424810051918,
-0.03980683535337448,
-0.08821604400873184,
-0.016357718035578728,
0.008503307588398457,
-0.0370405875146389,
-0.054707664996385574,
-0.12074162811040878,
-0.13418833911418915,
0.15585695207118988,
-0.0054986304603517056,
-0.03657561168074608,
-0.11631700396537781,
0.09414570778608322,
0.06403425335884094,
-0.09031129628419876,
0.04169662296772003,
0.007905388250946999,
0.0650867372751236,
0.023276949301362038,
-0.06935807317495346,
0.11569317430257797,
-0.05001915246248245,
-0.15496185421943665,
-0.060223743319511414,
0.10384432971477509,
0.04059140384197235,
0.06356358528137207,
-0.002912263385951519,
0.01480117253959179,
-0.03248436748981476,
-0.09006933122873306,
0.019769685342907906,
-0.020521096885204315,
0.07017780840396881,
0.015280917286872864,
-0.06729315966367722,
0.012287702411413193,
-0.06670353561639786,
-0.020423775538802147,
0.20356030762195587,
0.21988023817539215,
-0.10152546316385269,
0.017125777900218964,
0.038257524371147156,
-0.07485830783843994,
-0.2050040066242218,
0.06558529287576675,
0.06073904037475586,
0.0072907558642327785,
0.048449527472257614,
-0.17169135808944702,
0.13440638780593872,
0.09380823373794556,
-0.005063493736088276,
0.11106094717979431,
-0.3216880261898041,
-0.13513824343681335,
0.12704159319400787,
0.15944857895374298,
0.13189969956874847,
-0.1420527845621109,
-0.01899045519530773,
-0.020513547584414482,
-0.11597258597612381,
0.10896537452936172,
-0.06535527110099792,
0.12542490661144257,
-0.028553416952490807,
0.09781292825937271,
0.013191691599786282,
-0.059483449906110764,
0.1041710376739502,
0.026370512321591377,
0.09837990999221802,
-0.06412239372730255,
-0.07025934010744095,
0.04213874414563179,
-0.030555158853530884,
0.0063324798829853535,
-0.05739928409457207,
0.01748286373913288,
-0.10120146721601486,
-0.0273833479732275,
-0.08622881025075912,
0.04288238286972046,
-0.037829507142305374,
-0.06904426217079163,
-0.031890619546175,
0.025272948667407036,
0.03649219870567322,
-0.01712815649807453,
0.12754672765731812,
-0.00017206979100592434,
0.16839542984962463,
0.11143731325864792,
0.08624520897865295,
-0.06643806397914886,
-0.0680655762553215,
-0.013635866343975067,
-0.015448175370693207,
0.05677620321512222,
-0.13587656617164612,
0.024703465402126312,
0.1463315188884735,
0.0293254517018795,
0.13956840336322784,
0.09108337014913559,
-0.016175590455532074,
0.009848171845078468,
0.062446098774671555,
-0.15549932420253754,
-0.07894393801689148,
-0.012652570381760597,
-0.08286992460489273,
-0.10476948320865631,
0.05083287134766579,
0.08412737399339676,
-0.07291921973228455,
-0.01228847075253725,
-0.01220008172094822,
-0.012532250955700874,
-0.05991440266370773,
0.20119494199752808,
0.07473272830247879,
0.049112964421510696,
-0.10324537009000778,
0.05423522740602493,
0.059235602617263794,
-0.06756359338760376,
-0.005590261425822973,
0.07764670997858047,
-0.08241984248161316,
-0.04313085973262787,
0.09108132123947144,
0.19661317765712738,
-0.07557334750890732,
-0.026381639763712883,
-0.13812591135501862,
-0.12503010034561157,
0.0752948746085167,
0.17032550275325775,
0.1168777346611023,
0.007034961134195328,
-0.06915662437677383,
0.011166122741997242,
-0.13221776485443115,
0.07713543623685837,
0.03492147848010063,
0.0688253864645958,
-0.12926004827022552,
0.20082725584506989,
0.006491682026535273,
0.04704971984028816,
-0.026859266683459282,
0.018505925312638283,
-0.1156233549118042,
0.01872982084751129,
-0.11804894357919693,
-0.030524052679538727,
-0.020023100078105927,
0.002488550031557679,
-0.01170563418418169,
-0.06073109805583954,
-0.05017226189374924,
0.0036896364763379097,
-0.12547366321086884,
-0.01566261053085327,
0.03781323879957199,
0.051480237394571304,
-0.1106124147772789,
-0.0357719361782074,
0.015596827492117882,
-0.049096908420324326,
0.060551486909389496,
0.040603190660476685,
0.011029127053916454,
0.0704314112663269,
-0.14901046454906464,
0.00571257621049881,
0.06861218065023422,
0.003913934342563152,
0.07239187508821487,
-0.06439506262540817,
-0.0014872022438794374,
-0.004179536364972591,
0.07989631593227386,
0.030029090121388435,
0.06473201513290405,
-0.1428796350955963,
0.003717373125255108,
-0.023107990622520447,
-0.08673097938299179,
-0.06917595863342285,
0.037239108234643936,
0.07201912254095078,
0.008112939074635506,
0.18997791409492493,
-0.08107025921344757,
0.04985703527927399,
-0.2123512625694275,
-0.0034258824307471514,
-0.015839487314224243,
-0.11317837983369827,
-0.11147201061248779,
-0.07770148664712906,
0.06653372943401337,
-0.05245012044906616,
0.1361333280801773,
0.0531688816845417,
0.04716285690665245,
0.03165140748023987,
-0.01215999387204647,
0.013594666495919228,
0.024500884115695953,
0.21904048323631287,
0.036557089537382126,
-0.041461773216724396,
0.0626106783747673,
0.07423950731754303,
0.0950976088643074,
0.12400675565004349,
0.2043699473142624,
0.14615650475025177,
-0.039489734917879105,
0.08055940270423889,
0.025782953947782516,
-0.036622971296310425,
-0.14954398572444916,
0.030974775552749634,
-0.043097931891679764,
0.09213200956583023,
-0.03084328956902027,
0.2113218605518341,
0.05836709961295128,
-0.17056569457054138,
0.05148068070411682,
-0.053073156625032425,
-0.09523463249206543,
-0.10976426303386688,
-0.040263086557388306,
-0.08308563381433487,
-0.1302037239074707,
0.003760353894904256,
-0.10824208706617355,
0.016063503921031952,
0.1118280291557312,
0.007795763202011585,
-0.034358538687229156,
0.15871919691562653,
0.01847602240741253,
0.02101091295480728,
0.06819093227386475,
0.003941171802580357,
-0.01705554872751236,
-0.12275547534227371,
-0.05359440669417381,
-0.025766249746084213,
-0.011823983862996101,
0.027921436354517937,
-0.062184035778045654,
-0.07204703241586685,
0.02614426054060459,
-0.025676319375634193,
-0.10380465537309647,
0.01753372885286808,
0.024549825116991997,
0.07222782075405121,
0.03785202279686928,
0.0025171011220663786,
0.010276084765791893,
-0.021117808297276497,
0.21757149696350098,
-0.0725889801979065,
-0.0768909826874733,
-0.07946910709142685,
0.2860095202922821,
0.05348288267850876,
-0.0017552237259224057,
0.03149405121803284,
-0.0645890161395073,
0.017016947269439697,
0.26553279161453247,
0.21850821375846863,
-0.09494562447071075,
0.0003310833708383143,
0.004261404741555452,
-0.009674965403974056,
-0.000014652546269644517,
0.13021469116210938,
0.11598257720470428,
0.04604387283325195,
-0.10280904173851013,
-0.026232212781906128,
-0.053591545671224594,
-0.012847300618886948,
-0.037330057471990585,
0.07293705642223358,
0.061060208827257156,
0.005256067030131817,
-0.04757178574800491,
0.06412451714277267,
-0.07306212931871414,
-0.09998247772455215,
0.06492608785629272,
-0.2123156487941742,
-0.15849162638187408,
-0.01606045477092266,
0.09352979063987732,
0.00341704860329628,
0.07219978421926498,
-0.025162888690829277,
0.0015745960408821702,
0.051409583538770676,
-0.017110226675868034,
-0.09932514280080795,
-0.07319658994674683,
0.09871815890073776,
-0.1166648268699646,
0.18572978675365448,
-0.04479580745100975,
0.06175870820879936,
0.11920980364084244,
0.06929562985897064,
-0.052779149264097214,
0.06122441589832306,
0.03398152068257332,
-0.07694023847579956,
0.038557518273591995,
0.08231307566165924,
-0.028508564457297325,
0.036955755203962326,
0.03263469412922859,
-0.14628484845161438,
0.03771299123764038,
-0.09448335319757462,
-0.06003115326166153,
-0.04207547754049301,
-0.043769653886556625,
-0.05183563008904457,
0.12642541527748108,
0.22688649594783783,
-0.019478969275951385,
0.006698258686810732,
-0.0734444409608841,
0.0020292436238378286,
0.05666620284318924,
0.012660848908126354,
-0.08469624072313309,
-0.22923652827739716,
0.0015962637262418866,
0.06408537924289703,
-0.02862069196999073,
-0.25650307536125183,
-0.08264486491680145,
-0.001982743851840496,
-0.07134432345628738,
-0.0932161808013916,
0.08230649679899216,
0.07807870954275131,
0.053940895944833755,
-0.053267452865839005,
-0.07987610250711441,
-0.07627292722463608,
0.16460570693016052,
-0.15345199406147003,
-0.09168349951505661
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext-finetuned-pubmedqa-2
This model is a fine-tuned version of [microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext](https://huggingface.co/microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext) on the None dataset.
It achieves the following results on the evaluation set:
- Loss: 1.0005
- Accuracy: 0.54
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.003
- train_batch_size: 8
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 5
### Training results
| Training Loss | Epoch | Step | Validation Loss | Accuracy |
|:-------------:|:-----:|:----:|:---------------:|:--------:|
| No log | 1.0 | 57 | 1.3510 | 0.54 |
| No log | 2.0 | 114 | 0.9606 | 0.54 |
| No log | 3.0 | 171 | 0.9693 | 0.54 |
| No log | 4.0 | 228 | 1.0445 | 0.54 |
| No log | 5.0 | 285 | 1.0005 | 0.54 |
### Framework versions
- Transformers 4.10.2
- Pytorch 1.9.0+cu102
- Datasets 1.12.1
- Tokenizers 0.10.3
|
{"license": "mit", "tags": ["generated_from_trainer"], "datasets": [], "metrics": ["accuracy"]}
|
text-classification
|
blizrys/BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext-finetuned-pubmedqa-2
|
[
"transformers",
"pytorch",
"tensorboard",
"bert",
"text-classification",
"generated_from_trainer",
"license:mit",
"model-index",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #bert #text-classification #generated_from_trainer #license-mit #model-index #autotrain_compatible #endpoints_compatible #region-us
|
BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext-finetuned-pubmedqa-2
========================================================================
This model is a fine-tuned version of microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext on the None dataset.
It achieves the following results on the evaluation set:
* Loss: 1.0005
* Accuracy: 0.54
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 0.003
* train\_batch\_size: 8
* eval\_batch\_size: 8
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 5
### Training results
### Framework versions
* Transformers 4.10.2
* Pytorch 1.9.0+cu102
* Datasets 1.12.1
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.003\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 5",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.12.1\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #bert #text-classification #generated_from_trainer #license-mit #model-index #autotrain_compatible #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.003\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 5",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.12.1\n* Tokenizers 0.10.3"
] |
[
56,
97,
4,
34
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #bert #text-classification #generated_from_trainer #license-mit #model-index #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.003\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 5### Training results### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.12.1\n* Tokenizers 0.10.3"
] |
[
-0.10349975526332855,
0.06344456970691681,
-0.0019184647826477885,
0.11987123638391495,
0.19383825361728668,
0.03553709015250206,
0.11946290731430054,
0.11506819725036621,
-0.0921780988574028,
0.011194009333848953,
0.12322049587965012,
0.18850217759609222,
0.0010970896109938622,
0.08295973390340805,
-0.05663697421550751,
-0.2735445201396942,
-0.0246647410094738,
0.05330060049891472,
-0.06946317851543427,
0.135460764169693,
0.09445960074663162,
-0.146188884973526,
0.08417535573244095,
0.005692576989531517,
-0.23398488759994507,
0.016236644238233566,
0.03216125816106796,
-0.059385690838098526,
0.15740671753883362,
0.02042636275291443,
0.14010050892829895,
0.006836684420704842,
0.0890783816576004,
-0.16948877274990082,
0.014642707072198391,
0.04968752712011337,
0.005074121057987213,
0.09238757938146591,
0.06538393348455429,
-0.011776994913816452,
0.12884695827960968,
-0.07749283313751221,
0.056903284043073654,
0.023865727707743645,
-0.13188563287258148,
-0.20555731654167175,
-0.06734432280063629,
0.013664310798048973,
0.056952737271785736,
0.10089671611785889,
-0.004098115023225546,
0.13478153944015503,
-0.10412298887968063,
0.09907609969377518,
0.22220546007156372,
-0.28443679213523865,
-0.06638854742050171,
0.04972411319613457,
0.007902383804321289,
0.0806228369474411,
-0.11729621142148972,
-0.008129745721817017,
0.054393719881772995,
0.04416169598698616,
0.13132822513580322,
-0.038020092993974686,
-0.09691240638494492,
0.02836534008383751,
-0.1407090276479721,
-0.020156629383563995,
0.11667231470346451,
0.023953180760145187,
-0.03011619672179222,
-0.03211808204650879,
-0.05178442969918251,
-0.15914560854434967,
-0.040777549147605896,
-0.01471991278231144,
0.04294143617153168,
-0.036381661891937256,
-0.07734332233667374,
-0.0053032273426651955,
-0.11117502301931381,
-0.07190420478582382,
-0.07392175495624542,
0.1590186357498169,
0.036149680614471436,
0.01088380441069603,
-0.03767021745443344,
0.11194378137588501,
-0.0015582783380523324,
-0.12744517624378204,
0.03522256016731262,
0.031644754111766815,
-0.005154567304998636,
-0.062154751271009445,
-0.06587875634431839,
-0.0824107974767685,
-0.00172227225266397,
0.07819213718175888,
-0.05011265352368355,
0.0448751337826252,
0.04172651097178459,
0.04531484469771385,
-0.09778149425983429,
0.2005617618560791,
-0.03291850909590721,
-0.006867273710668087,
-0.0019516113679856062,
0.03737768158316612,
-0.015111275017261505,
-0.013271884992718697,
-0.12827883660793304,
-0.005179877392947674,
0.10421895980834961,
0.012189898639917374,
-0.08036947250366211,
0.07848885655403137,
-0.04792110621929169,
-0.03147523105144501,
-0.008730198256671429,
-0.0914900004863739,
0.0396302305161953,
0.003149918280541897,
-0.08248874545097351,
-0.022175395861268044,
0.018364133313298225,
0.016891784965991974,
-0.022277481853961945,
0.13812187314033508,
-0.09204432368278503,
0.046068254858255386,
-0.10505478829145432,
-0.12402471154928207,
-0.00013229201431386173,
-0.0990481972694397,
0.02580234967172146,
-0.09585228562355042,
-0.15151406824588776,
-0.018140273168683052,
0.050865791738033295,
-0.02352742664515972,
-0.045944612473249435,
-0.05282685533165932,
-0.06611424684524536,
0.0026201284490525723,
-0.009046456776559353,
0.15165206789970398,
-0.05405573919415474,
0.10976162552833557,
0.030187658965587616,
0.05825664848089218,
-0.05620517209172249,
0.06406164914369583,
-0.09123047441244125,
-0.005086524877697229,
-0.17983601987361908,
0.04769642651081085,
-0.0392010435461998,
0.07152712345123291,
-0.08167752623558044,
-0.10967840999364853,
0.011800778098404408,
0.003535629017278552,
0.07156714797019958,
0.09769883751869202,
-0.17325372993946075,
-0.08298186212778091,
0.15089890360832214,
-0.054416049271821976,
-0.10276492685079575,
0.11342766880989075,
-0.0751129686832428,
0.06410781294107437,
0.08543961495161057,
0.16500547528266907,
0.08148369938135147,
-0.06606651097536087,
0.04554460942745209,
0.0007238585967570543,
0.04752262309193611,
-0.07413946092128754,
0.050699129700660706,
-0.0024784456472843885,
-0.014704499393701553,
0.03344373404979706,
-0.02159540168941021,
0.05575539916753769,
-0.10607803612947464,
-0.09387324005365372,
-0.031494129449129105,
-0.09718593209981918,
0.07324177026748657,
0.08279352635145187,
0.09782934188842773,
-0.09788758307695389,
-0.06564809381961823,
0.08608832210302353,
0.06179545074701309,
-0.05282195657491684,
0.025414662435650826,
-0.050718821585178375,
0.058081161230802536,
-0.03951388970017433,
-0.02988150715827942,
-0.19815365970134735,
-0.011575696058571339,
0.004626043606549501,
0.030264893546700478,
0.030450081452727318,
0.04323921725153923,
0.0743846520781517,
0.060396213084459305,
-0.05827020853757858,
-0.003216044045984745,
-0.011834768578410149,
-0.0009273603209294379,
-0.15712593495845795,
-0.19882987439632416,
-0.023856354877352715,
-0.016807403415441513,
0.10770367830991745,
-0.22501292824745178,
0.03791084513068199,
-0.022971801459789276,
0.0587056465446949,
0.0024185371585190296,
-0.010020165704190731,
-0.053585853427648544,
0.0935823991894722,
-0.03260447829961777,
-0.03903326019644737,
0.08782321214675903,
-0.004697255324572325,
-0.09051459282636642,
-0.046109575778245926,
-0.10222547501325607,
0.1844615489244461,
0.1352466195821762,
-0.15305031836032867,
-0.08912665396928787,
-0.012967649847269058,
-0.04944417253136635,
-0.022238854318857193,
-0.052325401455163956,
0.04167763888835907,
0.20627228915691376,
-0.01140100322663784,
0.16016677021980286,
-0.06499595195055008,
-0.04146505892276764,
0.02113158255815506,
-0.032578740268945694,
0.037301573902368546,
0.12742920219898224,
0.12302359938621521,
-0.0946425274014473,
0.1319817453622818,
0.13859876990318298,
-0.09339684247970581,
0.14339867234230042,
-0.029006466269493103,
-0.06436686217784882,
-0.01003218162804842,
-0.030121903866529465,
-0.00024357566144317389,
0.08951583504676819,
-0.15847282111644745,
-0.0195428729057312,
0.013582766987383366,
0.018528880551457405,
0.023226063698530197,
-0.23057302832603455,
-0.04769810661673546,
0.04078400135040283,
-0.031961839646101,
-0.012888779863715172,
-0.017164386808872223,
0.011581255123019218,
0.11415606737136841,
0.0019458717433735728,
-0.0836932510137558,
0.03800475597381592,
0.0032988088205456734,
-0.08583945780992508,
0.22544832527637482,
-0.07000870257616043,
-0.13695409893989563,
-0.12508736550807953,
-0.0814225897192955,
-0.04510823264718056,
0.01567450538277626,
0.057074639946222305,
-0.10383432358503342,
-0.021394316107034683,
-0.04927509278059006,
0.015486709773540497,
-0.02213936112821102,
0.03974810615181923,
-0.024948112666606903,
0.0009653024026192725,
0.054925620555877686,
-0.11074631661176682,
-0.011730954982340336,
-0.06496027112007141,
-0.07545340061187744,
0.04962506890296936,
0.029452083632349968,
0.10861045867204666,
0.1703566461801529,
-0.03815723955631256,
0.010563071817159653,
-0.03295049071311951,
0.22508935630321503,
-0.06789840757846832,
-0.03588700294494629,
0.12103001028299332,
-0.013732951134443283,
0.04993169382214546,
0.10703761130571365,
0.08411391824483871,
-0.08491749316453934,
0.007716527674347162,
0.03330645337700844,
-0.04401382803916931,
-0.22557516396045685,
-0.04345863312482834,
-0.0602383054792881,
-0.03406121954321861,
0.09771441668272018,
0.02816399373114109,
0.053060244768857956,
0.0778566524386406,
0.054240692406892776,
0.08224654942750931,
-0.04418935254216194,
0.0515282042324543,
0.10873981565237045,
0.04451474919915199,
0.13151493668556213,
-0.04425540566444397,
-0.07655186206102371,
0.029729070141911507,
-0.03721974790096283,
0.22580046951770782,
0.014185202307999134,
0.1280866116285324,
0.058364272117614746,
0.176515132188797,
0.013077834621071815,
0.09060919284820557,
-0.0037198341451585293,
-0.050707995891571045,
-0.009184770286083221,
-0.03844541683793068,
-0.03788617253303528,
0.012511059641838074,
-0.047076378017663956,
0.05248333513736725,
-0.1360103189945221,
-0.02391052059829235,
0.043538209050893784,
0.23324742913246155,
0.027890371158719063,
-0.3213478922843933,
-0.07819756120443344,
-0.0009721650276333094,
-0.024758731946349144,
-0.017275214195251465,
0.014492114074528217,
0.10018511861562729,
-0.10297562927007675,
0.017284579575061798,
-0.06827205419540405,
0.09886271506547928,
-0.04578384384512901,
0.05316316336393356,
0.0660601332783699,
0.08744175732135773,
-0.006892805453389883,
0.08589950948953629,
-0.3237849771976471,
0.28395694494247437,
0.008242788724601269,
0.07048019021749496,
-0.08349333703517914,
-0.014894912950694561,
0.035522766411304474,
0.07905026525259018,
0.05296725034713745,
-0.013275490142405033,
-0.004813314415514469,
-0.21648839116096497,
-0.032750893384218216,
0.027373407036066055,
0.09964877367019653,
-0.035447102040052414,
0.08686181157827377,
-0.02772245556116104,
0.010609190911054611,
0.08096246421337128,
-0.012654272839426994,
-0.04776868224143982,
-0.08864118903875351,
-0.01847808063030243,
0.014668532647192478,
-0.05305170640349388,
-0.052875593304634094,
-0.11699574440717697,
-0.1332307755947113,
0.15767274796962738,
-0.006962991785258055,
-0.031018724665045738,
-0.11835809051990509,
0.08896848559379578,
0.061894603073596954,
-0.08979585021734238,
0.04040545970201492,
0.005989388562738895,
0.05763566493988037,
0.027232542634010315,
-0.0707092359662056,
0.11401735991239548,
-0.05131986737251282,
-0.15252405405044556,
-0.062492355704307556,
0.09780584275722504,
0.04160023853182793,
0.06226802617311478,
-0.0034040058963000774,
0.016458183526992798,
-0.03671538457274437,
-0.09535864740610123,
0.022061074152588844,
-0.023874713107943535,
0.07350242882966995,
0.022867076098918915,
-0.06791398674249649,
0.016171369701623917,
-0.06489842385053635,
-0.02616119384765625,
0.2027081847190857,
0.2157335877418518,
-0.10244793444871902,
0.015262852422893047,
0.031168514862656593,
-0.07793587446212769,
-0.2035345435142517,
0.06646257638931274,
0.06060874089598656,
0.008866299875080585,
0.05186406522989273,
-0.17516878247261047,
0.1359560638666153,
0.0907803401350975,
-0.005239305552095175,
0.1095181480050087,
-0.3182474374771118,
-0.1372329145669937,
0.13015657663345337,
0.16456176340579987,
0.13879410922527313,
-0.14098799228668213,
-0.021381327882409096,
-0.023133128881454468,
-0.11130993068218231,
0.10280897468328476,
-0.09105870872735977,
0.12661361694335938,
-0.028904959559440613,
0.09301742166280746,
0.01325486321002245,
-0.0581507571041584,
0.1092018336057663,
0.0267864428460598,
0.10427618026733398,
-0.06599090993404388,
-0.0636872723698616,
0.032897572964429855,
-0.03075271286070347,
0.004123343154788017,
-0.06993968039751053,
0.021730398759245872,
-0.10300187766551971,
-0.02576817199587822,
-0.08291847258806229,
0.043942566961050034,
-0.03852805867791176,
-0.06769789755344391,
-0.03018389642238617,
0.0200948603451252,
0.037443868815898895,
-0.01772717572748661,
0.1294618397951126,
-0.0015514512779191136,
0.17265525460243225,
0.10809183865785599,
0.09099723398685455,
-0.06387651711702347,
-0.06479719281196594,
-0.013149767182767391,
-0.011372932232916355,
0.05671946704387665,
-0.1345275193452835,
0.02564227394759655,
0.14260244369506836,
0.028693046420812607,
0.13720816373825073,
0.09243103116750717,
-0.012986830435693264,
0.008091685362160206,
0.06351008266210556,
-0.15343067049980164,
-0.08028871566057205,
-0.01250691618770361,
-0.08557295054197311,
-0.10487332195043564,
0.05245267227292061,
0.08163101971149445,
-0.06932425498962402,
-0.008498728275299072,
-0.013976024463772774,
-0.01608145423233509,
-0.06710995733737946,
0.20318272709846497,
0.07398897409439087,
0.049744486808776855,
-0.10384009033441544,
0.055352553725242615,
0.059327125549316406,
-0.05742562562227249,
-0.007980190217494965,
0.07562851905822754,
-0.08358216285705566,
-0.04526730626821518,
0.1012553796172142,
0.2069450169801712,
-0.06837275624275208,
-0.0266976710408926,
-0.13731977343559265,
-0.12614916265010834,
0.07359807193279266,
0.1659143716096878,
0.11819646507501602,
0.008722851052880287,
-0.07412701100111008,
0.007998622953891754,
-0.12897785007953644,
0.07469549030065536,
0.03817104548215866,
0.06889194995164871,
-0.13185124099254608,
0.19969190657138824,
0.007616586051881313,
0.047689441591501236,
-0.02575719729065895,
0.018698519095778465,
-0.11495286971330643,
0.0216696597635746,
-0.12123223394155502,
-0.03136717900633812,
-0.0174233578145504,
0.0043129767291247845,
-0.010287737473845482,
-0.0631583034992218,
-0.05078831687569618,
0.006869135890156031,
-0.1237027496099472,
-0.016879746690392494,
0.03676268830895424,
0.05450427532196045,
-0.10820280015468597,
-0.03752788156270981,
0.016092706471681595,
-0.04827384278178215,
0.05569826066493988,
0.044024426490068436,
0.010831336490809917,
0.07103770971298218,
-0.1566322147846222,
0.01091611199080944,
0.06408701837062836,
0.0065675838850438595,
0.07160694897174835,
-0.06189625337719917,
0.00038459699135273695,
-0.004447216633707285,
0.0845872089266777,
0.0307387113571167,
0.06613218039274216,
-0.14237742125988007,
0.002804307034239173,
-0.026980813592672348,
-0.09023161232471466,
-0.06734411418437958,
0.03833518177270889,
0.07446122169494629,
0.012540155090391636,
0.19040116667747498,
-0.0822015032172203,
0.048333026468753815,
-0.2137441486120224,
-0.0020784721709787846,
-0.014987442642450333,
-0.11221219599246979,
-0.11837665736675262,
-0.08299413323402405,
0.06925524771213531,
-0.054399386048316956,
0.13711559772491455,
0.05725104734301567,
0.04679156467318535,
0.02790519781410694,
-0.009583220817148685,
0.023688701912760735,
0.021298862993717194,
0.21829554438591003,
0.04105158895254135,
-0.04167182743549347,
0.06305820494890213,
0.07212510704994202,
0.09354648739099503,
0.13008879125118256,
0.20709894597530365,
0.14616477489471436,
-0.03529706224799156,
0.08035825937986374,
0.027017630636692047,
-0.0451776385307312,
-0.14040915668010712,
0.02581770531833172,
-0.040236979722976685,
0.0881812646985054,
-0.03370872884988785,
0.20639170706272125,
0.06146164610981941,
-0.17317695915699005,
0.05297083035111427,
-0.0525209978222847,
-0.09751299023628235,
-0.11856675148010254,
-0.04103818163275719,
-0.08384183794260025,
-0.12624229490756989,
0.00286424090154469,
-0.1040872186422348,
0.02061736397445202,
0.11337330937385559,
0.007630578707903624,
-0.031064530834555626,
0.15981294214725494,
0.0249136034399271,
0.023781869560480118,
0.06896806508302689,
0.001138563035055995,
-0.0135880121961236,
-0.11822670698165894,
-0.054225388914346695,
-0.028463106602430344,
-0.020939169451594353,
0.026710253208875656,
-0.06464750319719315,
-0.07250060141086578,
0.028910687193274498,
-0.023940807208418846,
-0.10527394711971283,
0.019085530191659927,
0.020464634522795677,
0.07144217938184738,
0.047668907791376114,
0.006640610750764608,
0.011469862423837185,
-0.02051307074725628,
0.2201468050479889,
-0.07415705174207687,
-0.0739796981215477,
-0.07692272961139679,
0.2832767367362976,
0.05276748165488243,
-0.0030851862393319607,
0.029309876263141632,
-0.06301488727331161,
0.022996824234724045,
0.2644723057746887,
0.22198401391506195,
-0.10008440166711807,
0.0017156625399366021,
0.004613433964550495,
-0.010388023220002651,
0.0018297599162906408,
0.1325909048318863,
0.11125807464122772,
0.046571020036935806,
-0.10323699563741684,
-0.023755885660648346,
-0.049729663878679276,
-0.010268076322972775,
-0.04195462912321091,
0.07689587026834488,
0.06326907128095627,
0.006929346360266209,
-0.04516974464058876,
0.06462287157773972,
-0.072083480656147,
-0.09242860227823257,
0.06921996176242828,
-0.20216409862041473,
-0.15772706270217896,
-0.0206749327480793,
0.09373711049556732,
0.0023578680120408535,
0.0706709697842598,
-0.029256004840135574,
0.0015626437962055206,
0.0473693385720253,
-0.018484927713871002,
-0.09992912411689758,
-0.07969307154417038,
0.10250192135572433,
-0.10921120643615723,
0.18491916358470917,
-0.04515466466546059,
0.061046674847602844,
0.11924148350954056,
0.07198408246040344,
-0.052894286811351776,
0.06195247173309326,
0.03253096342086792,
-0.08159077167510986,
0.0403362400829792,
0.08183104544878006,
-0.03156834840774536,
0.029758037999272346,
0.03218013048171997,
-0.13679613173007965,
0.04281188175082207,
-0.09324603527784348,
-0.06201579049229622,
-0.04260540008544922,
-0.04307807609438896,
-0.05386063829064369,
0.12598423659801483,
0.22882278263568878,
-0.016057999804615974,
0.009294208139181137,
-0.07663276046514511,
0.0008400149527005851,
0.058726128190755844,
0.01929803192615509,
-0.08731043338775635,
-0.22688482701778412,
0.004816829692572355,
0.07894548028707504,
-0.029542788863182068,
-0.25978630781173706,
-0.08525197952985764,
0.0028680867981165648,
-0.07483229786157608,
-0.08833763003349304,
0.07849226146936417,
0.08229783922433853,
0.05503910779953003,
-0.05322801694273949,
-0.0993819385766983,
-0.07566752284765244,
0.1634988933801651,
-0.15366455912590027,
-0.09342646598815918
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext-finetuned-pubmedqa
This model is a fine-tuned version of [microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext](https://huggingface.co/microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext) on the None dataset.
It achieves the following results on the evaluation set:
- Loss: 0.6748
- Accuracy: 0.72
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 1e-05
- train_batch_size: 8
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 10
### Training results
| Training Loss | Epoch | Step | Validation Loss | Accuracy |
|:-------------:|:-----:|:----:|:---------------:|:--------:|
| No log | 1.0 | 57 | 0.8396 | 0.58 |
| No log | 2.0 | 114 | 0.8608 | 0.58 |
| No log | 3.0 | 171 | 0.7642 | 0.68 |
| No log | 4.0 | 228 | 0.8196 | 0.64 |
| No log | 5.0 | 285 | 0.6477 | 0.72 |
| No log | 6.0 | 342 | 0.6861 | 0.72 |
| No log | 7.0 | 399 | 0.6735 | 0.74 |
| No log | 8.0 | 456 | 0.6516 | 0.72 |
| 0.6526 | 9.0 | 513 | 0.6707 | 0.72 |
| 0.6526 | 10.0 | 570 | 0.6748 | 0.72 |
### Framework versions
- Transformers 4.10.2
- Pytorch 1.9.0+cu102
- Datasets 1.12.0
- Tokenizers 0.10.3
|
{"license": "mit", "tags": ["generated_from_trainer"], "datasets": [], "metrics": ["accuracy"]}
|
text-classification
|
blizrys/BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext-finetuned-pubmedqa
|
[
"transformers",
"pytorch",
"tensorboard",
"bert",
"text-classification",
"generated_from_trainer",
"license:mit",
"model-index",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #bert #text-classification #generated_from_trainer #license-mit #model-index #autotrain_compatible #endpoints_compatible #region-us
|
BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext-finetuned-pubmedqa
======================================================================
This model is a fine-tuned version of microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract-fulltext on the None dataset.
It achieves the following results on the evaluation set:
* Loss: 0.6748
* Accuracy: 0.72
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 1e-05
* train\_batch\_size: 8
* eval\_batch\_size: 8
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 10
### Training results
### Framework versions
* Transformers 4.10.2
* Pytorch 1.9.0+cu102
* Datasets 1.12.0
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 1e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 10",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.12.0\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #bert #text-classification #generated_from_trainer #license-mit #model-index #autotrain_compatible #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 1e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 10",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.12.0\n* Tokenizers 0.10.3"
] |
[
56,
98,
4,
34
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #bert #text-classification #generated_from_trainer #license-mit #model-index #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 1e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 10### Training results### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.12.0\n* Tokenizers 0.10.3"
] |
[
-0.10087471455335617,
0.07445205003023148,
-0.0018672136357054114,
0.11826130747795105,
0.18967922031879425,
0.030420133844017982,
0.12296590954065323,
0.11705076694488525,
-0.0897965282201767,
0.012555219233036041,
0.12275653332471848,
0.1861649751663208,
0.0020518277306109667,
0.10130710899829865,
-0.05488329753279686,
-0.2777828276157379,
-0.021608062088489532,
0.05884600803256035,
-0.06829068064689636,
0.13765493035316467,
0.09370884299278259,
-0.14887546002864838,
0.08389528095722198,
0.008579831570386887,
-0.2360214740037918,
0.01208409108221531,
0.029537731781601906,
-0.06604434549808502,
0.15655776858329773,
0.020534250885248184,
0.13675643503665924,
0.010262326337397099,
0.08835168927907944,
-0.1649406999349594,
0.01310371607542038,
0.04397793859243393,
0.00755735021084547,
0.0942433774471283,
0.06052087992429733,
-0.007056636270135641,
0.11515629291534424,
-0.07371179014444351,
0.05946892872452736,
0.021689768880605698,
-0.13217361271381378,
-0.2126310020685196,
-0.06642154604196548,
0.02727845311164856,
0.05743500962853432,
0.09789297729730606,
-0.006133230868726969,
0.13622774183750153,
-0.09938891977071762,
0.09681244194507599,
0.22032880783081055,
-0.28256186842918396,
-0.06461110711097717,
0.043785568326711655,
0.014646513387560844,
0.07640674710273743,
-0.12039827555418015,
-0.014144625514745712,
0.0585813969373703,
0.04299136623740196,
0.13209235668182373,
-0.03688430413603783,
-0.08971057087182999,
0.023791104555130005,
-0.141779825091362,
-0.02241506800055504,
0.1186365932226181,
0.026175886392593384,
-0.027931388467550278,
-0.03819325193762779,
-0.05127131938934326,
-0.16317535936832428,
-0.0438343770802021,
-0.017387235537171364,
0.04449324682354927,
-0.04007164388895035,
-0.0830298662185669,
-0.0016397204017266631,
-0.1107645034790039,
-0.06427336484193802,
-0.06960709393024445,
0.1529645323753357,
0.03561661019921303,
0.008191050961613655,
-0.03942060098052025,
0.1089097410440445,
-0.003116583451628685,
-0.1283807009458542,
0.037175390869379044,
0.03050795942544937,
0.0017121904529631138,
-0.06318014860153198,
-0.06389949470758438,
-0.07560490071773529,
-0.0054520536214113235,
0.08795551210641861,
-0.05356671288609505,
0.04373497888445854,
0.039178043603897095,
0.046573515981435776,
-0.09148012101650238,
0.20813997089862823,
-0.036197729408741,
-0.009130986407399178,
-0.000954429735429585,
0.04212907329201698,
-0.012447568587958813,
-0.009528695605695248,
-0.13113956153392792,
-0.003041745163500309,
0.11384139209985733,
0.010072685778141022,
-0.08009220659732819,
0.07969297468662262,
-0.04706133157014847,
-0.03210445120930672,
0.004182311240583658,
-0.09002657979726791,
0.038868069648742676,
0.00031449150992557406,
-0.08443127572536469,
-0.01521297823637724,
0.01800416223704815,
0.013403691351413727,
-0.020411815494298935,
0.1369602233171463,
-0.09691087156534195,
0.04585041105747223,
-0.10298489034175873,
-0.12360896915197372,
0.005544786807149649,
-0.09445403516292572,
0.028048787266016006,
-0.09776705503463745,
-0.1468457728624344,
-0.01642313413321972,
0.048322275280952454,
-0.021601524204015732,
-0.04933108389377594,
-0.05346988886594772,
-0.06828141957521439,
0.002529844641685486,
-0.012349748983979225,
0.14158034324645996,
-0.05274554714560509,
0.11273174732923508,
0.03212076053023338,
0.06280422955751419,
-0.05475148931145668,
0.0652107521891594,
-0.09677635133266449,
-0.0034415952395647764,
-0.19186139106750488,
0.04712645709514618,
-0.040700025856494904,
0.07201069593429565,
-0.0821763426065445,
-0.11528884619474411,
0.017881454899907112,
0.0045051840133965015,
0.07057970762252808,
0.09806141257286072,
-0.162412628531456,
-0.08723504841327667,
0.15380562841892242,
-0.058673568069934845,
-0.10650723427534103,
0.11127647757530212,
-0.07199294865131378,
0.06915052980184555,
0.08555793762207031,
0.17237983644008636,
0.07330083101987839,
-0.07016393542289734,
0.03746413066983223,
-0.004778741858899593,
0.04778588190674782,
-0.07594197243452072,
0.05105135962367058,
0.0015088138170540333,
-0.0023246698547154665,
0.03326098620891571,
-0.024722347036004066,
0.059644389897584915,
-0.10566816478967667,
-0.08970538526773453,
-0.026380063965916634,
-0.096671462059021,
0.07834067940711975,
0.08191876858472824,
0.09922488778829575,
-0.09693557769060135,
-0.07112260907888412,
0.10247140377759933,
0.060264505445957184,
-0.05659811198711395,
0.025869399309158325,
-0.056365933269262314,
0.06030962988734245,
-0.042948611080646515,
-0.03022135980427265,
-0.19571557641029358,
-0.011244597844779491,
0.005565489176660776,
0.020473124459385872,
0.030352793633937836,
0.037365250289440155,
0.0727711096405983,
0.059891607612371445,
-0.057853590697050095,
-0.009954105131328106,
-0.013413921929895878,
0.00007325362821575254,
-0.1546209156513214,
-0.18935628235340118,
-0.02566433697938919,
-0.015149794518947601,
0.11270678043365479,
-0.21750064194202423,
0.04049839451909065,
-0.01805323362350464,
0.06106065958738327,
0.0055373902432620525,
-0.009839157573878765,
-0.05142056941986084,
0.0920400619506836,
-0.03185184299945831,
-0.04095391184091568,
0.08654773235321045,
-0.003454808611422777,
-0.08718836307525635,
-0.04336337372660637,
-0.10840009897947311,
0.18414044380187988,
0.14098884165287018,
-0.1454484462738037,
-0.09236498177051544,
-0.012755293399095535,
-0.05156616494059563,
-0.02170892059803009,
-0.04523766413331032,
0.03586788475513458,
0.20059514045715332,
-0.012268475256860256,
0.1612502485513687,
-0.06689981371164322,
-0.04583900421857834,
0.0223530326038599,
-0.03364070504903793,
0.03314638137817383,
0.12456385791301727,
0.12383009493350983,
-0.08955555409193039,
0.1395193487405777,
0.1415599286556244,
-0.08706090599298477,
0.14398406445980072,
-0.030128633603453636,
-0.06498973071575165,
-0.012251322157680988,
-0.03653278574347496,
-0.0023718972224742174,
0.09199409186840057,
-0.15647822618484497,
-0.01872975192964077,
0.015141311101615429,
0.015821296721696854,
0.02420075424015522,
-0.22974810004234314,
-0.04504149779677391,
0.03679995611310005,
-0.02951870672404766,
-0.011743026785552502,
-0.017482250928878784,
0.013256818056106567,
0.1132611334323883,
0.004455277696251869,
-0.08083771914243698,
0.03977440297603607,
0.007252393290400505,
-0.08455335348844528,
0.22437596321105957,
-0.07326865196228027,
-0.14848440885543823,
-0.1264139711856842,
-0.08501695841550827,
-0.03692084550857544,
0.01439812034368515,
0.05887556076049805,
-0.10111205279827118,
-0.021752193570137024,
-0.04804990068078041,
0.022130535915493965,
-0.016008267179131508,
0.035250235348939896,
-0.01606730930507183,
0.000024005617888178676,
0.054995566606521606,
-0.10992296785116196,
-0.012746360152959824,
-0.06429212540388107,
-0.06623243540525436,
0.05292920023202896,
0.027794810011982918,
0.108037568628788,
0.1753002107143402,
-0.04140227288007736,
0.011297591961920261,
-0.03701674938201904,
0.2273317575454712,
-0.0702124610543251,
-0.029910041019320488,
0.12312145531177521,
-0.014154007658362389,
0.051207881420850754,
0.10723748803138733,
0.08148995786905289,
-0.08613479882478714,
0.0070084636099636555,
0.026971017941832542,
-0.042264122515916824,
-0.22022873163223267,
-0.04610889405012131,
-0.0532478466629982,
-0.02569865994155407,
0.09923196583986282,
0.0243906881660223,
0.053167764097452164,
0.07749416679143906,
0.054372914135456085,
0.07975420355796814,
-0.042837146669626236,
0.05561673268675804,
0.11992864310741425,
0.041178494691848755,
0.13108029961585999,
-0.04205411672592163,
-0.07842884212732315,
0.027184495702385902,
-0.03142145276069641,
0.21111464500427246,
0.012256471440196037,
0.12763746082782745,
0.054450590163469315,
0.16881492733955383,
0.016810424625873566,
0.08852731436491013,
-0.000012786502338713035,
-0.05023816600441933,
-0.009649022482335567,
-0.036660656332969666,
-0.039399489760398865,
0.013165823183953762,
-0.0456901378929615,
0.05036661773920059,
-0.13636426627635956,
-0.02900124527513981,
0.04399678111076355,
0.22730059921741486,
0.030904410406947136,
-0.3303312361240387,
-0.08747995644807816,
-0.004409965593367815,
-0.02814365178346634,
-0.016618741676211357,
0.015298642218112946,
0.09671124070882797,
-0.10410208255052567,
0.019142666831612587,
-0.07129934430122375,
0.09567778557538986,
-0.047720976173877716,
0.05531203746795654,
0.06544215232133865,
0.0835597962141037,
-0.005873092450201511,
0.08351713418960571,
-0.3126921057701111,
0.27711209654808044,
0.005778764374554157,
0.06630592793226242,
-0.08003444224596024,
-0.01509715337306261,
0.030926426872611046,
0.07679086923599243,
0.055114392191171646,
-0.014640505425632,
-0.004996994510293007,
-0.22130127251148224,
-0.037120431661605835,
0.023882251232862473,
0.09651049226522446,
-0.033639054745435715,
0.09181001037359238,
-0.025505540892481804,
0.0076469918712973595,
0.0789426639676094,
-0.020320424810051918,
-0.03980683535337448,
-0.08821604400873184,
-0.016357718035578728,
0.008503307588398457,
-0.0370405875146389,
-0.054707664996385574,
-0.12074162811040878,
-0.13418833911418915,
0.15585695207118988,
-0.0054986304603517056,
-0.03657561168074608,
-0.11631700396537781,
0.09414570778608322,
0.06403425335884094,
-0.09031129628419876,
0.04169662296772003,
0.007905388250946999,
0.0650867372751236,
0.023276949301362038,
-0.06935807317495346,
0.11569317430257797,
-0.05001915246248245,
-0.15496185421943665,
-0.060223743319511414,
0.10384432971477509,
0.04059140384197235,
0.06356358528137207,
-0.002912263385951519,
0.01480117253959179,
-0.03248436748981476,
-0.09006933122873306,
0.019769685342907906,
-0.020521096885204315,
0.07017780840396881,
0.015280917286872864,
-0.06729315966367722,
0.012287702411413193,
-0.06670353561639786,
-0.020423775538802147,
0.20356030762195587,
0.21988023817539215,
-0.10152546316385269,
0.017125777900218964,
0.038257524371147156,
-0.07485830783843994,
-0.2050040066242218,
0.06558529287576675,
0.06073904037475586,
0.0072907558642327785,
0.048449527472257614,
-0.17169135808944702,
0.13440638780593872,
0.09380823373794556,
-0.005063493736088276,
0.11106094717979431,
-0.3216880261898041,
-0.13513824343681335,
0.12704159319400787,
0.15944857895374298,
0.13189969956874847,
-0.1420527845621109,
-0.01899045519530773,
-0.020513547584414482,
-0.11597258597612381,
0.10896537452936172,
-0.06535527110099792,
0.12542490661144257,
-0.028553416952490807,
0.09781292825937271,
0.013191691599786282,
-0.059483449906110764,
0.1041710376739502,
0.026370512321591377,
0.09837990999221802,
-0.06412239372730255,
-0.07025934010744095,
0.04213874414563179,
-0.030555158853530884,
0.0063324798829853535,
-0.05739928409457207,
0.01748286373913288,
-0.10120146721601486,
-0.0273833479732275,
-0.08622881025075912,
0.04288238286972046,
-0.037829507142305374,
-0.06904426217079163,
-0.031890619546175,
0.025272948667407036,
0.03649219870567322,
-0.01712815649807453,
0.12754672765731812,
-0.00017206979100592434,
0.16839542984962463,
0.11143731325864792,
0.08624520897865295,
-0.06643806397914886,
-0.0680655762553215,
-0.013635866343975067,
-0.015448175370693207,
0.05677620321512222,
-0.13587656617164612,
0.024703465402126312,
0.1463315188884735,
0.0293254517018795,
0.13956840336322784,
0.09108337014913559,
-0.016175590455532074,
0.009848171845078468,
0.062446098774671555,
-0.15549932420253754,
-0.07894393801689148,
-0.012652570381760597,
-0.08286992460489273,
-0.10476948320865631,
0.05083287134766579,
0.08412737399339676,
-0.07291921973228455,
-0.01228847075253725,
-0.01220008172094822,
-0.012532250955700874,
-0.05991440266370773,
0.20119494199752808,
0.07473272830247879,
0.049112964421510696,
-0.10324537009000778,
0.05423522740602493,
0.059235602617263794,
-0.06756359338760376,
-0.005590261425822973,
0.07764670997858047,
-0.08241984248161316,
-0.04313085973262787,
0.09108132123947144,
0.19661317765712738,
-0.07557334750890732,
-0.026381639763712883,
-0.13812591135501862,
-0.12503010034561157,
0.0752948746085167,
0.17032550275325775,
0.1168777346611023,
0.007034961134195328,
-0.06915662437677383,
0.011166122741997242,
-0.13221776485443115,
0.07713543623685837,
0.03492147848010063,
0.0688253864645958,
-0.12926004827022552,
0.20082725584506989,
0.006491682026535273,
0.04704971984028816,
-0.026859266683459282,
0.018505925312638283,
-0.1156233549118042,
0.01872982084751129,
-0.11804894357919693,
-0.030524052679538727,
-0.020023100078105927,
0.002488550031557679,
-0.01170563418418169,
-0.06073109805583954,
-0.05017226189374924,
0.0036896364763379097,
-0.12547366321086884,
-0.01566261053085327,
0.03781323879957199,
0.051480237394571304,
-0.1106124147772789,
-0.0357719361782074,
0.015596827492117882,
-0.049096908420324326,
0.060551486909389496,
0.040603190660476685,
0.011029127053916454,
0.0704314112663269,
-0.14901046454906464,
0.00571257621049881,
0.06861218065023422,
0.003913934342563152,
0.07239187508821487,
-0.06439506262540817,
-0.0014872022438794374,
-0.004179536364972591,
0.07989631593227386,
0.030029090121388435,
0.06473201513290405,
-0.1428796350955963,
0.003717373125255108,
-0.023107990622520447,
-0.08673097938299179,
-0.06917595863342285,
0.037239108234643936,
0.07201912254095078,
0.008112939074635506,
0.18997791409492493,
-0.08107025921344757,
0.04985703527927399,
-0.2123512625694275,
-0.0034258824307471514,
-0.015839487314224243,
-0.11317837983369827,
-0.11147201061248779,
-0.07770148664712906,
0.06653372943401337,
-0.05245012044906616,
0.1361333280801773,
0.0531688816845417,
0.04716285690665245,
0.03165140748023987,
-0.01215999387204647,
0.013594666495919228,
0.024500884115695953,
0.21904048323631287,
0.036557089537382126,
-0.041461773216724396,
0.0626106783747673,
0.07423950731754303,
0.0950976088643074,
0.12400675565004349,
0.2043699473142624,
0.14615650475025177,
-0.039489734917879105,
0.08055940270423889,
0.025782953947782516,
-0.036622971296310425,
-0.14954398572444916,
0.030974775552749634,
-0.043097931891679764,
0.09213200956583023,
-0.03084328956902027,
0.2113218605518341,
0.05836709961295128,
-0.17056569457054138,
0.05148068070411682,
-0.053073156625032425,
-0.09523463249206543,
-0.10976426303386688,
-0.040263086557388306,
-0.08308563381433487,
-0.1302037239074707,
0.003760353894904256,
-0.10824208706617355,
0.016063503921031952,
0.1118280291557312,
0.007795763202011585,
-0.034358538687229156,
0.15871919691562653,
0.01847602240741253,
0.02101091295480728,
0.06819093227386475,
0.003941171802580357,
-0.01705554872751236,
-0.12275547534227371,
-0.05359440669417381,
-0.025766249746084213,
-0.011823983862996101,
0.027921436354517937,
-0.062184035778045654,
-0.07204703241586685,
0.02614426054060459,
-0.025676319375634193,
-0.10380465537309647,
0.01753372885286808,
0.024549825116991997,
0.07222782075405121,
0.03785202279686928,
0.0025171011220663786,
0.010276084765791893,
-0.021117808297276497,
0.21757149696350098,
-0.0725889801979065,
-0.0768909826874733,
-0.07946910709142685,
0.2860095202922821,
0.05348288267850876,
-0.0017552237259224057,
0.03149405121803284,
-0.0645890161395073,
0.017016947269439697,
0.26553279161453247,
0.21850821375846863,
-0.09494562447071075,
0.0003310833708383143,
0.004261404741555452,
-0.009674965403974056,
-0.000014652546269644517,
0.13021469116210938,
0.11598257720470428,
0.04604387283325195,
-0.10280904173851013,
-0.026232212781906128,
-0.053591545671224594,
-0.012847300618886948,
-0.037330057471990585,
0.07293705642223358,
0.061060208827257156,
0.005256067030131817,
-0.04757178574800491,
0.06412451714277267,
-0.07306212931871414,
-0.09998247772455215,
0.06492608785629272,
-0.2123156487941742,
-0.15849162638187408,
-0.01606045477092266,
0.09352979063987732,
0.00341704860329628,
0.07219978421926498,
-0.025162888690829277,
0.0015745960408821702,
0.051409583538770676,
-0.017110226675868034,
-0.09932514280080795,
-0.07319658994674683,
0.09871815890073776,
-0.1166648268699646,
0.18572978675365448,
-0.04479580745100975,
0.06175870820879936,
0.11920980364084244,
0.06929562985897064,
-0.052779149264097214,
0.06122441589832306,
0.03398152068257332,
-0.07694023847579956,
0.038557518273591995,
0.08231307566165924,
-0.028508564457297325,
0.036955755203962326,
0.03263469412922859,
-0.14628484845161438,
0.03771299123764038,
-0.09448335319757462,
-0.06003115326166153,
-0.04207547754049301,
-0.043769653886556625,
-0.05183563008904457,
0.12642541527748108,
0.22688649594783783,
-0.019478969275951385,
0.006698258686810732,
-0.0734444409608841,
0.0020292436238378286,
0.05666620284318924,
0.012660848908126354,
-0.08469624072313309,
-0.22923652827739716,
0.0015962637262418866,
0.06408537924289703,
-0.02862069196999073,
-0.25650307536125183,
-0.08264486491680145,
-0.001982743851840496,
-0.07134432345628738,
-0.0932161808013916,
0.08230649679899216,
0.07807870954275131,
0.053940895944833755,
-0.053267452865839005,
-0.07987610250711441,
-0.07627292722463608,
0.16460570693016052,
-0.15345199406147003,
-0.09168349951505661
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# biobert-base-cased-v1.1-finetuned-pubmedqa
This model is a fine-tuned version of [dmis-lab/biobert-base-cased-v1.1](https://huggingface.co/dmis-lab/biobert-base-cased-v1.1) on the None dataset.
It achieves the following results on the evaluation set:
- Loss: 2.3182
- Accuracy: 0.5
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 5e-05
- train_batch_size: 8
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 10
### Training results
| Training Loss | Epoch | Step | Validation Loss | Accuracy |
|:-------------:|:-----:|:----:|:---------------:|:--------:|
| No log | 1.0 | 57 | 0.8591 | 0.58 |
| No log | 2.0 | 114 | 0.9120 | 0.58 |
| No log | 3.0 | 171 | 0.8159 | 0.62 |
| No log | 4.0 | 228 | 1.1651 | 0.54 |
| No log | 5.0 | 285 | 1.2350 | 0.6 |
| No log | 6.0 | 342 | 1.5563 | 0.68 |
| No log | 7.0 | 399 | 2.0233 | 0.58 |
| No log | 8.0 | 456 | 2.2054 | 0.5 |
| 0.4463 | 9.0 | 513 | 2.2434 | 0.5 |
| 0.4463 | 10.0 | 570 | 2.3182 | 0.5 |
### Framework versions
- Transformers 4.10.2
- Pytorch 1.9.0+cu102
- Datasets 1.11.0
- Tokenizers 0.10.3
|
{"tags": ["generated_from_trainer"], "datasets": [], "metrics": ["accuracy"]}
|
text-classification
|
blizrys/biobert-base-cased-v1.1-finetuned-pubmedqa
|
[
"transformers",
"pytorch",
"tensorboard",
"bert",
"text-classification",
"generated_from_trainer",
"model-index",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #bert #text-classification #generated_from_trainer #model-index #autotrain_compatible #endpoints_compatible #region-us
|
biobert-base-cased-v1.1-finetuned-pubmedqa
==========================================
This model is a fine-tuned version of dmis-lab/biobert-base-cased-v1.1 on the None dataset.
It achieves the following results on the evaluation set:
* Loss: 2.3182
* Accuracy: 0.5
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 5e-05
* train\_batch\_size: 8
* eval\_batch\_size: 8
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 10
### Training results
### Framework versions
* Transformers 4.10.2
* Pytorch 1.9.0+cu102
* Datasets 1.11.0
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 10",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.11.0\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #bert #text-classification #generated_from_trainer #model-index #autotrain_compatible #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 10",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.11.0\n* Tokenizers 0.10.3"
] |
[
51,
98,
4,
34
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #bert #text-classification #generated_from_trainer #model-index #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 10### Training results### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.11.0\n* Tokenizers 0.10.3"
] |
[
-0.10358120501041412,
0.05779881402850151,
-0.0014367330586537719,
0.11845432966947556,
0.2032744437456131,
0.038293831050395966,
0.11997881531715393,
0.10927186161279678,
-0.08357654511928558,
0.0175037644803524,
0.125962033867836,
0.17450492084026337,
0.0028766244649887085,
0.08918216079473495,
-0.06106366962194443,
-0.27924051880836487,
-0.029644381254911423,
0.04818357154726982,
-0.06014212965965271,
0.13543303310871124,
0.08322501182556152,
-0.1554640680551529,
0.08051394671201706,
-0.0055994861759245396,
-0.23335811495780945,
0.01267528161406517,
0.025751495733857155,
-0.060206327587366104,
0.1552467793226242,
0.006335372570902109,
0.15761038661003113,
-0.00380586925894022,
0.09591299295425415,
-0.16449497640132904,
0.014347254298627377,
0.05021455138921738,
0.015027262270450592,
0.09212793409824371,
0.06769777834415436,
-0.019139522686600685,
0.10329241305589676,
-0.08686091750860214,
0.06546257436275482,
0.0040175532922148705,
-0.13088800013065338,
-0.1896478831768036,
-0.05724064260721207,
0.008376134559512138,
0.054365139454603195,
0.1043473333120346,
-0.013850980438292027,
0.15171320736408234,
-0.10219620168209076,
0.10463982075452805,
0.21729592978954315,
-0.26961514353752136,
-0.07772115617990494,
0.04502367973327637,
0.004122905433177948,
0.10101787745952606,
-0.12118792533874512,
-0.014215854927897453,
0.058744288980960846,
0.05108538269996643,
0.12140806019306183,
-0.03361033275723457,
-0.09643658995628357,
0.027054214850068092,
-0.14937879145145416,
-0.01112122368067503,
0.09733385592699051,
0.023973509669303894,
-0.021556777879595757,
-0.024815188720822334,
-0.05791473016142845,
-0.15791136026382446,
-0.038849323987960815,
-0.019157283008098602,
0.037496138364076614,
-0.04806632176041603,
-0.09159994125366211,
-0.007373708765953779,
-0.10700786858797073,
-0.06526284664869308,
-0.06181173026561737,
0.16220834851264954,
0.03662514314055443,
0.008512559346854687,
-0.04245437681674957,
0.11458199471235275,
-0.003350496059283614,
-0.133348286151886,
0.053487423807382584,
0.025250129401683807,
-0.014697890728712082,
-0.06570432335138321,
-0.07392476499080658,
-0.10171090066432953,
-0.009235399775207043,
0.06686010956764221,
-0.04496660828590393,
0.05282846465706825,
0.042839858680963516,
0.036449678242206573,
-0.08843734860420227,
0.21380244195461273,
-0.03690135478973389,
-0.024349626153707504,
-0.009545058012008667,
0.05731474608182907,
-0.022657323628664017,
-0.01921640895307064,
-0.12238283455371857,
0.006944836582988501,
0.12563134729862213,
0.0012571512488648295,
-0.08072689920663834,
0.0727684423327446,
-0.04100833088159561,
-0.03382868319749832,
-0.03686776012182236,
-0.0932091623544693,
0.04552388936281204,
-0.003196057165041566,
-0.09151236712932587,
-0.007886800915002823,
0.008247323334217072,
0.020258111879229546,
-0.02594398334622383,
0.1552378088235855,
-0.09547682851552963,
0.0518038310110569,
-0.11169857531785965,
-0.12693685293197632,
-0.00496011134237051,
-0.1059439405798912,
0.023099813610315323,
-0.09370430558919907,
-0.15093901753425598,
-0.028938081115484238,
0.04941681772470474,
-0.027567783370614052,
-0.03983130306005478,
-0.06673035025596619,
-0.06846670806407928,
0.00897375401109457,
-0.005817160941660404,
0.15628941357135773,
-0.053763628005981445,
0.11330036073923111,
0.031120665371418,
0.06288092583417892,
-0.04962730035185814,
0.05798499658703804,
-0.08561678975820541,
-0.004269047174602747,
-0.18587784469127655,
0.06094137206673622,
-0.04072776064276695,
0.07820829749107361,
-0.06904663890600204,
-0.11218378692865372,
0.009949704632163048,
0.0026753328274935484,
0.08356212079524994,
0.09783729910850525,
-0.17665770649909973,
-0.08864407986402512,
0.1535259485244751,
-0.04775223508477211,
-0.09175172448158264,
0.12037567794322968,
-0.0823303833603859,
0.04928178712725639,
0.0928083062171936,
0.18256120383739471,
0.06289060413837433,
-0.0653446763753891,
0.03817461058497429,
-0.010867072269320488,
0.06702029705047607,
-0.052108053117990494,
0.0367637500166893,
0.012326628901064396,
-0.013696584850549698,
0.0323793962597847,
-0.0234251506626606,
0.06526126712560654,
-0.11451811343431473,
-0.08713994920253754,
-0.030182328075170517,
-0.10625958442687988,
0.08034178614616394,
0.08577790856361389,
0.1052914559841156,
-0.099321648478508,
-0.06550661474466324,
0.08058061450719833,
0.05808926746249199,
-0.05795709043741226,
0.018411261960864067,
-0.04723328724503517,
0.06610044091939926,
-0.051788970828056335,
-0.029738498851656914,
-0.20707513391971588,
-0.019852740690112114,
0.004539571236819029,
0.0520206019282341,
0.0319938138127327,
0.03623618930578232,
0.08406323939561844,
0.06097216159105301,
-0.057649727910757065,
-0.011180181056261063,
-0.014188485220074654,
-0.006001572124660015,
-0.1615845113992691,
-0.1984742134809494,
-0.019902490079402924,
-0.021158013492822647,
0.11978741735219955,
-0.22799323499202728,
0.033405572175979614,
-0.03615825995802879,
0.06790813058614731,
0.010518128983676434,
-0.013540919870138168,
-0.0509248711168766,
0.0986810103058815,
-0.02507414110004902,
-0.039860792458057404,
0.07973726093769073,
-0.009117214940488338,
-0.09249839931726456,
-0.0723894014954567,
-0.10825476795434952,
0.17906194925308228,
0.13616447150707245,
-0.16651490330696106,
-0.09619612246751785,
-0.002863527275621891,
-0.048838287591934204,
-0.016910716891288757,
-0.05081924423575401,
0.04224579036235809,
0.2270447313785553,
-0.007492275908589363,
0.1574716418981552,
-0.06219034269452095,
-0.04254503548145294,
0.012754134833812714,
-0.034391503781080246,
0.04105374589562416,
0.13054323196411133,
0.11638695001602173,
-0.08979535847902298,
0.12640593945980072,
0.1355987787246704,
-0.1089436262845993,
0.15261265635490417,
-0.019842378795146942,
-0.06898549944162369,
-0.0063765281811356544,
-0.03621821105480194,
-0.004963691346347332,
0.09145249426364899,
-0.15570323169231415,
-0.030555270612239838,
0.006573381833732128,
0.021720459684729576,
0.026595700532197952,
-0.23055841028690338,
-0.04811711609363556,
0.03896999731659889,
-0.011698192916810513,
-0.008991112932562828,
-0.02756485342979431,
0.021167166531085968,
0.1220044419169426,
0.002735287183895707,
-0.08033424615859985,
0.024990001693367958,
-0.00029730083770118654,
-0.07924489676952362,
0.22489485144615173,
-0.07136142998933792,
-0.13784609735012054,
-0.10908220708370209,
-0.08577176928520203,
-0.046970680356025696,
0.019639847800135612,
0.0420682355761528,
-0.12284725904464722,
-0.012824799865484238,
-0.04073143005371094,
0.02746402472257614,
-0.008404524996876717,
0.04508436843752861,
-0.010164260864257812,
-0.008197873830795288,
0.05470839887857437,
-0.10061901062726974,
-0.007501838728785515,
-0.07617358863353729,
-0.06759504228830338,
0.05231361463665962,
0.035329438745975494,
0.11582162976264954,
0.18027082085609436,
-0.038493577390909195,
0.011821316555142403,
-0.02742757461965084,
0.2407606840133667,
-0.07797988504171371,
-0.03681034594774246,
0.09842994809150696,
-0.0379837267100811,
0.047346822917461395,
0.10060150921344757,
0.07904551923274994,
-0.09086708724498749,
0.011431241407990456,
0.03267475590109825,
-0.04221460595726967,
-0.21650981903076172,
-0.03894919902086258,
-0.05174864083528519,
-0.038152799010276794,
0.09539563953876495,
0.016421878710389137,
0.046148303896188736,
0.07809045165777206,
0.06356512010097504,
0.0838223472237587,
-0.0518646202981472,
0.04299366846680641,
0.08933112025260925,
0.039159052073955536,
0.1305849403142929,
-0.03447888046503067,
-0.09191035479307175,
0.025252796709537506,
-0.040617819875478745,
0.2169671654701233,
-0.0021038793493062258,
0.09981096535921097,
0.04110785946249962,
0.16662055253982544,
0.010257394053041935,
0.09135090559720993,
-0.003970564808696508,
-0.057415321469306946,
-0.006888309959322214,
-0.028545306995511055,
-0.046511996537446976,
0.012562445364892483,
-0.032514605671167374,
0.056526925414800644,
-0.13765273988246918,
-0.01965603046119213,
0.05394313111901283,
0.2214595228433609,
0.03305966779589653,
-0.32579782605171204,
-0.07383890450000763,
0.0005856975330971181,
-0.028993036597967148,
-0.020345287397503853,
0.011748982593417168,
0.11363188177347183,
-0.10908517241477966,
0.018202481791377068,
-0.0682215467095375,
0.09620372951030731,
-0.06325005739927292,
0.05663718283176422,
0.053700607270002365,
0.09599097073078156,
-0.01341578271239996,
0.07743900269269943,
-0.3074696958065033,
0.2772078812122345,
0.007324158679693937,
0.0694822371006012,
-0.08479763567447662,
-0.020560884848237038,
0.041285574436187744,
0.05624337121844292,
0.04023124650120735,
-0.012893634848296642,
0.0070214285515248775,
-0.21793904900550842,
-0.03890835866332054,
0.03220421075820923,
0.1194089725613594,
-0.0351385623216629,
0.09790105372667313,
-0.019403163343667984,
0.00789687130600214,
0.07557141035795212,
-0.031216232106089592,
-0.04369715601205826,
-0.08376059681177139,
-0.023946966975927353,
0.011385948397219181,
-0.05899564176797867,
-0.04690735042095184,
-0.12296503782272339,
-0.13951845467090607,
0.14385247230529785,
0.010496153496205807,
-0.02496294490993023,
-0.11953537911176682,
0.11456288397312164,
0.06678125262260437,
-0.08298949152231216,
0.03005019947886467,
0.017587194219231606,
0.05982642248272896,
0.028287725523114204,
-0.05978679284453392,
0.11112657189369202,
-0.05146387219429016,
-0.15235188603401184,
-0.06381435692310333,
0.09268934279680252,
0.05298261716961861,
0.06756245344877243,
-0.009523740969598293,
0.015028056688606739,
-0.03646937757730484,
-0.09334655106067657,
0.02851441502571106,
-0.025178497657179832,
0.061287425458431244,
0.0379624143242836,
-0.05732453987002373,
0.007853973656892776,
-0.07039256393909454,
-0.006893026642501354,
0.21044211089611053,
0.21281206607818604,
-0.09033837914466858,
-0.003865275764837861,
0.03475711867213249,
-0.07382605224847794,
-0.1968725323677063,
0.0866541787981987,
0.08406449109315872,
0.011555365286767483,
0.04062666371464729,
-0.17220023274421692,
0.14267829060554504,
0.08262895047664642,
0.002495020627975464,
0.11266102641820908,
-0.3126635253429413,
-0.13091379404067993,
0.12286323308944702,
0.1700405329465866,
0.1361604481935501,
-0.13795945048332214,
-0.018454663455486298,
-0.019718777388334274,
-0.0944688618183136,
0.09386829286813736,
-0.0828736200928688,
0.12076596915721893,
-0.019258864223957062,
0.10453358292579651,
0.019785910844802856,
-0.06097566336393356,
0.10635937005281448,
0.024173857644200325,
0.10483340919017792,
-0.06837600469589233,
-0.06903586536645889,
0.025259824469685555,
-0.032304923981428146,
-0.007949736900627613,
-0.0509500652551651,
0.0168524868786335,
-0.10440470278263092,
-0.025829771533608437,
-0.09139425307512283,
0.028449513018131256,
-0.03247161582112312,
-0.06698127090930939,
-0.015594561584293842,
0.020512768998742104,
0.039164949208498,
-0.011343440040946007,
0.11263182014226913,
-0.006690685637295246,
0.17952118813991547,
0.08753842115402222,
0.09378490597009659,
-0.06760915368795395,
-0.04682798683643341,
-0.0054016560316085815,
-0.005483238026499748,
0.055369630455970764,
-0.12574586272239685,
0.02140573039650917,
0.15206371247768402,
0.027437157928943634,
0.1392446905374527,
0.09572677314281464,
-0.01002343650907278,
0.011282188817858696,
0.06415637582540512,
-0.1708742380142212,
-0.06335358321666718,
-0.017778340727090836,
-0.08775640279054642,
-0.10644841194152832,
0.046417608857154846,
0.09124995768070221,
-0.062323570251464844,
-0.011985747143626213,
-0.02009192481637001,
-0.01864285208284855,
-0.061936210840940475,
0.21270275115966797,
0.0798899456858635,
0.0487823449075222,
-0.10338003933429718,
0.0544208288192749,
0.05980629473924637,
-0.0765879824757576,
0.0028082167264074087,
0.08592244982719421,
-0.08230799436569214,
-0.036821164190769196,
0.10170594602823257,
0.23120997846126556,
-0.057931143790483475,
-0.019019480794668198,
-0.14427818357944489,
-0.11892393231391907,
0.07103845477104187,
0.1733740121126175,
0.1171092689037323,
-0.005839540623128414,
-0.0781860277056694,
0.017051292583346367,
-0.1357995718717575,
0.07921002805233002,
0.05481657385826111,
0.07064352929592133,
-0.13647082448005676,
0.19791297614574432,
0.0011403782991692424,
0.045521605759859085,
-0.030771609395742416,
0.020187625661492348,
-0.11848779767751694,
0.025982927531003952,
-0.12537704408168793,
-0.03957723081111908,
0.0030198481399565935,
0.00038959976518526673,
-0.007891620509326458,
-0.06730910390615463,
-0.04862106218934059,
-0.005728733725845814,
-0.13209784030914307,
-0.017553111538290977,
0.0397711805999279,
0.03418998047709465,
-0.10584161430597305,
-0.03983399644494057,
0.019037194550037384,
-0.05284172669053078,
0.05895841866731644,
0.05172297731041908,
0.008803405798971653,
0.07545090466737747,
-0.15579217672348022,
0.0007440376793965697,
0.06471956521272659,
0.0016550584696233273,
0.0789731964468956,
-0.04842275381088257,
0.0012722990941256285,
-0.013131537474691868,
0.09663394093513489,
0.03374401852488518,
0.07917309552431107,
-0.13256274163722992,
0.015608618035912514,
-0.016414083540439606,
-0.09713869541883469,
-0.06776982545852661,
0.037636566907167435,
0.06371068209409714,
0.01853511482477188,
0.17896567285060883,
-0.08015838265419006,
0.058897633105516434,
-0.2132108360528946,
-0.005766621325165033,
-0.01234390027821064,
-0.11469864100217819,
-0.10086961090564728,
-0.08149390667676926,
0.07981782406568527,
-0.05808928236365318,
0.12016214430332184,
0.058535877615213394,
0.06569675356149673,
0.021144593134522438,
-0.003576872171834111,
0.01572355069220066,
0.03337302431464195,
0.1993437260389328,
0.041049495339393616,
-0.05117286741733551,
0.058990929275751114,
0.07962273061275482,
0.10832703113555908,
0.13652046024799347,
0.21315382421016693,
0.13573920726776123,
-0.027016043663024902,
0.08400540053844452,
0.021907443180680275,
-0.03915886580944061,
-0.14623205363750458,
0.016984866932034492,
-0.057730115950107574,
0.09302207082509995,
-0.029911041259765625,
0.21035070717334747,
0.04760585352778435,
-0.16937962174415588,
0.04426027089357376,
-0.057707369327545166,
-0.10626792907714844,
-0.1047947108745575,
-0.024974465370178223,
-0.08043865114450455,
-0.12768429517745972,
0.007662426680326462,
-0.11448001116514206,
0.011656730435788631,
0.11550837010145187,
0.01438788790255785,
-0.02949993498623371,
0.18109484016895294,
0.028293149545788765,
0.02873769961297512,
0.08521487563848495,
0.007845149375498295,
-0.01329805888235569,
-0.11325422674417496,
-0.06198659911751747,
-0.038480207324028015,
-0.00891659501940012,
0.029085911810398102,
-0.07633378356695175,
-0.0886649489402771,
0.02515975944697857,
-0.017547300085425377,
-0.10985255241394043,
0.022711994126439095,
0.016912780702114105,
0.07304808497428894,
0.041576627641916275,
-0.0014122213469818234,
0.008012368343770504,
-0.029682699590921402,
0.23680438101291656,
-0.08469341695308685,
-0.06940598785877228,
-0.09244325757026672,
0.27009961009025574,
0.0395711250603199,
-0.000009295958989241626,
0.022480009123682976,
-0.07133132219314575,
0.02010851353406906,
0.26389047503471375,
0.21620234847068787,
-0.1270969808101654,
-0.001363531337119639,
0.005314889829605818,
-0.0057011134922504425,
-0.00540531612932682,
0.13066235184669495,
0.11979489773511887,
0.04583674296736717,
-0.11488928645849228,
-0.023482102900743484,
-0.0508694052696228,
-0.014407274313271046,
-0.026326872408390045,
0.06806671619415283,
0.07365710288286209,
0.012738853693008423,
-0.06028333678841591,
0.07115283608436584,
-0.08745306730270386,
-0.100897878408432,
0.05681433528661728,
-0.225484699010849,
-0.16978615522384644,
-0.01960759423673153,
0.1007891446352005,
-0.002464213175699115,
0.07719725370407104,
-0.025936348363757133,
-0.006546495016664267,
0.04109171777963638,
-0.025039032101631165,
-0.07727918028831482,
-0.08605588972568512,
0.09963306039571762,
-0.1253133863210678,
0.17540030181407928,
-0.04685105383396149,
0.059091854840517044,
0.12223844975233078,
0.06608280539512634,
-0.03668865188956261,
0.0523231104016304,
0.03829411789774895,
-0.08152730762958527,
0.028756802901625633,
0.1113969087600708,
-0.03354546055197716,
0.034265872091054916,
0.04110616445541382,
-0.14446182548999786,
0.040228236466646194,
-0.09578108042478561,
-0.05457255616784096,
-0.03807707130908966,
-0.044947300106287,
-0.05534990504384041,
0.1267317682504654,
0.2381376028060913,
-0.009833847172558308,
0.02290351130068302,
-0.07301177829504013,
0.0009103852789849043,
0.04720219969749451,
0.03344864398241043,
-0.09810997545719147,
-0.24318012595176697,
-0.0026385621167719364,
0.07782167941331863,
-0.03417897969484329,
-0.25640392303466797,
-0.08660975098609924,
0.002089862711727619,
-0.06850263476371765,
-0.08996272087097168,
0.08269504457712173,
0.07856255024671555,
0.05460745096206665,
-0.05243023484945297,
-0.08494658023118973,
-0.07486661523580551,
0.16582080721855164,
-0.15162213146686554,
-0.09023240208625793
] |
null | null | null |
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# biobert-v1.1-finetuned-pubmedqa-adapter
This model is a fine-tuned version of [dmis-lab/biobert-v1.1](https://huggingface.co/dmis-lab/biobert-v1.1) on the None dataset.
It achieves the following results on the evaluation set:
- Loss: 2.0910
- Accuracy: 0.48
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.003
- train_batch_size: 8
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 10
### Training results
| Training Loss | Epoch | Step | Validation Loss | Accuracy |
|:-------------:|:-----:|:----:|:---------------:|:--------:|
| No log | 1.0 | 57 | 0.9848 | 0.58 |
| No log | 2.0 | 114 | 0.8537 | 0.58 |
| No log | 3.0 | 171 | 0.9565 | 0.42 |
| No log | 4.0 | 228 | 0.9659 | 0.56 |
| No log | 5.0 | 285 | 0.9763 | 0.6 |
| No log | 6.0 | 342 | 1.0647 | 0.66 |
| No log | 7.0 | 399 | 1.4305 | 0.6 |
| No log | 8.0 | 456 | 2.0545 | 0.56 |
| 0.6957 | 9.0 | 513 | 2.2438 | 0.5 |
| 0.6957 | 10.0 | 570 | 2.0910 | 0.48 |
### Framework versions
- Transformers 4.8.2
- Pytorch 1.9.0+cu102
- Datasets 1.11.0
- Tokenizers 0.10.3
|
{"tags": ["generated_from_trainer"], "datasets": [], "metrics": ["accuracy"], "model_index": [{"name": "biobert-v1.1-finetuned-pubmedqa-adapter", "results": [{"task": {"name": "Text Classification", "type": "text-classification"}, "metric": {"name": "Accuracy", "type": "accuracy", "value": 0.48}}]}]}
| null |
blizrys/biobert-v1.1-finetuned-pubmedqa-adapter
|
[
"tensorboard",
"generated_from_trainer",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#tensorboard #generated_from_trainer #region-us
|
biobert-v1.1-finetuned-pubmedqa-adapter
=======================================
This model is a fine-tuned version of dmis-lab/biobert-v1.1 on the None dataset.
It achieves the following results on the evaluation set:
* Loss: 2.0910
* Accuracy: 0.48
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 0.003
* train\_batch\_size: 8
* eval\_batch\_size: 8
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 10
### Training results
### Framework versions
* Transformers 4.8.2
* Pytorch 1.9.0+cu102
* Datasets 1.11.0
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.003\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 10",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.8.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.11.0\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#tensorboard #generated_from_trainer #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.003\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 10",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.8.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.11.0\n* Tokenizers 0.10.3"
] |
[
17,
97,
4,
34
] |
[
"passage: TAGS\n#tensorboard #generated_from_trainer #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.003\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 10### Training results### Framework versions\n\n\n* Transformers 4.8.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.11.0\n* Tokenizers 0.10.3"
] |
[
-0.07540292292833328,
-0.0407540500164032,
-0.0007638538372702897,
0.10417835414409637,
0.21386995911598206,
0.020929459482431412,
0.12041391432285309,
0.05960815027356148,
-0.11139849573373795,
0.04753031209111214,
0.12600326538085938,
0.16068175435066223,
-0.0164119154214859,
0.0746191143989563,
-0.05064016953110695,
-0.2063300460577011,
-0.0313720740377903,
0.011737807653844357,
-0.0987095758318901,
0.11304520815610886,
0.0600661039352417,
-0.17274560034275055,
0.06235133111476898,
-0.05315553396940231,
-0.3211398720741272,
0.051080089062452316,
0.03451685607433319,
-0.014278470538556576,
0.130055233836174,
-0.0278825331479311,
0.19129490852355957,
-0.011346787214279175,
0.10811815410852432,
-0.14915968477725983,
0.014545340090990067,
0.08976539969444275,
0.010001152753829956,
0.05513457953929901,
0.0540725402534008,
0.018512023612856865,
0.12653547525405884,
-0.12958693504333496,
0.05577443540096283,
-0.00951188150793314,
-0.16156984865665436,
-0.19036009907722473,
-0.06963711977005005,
-0.11354638636112213,
0.04063698649406433,
0.08617784082889557,
-0.02351265214383602,
0.17979669570922852,
-0.08437442034482956,
0.10142321139574051,
0.264234721660614,
-0.2459867149591446,
-0.09237400442361832,
0.11832679063081741,
-0.00045039475662633777,
0.16661466658115387,
-0.11249158531427383,
0.007633537519723177,
0.08594544231891632,
0.04648778960108757,
0.1198543980717659,
-0.03381219506263733,
-0.13929429650306702,
0.036224741488695145,
-0.1555161029100418,
0.03422588109970093,
0.04127662628889084,
0.026364924386143684,
0.0037830935325473547,
0.05421006307005882,
-0.08157135546207428,
-0.15403760969638824,
-0.0553225614130497,
-0.04980766773223877,
0.10165238380432129,
-0.05228447541594505,
-0.10597879439592361,
-0.0002546927717048675,
-0.08875323832035065,
-0.07120480388402939,
-0.06462638080120087,
0.1725209653377533,
0.039076339453458786,
0.026793548837304115,
-0.05647261068224907,
0.0666598528623581,
-0.08949387818574905,
-0.11104155331850052,
0.06834730505943298,
0.05916077271103859,
-0.04287872835993767,
-0.08415906131267548,
-0.09422307461500168,
-0.17487271130084991,
0.021741295233368874,
0.03448034077882767,
-0.07911767810583115,
0.08478163182735443,
0.0504411906003952,
0.047945450991392136,
-0.1187279224395752,
0.1672830730676651,
-0.02943911775946617,
-0.01071485411375761,
-0.007788531016558409,
0.021129164844751358,
-0.023976262658834457,
-0.0047067697159945965,
-0.09585851430892944,
0.04409879073500633,
0.08758719265460968,
-0.010870939120650291,
-0.11718571931123734,
0.017976835370063782,
-0.03571650758385658,
0.011096933856606483,
-0.09930367022752762,
-0.08981684595346451,
0.05159278213977814,
-0.036842480301856995,
-0.08642379939556122,
-0.010579022578895092,
-0.0228084996342659,
0.020760204643011093,
0.016095055267214775,
0.14164018630981445,
-0.09103551506996155,
0.099054254591465,
-0.14040479063987732,
-0.13527043163776398,
-0.019193658605217934,
-0.10352548956871033,
0.022073796018958092,
-0.05755615234375,
-0.1064164936542511,
-0.056144025176763535,
0.0701432079076767,
-0.03947994112968445,
0.016730597242712975,
-0.06834331154823303,
-0.06927221268415451,
-0.02451368421316147,
-0.00951281189918518,
0.18900926411151886,
-0.05888645350933075,
0.07628524303436279,
0.02868831343948841,
0.10271748900413513,
-0.09960611909627914,
0.04419844225049019,
-0.05350414663553238,
-0.0032931652385741472,
-0.28065481781959534,
0.04552341252565384,
-0.05448014289140701,
0.10497833788394928,
-0.0702216699719429,
-0.08178103715181351,
-0.016344167292118073,
0.0007589504821226001,
0.12309321016073227,
0.04947720095515251,
-0.26578161120414734,
-0.06295822560787201,
0.16901031136512756,
-0.04827769845724106,
-0.0644942969083786,
0.08318650722503662,
-0.08107972145080566,
0.08798525482416153,
0.10340645909309387,
0.2469397485256195,
-0.06918397545814514,
-0.1020967960357666,
0.048719100654125214,
-0.05573670193552971,
0.03861979767680168,
-0.07956932485103607,
-0.006248386111110449,
-0.0044161975383758545,
0.0633380189538002,
0.02264682576060295,
0.01455390639603138,
0.04484495893120766,
-0.13214954733848572,
-0.07537616044282913,
-0.042812664061784744,
-0.10850367695093155,
-0.03989005833864212,
0.08649033308029175,
0.11597675830125809,
-0.12478949874639511,
-0.016444338485598564,
0.1433844119310379,
0.021548625081777573,
-0.07174298912286758,
0.05305642634630203,
-0.013327114284038544,
0.015310123562812805,
-0.0608859546482563,
-0.039868928492069244,
-0.20981533825397491,
-0.03347225859761238,
0.013085922226309776,
0.04747549444437027,
0.06502310931682587,
0.05626721307635307,
0.09150504320859909,
0.04812530055642128,
-0.06188230961561203,
0.054721374064683914,
-0.027734989300370216,
-0.009071946144104004,
-0.1867191046476364,
-0.21670475602149963,
0.023885639384388924,
-0.025263724848628044,
0.04754333943128586,
-0.22976519167423248,
0.012843701057136059,
-0.0387510284781456,
0.04171200841665268,
0.014631801284849644,
-0.0530463382601738,
-0.07455217093229294,
0.11016830801963806,
0.008182440884411335,
-0.08194399625062943,
0.0685906633734703,
-0.040534961968660355,
-0.07356078177690506,
-0.11834409832954407,
-0.10742011666297913,
0.14041677117347717,
0.1317167431116104,
-0.20770730078220367,
-0.11258680373430252,
0.03863916173577309,
-0.042478300631046295,
-0.018080858513712883,
-0.05195743963122368,
0.09448704868555069,
0.22235484421253204,
-0.010683506727218628,
0.1281178742647171,
-0.07659224420785904,
-0.01788313314318657,
-0.0003072007966693491,
-0.0354943722486496,
0.09637082368135452,
0.11253786832094193,
0.16702356934547424,
-0.042158182710409164,
0.08460161089897156,
0.15142568945884705,
-0.13097158074378967,
0.0938277542591095,
-0.041644513607025146,
-0.08092910051345825,
-0.020026428624987602,
-0.004083545878529549,
-0.005346597637981176,
0.11346876621246338,
-0.09460634738206863,
-0.016589175909757614,
-0.03487526625394821,
0.039654288440942764,
0.048165321350097656,
-0.25544267892837524,
-0.06200117990374565,
0.015172525309026241,
-0.026303140446543694,
0.013188456185162067,
-0.04981183260679245,
0.0016363146714866161,
0.11343445628881454,
-0.02812637947499752,
-0.06078604236245155,
-0.004290347918868065,
-0.0012530158273875713,
-0.05084790661931038,
0.2171054631471634,
-0.06437113136053085,
-0.0030145677737891674,
-0.025658538565039635,
-0.06196064129471779,
-0.02473778836429119,
0.005685406271368265,
0.04243761673569679,
-0.1604272723197937,
-0.011629024520516396,
-0.05361656844615936,
0.00975108053535223,
0.019464759156107903,
0.0504327230155468,
0.031034648418426514,
-0.0018319344380870461,
0.07145597040653229,
-0.11667506396770477,
0.007603433448821306,
-0.12119968235492706,
-0.09687710553407669,
0.05743147432804108,
0.06444384157657623,
0.12473998218774796,
0.17454397678375244,
-0.039331596344709396,
0.008900654502213001,
-0.01355142705142498,
0.2726666033267975,
-0.07242816686630249,
-0.05095749348402023,
0.03800138458609581,
0.0018093407852575183,
0.04908657819032669,
0.08044715970754623,
0.09286731481552124,
-0.1591760218143463,
0.00276382127776742,
0.07022116333246231,
-0.07755061239004135,
-0.2271192967891693,
-0.006002100184559822,
-0.028740592300891876,
-0.14268146455287933,
0.048304129391908646,
0.01500737015157938,
-0.014527836814522743,
0.04652322828769684,
0.12242753058671951,
0.1168152466416359,
-0.07774649560451508,
0.01730843260884285,
0.023732414469122887,
0.052827753126621246,
0.11671344935894012,
-0.05900882929563522,
-0.0685456395149231,
0.02825096994638443,
-0.0563257560133934,
0.32342272996902466,
0.03262082487344742,
0.08847816288471222,
0.08207894116640091,
0.1398409903049469,
-0.027127133682370186,
0.07087690383195877,
0.002843657974153757,
-0.09513632208108902,
-0.024021275341510773,
-0.040891874581575394,
0.0022840434685349464,
0.014064648188650608,
-0.09086038172245026,
0.019789496436715126,
-0.07372915744781494,
0.02947390265762806,
0.06477950513362885,
0.19266073405742645,
0.004885036498308182,
-0.2788732647895813,
0.009987286292016506,
-0.012363833375275135,
0.0032584427390247583,
0.019480597227811813,
0.018973642960190773,
0.1712685227394104,
-0.029159456491470337,
0.011294519528746605,
-0.03755699098110199,
0.09340877085924149,
-0.008688422851264477,
0.04579754173755646,
0.031124653294682503,
0.14987294375896454,
-0.02242123894393444,
0.02185308374464512,
-0.31134167313575745,
0.30907338857650757,
0.0228273905813694,
0.10176986455917358,
-0.041229479014873505,
-0.04172544181346893,
0.015104618854820728,
0.014905638061463833,
0.008005527779459953,
-0.00983278825879097,
-0.0942688137292862,
-0.2051517814397812,
-0.02343560755252838,
0.06359346210956573,
0.17262475192546844,
0.04525148868560791,
0.07372939586639404,
0.02608254738152027,
0.03344156965613365,
0.10858497023582458,
-0.050053101032972336,
-0.0966251790523529,
-0.02878417819738388,
-0.07967034727334976,
0.04267160966992378,
-0.16291704773902893,
-0.02918313629925251,
-0.10020717233419418,
-0.15343545377254486,
0.10881280153989792,
0.09767226874828339,
0.007931253872811794,
-0.10468226671218872,
0.14596866071224213,
0.07415571063756943,
-0.054381754249334335,
0.03300732001662254,
0.0415785051882267,
-0.017187314108014107,
0.06106949970126152,
-0.028272725641727448,
0.1302712857723236,
-0.013580458238720894,
-0.0871087834239006,
-0.059044249355793,
0.03197050839662552,
0.0658789575099945,
0.0651342123746872,
-0.031449172645807266,
0.03440834581851959,
-0.007460953202098608,
-0.10703036189079285,
0.04263526573777199,
-0.06700422614812851,
0.044085633009672165,
0.031504593789577484,
-0.04654023051261902,
0.0663435086607933,
-0.047373414039611816,
-0.01757749728858471,
0.16528993844985962,
0.3092266321182251,
-0.06796716898679733,
-0.03112334944307804,
-0.0026827373076230288,
-0.0863036960363388,
-0.16244173049926758,
0.1728551834821701,
0.08965694904327393,
-0.0210493765771389,
0.08571380376815796,
-0.16408966481685638,
0.1761562079191208,
0.14842402935028076,
0.024413958191871643,
0.1457737386226654,
-0.3107076585292816,
-0.14902549982070923,
0.05800584703683853,
0.22598125040531158,
0.2167191505432129,
-0.16781392693519592,
-0.010468961670994759,
-0.04028600826859474,
-0.07320404052734375,
0.06917630136013031,
-0.1597318798303604,
0.12024451047182083,
0.0034723172429949045,
0.0874713584780693,
-0.004793606698513031,
-0.06339716911315918,
0.11747953295707703,
0.05926220864057541,
0.1738085001707077,
-0.05432655289769173,
-0.03415319696068764,
0.07407408207654953,
-0.015313575975596905,
-0.0536612831056118,
-0.0050112903118133545,
-0.007672992069274187,
-0.030816784128546715,
-0.002359914593398571,
-0.07259311527013779,
0.0030875876545906067,
-0.020043686032295227,
-0.048648424446582794,
-0.0571993924677372,
-0.014066317118704319,
0.04160808026790619,
-0.002115703886374831,
0.12418558448553085,
0.008364807814359665,
0.1737319827079773,
0.03995068743824959,
-0.0017355235759168863,
-0.13684704899787903,
-0.01021052896976471,
0.03458636626601219,
0.022938989102840424,
0.037503432482481,
-0.19188223779201508,
0.01324955839663744,
0.1464499533176422,
0.019999174401164055,
0.10413375496864319,
0.08484811335802078,
-0.019423719495534897,
0.04121962934732437,
0.06214045733213425,
-0.13907986879348755,
-0.0873529389500618,
0.05787328630685806,
-0.11845579743385315,
-0.03863237053155899,
0.05593709647655487,
0.05696297809481621,
-0.04403683915734291,
0.017056917771697044,
-0.038148432970047,
-0.033026352524757385,
-0.08561299741268158,
0.2350780963897705,
0.0856560468673706,
0.02217838354408741,
-0.10922355204820633,
0.06935086846351624,
0.046558089554309845,
-0.054199956357479095,
0.016643639653921127,
0.06672138720750809,
-0.057183731347322464,
0.018879026174545288,
0.18709135055541992,
0.2813037931919098,
-0.032710105180740356,
-0.008996699005365372,
-0.16053158044815063,
-0.06859973073005676,
0.05275469273328781,
0.2024518996477127,
0.12178220599889755,
-0.04783656448125839,
-0.022090518847107887,
0.030057458207011223,
-0.14725632965564728,
0.022355875000357628,
0.00940045714378357,
0.07473617047071457,
-0.0891852080821991,
0.1901070922613144,
0.026736222207546234,
-0.007008718326687813,
-0.03166113421320915,
0.05142735689878464,
-0.13012731075286865,
0.04036532714962959,
-0.10192040354013443,
-0.05594978109002113,
0.05023106560111046,
-0.02291976474225521,
0.013167189434170723,
-0.0711427628993988,
-0.08832979202270508,
0.007461317349225283,
-0.15171509981155396,
-0.009823212400078773,
0.056093618273735046,
0.012740504927933216,
-0.11560475081205368,
-0.037594638764858246,
-0.001736511243507266,
-0.01386302337050438,
0.015964914113283157,
0.039910342544317245,
0.025803031399846077,
0.08547760546207428,
-0.21452215313911438,
-0.013886191882193089,
0.09329910576343536,
-0.013858133926987648,
0.10502718389034271,
0.00859109777957201,
-0.037042923271656036,
-0.012822093442082405,
0.18120169639587402,
0.028918607160449028,
0.051758017390966415,
-0.1149241179227829,
0.016467029228806496,
-0.0886506587266922,
-0.11518871039152145,
-0.03492087125778198,
-0.0029993592761456966,
0.0669509619474411,
0.04211168736219406,
0.1490532010793686,
-0.07325686514377594,
0.020855091512203217,
-0.21962454915046692,
-0.026204314082860947,
0.009564516134560108,
-0.08717731386423111,
-0.07059575617313385,
-0.05760791152715683,
0.08732055872678757,
-0.0682356134057045,
0.16237077116966248,
0.06437833607196808,
0.05997033417224884,
0.03532371670007706,
-0.007574375718832016,
-0.030756935477256775,
0.023957202211022377,
0.1942257136106491,
0.05947955325245857,
-0.030880866572260857,
0.08499255776405334,
0.10500096529722214,
0.13963094353675842,
0.11153025180101395,
0.2639656364917755,
0.18527282774448395,
-0.06884384155273438,
0.10423695296049118,
-0.0052407365292310715,
-0.05339404568076134,
-0.08566083759069443,
0.12701541185379028,
-0.06268858909606934,
0.0738227516412735,
-0.013411937281489372,
0.18957601487636566,
0.03223040699958801,
-0.17405839264392853,
0.04101511836051941,
-0.06325149536132812,
-0.11648455262184143,
-0.11235325038433075,
0.07099930197000504,
-0.09534357488155365,
-0.16005557775497437,
0.02617722377181053,
-0.09906303882598877,
0.04467145353555679,
0.17296357452869415,
0.011538594029843807,
0.020947569981217384,
0.21535098552703857,
0.035707876086235046,
0.06789708137512207,
0.03703564405441284,
-0.013132474385201931,
-0.04279688373208046,
-0.10562257468700409,
-0.08634672313928604,
-0.02038928121328354,
-0.0170089453458786,
0.022789180278778076,
-0.05446706339716911,
-0.0804004967212677,
0.026898004114627838,
-0.02428821660578251,
-0.1054673120379448,
0.010717920027673244,
0.04365181922912598,
0.06072191148996353,
-0.024277852848172188,
-0.003013943089172244,
0.007144404109567404,
-0.03807318955659866,
0.20958615839481354,
-0.054958492517471313,
-0.07443062961101532,
-0.051108021289110184,
0.20868521928787231,
0.052765071392059326,
0.019068563356995583,
-0.0236070454120636,
-0.07635506987571716,
0.011299913749098778,
0.21403248608112335,
0.1522139459848404,
-0.17055729031562805,
-0.01957658678293228,
-0.026408622041344643,
-0.013544818386435509,
-0.05316004902124405,
0.16938607394695282,
0.10870855301618576,
-0.06168519705533981,
-0.11907196044921875,
-0.07101713865995407,
-0.05959014222025871,
-0.011179366149008274,
-0.07138366997241974,
-0.00025022949557751417,
0.09879161417484283,
0.041489455848932266,
-0.061048779636621475,
0.08148474246263504,
-0.0410405695438385,
-0.12838029861450195,
0.08813507854938507,
-0.22052544355392456,
-0.17199298739433289,
-0.0030310340225696564,
0.12971392273902893,
-0.037605468183755875,
0.058834854513406754,
-0.05571949481964111,
-0.007746873889118433,
-0.006599841173738241,
-0.05590788647532463,
-0.07098731398582458,
-0.11524683237075806,
0.08715465664863586,
-0.20145808160305023,
0.17398875951766968,
-0.03988448902964592,
0.0951061099767685,
0.10677388310432434,
0.053148023784160614,
-0.030708055943250656,
0.0956207886338234,
0.022831792011857033,
-0.10675695538520813,
-0.020483775064349174,
0.10434159636497498,
-0.06203809380531311,
0.003600062569603324,
0.05044110491871834,
-0.1023913323879242,
0.03468707203865051,
-0.05547843128442764,
-0.044342901557683945,
-0.034060288220644,
-0.09358777850866318,
-0.09286151826381683,
0.07740537822246552,
0.20223161578178406,
0.014699838124215603,
0.08083294332027435,
-0.08546378463506699,
0.007906002923846245,
0.05386843532323837,
0.061114318668842316,
-0.13080130517482758,
-0.25710055232048035,
0.03019610233604908,
0.13149243593215942,
-0.07535038143396378,
-0.1940641701221466,
-0.08173292875289917,
0.015753837302327156,
-0.07820693403482437,
-0.06629078090190887,
0.08124746382236481,
0.08495938032865524,
0.06791015714406967,
-0.056135982275009155,
-0.19740448892116547,
-0.0688498318195343,
0.16961225867271423,
-0.12465069442987442,
-0.1050846055150032
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# biobert-v1.1-finetuned-pubmedqa
This model is a fine-tuned version of [dmis-lab/biobert-v1.1](https://huggingface.co/dmis-lab/biobert-v1.1) on the None dataset.
It achieves the following results on the evaluation set:
- Loss: 0.7737
- Accuracy: 0.7
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 1e-05
- train_batch_size: 8
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 10
### Training results
| Training Loss | Epoch | Step | Validation Loss | Accuracy |
|:-------------:|:-----:|:----:|:---------------:|:--------:|
| No log | 1.0 | 57 | 0.8810 | 0.56 |
| No log | 2.0 | 114 | 0.8139 | 0.62 |
| No log | 3.0 | 171 | 0.7963 | 0.68 |
| No log | 4.0 | 228 | 0.7709 | 0.66 |
| No log | 5.0 | 285 | 0.7931 | 0.64 |
| No log | 6.0 | 342 | 0.7420 | 0.7 |
| No log | 7.0 | 399 | 0.7654 | 0.7 |
| No log | 8.0 | 456 | 0.7756 | 0.68 |
| 0.5849 | 9.0 | 513 | 0.7605 | 0.68 |
| 0.5849 | 10.0 | 570 | 0.7737 | 0.7 |
### Framework versions
- Transformers 4.10.2
- Pytorch 1.9.0+cu102
- Datasets 1.11.0
- Tokenizers 0.10.3
|
{"tags": ["generated_from_trainer"], "datasets": [], "metrics": ["accuracy"]}
|
text-classification
|
blizrys/biobert-v1.1-finetuned-pubmedqa
|
[
"transformers",
"pytorch",
"tensorboard",
"bert",
"text-classification",
"generated_from_trainer",
"model-index",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #bert #text-classification #generated_from_trainer #model-index #autotrain_compatible #endpoints_compatible #region-us
|
biobert-v1.1-finetuned-pubmedqa
===============================
This model is a fine-tuned version of dmis-lab/biobert-v1.1 on the None dataset.
It achieves the following results on the evaluation set:
* Loss: 0.7737
* Accuracy: 0.7
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 1e-05
* train\_batch\_size: 8
* eval\_batch\_size: 8
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 10
### Training results
### Framework versions
* Transformers 4.10.2
* Pytorch 1.9.0+cu102
* Datasets 1.11.0
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 1e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 10",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.11.0\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #bert #text-classification #generated_from_trainer #model-index #autotrain_compatible #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 1e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 10",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.11.0\n* Tokenizers 0.10.3"
] |
[
51,
98,
4,
34
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #bert #text-classification #generated_from_trainer #model-index #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 1e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 10### Training results### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.11.0\n* Tokenizers 0.10.3"
] |
[
-0.10333510488271713,
0.05702367052435875,
-0.0014348356053233147,
0.11853138357400894,
0.20317751169204712,
0.038077495992183685,
0.12030661106109619,
0.11005394905805588,
-0.08334875106811523,
0.01738586463034153,
0.1266016662120819,
0.17484842240810394,
0.0019629206508398056,
0.08818419277667999,
-0.06047726050019264,
-0.2798804044723511,
-0.02948552370071411,
0.04776883125305176,
-0.06062687188386917,
0.13573315739631653,
0.08317619562149048,
-0.15439245104789734,
0.08042962104082108,
-0.005145539529621601,
-0.2319299578666687,
0.011662262491881847,
0.025726811960339546,
-0.06006883084774017,
0.15629544854164124,
0.006230210419744253,
0.15831570327281952,
-0.0037338598631322384,
0.09691574424505234,
-0.1647930145263672,
0.014197234995663166,
0.05011899024248123,
0.01537317968904972,
0.09184325486421585,
0.06602785736322403,
-0.01983288675546646,
0.10343275219202042,
-0.08560337871313095,
0.06584487110376358,
0.004430330358445644,
-0.1315222978591919,
-0.19033265113830566,
-0.05735154449939728,
0.008450263179838657,
0.0532824844121933,
0.10489889979362488,
-0.013479511253535748,
0.1510801613330841,
-0.10140783339738846,
0.104265496134758,
0.21655894815921783,
-0.26820361614227295,
-0.0772402435541153,
0.04448103532195091,
0.0032381871715188026,
0.10056646168231964,
-0.12118013203144073,
-0.014445674605667591,
0.05884551629424095,
0.05156652256846428,
0.12017728388309479,
-0.0337807834148407,
-0.09694581478834152,
0.026292230933904648,
-0.14902064204216003,
-0.01178751140832901,
0.0956500992178917,
0.02312684990465641,
-0.022163374349474907,
-0.02380567416548729,
-0.058582670986652374,
-0.15889225900173187,
-0.03921607881784439,
-0.01789977215230465,
0.03689814731478691,
-0.04750879481434822,
-0.0922863706946373,
-0.007046262267976999,
-0.10674437135457993,
-0.06481071561574936,
-0.06233888864517212,
0.163425475358963,
0.03646283596754074,
0.008820542134344578,
-0.041165757924318314,
0.11487581580877304,
-0.0017596159595996141,
-0.13416655361652374,
0.05370323732495308,
0.026829971000552177,
-0.014482252299785614,
-0.06519178301095963,
-0.07357905805110931,
-0.09987400472164154,
-0.009477591142058372,
0.06699084490537643,
-0.046508919447660446,
0.052640654146671295,
0.04331822693347931,
0.03718949109315872,
-0.08833340555429459,
0.2143067717552185,
-0.036887820810079575,
-0.02276570163667202,
-0.009054867550730705,
0.05715770274400711,
-0.021799236536026,
-0.019614920020103455,
-0.12307125329971313,
0.00826688576489687,
0.12587657570838928,
0.0016542241210117936,
-0.08059827983379364,
0.07320704311132431,
-0.04089144989848137,
-0.033569518476724625,
-0.035691823810338974,
-0.093243308365345,
0.04522720351815224,
-0.0036684435326606035,
-0.09126905351877213,
-0.008483055979013443,
0.008876276202499866,
0.0201288852840662,
-0.025210218504071236,
0.15420883893966675,
-0.09542032331228256,
0.05244506523013115,
-0.11129170656204224,
-0.12667179107666016,
-0.0045929765328764915,
-0.10442762076854706,
0.022627653554081917,
-0.09345044195652008,
-0.15143893659114838,
-0.02923569828271866,
0.05031495913863182,
-0.027707833796739578,
-0.03981734439730644,
-0.06654512137174606,
-0.06864488124847412,
0.009470849297940731,
-0.0058632041327655315,
0.15543855726718903,
-0.05403218790888786,
0.11376675963401794,
0.030092887580394745,
0.06277712434530258,
-0.049568165093660355,
0.058134350925683975,
-0.0859052985906601,
-0.0035354977007955313,
-0.1876055747270584,
0.0619121715426445,
-0.04126492142677307,
0.07807020097970963,
-0.06831524521112442,
-0.11189580708742142,
0.010486516170203686,
0.003127695992588997,
0.08374008536338806,
0.09741769731044769,
-0.17755825817584991,
-0.08870444446802139,
0.15290389955043793,
-0.04846508428454399,
-0.09173764288425446,
0.1207609698176384,
-0.08275116980075836,
0.05028669536113739,
0.09202835708856583,
0.18218542635440826,
0.06279102712869644,
-0.06449878215789795,
0.03783797845244408,
-0.010796234011650085,
0.06628751009702682,
-0.052140675485134125,
0.03635707497596741,
0.012767479754984379,
-0.012968302704393864,
0.031822700053453445,
-0.021735774353146553,
0.06524989753961563,
-0.11507508903741837,
-0.08704157173633575,
-0.029782578349113464,
-0.10558168590068817,
0.07902883738279343,
0.08608448505401611,
0.10451894998550415,
-0.09935860335826874,
-0.06709788739681244,
0.08053344488143921,
0.058314982801675797,
-0.058637835085392,
0.01919405534863472,
-0.04689939692616463,
0.06544087082147598,
-0.05145040154457092,
-0.029962055385112762,
-0.2074255496263504,
-0.021111855283379555,
0.004028416704386473,
0.05268774554133415,
0.03292553871870041,
0.03657953441143036,
0.0836450457572937,
0.06089824065566063,
-0.057963740080595016,
-0.010576806962490082,
-0.013779999688267708,
-0.00591406412422657,
-0.1611507087945938,
-0.19799606502056122,
-0.020075641572475433,
-0.021408716216683388,
0.12098619341850281,
-0.2272190898656845,
0.033250339329242706,
-0.03672114014625549,
0.0666489377617836,
0.01011990662664175,
-0.013625888153910637,
-0.05108533054590225,
0.09874691814184189,
-0.025838807225227356,
-0.04096517711877823,
0.07985758781433105,
-0.008662663400173187,
-0.09349264949560165,
-0.0704282745718956,
-0.10807916522026062,
0.17674817144870758,
0.1361951380968094,
-0.1675439476966858,
-0.09762129932641983,
-0.001348912832327187,
-0.04927917942404747,
-0.016456227749586105,
-0.0509905144572258,
0.041967131197452545,
0.22957931458950043,
-0.0067420112900435925,
0.15744639933109283,
-0.06183929368853569,
-0.04230789840221405,
0.013072337955236435,
-0.03526587039232254,
0.04160784184932709,
0.13063763082027435,
0.11636316031217575,
-0.089559406042099,
0.126482293009758,
0.13345250487327576,
-0.1086946502327919,
0.15137293934822083,
-0.020354852080345154,
-0.06817790120840073,
-0.006368008442223072,
-0.036672476679086685,
-0.004272323567420244,
0.09126418828964233,
-0.15537066757678986,
-0.030438680201768875,
0.0064015439711511135,
0.022530151531100273,
0.026909593492746353,
-0.23201541602611542,
-0.04865400493144989,
0.038824670016765594,
-0.012429386377334595,
-0.008724500425159931,
-0.028500637039542198,
0.02158854715526104,
0.12229250371456146,
0.002456776797771454,
-0.08088928461074829,
0.02520916238427162,
-0.00015051112859509885,
-0.07904611527919769,
0.22519707679748535,
-0.07110895216464996,
-0.13769660890102386,
-0.1104573979973793,
-0.08614935725927353,
-0.04640251770615578,
0.020293809473514557,
0.041831016540527344,
-0.12237180769443512,
-0.012459417805075645,
-0.0410735048353672,
0.028067993000149727,
-0.007950963452458382,
0.04434008151292801,
-0.00997576117515564,
-0.00831669382750988,
0.05484692379832268,
-0.10098852217197418,
-0.008465361781418324,
-0.07591063529253006,
-0.06796281039714813,
0.0532708615064621,
0.03474524989724159,
0.11525792628526688,
0.18218345940113068,
-0.03937524929642677,
0.011841380037367344,
-0.027008676901459694,
0.23933374881744385,
-0.07811737060546875,
-0.03584776073694229,
0.09817083925008774,
-0.03759152442216873,
0.04806971549987793,
0.10055387765169144,
0.07966278493404388,
-0.09132383018732071,
0.011379558593034744,
0.03318117931485176,
-0.04333256557583809,
-0.21562634408473969,
-0.039424967020750046,
-0.05090508237481117,
-0.037856269627809525,
0.09508489072322845,
0.016774019226431847,
0.04494569078087807,
0.07836788147687912,
0.06370441615581512,
0.08355195820331573,
-0.05223379656672478,
0.04300154745578766,
0.08948826789855957,
0.03885580599308014,
0.13031189143657684,
-0.03421208634972572,
-0.09222173690795898,
0.0258391872048378,
-0.04067836329340935,
0.21753312647342682,
-0.0026770075783133507,
0.10106001049280167,
0.04167122766375542,
0.1648472547531128,
0.01057074312120676,
0.09081575274467468,
-0.004037116654217243,
-0.05751541256904602,
-0.007257789373397827,
-0.02830929309129715,
-0.04765133187174797,
0.011872503906488419,
-0.034078359603881836,
0.056130703538656235,
-0.13754533231258392,
-0.01853160560131073,
0.05323673039674759,
0.2201623022556305,
0.03371912240982056,
-0.3259112238883972,
-0.07455416768789291,
0.0007563615217804909,
-0.029261145740747452,
-0.0205391813069582,
0.01231666561216116,
0.11326710879802704,
-0.10879725217819214,
0.01758621260523796,
-0.06807686388492584,
0.09659461677074432,
-0.0644911378622055,
0.05614618957042694,
0.05409802868962288,
0.09519852697849274,
-0.013433963991701603,
0.0780353918671608,
-0.30447444319725037,
0.27809396386146545,
0.006867044139653444,
0.06882482022047043,
-0.08501585572957993,
-0.020557178184390068,
0.041745226830244064,
0.05680801719427109,
0.03961426019668579,
-0.01244272105395794,
0.007546028587967157,
-0.21943014860153198,
-0.03937871381640434,
0.03169091418385506,
0.1185697540640831,
-0.03434011712670326,
0.09874742478132248,
-0.01946009323000908,
0.00858866237103939,
0.07544443011283875,
-0.030130185186862946,
-0.04164358600974083,
-0.08392094820737839,
-0.02469244785606861,
0.011642014607787132,
-0.059208814054727554,
-0.04726117104291916,
-0.12303232401609421,
-0.13956612348556519,
0.14507637917995453,
0.015589740127325058,
-0.025737805292010307,
-0.11928940564393997,
0.11470570415258408,
0.0675509050488472,
-0.08350997418165207,
0.02919490821659565,
0.01707438752055168,
0.059048037976026535,
0.027674803510308266,
-0.05907857045531273,
0.1103416234254837,
-0.050841230899095535,
-0.1524256318807602,
-0.06457290053367615,
0.09216798096895218,
0.052848294377326965,
0.06823704391717911,
-0.00989560317248106,
0.015363754704594612,
-0.037086084485054016,
-0.09386551380157471,
0.0278838649392128,
-0.027025455608963966,
0.06148187071084976,
0.03728257864713669,
-0.057296570390462875,
0.01126210018992424,
-0.07054423540830612,
-0.006965961307287216,
0.2108328491449356,
0.21323199570178986,
-0.09070395678281784,
-0.004337855614721775,
0.03502494469285011,
-0.07296591252088547,
-0.19800856709480286,
0.08477146178483963,
0.08442626148462296,
0.010853191837668419,
0.03931950405240059,
-0.1719275414943695,
0.143052339553833,
0.08316196501255035,
0.0022656084038317204,
0.11384981125593185,
-0.31259357929229736,
-0.13104449212551117,
0.12297260016202927,
0.16932375729084015,
0.1360408514738083,
-0.13860073685646057,
-0.017943745478987694,
-0.01969818025827408,
-0.09540431946516037,
0.09414840489625931,
-0.08027873933315277,
0.12215876579284668,
-0.0184260793030262,
0.10673542320728302,
0.019400903955101967,
-0.06092527508735657,
0.10643205046653748,
0.02546064741909504,
0.10441160202026367,
-0.06848146766424179,
-0.07049409300088882,
0.026021521538496017,
-0.03254416212439537,
-0.008019461296498775,
-0.04951552674174309,
0.01713758334517479,
-0.10520169138908386,
-0.02613876946270466,
-0.0907905101776123,
0.02829785645008087,
-0.032315418124198914,
-0.0677444115281105,
-0.016279034316539764,
0.020767666399478912,
0.03909635916352272,
-0.011866292916238308,
0.11367145925760269,
-0.007085889577865601,
0.1792486011981964,
0.08800794184207916,
0.09197632968425751,
-0.069939024746418,
-0.045966736972332,
-0.005441546905785799,
-0.005602903664112091,
0.05460225045681,
-0.12628479301929474,
0.02186565287411213,
0.15206392109394073,
0.027956273406744003,
0.13943220674991608,
0.09578590095043182,
-0.009799225255846977,
0.012240245006978512,
0.06344480812549591,
-0.16919302940368652,
-0.06292782723903656,
-0.017429303377866745,
-0.08771850913763046,
-0.10620854794979095,
0.04609373211860657,
0.09138722717761993,
-0.06233292073011398,
-0.011657155118882656,
-0.019649343565106392,
-0.018784336745738983,
-0.06197642907500267,
0.2118193805217743,
0.08067067712545395,
0.0492110438644886,
-0.10343965142965317,
0.05384765937924385,
0.060252800583839417,
-0.07571657001972198,
0.0023670257069170475,
0.08624976128339767,
-0.08204003423452377,
-0.03672831505537033,
0.10139722377061844,
0.23197482526302338,
-0.06002824753522873,
-0.01824226602911949,
-0.1446094661951065,
-0.11974175274372101,
0.07130279392004013,
0.17302726209163666,
0.11723332852125168,
-0.004689945839345455,
-0.07878560572862625,
0.016746526584029198,
-0.1362931728363037,
0.07873072475194931,
0.055105019360780716,
0.07018157094717026,
-0.135887011885643,
0.19825994968414307,
0.001642804592847824,
0.04675707593560219,
-0.030982766300439835,
0.019461356103420258,
-0.11782259494066238,
0.026673799380660057,
-0.12751083076000214,
-0.03864553943276405,
0.0025578688364475965,
0.0003236242919228971,
-0.007361225783824921,
-0.0673007071018219,
-0.04843315854668617,
-0.006322041619569063,
-0.1319664865732193,
-0.0178611408919096,
0.03919048234820366,
0.03337879478931427,
-0.10580906271934509,
-0.040259286761283875,
0.018730800598859787,
-0.05284161865711212,
0.05902258679270744,
0.051607176661491394,
0.008601696230471134,
0.07437563687562943,
-0.1547982543706894,
0.001038790331222117,
0.06496147066354752,
0.0017310940893366933,
0.07955073565244675,
-0.04891270026564598,
0.0009913129033520818,
-0.013812844641506672,
0.09653312712907791,
0.03368118777871132,
0.07905177772045135,
-0.1325932890176773,
0.016972340643405914,
-0.01613359898328781,
-0.09842365980148315,
-0.06750886142253876,
0.037747763097286224,
0.06325653940439224,
0.018319910392165184,
0.179081991314888,
-0.08022849261760712,
0.0599236823618412,
-0.21401728689670563,
-0.006066875532269478,
-0.012053675018250942,
-0.11378350853919983,
-0.0997389554977417,
-0.08168136328458786,
0.07933379709720612,
-0.05754625052213669,
0.11986849457025528,
0.05862629786133766,
0.06532897055149078,
0.020695026963949203,
-0.003337441710755229,
0.015563185326755047,
0.03292005881667137,
0.1992880403995514,
0.04163777455687523,
-0.05101238191127777,
0.058614131063222885,
0.07890242338180542,
0.10852319002151489,
0.1353907436132431,
0.2126520276069641,
0.1349097192287445,
-0.025735212489962578,
0.08318701386451721,
0.02147786132991314,
-0.039011627435684204,
-0.1464281529188156,
0.01681853085756302,
-0.05801264941692352,
0.09169962257146835,
-0.029018083587288857,
0.21045951545238495,
0.04669271782040596,
-0.16858497262001038,
0.044576458632946014,
-0.05728309974074364,
-0.10644634068012238,
-0.10488925129175186,
-0.02517641708254814,
-0.08016546815633774,
-0.12735244631767273,
0.007721861358731985,
-0.11418185383081436,
0.011699006892740726,
0.11641766130924225,
0.0147638488560915,
-0.02894366905093193,
0.18018078804016113,
0.026523573324084282,
0.028624361380934715,
0.08367429673671722,
0.008717789314687252,
-0.012278877198696136,
-0.11386305093765259,
-0.06199680641293526,
-0.03830888867378235,
-0.00809806864708662,
0.029031047597527504,
-0.07505437731742859,
-0.08757869899272919,
0.02542153373360634,
-0.01775652915239334,
-0.11014046519994736,
0.022286521270871162,
0.016725072637200356,
0.07393684983253479,
0.041485805064439774,
-0.0017586075700819492,
0.008969517424702644,
-0.029595864936709404,
0.23616357147693634,
-0.08412141352891922,
-0.07176029682159424,
-0.09218794107437134,
0.27329614758491516,
0.037953492254018784,
-0.00033307928242720664,
0.0222815852612257,
-0.07124337553977966,
0.019592618569731712,
0.26468992233276367,
0.21712864935398102,
-0.1283319592475891,
-0.0016012826235964894,
0.005828325171023607,
-0.00597271928563714,
-0.005322239361703396,
0.13096614181995392,
0.11956024914979935,
0.04664252698421478,
-0.11550343036651611,
-0.02535298839211464,
-0.05168354883790016,
-0.014181780628859997,
-0.027176709845662117,
0.06887350976467133,
0.07329646497964859,
0.013536307960748672,
-0.061406657099723816,
0.07035426795482635,
-0.08815789967775345,
-0.10260528326034546,
0.058076221495866776,
-0.2260308414697647,
-0.16918739676475525,
-0.019066134467720985,
0.10066826641559601,
-0.0025395252741873264,
0.07752075046300888,
-0.025568217039108276,
-0.006168636493384838,
0.04141569510102272,
-0.025332046672701836,
-0.07731741666793823,
-0.08380922675132751,
0.09888606518507004,
-0.12472832947969437,
0.1758694052696228,
-0.047695960849523544,
0.05871821567416191,
0.12237302958965302,
0.06629873067140579,
-0.037096355110406876,
0.05322537198662758,
0.03820768743753433,
-0.08083246648311615,
0.028272755444049835,
0.11016940325498581,
-0.03332331404089928,
0.032750632613897324,
0.04112745821475983,
-0.1459735929965973,
0.040147218853235245,
-0.0944279134273529,
-0.05508606508374214,
-0.03774937987327576,
-0.046977024525403976,
-0.05517815425992012,
0.12706756591796875,
0.23784291744232178,
-0.009720840491354465,
0.023290980607271194,
-0.07286100834608078,
-0.000002560910388638149,
0.04758267104625702,
0.03225008025765419,
-0.09822677820920944,
-0.24385014176368713,
-0.0030146981589496136,
0.078040212392807,
-0.03479839861392975,
-0.2570963501930237,
-0.08669403940439224,
0.0034068990498781204,
-0.06871112436056137,
-0.0888427346944809,
0.08299421519041061,
0.07673094421625137,
0.055366151034832,
-0.05232156440615654,
-0.0829143300652504,
-0.07450148463249207,
0.1662084013223648,
-0.15114925801753998,
-0.09026945382356644
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# distilbert-base-uncased-finetuned-cola
This model is a fine-tuned version of [distilbert-base-uncased](https://huggingface.co/distilbert-base-uncased) on the glue dataset.
It achieves the following results on the evaluation set:
- Loss: 0.6223
- Matthews Correlation: 0.5374
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 2e-05
- train_batch_size: 16
- eval_batch_size: 16
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 5
### Training results
| Training Loss | Epoch | Step | Validation Loss | Matthews Correlation |
|:-------------:|:-----:|:----:|:---------------:|:--------------------:|
| 0.5275 | 1.0 | 535 | 0.5456 | 0.3973 |
| 0.3481 | 2.0 | 1070 | 0.5401 | 0.5006 |
| 0.242 | 3.0 | 1605 | 0.6223 | 0.5374 |
| 0.1725 | 4.0 | 2140 | 0.7934 | 0.5229 |
| 0.1346 | 5.0 | 2675 | 0.8478 | 0.5367 |
### Framework versions
- Transformers 4.10.2
- Pytorch 1.9.0+cu102
- Datasets 1.11.0
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "datasets": ["glue"], "metrics": ["matthews_correlation"], "model-index": [{"name": "distilbert-base-uncased-finetuned-cola", "results": [{"task": {"type": "text-classification", "name": "Text Classification"}, "dataset": {"name": "glue", "type": "glue", "args": "cola"}, "metrics": [{"type": "matthews_correlation", "value": 0.5373623427702773, "name": "Matthews Correlation"}]}]}]}
|
text-classification
|
blizrys/distilbert-base-uncased-finetuned-cola
|
[
"transformers",
"pytorch",
"tensorboard",
"distilbert",
"text-classification",
"generated_from_trainer",
"dataset:glue",
"license:apache-2.0",
"model-index",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #distilbert #text-classification #generated_from_trainer #dataset-glue #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us
|
distilbert-base-uncased-finetuned-cola
======================================
This model is a fine-tuned version of distilbert-base-uncased on the glue dataset.
It achieves the following results on the evaluation set:
* Loss: 0.6223
* Matthews Correlation: 0.5374
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 2e-05
* train\_batch\_size: 16
* eval\_batch\_size: 16
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 5
### Training results
### Framework versions
* Transformers 4.10.2
* Pytorch 1.9.0+cu102
* Datasets 1.11.0
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 5",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.11.0\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #distilbert #text-classification #generated_from_trainer #dataset-glue #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 5",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.11.0\n* Tokenizers 0.10.3"
] |
[
67,
98,
4,
34
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #distilbert #text-classification #generated_from_trainer #dataset-glue #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 5### Training results### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.11.0\n* Tokenizers 0.10.3"
] |
[
-0.10426512360572815,
0.10433709621429443,
-0.002259957604110241,
0.12249139696359634,
0.1660148948431015,
0.03348537161946297,
0.12569215893745422,
0.12749995291233063,
-0.08600743860006332,
0.022487498819828033,
0.12130539864301682,
0.15879149734973907,
0.02310153841972351,
0.11628648638725281,
-0.05104270204901695,
-0.26310062408447266,
-0.012797782197594643,
0.04824819043278694,
-0.05345110222697258,
0.13410483300685883,
0.09187425673007965,
-0.12191611528396606,
0.0906994491815567,
0.012523122131824493,
-0.19482818245887756,
-0.0029480960220098495,
0.00042572690290398896,
-0.053956933319568634,
0.14694012701511383,
0.02537601813673973,
0.12314296513795853,
-0.0004913617740385234,
0.08683395385742188,
-0.19422951340675354,
0.010315582156181335,
0.04781133309006691,
0.005009995307773352,
0.09460025280714035,
0.04737091809511185,
0.005455106031149626,
0.1160484328866005,
-0.08192814141511917,
0.05484170839190483,
0.02224828489124775,
-0.11514543741941452,
-0.20770631730556488,
-0.08039336651563644,
0.03676752373576164,
0.07877427339553833,
0.10625603795051575,
-0.005990809295326471,
0.11905906349420547,
-0.0787460058927536,
0.09247059375047684,
0.2190333902835846,
-0.286700576543808,
-0.06601723283529282,
0.04456251114606857,
0.013659088872373104,
0.04419851675629616,
-0.10103028267621994,
-0.03667899966239929,
0.04668722674250603,
0.05234873667359352,
0.12773050367832184,
-0.028970537707209587,
-0.1218077763915062,
0.0033802713733166456,
-0.14009326696395874,
-0.03411950543522835,
0.17008748650550842,
0.040434580296278,
-0.028836995363235474,
-0.054720740765333176,
-0.06081100180745125,
-0.14672358334064484,
-0.03682434558868408,
-0.011688660830259323,
0.04699467867612839,
-0.02180595137178898,
-0.04085034504532814,
-0.010947499424219131,
-0.10869567096233368,
-0.06319879740476608,
-0.07729531079530716,
0.10842012614011765,
0.036131877452135086,
0.008506380021572113,
-0.028935758396983147,
0.1114862859249115,
-0.006415044888854027,
-0.12363190948963165,
0.023490887135267258,
0.021571394056081772,
0.013030925765633583,
-0.04021803289651871,
-0.05357298627495766,
-0.06285636126995087,
0.011701199226081371,
0.1296256184577942,
-0.047466713935136795,
0.0417354553937912,
0.048748426139354706,
0.04871011897921562,
-0.09200433641672134,
0.19268843531608582,
-0.034906383603811264,
-0.030038712546229362,
0.010833265259861946,
0.0468958243727684,
0.019004084169864655,
-0.011806197464466095,
-0.12342752516269684,
0.003989899531006813,
0.08965563029050827,
0.007289408706128597,
-0.06077326461672783,
0.07437824457883835,
-0.05541825294494629,
-0.0253609586507082,
0.00386650743894279,
-0.09246677905321121,
0.022484445944428444,
-0.0007294954848475754,
-0.07128224521875381,
-0.020290985703468323,
0.03648412227630615,
0.015956250950694084,
-0.021084999665617943,
0.10925419628620148,
-0.08756697177886963,
0.02676578238606453,
-0.09414482116699219,
-0.10812705755233765,
0.018080338835716248,
-0.10612281411886215,
0.021790599450469017,
-0.09482768177986145,
-0.18786847591400146,
-0.017543887719511986,
0.06102675944566727,
-0.023772550746798515,
-0.061533089727163315,
-0.05492522567510605,
-0.06842752546072006,
0.012772813439369202,
-0.009758928790688515,
0.11891642212867737,
-0.06408089399337769,
0.09128986299037933,
0.019680099561810493,
0.05981069803237915,
-0.04414365068078041,
0.060395292937755585,
-0.10320717096328735,
0.015181172639131546,
-0.15193164348602295,
0.04124220833182335,
-0.05021706596016884,
0.06925851106643677,
-0.08305956423282623,
-0.10385692119598389,
0.007799788378179073,
-0.005036167800426483,
0.06286153197288513,
0.09392543137073517,
-0.1877855807542801,
-0.07497616112232208,
0.15703634917736053,
-0.07153391093015671,
-0.12171104550361633,
0.11983588337898254,
-0.059980932623147964,
0.05607984587550163,
0.05881010368466377,
0.17729079723358154,
0.08146750181913376,
-0.07674693316221237,
0.0017060274258255959,
0.02487725019454956,
0.051810041069984436,
-0.06745584309101105,
0.06833644211292267,
0.003987649455666542,
0.018164023756980896,
0.03621920198202133,
-0.02947813645005226,
0.06423578411340714,
-0.08516176044940948,
-0.09827269613742828,
-0.04194852337241173,
-0.0823812186717987,
0.04079892858862877,
0.07540690898895264,
0.06894330680370331,
-0.0905974954366684,
-0.07699081301689148,
0.05027666687965393,
0.08283647894859314,
-0.057200465351343155,
0.023797692731022835,
-0.04974832013249397,
0.07535454630851746,
-0.027170144021511078,
-0.02276477962732315,
-0.1820925772190094,
-0.03723650798201561,
0.007776295766234398,
-0.0001121047098422423,
0.016079852357506752,
0.028509872034192085,
0.06071171537041664,
0.05999923497438431,
-0.0476035438477993,
-0.017464617267251015,
-0.03203345090150833,
0.0009492220124229789,
-0.12850148975849152,
-0.19165997207164764,
-0.02964540384709835,
-0.023648712784051895,
0.15829363465309143,
-0.2059369534254074,
0.04867374897003174,
-0.016797875985503197,
0.07151833921670914,
0.012727910652756691,
-0.006638881750404835,
-0.03739942982792854,
0.07276139408349991,
-0.04526588320732117,
-0.05399049073457718,
0.0806112140417099,
0.018874136731028557,
-0.08801580965518951,
-0.049643103033304214,
-0.09715786576271057,
0.1531108021736145,
0.12844762206077576,
-0.10493320971727371,
-0.07572564482688904,
-0.020654376596212387,
-0.06728719919919968,
-0.03382715582847595,
-0.04939243197441101,
0.025807708501815796,
0.18722404539585114,
-0.004461523145437241,
0.1508566290140152,
-0.06760133057832718,
-0.04336853325366974,
0.01723160594701767,
-0.036057278513908386,
0.017328675836324692,
0.1265285760164261,
0.1374155431985855,
-0.059712447226047516,
0.15458841621875763,
0.14715515077114105,
-0.09026557952165604,
0.14469219744205475,
-0.04152258485555649,
-0.06488775461912155,
-0.015399081632494926,
-0.03186597302556038,
-0.011341072618961334,
0.10118255764245987,
-0.15199686586856842,
0.0017764876829460263,
0.03497067466378212,
0.01689252071082592,
0.025553934276103973,
-0.2243642807006836,
-0.03870183601975441,
0.034155867993831635,
-0.04194335639476776,
-0.003607292892411351,
-0.00692207645624876,
0.0071485997177660465,
0.10098173469305038,
0.0011031328467652202,
-0.08600062131881714,
0.039261557161808014,
0.0022273578215390444,
-0.08415757119655609,
0.21540571749210358,
-0.08334285765886307,
-0.17394869029521942,
-0.13083769381046295,
-0.07111518830060959,
-0.04867135360836983,
0.00039458886021748185,
0.06669750064611435,
-0.08735331147909164,
-0.03210105746984482,
-0.0726284459233284,
0.022122973576188087,
0.010409518145024776,
0.024539994075894356,
0.003938174340873957,
0.004626403097063303,
0.06458874046802521,
-0.11110340058803558,
-0.015550504438579082,
-0.05684101581573486,
-0.04388412460684776,
0.04362897574901581,
0.03219163045287132,
0.1114690750837326,
0.15295127034187317,
-0.014264507219195366,
0.011468439362943172,
-0.029248511418700218,
0.23971162736415863,
-0.06038505211472511,
-0.017323501408100128,
0.14577287435531616,
-0.01131322979927063,
0.05169711634516716,
0.11967752873897552,
0.07254404574632645,
-0.07740233093500137,
0.004438361153006554,
0.03531082719564438,
-0.036097221076488495,
-0.22965286672115326,
-0.058771245181560516,
-0.058080822229385376,
0.008482000790536404,
0.09279254078865051,
0.02489478886127472,
0.030599800869822502,
0.07268312573432922,
0.04093388840556145,
0.07906527817249298,
-0.039519891142845154,
0.055318836122751236,
0.1321485936641693,
0.03393073379993439,
0.12528666853904724,
-0.04521327093243599,
-0.0631549209356308,
0.04426415637135506,
-0.010454464703798294,
0.22409671545028687,
0.004764808341860771,
0.12663747370243073,
0.06118015572428703,
0.16440026462078094,
-0.005794202908873558,
0.07819760590791702,
-0.009913095273077488,
-0.03396560251712799,
-0.01807704009115696,
-0.038802169263362885,
-0.04050105810165405,
0.025703569874167442,
-0.06718948483467102,
0.062140315771102905,
-0.11923284828662872,
0.015041983686387539,
0.05876095965504646,
0.24976108968257904,
0.03490309417247772,
-0.32359662652015686,
-0.09897902607917786,
0.0025236369110643864,
-0.03214254602789879,
-0.02410626783967018,
0.027108095586299896,
0.09401269257068634,
-0.10150056332349777,
0.02924281731247902,
-0.0762752816081047,
0.09722728282213211,
-0.05291596055030823,
0.04811663180589676,
0.08359494060277939,
0.09187682718038559,
0.01273881085216999,
0.09307749569416046,
-0.2858288884162903,
0.27187150716781616,
-0.0001095435582101345,
0.05727545917034149,
-0.07874415069818497,
0.01086416281759739,
0.04336170852184296,
0.06292960047721863,
0.08073702454566956,
-0.012330491095781326,
-0.027453524991869926,
-0.1826833188533783,
-0.07152741402387619,
0.028367064893245697,
0.06134333088994026,
-0.03791256994009018,
0.08296520262956619,
-0.033452264964580536,
0.007488494738936424,
0.07177787274122238,
-0.0007205126457847655,
-0.05129532516002655,
-0.10879101604223251,
-0.00538033340126276,
0.024950211867690086,
-0.0590340793132782,
-0.06024821102619171,
-0.11951345205307007,
-0.12641017138957977,
0.15740618109703064,
-0.03249810263514519,
-0.04079846665263176,
-0.10955542325973511,
0.08575702458620071,
0.061592940241098404,
-0.08935131877660751,
0.04640064388513565,
-0.0002651397662702948,
0.08131054788827896,
0.02311514876782894,
-0.07473637908697128,
0.10025379061698914,
-0.07616567611694336,
-0.15740104019641876,
-0.06600521504878998,
0.10558932274580002,
0.031837981194257736,
0.06433220207691193,
-0.011048474349081516,
0.008578725159168243,
-0.04882335662841797,
-0.09015624225139618,
0.015705324709415436,
0.01059509627521038,
0.0804133340716362,
0.01853850670158863,
-0.07612992823123932,
0.0060027409344911575,
-0.05939517542719841,
-0.03232140839099884,
0.20830915868282318,
0.21475538611412048,
-0.10217327624559402,
0.0258195698261261,
0.02200021594762802,
-0.07357484102249146,
-0.2013624608516693,
0.03310282528400421,
0.057223569601774216,
0.009338990785181522,
0.04134295508265495,
-0.18028351664543152,
0.1395263522863388,
0.10767275094985962,
-0.014254840090870857,
0.1049177423119545,
-0.31948742270469666,
-0.12232451885938644,
0.13675561547279358,
0.13327986001968384,
0.10010571032762527,
-0.12934616208076477,
-0.02182192914187908,
-0.019805684685707092,
-0.13585087656974792,
0.11871080845594406,
-0.09001870453357697,
0.11899729818105698,
-0.03491564840078354,
0.0815073773264885,
0.0023825892712920904,
-0.05845170468091965,
0.11969935894012451,
0.0289877038449049,
0.09193193167448044,
-0.05965009704232216,
-0.03315823897719383,
0.03096715360879898,
-0.04449234530329704,
0.03580469638109207,
-0.09313686937093735,
0.031442079693078995,
-0.10635531693696976,
-0.025107571855187416,
-0.06602425873279572,
0.04718546196818352,
-0.042435791343450546,
-0.06855174899101257,
-0.03725622221827507,
0.025839144363999367,
0.05012626573443413,
-0.008466712199151516,
0.12203888595104218,
0.02860250324010849,
0.1414310783147812,
0.09874077141284943,
0.07056708633899689,
-0.06843950599431992,
-0.07940129190683365,
-0.02658763900399208,
-0.01143626682460308,
0.050212763249874115,
-0.1347932070493698,
0.022326458245515823,
0.15249556303024292,
0.018951259553432465,
0.1510075181722641,
0.08182299882173538,
-0.018538322299718857,
0.000021221798306214623,
0.05697400122880936,
-0.16726034879684448,
-0.0875583365559578,
-0.014363158494234085,
-0.0649663656949997,
-0.12047193944454193,
0.04137254133820534,
0.09413165599107742,
-0.0674988403916359,
-0.007067597936838865,
-0.0040827360935509205,
0.01480003073811531,
-0.047181155532598495,
0.18542033433914185,
0.06140720844268799,
0.04556829482316971,
-0.09902192652225494,
0.07232040911912918,
0.04775995761156082,
-0.07217730581760406,
0.004505601711571217,
0.07354437559843063,
-0.08864618837833405,
-0.054575514048337936,
0.06766585260629654,
0.19024787843227386,
-0.04857509955763817,
-0.04677366837859154,
-0.14007768034934998,
-0.12211456149816513,
0.07924559712409973,
0.13740694522857666,
0.11964695900678635,
0.011042100377380848,
-0.06867233663797379,
0.00013753524399362504,
-0.10835038125514984,
0.10523269325494766,
0.05095338448882103,
0.06350406259298325,
-0.14289559423923492,
0.141910582780838,
0.017812194302678108,
0.048876430839300156,
-0.019941730424761772,
0.025252623483538628,
-0.09824269264936447,
0.005383232142776251,
-0.09809642285108566,
-0.0121102724224329,
-0.033991310745477676,
0.012102196924388409,
-0.005992238875478506,
-0.047347813844680786,
-0.05554035305976868,
0.010358233936131,
-0.10652101784944534,
-0.023995233699679375,
0.025617100298404694,
0.06924949586391449,
-0.10803163796663284,
-0.03712153807282448,
0.02782008796930313,
-0.06190326437354088,
0.077000193297863,
0.04481126740574837,
0.01620541326701641,
0.04950089380145073,
-0.13533835113048553,
0.016700081527233124,
0.07398871332406998,
0.03171689808368683,
0.06389815360307693,
-0.09730371832847595,
-0.006499884650111198,
-0.005229889880865812,
0.03809446096420288,
0.01969943195581436,
0.07728329300880432,
-0.14173462986946106,
0.002201495924964547,
-0.02328886091709137,
-0.08012913167476654,
-0.0682433471083641,
0.02571716532111168,
0.09044670313596725,
0.021472934633493423,
0.20141401886940002,
-0.07654982060194016,
0.05152589827775955,
-0.21565696597099304,
0.006225933320820332,
-0.009186693467199802,
-0.10887595266103745,
-0.1055423766374588,
-0.07114649564027786,
0.05583106353878975,
-0.05802374705672264,
0.1517743319272995,
0.04912406578660011,
0.022861337289214134,
0.02491481602191925,
-0.007246850058436394,
0.014773547649383545,
0.011061709374189377,
0.18983176350593567,
0.030938738957047462,
-0.03437184542417526,
0.0592007152736187,
0.0431998074054718,
0.10482235997915268,
0.11226430535316467,
0.20194236934185028,
0.14138156175613403,
-0.00624391995370388,
0.0932706668972969,
0.040944769978523254,
-0.05923188477754593,
-0.15989600121974945,
0.048051681369543076,
-0.037013355642557144,
0.11125864088535309,
-0.020855454728007317,
0.21790654957294464,
0.058761466294527054,
-0.1712712198495865,
0.04803154617547989,
-0.052442826330661774,
-0.0865674763917923,
-0.11406191438436508,
-0.05263666808605194,
-0.07929245382547379,
-0.127937912940979,
-0.005218966398388147,
-0.11683830618858337,
-0.002244236646220088,
0.12685494124889374,
0.0028031114488840103,
-0.028577234596014023,
0.15587973594665527,
0.006063263397663832,
0.021677058190107346,
0.05789635330438614,
0.012065466493368149,
-0.03534611314535141,
-0.13333582878112793,
-0.059983085840940475,
-0.017461296170949936,
-0.006359034217894077,
0.032993730157613754,
-0.06168423965573311,
-0.03825095295906067,
0.03244449943304062,
-0.022318247705698013,
-0.0928620770573616,
0.005188292358070612,
0.012640755623579025,
0.053713541477918625,
0.04606783762574196,
0.011239070445299149,
0.019917158409953117,
-0.0031351482030004263,
0.20080041885375977,
-0.07217791676521301,
-0.06660531461238861,
-0.10717114806175232,
0.22972801327705383,
0.03417762741446495,
-0.02237319014966488,
0.03579697757959366,
-0.06617016345262527,
0.0030825489666312933,
0.24917256832122803,
0.2160906195640564,
-0.08210866153240204,
-0.007621712051331997,
0.015840673819184303,
-0.00944583211094141,
-0.02301640249788761,
0.10099002718925476,
0.1437874436378479,
0.05389159917831421,
-0.09199661761522293,
-0.046872831881046295,
-0.05884753540158272,
-0.018054412677884102,
-0.03788604214787483,
0.07106056064367294,
0.04584185779094696,
0.0066960579715669155,
-0.034526970237493515,
0.05514732748270035,
-0.06887141615152359,
-0.09347423166036606,
0.054371193051338196,
-0.2162213772535324,
-0.16998834908008575,
-0.013261387124657631,
0.09828519821166992,
0.0034375409595668316,
0.05994460731744766,
-0.030700774863362312,
-0.0028944036457687616,
0.09509637206792831,
-0.021005388349294662,
-0.09704624861478806,
-0.06895597279071808,
0.08762237429618835,
-0.10917830467224121,
0.22248877584934235,
-0.04615882411599159,
0.05427993834018707,
0.12458188831806183,
0.06982076913118362,
-0.07060165703296661,
0.06350772827863693,
0.043261464685201645,
-0.040720079094171524,
0.02771449275314808,
0.07004562765359879,
-0.03564247861504555,
0.061437126249074936,
0.048388075083494186,
-0.13911570608615875,
0.019061563536524773,
-0.04958338290452957,
-0.06783602386713028,
-0.04561956971883774,
-0.023861533030867577,
-0.06201706826686859,
0.13243703544139862,
0.21538084745407104,
-0.02663380280137062,
-0.01076560840010643,
-0.0711965560913086,
0.010668067261576653,
0.05290389806032181,
0.022487344220280647,
-0.056119345128536224,
-0.20987090468406677,
0.016981951892375946,
0.03965592756867409,
-0.01915142871439457,
-0.2438865303993225,
-0.10073988139629364,
0.00047511851880699396,
-0.07343509048223495,
-0.09609877318143845,
0.07428222894668579,
0.08455246686935425,
0.04942093417048454,
-0.057058949023485184,
-0.04013773798942566,
-0.0767010897397995,
0.14584758877754211,
-0.1440131962299347,
-0.09217162430286407
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# distilbert-base-uncased-finetuned-mnli
This model is a fine-tuned version of [distilbert-base-uncased](https://huggingface.co/distilbert-base-uncased) on the glue dataset.
It achieves the following results on the evaluation set:
- Loss: 0.6753
- Accuracy: 0.8206
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 2e-05
- train_batch_size: 16
- eval_batch_size: 16
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 5
### Training results
| Training Loss | Epoch | Step | Validation Loss | Accuracy |
|:-------------:|:-----:|:------:|:---------------:|:--------:|
| 0.5146 | 1.0 | 24544 | 0.4925 | 0.8049 |
| 0.4093 | 2.0 | 49088 | 0.5090 | 0.8164 |
| 0.3122 | 3.0 | 73632 | 0.5299 | 0.8185 |
| 0.2286 | 4.0 | 98176 | 0.6753 | 0.8206 |
| 0.182 | 5.0 | 122720 | 0.8372 | 0.8195 |
### Framework versions
- Transformers 4.10.2
- Pytorch 1.9.0+cu102
- Datasets 1.11.0
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "datasets": ["glue"], "metrics": ["accuracy"], "model-index": [{"name": "distilbert-base-uncased-finetuned-mnli", "results": [{"task": {"type": "text-classification", "name": "Text Classification"}, "dataset": {"name": "glue", "type": "glue", "args": "mnli"}, "metrics": [{"type": "accuracy", "value": 0.8205807437595517, "name": "Accuracy"}]}]}]}
|
text-classification
|
blizrys/distilbert-base-uncased-finetuned-mnli
|
[
"transformers",
"pytorch",
"tensorboard",
"distilbert",
"text-classification",
"generated_from_trainer",
"dataset:glue",
"license:apache-2.0",
"model-index",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #distilbert #text-classification #generated_from_trainer #dataset-glue #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us
|
distilbert-base-uncased-finetuned-mnli
======================================
This model is a fine-tuned version of distilbert-base-uncased on the glue dataset.
It achieves the following results on the evaluation set:
* Loss: 0.6753
* Accuracy: 0.8206
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 2e-05
* train\_batch\_size: 16
* eval\_batch\_size: 16
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 5
### Training results
### Framework versions
* Transformers 4.10.2
* Pytorch 1.9.0+cu102
* Datasets 1.11.0
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 5",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.11.0\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #distilbert #text-classification #generated_from_trainer #dataset-glue #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 5",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.11.0\n* Tokenizers 0.10.3"
] |
[
67,
98,
4,
34
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #distilbert #text-classification #generated_from_trainer #dataset-glue #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 5### Training results### Framework versions\n\n\n* Transformers 4.10.2\n* Pytorch 1.9.0+cu102\n* Datasets 1.11.0\n* Tokenizers 0.10.3"
] |
[
-0.10426512360572815,
0.10433709621429443,
-0.002259957604110241,
0.12249139696359634,
0.1660148948431015,
0.03348537161946297,
0.12569215893745422,
0.12749995291233063,
-0.08600743860006332,
0.022487498819828033,
0.12130539864301682,
0.15879149734973907,
0.02310153841972351,
0.11628648638725281,
-0.05104270204901695,
-0.26310062408447266,
-0.012797782197594643,
0.04824819043278694,
-0.05345110222697258,
0.13410483300685883,
0.09187425673007965,
-0.12191611528396606,
0.0906994491815567,
0.012523122131824493,
-0.19482818245887756,
-0.0029480960220098495,
0.00042572690290398896,
-0.053956933319568634,
0.14694012701511383,
0.02537601813673973,
0.12314296513795853,
-0.0004913617740385234,
0.08683395385742188,
-0.19422951340675354,
0.010315582156181335,
0.04781133309006691,
0.005009995307773352,
0.09460025280714035,
0.04737091809511185,
0.005455106031149626,
0.1160484328866005,
-0.08192814141511917,
0.05484170839190483,
0.02224828489124775,
-0.11514543741941452,
-0.20770631730556488,
-0.08039336651563644,
0.03676752373576164,
0.07877427339553833,
0.10625603795051575,
-0.005990809295326471,
0.11905906349420547,
-0.0787460058927536,
0.09247059375047684,
0.2190333902835846,
-0.286700576543808,
-0.06601723283529282,
0.04456251114606857,
0.013659088872373104,
0.04419851675629616,
-0.10103028267621994,
-0.03667899966239929,
0.04668722674250603,
0.05234873667359352,
0.12773050367832184,
-0.028970537707209587,
-0.1218077763915062,
0.0033802713733166456,
-0.14009326696395874,
-0.03411950543522835,
0.17008748650550842,
0.040434580296278,
-0.028836995363235474,
-0.054720740765333176,
-0.06081100180745125,
-0.14672358334064484,
-0.03682434558868408,
-0.011688660830259323,
0.04699467867612839,
-0.02180595137178898,
-0.04085034504532814,
-0.010947499424219131,
-0.10869567096233368,
-0.06319879740476608,
-0.07729531079530716,
0.10842012614011765,
0.036131877452135086,
0.008506380021572113,
-0.028935758396983147,
0.1114862859249115,
-0.006415044888854027,
-0.12363190948963165,
0.023490887135267258,
0.021571394056081772,
0.013030925765633583,
-0.04021803289651871,
-0.05357298627495766,
-0.06285636126995087,
0.011701199226081371,
0.1296256184577942,
-0.047466713935136795,
0.0417354553937912,
0.048748426139354706,
0.04871011897921562,
-0.09200433641672134,
0.19268843531608582,
-0.034906383603811264,
-0.030038712546229362,
0.010833265259861946,
0.0468958243727684,
0.019004084169864655,
-0.011806197464466095,
-0.12342752516269684,
0.003989899531006813,
0.08965563029050827,
0.007289408706128597,
-0.06077326461672783,
0.07437824457883835,
-0.05541825294494629,
-0.0253609586507082,
0.00386650743894279,
-0.09246677905321121,
0.022484445944428444,
-0.0007294954848475754,
-0.07128224521875381,
-0.020290985703468323,
0.03648412227630615,
0.015956250950694084,
-0.021084999665617943,
0.10925419628620148,
-0.08756697177886963,
0.02676578238606453,
-0.09414482116699219,
-0.10812705755233765,
0.018080338835716248,
-0.10612281411886215,
0.021790599450469017,
-0.09482768177986145,
-0.18786847591400146,
-0.017543887719511986,
0.06102675944566727,
-0.023772550746798515,
-0.061533089727163315,
-0.05492522567510605,
-0.06842752546072006,
0.012772813439369202,
-0.009758928790688515,
0.11891642212867737,
-0.06408089399337769,
0.09128986299037933,
0.019680099561810493,
0.05981069803237915,
-0.04414365068078041,
0.060395292937755585,
-0.10320717096328735,
0.015181172639131546,
-0.15193164348602295,
0.04124220833182335,
-0.05021706596016884,
0.06925851106643677,
-0.08305956423282623,
-0.10385692119598389,
0.007799788378179073,
-0.005036167800426483,
0.06286153197288513,
0.09392543137073517,
-0.1877855807542801,
-0.07497616112232208,
0.15703634917736053,
-0.07153391093015671,
-0.12171104550361633,
0.11983588337898254,
-0.059980932623147964,
0.05607984587550163,
0.05881010368466377,
0.17729079723358154,
0.08146750181913376,
-0.07674693316221237,
0.0017060274258255959,
0.02487725019454956,
0.051810041069984436,
-0.06745584309101105,
0.06833644211292267,
0.003987649455666542,
0.018164023756980896,
0.03621920198202133,
-0.02947813645005226,
0.06423578411340714,
-0.08516176044940948,
-0.09827269613742828,
-0.04194852337241173,
-0.0823812186717987,
0.04079892858862877,
0.07540690898895264,
0.06894330680370331,
-0.0905974954366684,
-0.07699081301689148,
0.05027666687965393,
0.08283647894859314,
-0.057200465351343155,
0.023797692731022835,
-0.04974832013249397,
0.07535454630851746,
-0.027170144021511078,
-0.02276477962732315,
-0.1820925772190094,
-0.03723650798201561,
0.007776295766234398,
-0.0001121047098422423,
0.016079852357506752,
0.028509872034192085,
0.06071171537041664,
0.05999923497438431,
-0.0476035438477993,
-0.017464617267251015,
-0.03203345090150833,
0.0009492220124229789,
-0.12850148975849152,
-0.19165997207164764,
-0.02964540384709835,
-0.023648712784051895,
0.15829363465309143,
-0.2059369534254074,
0.04867374897003174,
-0.016797875985503197,
0.07151833921670914,
0.012727910652756691,
-0.006638881750404835,
-0.03739942982792854,
0.07276139408349991,
-0.04526588320732117,
-0.05399049073457718,
0.0806112140417099,
0.018874136731028557,
-0.08801580965518951,
-0.049643103033304214,
-0.09715786576271057,
0.1531108021736145,
0.12844762206077576,
-0.10493320971727371,
-0.07572564482688904,
-0.020654376596212387,
-0.06728719919919968,
-0.03382715582847595,
-0.04939243197441101,
0.025807708501815796,
0.18722404539585114,
-0.004461523145437241,
0.1508566290140152,
-0.06760133057832718,
-0.04336853325366974,
0.01723160594701767,
-0.036057278513908386,
0.017328675836324692,
0.1265285760164261,
0.1374155431985855,
-0.059712447226047516,
0.15458841621875763,
0.14715515077114105,
-0.09026557952165604,
0.14469219744205475,
-0.04152258485555649,
-0.06488775461912155,
-0.015399081632494926,
-0.03186597302556038,
-0.011341072618961334,
0.10118255764245987,
-0.15199686586856842,
0.0017764876829460263,
0.03497067466378212,
0.01689252071082592,
0.025553934276103973,
-0.2243642807006836,
-0.03870183601975441,
0.034155867993831635,
-0.04194335639476776,
-0.003607292892411351,
-0.00692207645624876,
0.0071485997177660465,
0.10098173469305038,
0.0011031328467652202,
-0.08600062131881714,
0.039261557161808014,
0.0022273578215390444,
-0.08415757119655609,
0.21540571749210358,
-0.08334285765886307,
-0.17394869029521942,
-0.13083769381046295,
-0.07111518830060959,
-0.04867135360836983,
0.00039458886021748185,
0.06669750064611435,
-0.08735331147909164,
-0.03210105746984482,
-0.0726284459233284,
0.022122973576188087,
0.010409518145024776,
0.024539994075894356,
0.003938174340873957,
0.004626403097063303,
0.06458874046802521,
-0.11110340058803558,
-0.015550504438579082,
-0.05684101581573486,
-0.04388412460684776,
0.04362897574901581,
0.03219163045287132,
0.1114690750837326,
0.15295127034187317,
-0.014264507219195366,
0.011468439362943172,
-0.029248511418700218,
0.23971162736415863,
-0.06038505211472511,
-0.017323501408100128,
0.14577287435531616,
-0.01131322979927063,
0.05169711634516716,
0.11967752873897552,
0.07254404574632645,
-0.07740233093500137,
0.004438361153006554,
0.03531082719564438,
-0.036097221076488495,
-0.22965286672115326,
-0.058771245181560516,
-0.058080822229385376,
0.008482000790536404,
0.09279254078865051,
0.02489478886127472,
0.030599800869822502,
0.07268312573432922,
0.04093388840556145,
0.07906527817249298,
-0.039519891142845154,
0.055318836122751236,
0.1321485936641693,
0.03393073379993439,
0.12528666853904724,
-0.04521327093243599,
-0.0631549209356308,
0.04426415637135506,
-0.010454464703798294,
0.22409671545028687,
0.004764808341860771,
0.12663747370243073,
0.06118015572428703,
0.16440026462078094,
-0.005794202908873558,
0.07819760590791702,
-0.009913095273077488,
-0.03396560251712799,
-0.01807704009115696,
-0.038802169263362885,
-0.04050105810165405,
0.025703569874167442,
-0.06718948483467102,
0.062140315771102905,
-0.11923284828662872,
0.015041983686387539,
0.05876095965504646,
0.24976108968257904,
0.03490309417247772,
-0.32359662652015686,
-0.09897902607917786,
0.0025236369110643864,
-0.03214254602789879,
-0.02410626783967018,
0.027108095586299896,
0.09401269257068634,
-0.10150056332349777,
0.02924281731247902,
-0.0762752816081047,
0.09722728282213211,
-0.05291596055030823,
0.04811663180589676,
0.08359494060277939,
0.09187682718038559,
0.01273881085216999,
0.09307749569416046,
-0.2858288884162903,
0.27187150716781616,
-0.0001095435582101345,
0.05727545917034149,
-0.07874415069818497,
0.01086416281759739,
0.04336170852184296,
0.06292960047721863,
0.08073702454566956,
-0.012330491095781326,
-0.027453524991869926,
-0.1826833188533783,
-0.07152741402387619,
0.028367064893245697,
0.06134333088994026,
-0.03791256994009018,
0.08296520262956619,
-0.033452264964580536,
0.007488494738936424,
0.07177787274122238,
-0.0007205126457847655,
-0.05129532516002655,
-0.10879101604223251,
-0.00538033340126276,
0.024950211867690086,
-0.0590340793132782,
-0.06024821102619171,
-0.11951345205307007,
-0.12641017138957977,
0.15740618109703064,
-0.03249810263514519,
-0.04079846665263176,
-0.10955542325973511,
0.08575702458620071,
0.061592940241098404,
-0.08935131877660751,
0.04640064388513565,
-0.0002651397662702948,
0.08131054788827896,
0.02311514876782894,
-0.07473637908697128,
0.10025379061698914,
-0.07616567611694336,
-0.15740104019641876,
-0.06600521504878998,
0.10558932274580002,
0.031837981194257736,
0.06433220207691193,
-0.011048474349081516,
0.008578725159168243,
-0.04882335662841797,
-0.09015624225139618,
0.015705324709415436,
0.01059509627521038,
0.0804133340716362,
0.01853850670158863,
-0.07612992823123932,
0.0060027409344911575,
-0.05939517542719841,
-0.03232140839099884,
0.20830915868282318,
0.21475538611412048,
-0.10217327624559402,
0.0258195698261261,
0.02200021594762802,
-0.07357484102249146,
-0.2013624608516693,
0.03310282528400421,
0.057223569601774216,
0.009338990785181522,
0.04134295508265495,
-0.18028351664543152,
0.1395263522863388,
0.10767275094985962,
-0.014254840090870857,
0.1049177423119545,
-0.31948742270469666,
-0.12232451885938644,
0.13675561547279358,
0.13327986001968384,
0.10010571032762527,
-0.12934616208076477,
-0.02182192914187908,
-0.019805684685707092,
-0.13585087656974792,
0.11871080845594406,
-0.09001870453357697,
0.11899729818105698,
-0.03491564840078354,
0.0815073773264885,
0.0023825892712920904,
-0.05845170468091965,
0.11969935894012451,
0.0289877038449049,
0.09193193167448044,
-0.05965009704232216,
-0.03315823897719383,
0.03096715360879898,
-0.04449234530329704,
0.03580469638109207,
-0.09313686937093735,
0.031442079693078995,
-0.10635531693696976,
-0.025107571855187416,
-0.06602425873279572,
0.04718546196818352,
-0.042435791343450546,
-0.06855174899101257,
-0.03725622221827507,
0.025839144363999367,
0.05012626573443413,
-0.008466712199151516,
0.12203888595104218,
0.02860250324010849,
0.1414310783147812,
0.09874077141284943,
0.07056708633899689,
-0.06843950599431992,
-0.07940129190683365,
-0.02658763900399208,
-0.01143626682460308,
0.050212763249874115,
-0.1347932070493698,
0.022326458245515823,
0.15249556303024292,
0.018951259553432465,
0.1510075181722641,
0.08182299882173538,
-0.018538322299718857,
0.000021221798306214623,
0.05697400122880936,
-0.16726034879684448,
-0.0875583365559578,
-0.014363158494234085,
-0.0649663656949997,
-0.12047193944454193,
0.04137254133820534,
0.09413165599107742,
-0.0674988403916359,
-0.007067597936838865,
-0.0040827360935509205,
0.01480003073811531,
-0.047181155532598495,
0.18542033433914185,
0.06140720844268799,
0.04556829482316971,
-0.09902192652225494,
0.07232040911912918,
0.04775995761156082,
-0.07217730581760406,
0.004505601711571217,
0.07354437559843063,
-0.08864618837833405,
-0.054575514048337936,
0.06766585260629654,
0.19024787843227386,
-0.04857509955763817,
-0.04677366837859154,
-0.14007768034934998,
-0.12211456149816513,
0.07924559712409973,
0.13740694522857666,
0.11964695900678635,
0.011042100377380848,
-0.06867233663797379,
0.00013753524399362504,
-0.10835038125514984,
0.10523269325494766,
0.05095338448882103,
0.06350406259298325,
-0.14289559423923492,
0.141910582780838,
0.017812194302678108,
0.048876430839300156,
-0.019941730424761772,
0.025252623483538628,
-0.09824269264936447,
0.005383232142776251,
-0.09809642285108566,
-0.0121102724224329,
-0.033991310745477676,
0.012102196924388409,
-0.005992238875478506,
-0.047347813844680786,
-0.05554035305976868,
0.010358233936131,
-0.10652101784944534,
-0.023995233699679375,
0.025617100298404694,
0.06924949586391449,
-0.10803163796663284,
-0.03712153807282448,
0.02782008796930313,
-0.06190326437354088,
0.077000193297863,
0.04481126740574837,
0.01620541326701641,
0.04950089380145073,
-0.13533835113048553,
0.016700081527233124,
0.07398871332406998,
0.03171689808368683,
0.06389815360307693,
-0.09730371832847595,
-0.006499884650111198,
-0.005229889880865812,
0.03809446096420288,
0.01969943195581436,
0.07728329300880432,
-0.14173462986946106,
0.002201495924964547,
-0.02328886091709137,
-0.08012913167476654,
-0.0682433471083641,
0.02571716532111168,
0.09044670313596725,
0.021472934633493423,
0.20141401886940002,
-0.07654982060194016,
0.05152589827775955,
-0.21565696597099304,
0.006225933320820332,
-0.009186693467199802,
-0.10887595266103745,
-0.1055423766374588,
-0.07114649564027786,
0.05583106353878975,
-0.05802374705672264,
0.1517743319272995,
0.04912406578660011,
0.022861337289214134,
0.02491481602191925,
-0.007246850058436394,
0.014773547649383545,
0.011061709374189377,
0.18983176350593567,
0.030938738957047462,
-0.03437184542417526,
0.0592007152736187,
0.0431998074054718,
0.10482235997915268,
0.11226430535316467,
0.20194236934185028,
0.14138156175613403,
-0.00624391995370388,
0.0932706668972969,
0.040944769978523254,
-0.05923188477754593,
-0.15989600121974945,
0.048051681369543076,
-0.037013355642557144,
0.11125864088535309,
-0.020855454728007317,
0.21790654957294464,
0.058761466294527054,
-0.1712712198495865,
0.04803154617547989,
-0.052442826330661774,
-0.0865674763917923,
-0.11406191438436508,
-0.05263666808605194,
-0.07929245382547379,
-0.127937912940979,
-0.005218966398388147,
-0.11683830618858337,
-0.002244236646220088,
0.12685494124889374,
0.0028031114488840103,
-0.028577234596014023,
0.15587973594665527,
0.006063263397663832,
0.021677058190107346,
0.05789635330438614,
0.012065466493368149,
-0.03534611314535141,
-0.13333582878112793,
-0.059983085840940475,
-0.017461296170949936,
-0.006359034217894077,
0.032993730157613754,
-0.06168423965573311,
-0.03825095295906067,
0.03244449943304062,
-0.022318247705698013,
-0.0928620770573616,
0.005188292358070612,
0.012640755623579025,
0.053713541477918625,
0.04606783762574196,
0.011239070445299149,
0.019917158409953117,
-0.0031351482030004263,
0.20080041885375977,
-0.07217791676521301,
-0.06660531461238861,
-0.10717114806175232,
0.22972801327705383,
0.03417762741446495,
-0.02237319014966488,
0.03579697757959366,
-0.06617016345262527,
0.0030825489666312933,
0.24917256832122803,
0.2160906195640564,
-0.08210866153240204,
-0.007621712051331997,
0.015840673819184303,
-0.00944583211094141,
-0.02301640249788761,
0.10099002718925476,
0.1437874436378479,
0.05389159917831421,
-0.09199661761522293,
-0.046872831881046295,
-0.05884753540158272,
-0.018054412677884102,
-0.03788604214787483,
0.07106056064367294,
0.04584185779094696,
0.0066960579715669155,
-0.034526970237493515,
0.05514732748270035,
-0.06887141615152359,
-0.09347423166036606,
0.054371193051338196,
-0.2162213772535324,
-0.16998834908008575,
-0.013261387124657631,
0.09828519821166992,
0.0034375409595668316,
0.05994460731744766,
-0.030700774863362312,
-0.0028944036457687616,
0.09509637206792831,
-0.021005388349294662,
-0.09704624861478806,
-0.06895597279071808,
0.08762237429618835,
-0.10917830467224121,
0.22248877584934235,
-0.04615882411599159,
0.05427993834018707,
0.12458188831806183,
0.06982076913118362,
-0.07060165703296661,
0.06350772827863693,
0.043261464685201645,
-0.040720079094171524,
0.02771449275314808,
0.07004562765359879,
-0.03564247861504555,
0.061437126249074936,
0.048388075083494186,
-0.13911570608615875,
0.019061563536524773,
-0.04958338290452957,
-0.06783602386713028,
-0.04561956971883774,
-0.023861533030867577,
-0.06201706826686859,
0.13243703544139862,
0.21538084745407104,
-0.02663380280137062,
-0.01076560840010643,
-0.0711965560913086,
0.010668067261576653,
0.05290389806032181,
0.022487344220280647,
-0.056119345128536224,
-0.20987090468406677,
0.016981951892375946,
0.03965592756867409,
-0.01915142871439457,
-0.2438865303993225,
-0.10073988139629364,
0.00047511851880699396,
-0.07343509048223495,
-0.09609877318143845,
0.07428222894668579,
0.08455246686935425,
0.04942093417048454,
-0.057058949023485184,
-0.04013773798942566,
-0.0767010897397995,
0.14584758877754211,
-0.1440131962299347,
-0.09217162430286407
] |
null | null |
transformers
|
# Keyphrase Boundary Infilling with Replacement (KBIR)
The KBIR model as described in "Learning Rich Representations of Keyphrases from Text" from Findings of NAACL 2022 (https://aclanthology.org/2022.findings-naacl.67.pdf) builds on top of the RoBERTa architecture by adding an Infilling head and a Replacement Classification head that is used during pre-training. However, these heads are not used during the downstream evaluation of the model and we only leverage the pre-trained embeddings. Discarding the heads thereby allows us to be compatible with all AutoModel classes that RoBERTa supports.
We provide examples on how to perform downstream evaluation on some of the tasks reported in the paper.
## Downstream Evaluation
### Keyphrase Extraction
```
from transformers import AutoTokenizer, AutoModelForTokenClassification
tokenizer = AutoTokenizer.from_pretrained("bloomberg/KBIR")
model = AutoModelForTokenClassification.from_pretrained("bloomberg/KBIR")
from datasets import load_dataset
dataset = load_dataset("midas/semeval2017_ke_tagged")
```
Reported Results:
| Model | Inspec | SE10 | SE17 |
|-----------------------|--------|-------|-------|
| RoBERTa+BiLSTM-CRF | 59.5 | 27.8 | 50.8 |
| RoBERTa+TG-CRF | 60.4 | 29.7 | 52.1 |
| SciBERT+Hypernet-CRF | 62.1 | 36.7 | 54.4 |
| RoBERTa+Hypernet-CRF | 62.3 | 34.8 | 53.3 |
| RoBERTa-extended-CRF* | 62.09 | 40.61 | 52.32 |
| KBI-CRF* | 62.61 | 40.81 | 59.7 |
| KBIR-CRF* | 62.72 | 40.15 | 62.56 |
### Named Entity Recognition
```
from transformers import AutoTokenizer, AutoModelForTokenClassification
tokenizer = AutoTokenizer.from_pretrained("bloomberg/KBIR")
model = AutoModelForTokenClassification.from_pretrained("bloomberg/KBIR")
from datasets import load_dataset
dataset = load_dataset("conll2003")
```
Reported Results:
| Model | F1 |
|---------------------------------|-------|
| LSTM-CRF (Lample et al., 2016) | 91.0 |
| ELMo (Peters et al., 2018) | 92.2 |
| BERT (Devlin et al., 2018) | 92.8 |
| (Akbik et al., 2019) | 93.1 |
| (Baevski et al., 2019) | 93.5 |
| LUKE (Yamada et al., 2020) | 94.3 |
| LUKE w/o entity attention | 94.1 |
| RoBERTa (Yamada et al., 2020) | 92.4 |
| RoBERTa-extended* | 92.54 |
| KBI* | 92.73 |
| KBIR* | 92.97 |
### Question Answering
```
from transformers import AutoTokenizer, AutoModelForQuestionAnswering
tokenizer = AutoTokenizer.from_pretrained("bloomberg/KBIR")
model = AutoModelForQuestionAnswering.from_pretrained("bloomberg/KBIR")
from datasets import load_dataset
dataset = load_dataset("squad")
```
Reported Results:
| Model | EM | F1 |
|------------------------|-------|-------|
| BERT | 84.2 | 91.1 |
| XLNet | 89.0 | 94.5 |
| ALBERT | 89.3 | 94.8 |
| LUKE | 89.8 | 95.0 |
| LUKE w/o entity attention | 89.2 | 94.7 |
| RoBERTa | 88.9 | 94.6 |
| RoBERTa-extended* | 88.88 | 94.55 |
| KBI* | 88.97 | 94.7 |
| KBIR* | 89.04 | 94.75 |
## Any other classification task
As mentioned above since KBIR is built on top of the RoBERTa architecture, it is compatible with any AutoModel setting that RoBERTa is also compatible with.
We encourage you to try fine-tuning KBIR on different datasets and report the downstream results.
## Citation
Please cite this work using the following BibTeX entry:
```
@inproceedings{kulkarni-etal-2022-learning,
title = "Learning Rich Representation of Keyphrases from Text",
author = "Kulkarni, Mayank and
Mahata, Debanjan and
Arora, Ravneet and
Bhowmik, Rajarshi",
booktitle = "Findings of the Association for Computational Linguistics: NAACL 2022",
month = jul,
year = "2022",
address = "Seattle, United States",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2022.findings-naacl.67",
doi = "10.18653/v1/2022.findings-naacl.67",
pages = "891--906",
abstract = "In this work, we explore how to train task-specific language models aimed towards learning rich representation of keyphrases from text documents. We experiment with different masking strategies for pre-training transformer language models (LMs) in discriminative as well as generative settings. In the discriminative setting, we introduce a new pre-training objective - Keyphrase Boundary Infilling with Replacement (KBIR), showing large gains in performance (upto 8.16 points in F1) over SOTA, when the LM pre-trained using KBIR is fine-tuned for the task of keyphrase extraction. In the generative setting, we introduce a new pre-training setup for BART - KeyBART, that reproduces the keyphrases related to the input text in the CatSeq format, instead of the denoised original input. This also led to gains in performance (upto 4.33 points in F1@M) over SOTA for keyphrase generation. Additionally, we also fine-tune the pre-trained language models on named entity recognition (NER), question answering (QA), relation extraction (RE), abstractive summarization and achieve comparable performance with that of the SOTA, showing that learning rich representation of keyphrases is indeed beneficial for many other fundamental NLP tasks.",
}
```
## Contact
For any questions contact [email protected]
|
{"license": "apache-2.0"}
| null |
bloomberg/KBIR
|
[
"transformers",
"pytorch",
"roberta",
"license:apache-2.0",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #roberta #license-apache-2.0 #endpoints_compatible #has_space #region-us
|
Keyphrase Boundary Infilling with Replacement (KBIR)
====================================================
The KBIR model as described in "Learning Rich Representations of Keyphrases from Text" from Findings of NAACL 2022 (URL builds on top of the RoBERTa architecture by adding an Infilling head and a Replacement Classification head that is used during pre-training. However, these heads are not used during the downstream evaluation of the model and we only leverage the pre-trained embeddings. Discarding the heads thereby allows us to be compatible with all AutoModel classes that RoBERTa supports.
We provide examples on how to perform downstream evaluation on some of the tasks reported in the paper.
Downstream Evaluation
---------------------
### Keyphrase Extraction
Reported Results:
### Named Entity Recognition
Reported Results:
### Question Answering
Reported Results:
Model: BERT, EM: 84.2, F1: 91.1
Model: XLNet, EM: 89.0, F1: 94.5
Model: ALBERT, EM: 89.3, F1: 94.8
Model: LUKE, EM: 89.8, F1: 95.0
Model: LUKE w/o entity attention, EM: 89.2, F1: 94.7
Model: RoBERTa, EM: 88.9, F1: 94.6
Model: RoBERTa-extended\*, EM: 88.88, F1: 94.55
Model: KBI\*, EM: 88.97, F1: 94.7
Model: KBIR\*, EM: 89.04, F1: 94.75
Any other classification task
-----------------------------
As mentioned above since KBIR is built on top of the RoBERTa architecture, it is compatible with any AutoModel setting that RoBERTa is also compatible with.
We encourage you to try fine-tuning KBIR on different datasets and report the downstream results.
Please cite this work using the following BibTeX entry:
Contact
-------
For any questions contact dmahata@URL
|
[
"### Keyphrase Extraction\n\n\nReported Results:",
"### Named Entity Recognition\n\n\nReported Results:",
"### Question Answering\n\n\nReported Results:\n\n\nModel: BERT, EM: 84.2, F1: 91.1\nModel: XLNet, EM: 89.0, F1: 94.5\nModel: ALBERT, EM: 89.3, F1: 94.8\nModel: LUKE, EM: 89.8, F1: 95.0\nModel: LUKE w/o entity attention, EM: 89.2, F1: 94.7\nModel: RoBERTa, EM: 88.9, F1: 94.6\nModel: RoBERTa-extended\\*, EM: 88.88, F1: 94.55\nModel: KBI\\*, EM: 88.97, F1: 94.7\nModel: KBIR\\*, EM: 89.04, F1: 94.75\n\n\nAny other classification task\n-----------------------------\n\n\nAs mentioned above since KBIR is built on top of the RoBERTa architecture, it is compatible with any AutoModel setting that RoBERTa is also compatible with.\n\n\nWe encourage you to try fine-tuning KBIR on different datasets and report the downstream results.\n\n\nPlease cite this work using the following BibTeX entry:\n\n\nContact\n-------\n\n\nFor any questions contact dmahata@URL"
] |
[
"TAGS\n#transformers #pytorch #roberta #license-apache-2.0 #endpoints_compatible #has_space #region-us \n",
"### Keyphrase Extraction\n\n\nReported Results:",
"### Named Entity Recognition\n\n\nReported Results:",
"### Question Answering\n\n\nReported Results:\n\n\nModel: BERT, EM: 84.2, F1: 91.1\nModel: XLNet, EM: 89.0, F1: 94.5\nModel: ALBERT, EM: 89.3, F1: 94.8\nModel: LUKE, EM: 89.8, F1: 95.0\nModel: LUKE w/o entity attention, EM: 89.2, F1: 94.7\nModel: RoBERTa, EM: 88.9, F1: 94.6\nModel: RoBERTa-extended\\*, EM: 88.88, F1: 94.55\nModel: KBI\\*, EM: 88.97, F1: 94.7\nModel: KBIR\\*, EM: 89.04, F1: 94.75\n\n\nAny other classification task\n-----------------------------\n\n\nAs mentioned above since KBIR is built on top of the RoBERTa architecture, it is compatible with any AutoModel setting that RoBERTa is also compatible with.\n\n\nWe encourage you to try fine-tuning KBIR on different datasets and report the downstream results.\n\n\nPlease cite this work using the following BibTeX entry:\n\n\nContact\n-------\n\n\nFor any questions contact dmahata@URL"
] |
[
36,
11,
13,
256
] |
[
"passage: TAGS\n#transformers #pytorch #roberta #license-apache-2.0 #endpoints_compatible #has_space #region-us \n### Keyphrase Extraction\n\n\nReported Results:### Named Entity Recognition\n\n\nReported Results:### Question Answering\n\n\nReported Results:\n\n\nModel: BERT, EM: 84.2, F1: 91.1\nModel: XLNet, EM: 89.0, F1: 94.5\nModel: ALBERT, EM: 89.3, F1: 94.8\nModel: LUKE, EM: 89.8, F1: 95.0\nModel: LUKE w/o entity attention, EM: 89.2, F1: 94.7\nModel: RoBERTa, EM: 88.9, F1: 94.6\nModel: RoBERTa-extended\\*, EM: 88.88, F1: 94.55\nModel: KBI\\*, EM: 88.97, F1: 94.7\nModel: KBIR\\*, EM: 89.04, F1: 94.75\n\n\nAny other classification task\n-----------------------------\n\n\nAs mentioned above since KBIR is built on top of the RoBERTa architecture, it is compatible with any AutoModel setting that RoBERTa is also compatible with.\n\n\nWe encourage you to try fine-tuning KBIR on different datasets and report the downstream results.\n\n\nPlease cite this work using the following BibTeX entry:\n\n\nContact\n-------\n\n\nFor any questions contact dmahata@URL"
] |
[
-0.07931769639253616,
0.06474899500608444,
-0.0050156740471720695,
0.048398640006780624,
0.04830460622906685,
0.01744413562119007,
0.04362933337688446,
0.10051337629556656,
0.1415790170431137,
0.09301519393920898,
0.05291101336479187,
0.18618980050086975,
0.06556149572134018,
0.2075665295124054,
-0.06128862872719765,
-0.14473885297775269,
-0.008821830153465271,
-0.006720613222569227,
0.054178208112716675,
0.08994743973016739,
0.12229394167661667,
-0.0955691784620285,
0.04944763705134392,
-0.026933016255497932,
-0.06550378352403641,
0.05051903799176216,
-0.011134016327559948,
-0.0391717255115509,
0.08231902867555618,
0.06881210952997208,
0.09616133570671082,
0.06816698610782623,
0.0008323327638208866,
-0.13865084946155548,
0.014338430017232895,
0.025955205783247948,
-0.028816580772399902,
0.050734441727399826,
0.08157385885715485,
-0.018000327050685883,
0.09757637977600098,
-0.09458666294813156,
0.007736813277006149,
0.06714779138565063,
-0.10069119930267334,
-0.2049741894006729,
-0.10807839035987854,
0.17510466277599335,
0.1887616068124771,
-0.010702353902161121,
-0.05272816866636276,
0.13779360055923462,
-0.05172402039170265,
0.09700678288936615,
0.09741972386837006,
-0.18133781850337982,
-0.025825457647442818,
0.09072888642549515,
0.0001259753480553627,
0.012267344631254673,
-0.03727275878190994,
-0.031861454248428345,
0.019636349752545357,
0.011192419566214085,
0.0245522428303957,
-0.05178631469607353,
0.02695469930768013,
-0.03366551920771599,
-0.10285016149282455,
-0.0482306033372879,
0.1214328482747078,
0.12113099545240402,
-0.07541167736053467,
-0.1662335842847824,
-0.06146042421460152,
-0.04827135428786278,
-0.015270540490746498,
-0.05348648130893707,
0.0007489968556910753,
-0.05328287556767464,
0.09914419054985046,
-0.03410431370139122,
-0.09849690645933151,
-0.05623304471373558,
-0.03915839642286301,
0.0543140210211277,
0.039049651473760605,
-0.013977701775729656,
0.05840903893113136,
0.09428142011165619,
-0.0767040103673935,
-0.14066873490810394,
-0.0861465185880661,
-0.09941238909959793,
-0.15566878020763397,
-0.026632722467184067,
0.05249162018299103,
0.05515042319893837,
0.043162498623132706,
0.2562284767627716,
0.03720777854323387,
0.07023046910762787,
0.014671210199594498,
-0.010857145301997662,
0.048094429075717926,
0.09622744470834732,
-0.07943487912416458,
-0.032741423696279526,
0.06134071573615074,
0.042125530540943146,
-0.0036359040532261133,
-0.02725849859416485,
-0.03754516690969467,
-0.03609462082386017,
0.10769670456647873,
0.09368711709976196,
0.08068016171455383,
0.011789746582508087,
-0.04914335533976555,
-0.07775665074586868,
0.2281723916530609,
-0.12418773025274277,
0.015079441480338573,
0.05892128124833107,
-0.026149798184633255,
-0.08958159387111664,
0.005829592701047659,
0.034053411334753036,
-0.01938316598534584,
0.02508728764951229,
-0.03656215965747833,
-0.06412528455257416,
-0.03904780372977257,
-0.12911999225616455,
0.07290012389421463,
-0.15322892367839813,
0.010705946013331413,
-0.15081541240215302,
-0.09622357785701752,
-0.07178984582424164,
0.003691115416586399,
-0.07279682159423828,
-0.03168537840247154,
0.014092718251049519,
-0.11405204981565475,
0.002021057764068246,
-0.05279199779033661,
0.1024545282125473,
-0.028338879346847534,
0.04387500882148743,
0.027010386809706688,
0.06493975222110748,
0.007720377296209335,
0.013153034262359142,
-0.03498804196715355,
0.026024892926216125,
-0.2291867434978485,
0.12603941559791565,
-0.1305142045021057,
-0.000438229413703084,
-0.17360804975032806,
-0.007188612595200539,
0.023896481841802597,
0.012492912821471691,
0.03369565308094025,
0.15058867633342743,
-0.14054234325885773,
0.0018454341916367412,
0.06850029528141022,
-0.03380044177174568,
-0.06958188861608505,
0.07069934159517288,
-0.019510600715875626,
-0.02242097072303295,
0.04792581871151924,
0.15868549048900604,
0.10258407145738602,
-0.052733372896909714,
-0.11388687789440155,
-0.04406067356467247,
-0.03389928489923477,
0.022337671369314194,
0.0733451098203659,
0.008929177187383175,
0.07364895939826965,
-0.003421294968575239,
-0.028120316565036774,
-0.017833352088928223,
-0.04678142070770264,
-0.04105633124709129,
-0.010008841753005981,
-0.04473156854510307,
-0.015406918711960316,
0.01817048341035843,
-0.008792083710432053,
-0.03688078373670578,
-0.10725793987512589,
-0.020708849653601646,
0.10611878335475922,
-0.02960810251533985,
-0.060800839215517044,
-0.1339792013168335,
0.08521804213523865,
-0.09111780673265457,
0.01584424078464508,
-0.16358281672000885,
-0.07132931053638458,
0.05272909626364708,
-0.07905061542987823,
0.015352693386375904,
0.055220551788806915,
0.060221437364816666,
0.015649789944291115,
-0.005917674396187067,
-0.09488239139318466,
-0.007742782589048147,
-0.0025870297104120255,
-0.060535237193107605,
-0.13065017759799957,
-0.10765152424573898,
-0.05208910256624222,
0.1394730508327484,
-0.21004365384578705,
0.006771341897547245,
0.08452671766281128,
0.161165252327919,
0.01718616858124733,
-0.034988876432180405,
0.01785859279334545,
0.003290732391178608,
-0.03169174864888191,
-0.028875773772597313,
0.02694113180041313,
-0.06344563513994217,
-0.08578527718782425,
0.0847637876868248,
-0.07463093847036362,
-0.025831449776887894,
0.05372552573680878,
0.04937634989619255,
-0.06306909024715424,
0.07161811739206314,
-0.06244368106126785,
-0.008836311288177967,
0.04028964415192604,
-0.08098170161247253,
0.11691748350858688,
0.060222480446100235,
0.07953333109617233,
-0.068771131336689,
-0.12234043329954147,
0.013763445429503918,
-0.0034111025743186474,
0.0015848075272515416,
0.21624405682086945,
0.004744145553559065,
-0.17483539879322052,
0.061192549765110016,
0.05011850595474243,
0.029878418892621994,
0.10617010295391083,
-0.02074703574180603,
-0.06304324418306351,
-0.044136568903923035,
0.05683713033795357,
0.027734529227018356,
0.05700322613120079,
-0.03520304709672928,
0.025543734431266785,
0.08367370814085007,
0.015925385057926178,
0.005832219496369362,
-0.04635731130838394,
0.031189702451229095,
0.020955443382263184,
-0.06748674064874649,
-0.01590578444302082,
0.09954377263784409,
0.05142251029610634,
0.0738382413983345,
-0.020071011036634445,
0.03578988090157509,
-0.018587110564112663,
-0.06613345444202423,
-0.09861644357442856,
0.19138242304325104,
-0.012117579579353333,
-0.21540707349777222,
-0.15042579174041748,
-0.041947100311517715,
-0.06614246964454651,
-0.012072764337062836,
0.06531281024217606,
-0.05935792624950409,
-0.08023491501808167,
-0.06938343495130539,
0.03182150423526764,
0.06437037885189056,
-0.03017074055969715,
-0.04780259728431702,
-0.007139076944440603,
0.05908830091357231,
-0.1499667912721634,
-0.027725612744688988,
0.004262096248567104,
-0.07785255461931229,
0.06305108219385147,
0.03470439463853836,
0.11716514080762863,
0.0999920442700386,
-0.017287475988268852,
0.0022041427437216043,
0.01677476242184639,
0.2307073175907135,
-0.06780869513750076,
0.09412382543087006,
0.17663249373435974,
-0.04119138419628143,
0.04277504235506058,
0.14121729135513306,
0.008191454224288464,
-0.06598539650440216,
0.015747051686048508,
0.08608290553092957,
-0.030253175646066666,
-0.2498117834329605,
-0.03989385813474655,
-0.02751333825290203,
0.0008598063723184168,
0.029086772352457047,
0.052935827523469925,
0.06826832890510559,
0.015465903095901012,
-0.05212875083088875,
-0.022334402427077293,
0.011461838148534298,
0.061514608561992645,
0.09047558903694153,
0.01809905841946602,
0.1269620805978775,
-0.0650632455945015,
0.004162282217293978,
0.06621702760457993,
-0.01958940550684929,
0.18579931557178497,
0.01128101721405983,
0.10727691650390625,
0.1186637133359909,
0.03044426441192627,
0.007265777792781591,
0.06699465960264206,
-0.04663223773241043,
0.02113982103765011,
-0.008655006065964699,
-0.07226558774709702,
-0.022721722722053528,
0.0597306452691555,
0.08267128467559814,
-0.04598443955183029,
-0.06835925579071045,
-0.11503268778324127,
0.0031403768807649612,
0.12136862426996231,
0.11734083294868469,
-0.21099330484867096,
-0.023062976077198982,
0.050564419478178024,
-0.09799331426620483,
-0.056319452822208405,
-0.08957978338003159,
0.05118107423186302,
-0.08324950933456421,
0.03006085194647312,
0.007723282091319561,
0.10087455809116364,
-0.12326505780220032,
-0.026134677231311798,
-0.09043464064598083,
0.03134729713201523,
0.02232505939900875,
0.06539913266897202,
-0.06960513442754745,
0.22308368980884552,
0.03455944359302521,
0.0682670995593071,
-0.07739122956991196,
0.0031795199029147625,
0.03880983963608742,
-0.07953500002622604,
0.14666283130645752,
-0.010365607216954231,
0.009953767992556095,
-0.22798624634742737,
-0.17119601368904114,
0.027753302827477455,
0.01305423304438591,
-0.13289885222911835,
0.1123519167304039,
0.017800748348236084,
-0.03261703997850418,
-0.027018481865525246,
0.057452429085969925,
-0.18124383687973022,
-0.07067844271659851,
0.016021134331822395,
0.021274231374263763,
-0.00065696204546839,
-0.05805331841111183,
-0.014800635166466236,
0.020339567214250565,
0.07490015774965286,
-0.15706034004688263,
-0.06844796240329742,
-0.11756784468889236,
0.012643760070204735,
0.1699339896440506,
-0.11399979144334793,
0.03586781397461891,
-0.04260185733437538,
0.0869513601064682,
0.009705774486064911,
-0.049464307725429535,
0.01843905821442604,
-0.07615500688552856,
-0.13014419376850128,
0.0026704110205173492,
0.15082688629627228,
0.08467373251914978,
0.04921920225024223,
0.05984535440802574,
0.019115494564175606,
0.02911810390651226,
-0.11331577599048615,
0.02835754118859768,
0.061991799622774124,
0.09897353500127792,
0.09728102385997772,
-0.03871433809399605,
-0.08085665851831436,
-0.09196237474679947,
-0.023215990513563156,
0.08416946232318878,
0.40462955832481384,
-0.06316537410020828,
0.08635824173688889,
0.09309450536966324,
-0.03859439119696617,
-0.19654160737991333,
-0.07334623485803604,
0.09505867958068848,
0.05729011818766594,
0.029275735840201378,
-0.10143384337425232,
0.058544572442770004,
0.08277984708547592,
-0.030616357922554016,
0.0777985006570816,
-0.13453629612922668,
-0.1356426477432251,
0.12379046529531479,
0.053680598735809326,
0.016917096450924873,
-0.12000677734613419,
-0.08969724178314209,
-0.013550229370594025,
-0.18323156237602234,
0.07828852534294128,
-0.019254932180047035,
0.09664978832006454,
-0.032279741019010544,
-0.04540124163031578,
0.034530188888311386,
-0.0352623350918293,
0.14564812183380127,
0.0341656394302845,
0.06641608476638794,
-0.04485469311475754,
-0.04403621703386307,
-0.005035695154219866,
-0.09030929207801819,
0.10149117559194565,
0.02164614200592041,
0.041850246489048004,
-0.2316976636648178,
-0.05822514742612839,
-0.0807143822312355,
0.09255137294530869,
-0.03042997233569622,
-0.038060180842876434,
-0.03918158635497093,
0.05006972327828407,
0.03929860517382622,
0.009940268471837044,
0.028406968340277672,
-0.09916304051876068,
0.020661011338233948,
0.17141474783420563,
0.13079044222831726,
-0.04689185321331024,
0.0013152469182386994,
0.0005807839334011078,
-0.043678443878889084,
-0.0031355624087154865,
-0.14701689779758453,
0.06603078544139862,
0.09223124384880066,
0.022318962961435318,
0.09286737442016602,
0.007428791373968124,
-0.11682198196649551,
-0.005664791911840439,
0.09366273880004883,
-0.09367470443248749,
-0.10700175166130066,
-0.005387555807828903,
-0.06681538373231888,
-0.08158905059099197,
-0.011303468607366085,
0.14239011704921722,
0.008522254414856434,
-0.04403627663850784,
0.02464165724813938,
0.014917643740773201,
0.01697819121181965,
0.13162043690681458,
0.09087306261062622,
0.09280355274677277,
-0.05820544809103012,
0.00039380797534249723,
0.042853206396102905,
-0.05390097200870514,
0.027907593175768852,
0.0481482557952404,
-0.0725618377327919,
-0.08588770776987076,
-0.059608303010463715,
0.11060654371976852,
-0.03412184491753578,
-0.03640833497047424,
0.01361022237688303,
-0.028018813580274582,
0.061439868062734604,
0.13915643095970154,
0.04106573015451431,
0.03721291199326515,
0.0708668902516365,
0.018703902140259743,
-0.05368500202894211,
0.11334429681301117,
0.08871953934431076,
0.03884172812104225,
-0.16163219511508942,
-0.016554901376366615,
-0.020198935642838478,
0.043256230652332306,
-0.015612464398145676,
0.02356105111539364,
-0.111177459359169,
-0.03023248352110386,
-0.1150692030787468,
-0.0095177898183465,
-0.02742180787026882,
-0.022934293374419212,
-0.009546393528580666,
-0.04122324287891388,
-0.06038666516542435,
0.01958608441054821,
-0.09474750608205795,
-0.0672186091542244,
-0.03943658992648125,
0.10865286737680435,
-0.14044098556041718,
-0.07425231486558914,
0.049701154232025146,
-0.10297458618879318,
0.12571987509727478,
0.02031954936683178,
0.00879837665706873,
0.017583008855581284,
-0.02085980214178562,
0.00426398916170001,
0.03438878059387207,
0.0482831634581089,
0.05531115457415581,
-0.20003946125507355,
0.03072470612823963,
-0.03745920583605766,
-0.0034246467985212803,
0.009622418321669102,
0.02693095989525318,
-0.10508588701486588,
-0.03668402135372162,
-0.07114879041910172,
-0.07725145667791367,
-0.07153458148241043,
0.07648687809705734,
0.11666543781757355,
0.08336049318313599,
0.1460193693637848,
-0.022808419540524483,
0.03030047006905079,
-0.19132432341575623,
-0.011465204879641533,
-0.03387416526675224,
-0.03508348390460014,
0.05598398670554161,
-0.06589513272047043,
0.05094302073121071,
-0.03916122391819954,
0.10251424461603165,
-0.036322616040706635,
0.009617987088859081,
0.03731071576476097,
-0.07995154708623886,
-0.01691826619207859,
0.03283284604549408,
0.1598980575799942,
0.006020986940711737,
-0.007107368670403957,
0.030417300760746002,
0.03820638731122017,
-0.022193849086761475,
-0.08022940903902054,
0.08536890149116516,
0.1867128163576126,
0.015742355957627296,
0.01731147989630699,
0.12366408854722977,
-0.11218413710594177,
-0.05884329974651337,
0.0061453282833099365,
-0.037022847682237625,
0.05034758150577545,
-0.04650471359491348,
0.1342131644487381,
0.11515169590711594,
-0.14259131252765656,
0.05338408425450325,
-0.008013640530407429,
-0.045363202691078186,
-0.11123468726873398,
-0.10281611979007721,
-0.09884646534919739,
-0.03345300629734993,
0.018568456172943115,
-0.07457384467124939,
0.03728804737329483,
0.053210023790597916,
0.029618997126817703,
0.006834977772086859,
0.1235840767621994,
-0.03210414946079254,
0.003722831839695573,
0.07131076604127884,
-0.005015102215111256,
-0.07328745722770691,
0.015650007873773575,
0.03528127446770668,
0.048105478286743164,
0.06652095913887024,
0.03763636574149132,
0.04326290637254715,
-0.05469845235347748,
0.021586937829852104,
-0.05053611844778061,
-0.11490076780319214,
-0.02198413573205471,
0.044577065855264664,
0.05934920534491539,
0.14026029407978058,
0.06424244493246078,
-0.0452519990503788,
-0.00009728087024996057,
0.1908976137638092,
-0.030667083337903023,
-0.05622010678052902,
-0.09033173322677612,
0.18519297242164612,
0.03243017569184303,
0.04607277363538742,
0.00463908864185214,
-0.13077549636363983,
0.025899067521095276,
0.12775073945522308,
0.09668684750795364,
0.0543668195605278,
-0.0142999067902565,
0.013736658729612827,
0.0026252446696162224,
-0.0357365719974041,
0.07861604541540146,
0.057854704558849335,
0.21630501747131348,
-0.040543098002672195,
0.07035188376903534,
0.002218768699094653,
-0.018290603533387184,
-0.019288266077637672,
0.159718856215477,
0.028297405689954758,
-0.04861034080386162,
-0.028711708262562752,
0.09168998897075653,
-0.01653251051902771,
-0.21072335541248322,
0.0629565492272377,
-0.10222672671079636,
-0.12952405214309692,
-0.012745248153805733,
-0.0181596539914608,
-0.020278966054320335,
0.05597078055143356,
-0.04395994916558266,
-0.02335400879383087,
0.11148198693990707,
-0.008555728942155838,
-0.0541459321975708,
-0.017972351983189583,
0.04986678063869476,
0.013263453729450703,
0.1897304356098175,
-0.014046430587768555,
0.13104109466075897,
0.16438037157058716,
-0.058218661695718765,
-0.14238102734088898,
0.03134315088391304,
0.028960300609469414,
-0.10341989994049072,
0.06754383444786072,
0.1932242214679718,
-0.0022988193668425083,
-0.017882373183965683,
0.034554190933704376,
-0.23204807937145233,
-0.017577288672327995,
0.03528048098087311,
-0.006776257883757353,
-0.10737474262714386,
0.09161096811294556,
-0.09262797236442566,
0.09199009090662003,
0.10139021277427673,
-0.0376540943980217,
0.014549591578543186,
-0.07558322697877884,
0.0493394210934639,
0.07309386134147644,
0.09660818427801132,
-0.004603053908795118,
-0.16581334173679352,
0.06559669226408005,
0.007986390963196754,
0.04600611701607704,
-0.263686865568161,
-0.05062156170606613,
-0.0014974564546719193,
-0.018685154616832733,
-0.03178830444812775,
0.12942911684513092,
0.02783055603504181,
0.044064488261938095,
-0.014589858241379261,
-0.1309649497270584,
-0.03884144127368927,
0.10439145565032959,
-0.1407584547996521,
-0.06045638397336006
] |
null | null |
transformers
|
# KeyBART
KeyBART as described in "Learning Rich Representations of Keyphrase from Text" published in the Findings of NAACL 2022 (https://aclanthology.org/2022.findings-naacl.67.pdf), pre-trains a BART-based architecture to produce a concatenated sequence of keyphrases in the CatSeqD format.
We provide some examples on Downstream Evaluations setups and and also how it can be used for Text-to-Text Generation in a zero-shot setting.
## Downstream Evaluation
### Keyphrase Generation
```
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
tokenizer = AutoTokenizer.from_pretrained("bloomberg/KeyBART")
model = AutoModelForSeq2SeqLM.from_pretrained("bloomberg/KeyBART")
from datasets import load_dataset
dataset = load_dataset("midas/kp20k")
```
Reported Results:
#### Present Keyphrase Generation
| | Inspec | | NUS | | Krapivin | | SemEval | | KP20k | |
|---------------|--------|-------|-------|-------|----------|-------|---------|-------|-------|-------|
| Model | F1@5 | F1@M | F1@5 | F1@M | F1@5 | F1@M | F1@5 | F1@M | F1@5 | F1@M |
| catSeq | 22.5 | 26.2 | 32.3 | 39.7 | 26.9 | 35.4 | 24.2 | 28.3 | 29.1 | 36.7 |
| catSeqTG | 22.9 | 27 | 32.5 | 39.3 | 28.2 | 36.6 | 24.6 | 29.0 | 29.2 | 36.6 |
| catSeqTG-2RF1 | 25.3 | 30.1 | 37.5 | 43.3 | 30 | 36.9 | 28.7 | 32.9 | 32.1 | 38.6 |
| GANMR | 25.8 | 29.9 | 34.8 | 41.7 | 28.8 | 36.9 | N/A | N/A | 30.3 | 37.8 |
| ExHiRD-h | 25.3 | 29.1 | N/A | N/A | 28.6 | 34.7 | 28.4 | 33.5 | 31.1 | 37.4 |
| Transformer (Ye et al., 2021) | 28.15 | 32.56 | 37.07 | 41.91 | 31.58 | 36.55 | 28.71 | 32.52 | 33.21 | 37.71 |
| BART* | 23.59 | 28.46 | 35.00 | 42.65 | 26.91 | 35.37 | 26.72 | 31.91 | 29.25 | 37.51 |
| KeyBART-DOC* | 24.42 | 29.57 | 31.37 | 39.24 | 24.21 | 32.60 | 24.69 | 30.50 | 28.82 | 37.59 |
| KeyBART* | 24.49 | 29.69 | 34.77 | 43.57 | 29.24 | 38.62 | 27.47 | 33.54 | 30.71 | 39.76 |
| KeyBART* (Zero-shot) | 30.72 | 36.89 | 18.86 | 21.67 | 18.35 | 20.46 | 20.25 | 25.82 | 12.57 | 15.41 |
#### Absent Keyphrase Generation
| | Inspec | | NUS | | Krapivin | | SemEval | | KP20k | |
|---------------|--------|------|------|------|----------|------|---------|------|-------|------|
| Model | F1@5 | F1@M | F1@5 | F1@M | F1@5 | F1@M | F1@5 | F1@M | F1@5 | F1@M |
| catSeq | 0.4 | 0.8 | 1.6 | 2.8 | 1.8 | 3.6 | 1.6 | 2.8 | 1.5 | 3.2 |
| catSeqTG | 0.5 | 1.1 | 1.1 | 1.8 | 1.8 | 3.4 | 1.1 | 1.8 | 1.5 | 3.2 |
| catSeqTG-2RF1 | 1.2 | 2.1 | 1.9 | 3.1 | 3.0 | 5.3 | 2.1 | 3.0 | 2.7 | 5.0 |
| GANMR | 1.3 | 1.9 | 2.6 | 3.8 | 4.2 | 5.7 | N/A | N/A | 3.2 | 4.5 |
| ExHiRD-h | 1.1 | 2.2 | N/A | N/A | 2.2 | 4.3 | 1.7 | 2.5 | 1.6 | 3.2 |
| Transformer (Ye et al., 2021) | 1.02 | 1.94 | 2.82 | 4.82 | 3.21 | 6.04 | 2.05 | 2.33 | 2.31 | 4.61 |
| BART* | 1.08 | 1.96 | 1.80 | 2.75 | 2.59 | 4.91 | 1.34 | 1.75 | 1.77 | 3.56 |
| KeyBART-DOC* | 0.99 | 2.03 | 1.39 | 2.74 | 2.40 | 4.58 | 1.07 | 1.39 | 1.69 | 3.38 |
| KeyBART* | 0.95 | 1.81 | 1.23 | 1.90 | 3.09 | 6.08 | 1.96 | 2.65 | 2.03 | 4.26 |
| KeyBART* (Zero-shot) | 1.83 | 2.92 | 1.46 | 2.19 | 1.29 | 2.09 | 1.12 | 1.45 | 0.70 | 1.14 |
### Abstractive Summarization
```
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
tokenizer = AutoTokenizer.from_pretrained("bloomberg/KeyBART")
model = AutoModelForSeq2SeqLM.from_pretrained("bloomberg/KeyBART")
from datasets import load_dataset
dataset = load_dataset("cnn_dailymail")
```
Reported Results:
| Model | R1 | R2 | RL |
|--------------|-------|-------|-------|
| BART (Lewis et al., 2019) | 44.16 | 21.28 | 40.9 |
| BART* | 42.93 | 20.12 | 39.72 |
| KeyBART-DOC* | 42.92 | 20.07 | 39.69 |
| KeyBART* | 43.10 | 20.26 | 39.90 |
## Zero-shot settings
```
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
tokenizer = AutoTokenizer.from_pretrained("bloomberg/KeyBART")
model = AutoModelForSeq2SeqLM.from_pretrained("bloomberg/KeyBART")
```
Alternatively use the Hosted Inference API console provided in https://huggingface.co/bloomberg/KeyBART
Sample Zero Shot result:
```
Input: In this work, we explore how to learn task specific language models aimed towards learning rich representation of keyphrases from text documents.
We experiment with different masking strategies for pre-training transformer language models (LMs) in discriminative as well as generative settings.
In the discriminative setting, we introduce a new pre-training objective - Keyphrase Boundary Infilling with Replacement (KBIR),
showing large gains in performance (upto 9.26 points in F1) over SOTA, when LM pre-trained using KBIR is fine-tuned for the task of keyphrase extraction.
In the generative setting, we introduce a new pre-training setup for BART - KeyBART, that reproduces the keyphrases related to the input text in the CatSeq
format, instead of the denoised original input. This also led to gains in performance (upto 4.33 points in F1@M) over SOTA for keyphrase generation.
Additionally, we also fine-tune the pre-trained language models on named entity recognition (NER), question answering (QA), relation extraction (RE),
abstractive summarization and achieve comparable performance with that of the SOTA, showing that learning rich representation of keyphrases is indeed beneficial
for many other fundamental NLP tasks.
Output: language model;keyphrase generation;new pre-training objective;pre-training setup;
```
## Citation
Please cite this work using the following BibTeX entry:
```
@inproceedings{kulkarni-etal-2022-learning,
title = "Learning Rich Representation of Keyphrases from Text",
author = "Kulkarni, Mayank and
Mahata, Debanjan and
Arora, Ravneet and
Bhowmik, Rajarshi",
booktitle = "Findings of the Association for Computational Linguistics: NAACL 2022",
month = jul,
year = "2022",
address = "Seattle, United States",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2022.findings-naacl.67",
doi = "10.18653/v1/2022.findings-naacl.67",
pages = "891--906",
abstract = "In this work, we explore how to train task-specific language models aimed towards learning rich representation of keyphrases from text documents. We experiment with different masking strategies for pre-training transformer language models (LMs) in discriminative as well as generative settings. In the discriminative setting, we introduce a new pre-training objective - Keyphrase Boundary Infilling with Replacement (KBIR), showing large gains in performance (upto 8.16 points in F1) over SOTA, when the LM pre-trained using KBIR is fine-tuned for the task of keyphrase extraction. In the generative setting, we introduce a new pre-training setup for BART - KeyBART, that reproduces the keyphrases related to the input text in the CatSeq format, instead of the denoised original input. This also led to gains in performance (upto 4.33 points in F1@M) over SOTA for keyphrase generation. Additionally, we also fine-tune the pre-trained language models on named entity recognition (NER), question answering (QA), relation extraction (RE), abstractive summarization and achieve comparable performance with that of the SOTA, showing that learning rich representation of keyphrases is indeed beneficial for many other fundamental NLP tasks.",
}
```
Please direct all questions to [email protected]
|
{"license": "apache-2.0"}
|
text2text-generation
|
bloomberg/KeyBART
|
[
"transformers",
"pytorch",
"bart",
"text2text-generation",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #bart #text2text-generation #license-apache-2.0 #autotrain_compatible #endpoints_compatible #has_space #region-us
|
KeyBART
=======
KeyBART as described in "Learning Rich Representations of Keyphrase from Text" published in the Findings of NAACL 2022 (URL pre-trains a BART-based architecture to produce a concatenated sequence of keyphrases in the CatSeqD format.
We provide some examples on Downstream Evaluations setups and and also how it can be used for Text-to-Text Generation in a zero-shot setting.
Downstream Evaluation
---------------------
### Keyphrase Generation
Reported Results:
#### Present Keyphrase Generation
#### Absent Keyphrase Generation
### Abstractive Summarization
Reported Results:
Zero-shot settings
------------------
Alternatively use the Hosted Inference API console provided in URL
Sample Zero Shot result:
Please cite this work using the following BibTeX entry:
Please direct all questions to dmahata@URL
|
[
"### Keyphrase Generation\n\n\nReported Results:",
"#### Present Keyphrase Generation",
"#### Absent Keyphrase Generation",
"### Abstractive Summarization\n\n\nReported Results:\n\n\n\nZero-shot settings\n------------------\n\n\nAlternatively use the Hosted Inference API console provided in URL\n\n\nSample Zero Shot result:\n\n\nPlease cite this work using the following BibTeX entry:\n\n\nPlease direct all questions to dmahata@URL"
] |
[
"TAGS\n#transformers #pytorch #bart #text2text-generation #license-apache-2.0 #autotrain_compatible #endpoints_compatible #has_space #region-us \n",
"### Keyphrase Generation\n\n\nReported Results:",
"#### Present Keyphrase Generation",
"#### Absent Keyphrase Generation",
"### Abstractive Summarization\n\n\nReported Results:\n\n\n\nZero-shot settings\n------------------\n\n\nAlternatively use the Hosted Inference API console provided in URL\n\n\nSample Zero Shot result:\n\n\nPlease cite this work using the following BibTeX entry:\n\n\nPlease direct all questions to dmahata@URL"
] |
[
50,
10,
7,
8,
62
] |
[
"passage: TAGS\n#transformers #pytorch #bart #text2text-generation #license-apache-2.0 #autotrain_compatible #endpoints_compatible #has_space #region-us \n### Keyphrase Generation\n\n\nReported Results:#### Present Keyphrase Generation#### Absent Keyphrase Generation### Abstractive Summarization\n\n\nReported Results:\n\n\n\nZero-shot settings\n------------------\n\n\nAlternatively use the Hosted Inference API console provided in URL\n\n\nSample Zero Shot result:\n\n\nPlease cite this work using the following BibTeX entry:\n\n\nPlease direct all questions to dmahata@URL"
] |
[
-0.03612354397773743,
0.14009790122509003,
-0.0033765563275665045,
0.012954792007803917,
0.033167943358421326,
-0.03365686908364296,
0.1576407104730606,
0.13981017470359802,
-0.04622672498226166,
0.016338398680090904,
0.18928009271621704,
0.1881651133298874,
0.014518477022647858,
0.1301501840353012,
-0.08133605867624283,
-0.13671736419200897,
0.045433539897203445,
0.07948146015405655,
-0.01447795145213604,
0.1246146485209465,
0.05864924192428589,
-0.06514324247837067,
0.04284847900271416,
0.002100135665386915,
-0.05769442021846771,
0.07586433738470078,
-0.026156235486268997,
-0.09618176519870758,
0.08106768131256104,
-0.00902946013957262,
0.0030705619137734175,
0.0707215890288353,
-0.08472573757171631,
-0.20940764248371124,
0.024903589859604836,
0.0325428806245327,
-0.01733437180519104,
0.07669791579246521,
0.06984865665435791,
-0.0026830167043954134,
0.058046504855155945,
-0.020838508382439613,
-0.043971553444862366,
0.044952359050512314,
-0.07358140498399734,
-0.2610580027103424,
-0.12421144545078278,
0.10059311985969543,
0.06273634731769562,
0.04154183343052864,
-0.014304123818874359,
0.18389703333377838,
-0.041384320706129074,
0.08813422173261642,
0.09673044085502625,
-0.3096811771392822,
-0.004042193293571472,
-0.014200130477547646,
0.02705598622560501,
0.07197996228933334,
0.0004528597928583622,
0.056516192853450775,
0.039350252598524094,
-0.009813105687499046,
0.09090257436037064,
-0.05838705971837044,
-0.06845198571681976,
-0.007383738178759813,
-0.009234296157956123,
-0.07527131587266922,
0.20707987248897552,
0.04858122020959854,
-0.015273899771273136,
-0.06271659582853317,
-0.04889056459069252,
-0.004926982801407576,
-0.030623728409409523,
-0.0466129295527935,
0.018733056262135506,
0.04518846422433853,
-0.032654765993356705,
-0.038105085492134094,
-0.1248798668384552,
-0.04935746267437935,
-0.06703396141529083,
-0.05955830216407776,
0.007833035662770271,
0.04306000471115112,
-0.14660364389419556,
0.06360646337270737,
-0.0034221457317471504,
-0.14309218525886536,
-0.047919802367687225,
-0.019231431186199188,
0.09515275806188583,
0.06082535535097122,
0.00860692746937275,
0.0065750060603022575,
0.17744165658950806,
0.1175549253821373,
-0.06823551654815674,
-0.00807770062237978,
-0.07867499440908432,
0.017901157960295677,
0.003430178388953209,
0.07057098299264908,
-0.06090305373072624,
-0.08557125926017761,
0.11993944644927979,
-0.07384146749973297,
0.062370408326387405,
-0.032489679753780365,
-0.0741720199584961,
0.007472681347280741,
0.15201349556446075,
0.13274168968200684,
0.1339133232831955,
0.043790053576231,
0.012727193534374237,
-0.008968805894255638,
0.17599689960479736,
-0.048487309366464615,
-0.01028235349804163,
0.0035966162104159594,
0.007823601365089417,
0.06823263317346573,
0.032186757773160934,
0.06185529753565788,
-0.11081698536872864,
-0.007961775176227093,
-0.07459761947393417,
-0.04733049497008324,
-0.022410154342651367,
-0.07952143251895905,
0.06521552056074142,
-0.09966650605201721,
0.010817475616931915,
-0.10887689888477325,
-0.15986251831054688,
-0.011022472754120827,
-0.01250289473682642,
-0.027711886912584305,
-0.04954108968377113,
-0.009592187590897083,
-0.047667115926742554,
0.06160376965999603,
-0.10530941933393478,
0.043675195425748825,
-0.06645037233829498,
0.10371588170528412,
-0.009643248282372952,
0.03405076637864113,
-0.17814505100250244,
0.055329542607069016,
-0.1645977646112442,
0.004901670850813389,
0.013742746785283089,
0.08339011669158936,
-0.052581094205379486,
0.11334121227264404,
-0.16766035556793213,
-0.024526087567210197,
-0.008434711955487728,
-0.020278245210647583,
0.06765655428171158,
0.21415261924266815,
-0.16043545305728912,
-0.0332687646150589,
0.1858942061662674,
-0.03394848853349686,
-0.173444464802742,
0.0540299266576767,
-0.006077939178794622,
0.04758919030427933,
0.09273240715265274,
0.2444263994693756,
-0.036910660564899445,
-0.059184566140174866,
-0.02892986685037613,
0.025947242975234985,
-0.01628238335251808,
-0.11585143208503723,
0.07024440169334412,
-0.050494689494371414,
0.023375939577817917,
0.07046768814325333,
0.14870107173919678,
-0.05308590456843376,
-0.00931534729897976,
-0.05431697890162468,
-0.024878935888409615,
-0.02973121963441372,
-0.13000547885894775,
-0.0022680044639855623,
0.0764104425907135,
-0.08234997093677521,
-0.09044262021780014,
0.06398672610521317,
0.013052905909717083,
-0.00045670801773667336,
0.022646551951766014,
-0.03257765248417854,
0.0465729683637619,
-0.17344579100608826,
0.04345055669546127,
-0.08948265016078949,
0.02602093480527401,
0.005117969121783972,
0.0649513304233551,
0.055538028478622437,
0.03903493657708168,
0.012375995516777039,
-0.06756921857595444,
0.027703946456313133,
-0.019330743700265884,
0.15010817348957062,
0.032870035618543625,
-0.10217241197824478,
-0.12907849252223969,
0.027038218453526497,
-0.03039977140724659,
-0.0009337872033938766,
0.021634172648191452,
-0.007685631513595581,
0.08273903280496597,
0.08622165769338608,
-0.022827023640275,
0.0048355781473219395,
0.03648493438959122,
0.04667721316218376,
-0.03388581424951553,
-0.00018659117631614208,
0.05854552239179611,
0.014730775728821754,
-0.11869063973426819,
0.18173864483833313,
-0.1471458226442337,
0.12808330357074738,
0.176640123128891,
-0.019457204267382622,
0.06797422468662262,
-0.004586035385727882,
-0.02156640961766243,
-0.029152996838092804,
0.12796510756015778,
0.027787070721387863,
0.02004767768085003,
0.04437875747680664,
0.11108353734016418,
-0.10814014077186584,
-0.1150454506278038,
-0.01746813952922821,
-0.05975070595741272,
-0.06863921135663986,
0.10116027295589447,
0.00658586947247386,
-0.2653183043003082,
0.1845642328262329,
0.14377404749393463,
0.06650484353303909,
0.20200365781784058,
-0.0204872228205204,
-0.06970345973968506,
-0.014864778146147728,
-0.06683480739593506,
0.0037149647250771523,
0.061419833451509476,
-0.05429889261722565,
0.0302999597042799,
0.0868578851222992,
0.01948939822614193,
0.06683476269245148,
-0.07490185648202896,
-0.006880675908178091,
-0.017306655645370483,
-0.042355749756097794,
-0.08835256099700928,
0.11618073284626007,
0.02056412771344185,
0.09477842599153519,
0.024020781740546227,
0.09190912544727325,
0.03416898474097252,
-0.018019840121269226,
-0.10554226487874985,
0.12475107610225677,
-0.12035136669874191,
-0.2720053195953369,
-0.060319069772958755,
0.06334435939788818,
-0.03386674076318741,
-0.01608765311539173,
0.1735423356294632,
-0.08790124207735062,
0.0005320957861840725,
-0.07658564299345016,
-0.09437069296836853,
-0.09973689913749695,
-0.03757595270872116,
-0.07529077678918839,
-0.024934733286499977,
0.07307013124227524,
-0.18020892143249512,
-0.04019395634531975,
-0.015958812087774277,
-0.06586873531341553,
0.05911628156900406,
-0.033239465206861496,
0.10594835132360458,
0.0749359205365181,
0.0065398141741752625,
0.013956052251160145,
-0.033892903476953506,
0.21042901277542114,
-0.02977781556546688,
0.02487926557660103,
0.13974569737911224,
0.038526102900505066,
0.08663947135210037,
0.14718110859394073,
0.008710294961929321,
-0.04952022805809975,
0.001285351230762899,
0.04788965359330177,
-0.06905004382133484,
-0.21896196901798248,
-0.04478210583329201,
-0.06180319935083389,
0.06105807051062584,
-0.020457036793231964,
0.04982077330350876,
0.1320490539073944,
0.051891524344682693,
-0.09649024903774261,
0.028389176353812218,
-0.07423748075962067,
0.10007370263338089,
0.1959572583436966,
0.03388199955224991,
0.13746097683906555,
-0.0945284515619278,
-0.02359699085354805,
0.09626729041337967,
0.08948981761932373,
0.021029403433203697,
0.01831832341849804,
0.11371862888336182,
0.10774635523557663,
0.14068767428398132,
0.04078667238354683,
0.06133151054382324,
-0.0213924590498209,
0.04712783917784691,
-0.050209444016218185,
-0.07517042756080627,
-0.056723274290561676,
0.0591081902384758,
-0.00008112320210784674,
-0.0749962255358696,
0.003358258167281747,
-0.10362685471773148,
0.07072558999061584,
0.1994834542274475,
0.08157708495855331,
-0.15355128049850464,
-0.05955585837364197,
0.0663333386182785,
-0.07491129636764526,
-0.07263937592506409,
0.009971092455089092,
-0.08463084697723389,
-0.0608784556388855,
0.059256456792354584,
0.002556747989729047,
0.14823982119560242,
-0.048939049243927,
0.06974200904369354,
-0.1302759051322937,
-0.11985611170530319,
-0.016151512041687965,
0.09994734078645706,
-0.3016359508037567,
0.18467792868614197,
0.02276001125574112,
0.021554304286837578,
-0.06895334273576736,
-0.002264280803501606,
-0.0007880489574745297,
-0.06510735303163528,
0.09276900440454483,
-0.01086728647351265,
-0.020223286002874374,
-0.04248872399330139,
-0.0872742161154747,
0.08326680213212967,
-0.03488150238990784,
-0.07458367198705673,
0.03807326406240463,
0.0029300269670784473,
0.034634269773960114,
-0.018401142209768295,
0.07611675560474396,
-0.16219115257263184,
-0.10947073996067047,
0.05537501722574234,
0.12749359011650085,
0.006479071453213692,
-0.046867735683918,
0.004959702026098967,
0.019113248214125633,
0.0861508846282959,
-0.01990879327058792,
-0.09818284958600998,
-0.09891979396343231,
-0.038302384316921234,
0.10499295592308044,
-0.1033119410276413,
0.021049190312623978,
-0.12181538343429565,
-0.021344363689422607,
-0.015682745724916458,
-0.13291984796524048,
0.09827903658151627,
-0.06387464702129364,
-0.04480763152241707,
-0.04231487214565277,
0.12919749319553375,
-0.0197740625590086,
-0.022059908136725426,
0.04673280194401741,
0.04638611897826195,
-0.08704192191362381,
-0.08510010689496994,
0.07081691920757294,
-0.08848078548908234,
0.07092124223709106,
-0.10267233103513718,
0.01060937438160181,
-0.0019780502188950777,
-0.03599177673459053,
-0.0826689824461937,
0.1662372648715973,
0.2555430233478546,
-0.0902954638004303,
0.1432887464761734,
0.1603836715221405,
-0.09636478126049042,
-0.21743144094944,
-0.16165706515312195,
-0.06643813103437424,
-0.0639624148607254,
0.014932457357645035,
-0.12550361454486847,
0.03099403716623783,
0.0372600220143795,
-0.08807440102100372,
0.04170108959078789,
-0.07430560141801834,
-0.1030302345752716,
0.17272622883319855,
0.03145332634449005,
0.23432636260986328,
-0.20206713676452637,
-0.10711562633514404,
-0.08534889668226242,
-0.17491605877876282,
0.1738775372505188,
-0.03386486694216728,
0.05969035252928734,
-0.07290133833885193,
0.12348523736000061,
0.013625891879200935,
-0.04353712126612663,
0.08448042720556259,
-0.05557527020573616,
0.04062037542462349,
-0.08122362196445465,
-0.0147251533344388,
-0.015382559038698673,
-0.060952771455049515,
0.0780896246433258,
-0.1712600588798523,
0.054686520248651505,
-0.09426780790090561,
-0.0014866732526570559,
-0.07164379209280014,
0.06148312985897064,
-0.042630214244127274,
-0.07665025442838669,
-0.03791433572769165,
-0.048024747520685196,
0.0043962979689240456,
-0.01630742847919464,
0.18557605147361755,
-0.118923120200634,
0.13791143894195557,
0.18869252502918243,
0.03523675724864006,
-0.05067183077335358,
0.07466840744018555,
0.0008059043902903795,
-0.08121734857559204,
0.08994326740503311,
-0.21501706540584564,
0.08709626644849777,
0.08576567471027374,
-0.007307632360607386,
0.027049509808421135,
0.013255463913083076,
-0.06824545562267303,
0.007889441214501858,
0.054379403591156006,
-0.15004348754882812,
-0.04471324011683464,
0.008252245374023914,
-0.017288533970713615,
0.052815552800893784,
0.09424546360969543,
0.18229179084300995,
-0.0173537265509367,
-0.06939706206321716,
0.006611458957195282,
0.011400323361158371,
-0.05316765233874321,
0.07215427607297897,
0.018865613266825676,
0.048560310155153275,
-0.08996862173080444,
0.07948686927556992,
0.05816563218832016,
-0.039378441870212555,
0.09352699667215347,
0.0762273296713829,
-0.043904274702072144,
-0.07704076170921326,
-0.01922416314482689,
0.12002082914113998,
-0.07382621616125107,
-0.04817300662398338,
-0.04853921756148338,
-0.02732411026954651,
0.0803370475769043,
0.12845295667648315,
0.012872439809143543,
0.0914139449596405,
-0.0018160635372623801,
-0.02877371571958065,
-0.07232100516557693,
0.11536632478237152,
-0.025842083618044853,
-0.01955721713602543,
-0.06699780374765396,
-0.05779096856713295,
-0.012796303257346153,
0.03367457911372185,
-0.02870458923280239,
-0.007777603343129158,
-0.12426189333200455,
-0.02342887781560421,
-0.23241013288497925,
-0.028763219714164734,
-0.10660307109355927,
-0.014328447170555592,
0.002494771033525467,
-0.045951373875141144,
-0.05234208703041077,
0.026245852932333946,
-0.08618789911270142,
-0.03634015843272209,
-0.01340717263519764,
0.12330502271652222,
-0.16506104171276093,
-0.03305653855204582,
0.04317675530910492,
0.029568960890173912,
0.12330089509487152,
0.07415260374546051,
-0.06873731315135956,
0.03159867227077484,
-0.16515259444713593,
0.015713423490524292,
0.0016512330621480942,
0.01920952834188938,
0.03918415680527687,
0.019306888803839684,
-0.03775642439723015,
0.1060129702091217,
0.040333133190870285,
0.00553601561114192,
0.025139471516013145,
-0.13149988651275635,
-0.152694970369339,
0.0014142126310616732,
-0.04857000708580017,
-0.024331433698534966,
0.008490839041769505,
0.12074514478445053,
0.018611915409564972,
0.08875010162591934,
-0.05193427577614784,
0.0735427662730217,
-0.16456706821918488,
0.006607765331864357,
-0.0007884087972342968,
-0.12277994304895401,
-0.1514044553041458,
-0.03649927303195,
0.023236971348524094,
-0.02827509120106697,
0.21016596257686615,
-0.028686244040727615,
-0.016997961327433586,
0.04523305594921112,
-0.03900204971432686,
0.051636580377817154,
-0.00194521842058748,
0.3168668746948242,
0.013672231696546078,
-0.0049331411719322205,
-0.01595393754541874,
0.03602210059762001,
-0.04068160802125931,
0.09132599085569382,
0.03917260095477104,
0.11461345106363297,
0.162804514169693,
0.06284572184085846,
0.007072548847645521,
-0.00415934668853879,
-0.028049752116203308,
-0.06504323333501816,
0.06458821892738342,
0.10611701756715775,
-0.019041113555431366,
-0.0011256217258051038,
0.21757623553276062,
-0.08975345641374588,
0.006461225915700197,
-0.030804166570305824,
0.005251062568277121,
-0.14310844242572784,
-0.1533425748348236,
-0.08503621071577072,
-0.09080132842063904,
0.030156394466757774,
-0.09518786519765854,
0.05779530480504036,
0.06791268289089203,
0.04005436599254608,
-0.062295034527778625,
0.05823628604412079,
0.0977695882320404,
-0.04523409903049469,
0.06311196088790894,
-0.010909166187047958,
-0.05736808106303215,
0.0076039936393499374,
0.09740481525659561,
0.02610582299530506,
0.06498062610626221,
0.015275583602488041,
0.05029040202498436,
-0.07268904149532318,
0.05267883092164993,
-0.09816683828830719,
-0.10184156894683838,
-0.0429498665034771,
0.06884690374135971,
0.07523497194051743,
0.24576729536056519,
0.02468865178525448,
-0.02481023594737053,
0.045259345322847366,
0.19769148528575897,
-0.06680350750684738,
-0.05317080393433571,
-0.013390569016337395,
0.21476630866527557,
-0.0032044388353824615,
0.022287774831056595,
0.0022864635102450848,
-0.0347004234790802,
0.010393277741968632,
0.2016230672597885,
0.2677469551563263,
0.021540137007832527,
-0.010857372544705868,
-0.07332754135131836,
0.020630940794944763,
0.01938617415726185,
0.04788627102971077,
0.07064253091812134,
0.20743480324745178,
-0.07170651853084564,
0.0698992908000946,
-0.040579937398433685,
-0.017283815890550613,
-0.06326267868280411,
0.14618626236915588,
0.00002256972402392421,
-0.09867993742227554,
0.009940718300640583,
0.10195084661245346,
-0.15115350484848022,
-0.11577928811311722,
-0.03449495509266853,
-0.05055610090494156,
-0.05412786453962326,
0.0503908172249794,
0.09518781304359436,
-0.02752685546875,
-0.018117161467671394,
-0.016116390004754066,
0.06564104557037354,
0.09541520476341248,
-0.033675841987133026,
-0.10467945039272308,
-0.014175951480865479,
0.05918211117386818,
-0.05218911170959473,
0.11781125515699387,
-0.023803899064660072,
0.04841744154691696,
0.12276487052440643,
0.01592336967587471,
-0.10997837781906128,
0.08124788850545883,
0.03218913823366165,
-0.08847896009683609,
0.024625273421406746,
-0.03743845969438553,
0.019819317385554314,
0.04233383387327194,
0.08407670259475708,
-0.1319216638803482,
0.016166195273399353,
0.01607944257557392,
-0.06167178228497505,
-0.08181459456682205,
0.03564024344086647,
-0.08843029290437698,
0.06059562787413597,
0.008016175590455532,
-0.08969993144273758,
-0.01950952224433422,
-0.05638947710394859,
0.033014409244060516,
-0.019535869359970093,
-0.07601634413003922,
-0.0251154825091362,
-0.11467397212982178,
0.03130879998207092,
0.080115407705307,
0.02554045245051384,
-0.264175683259964,
0.027257530018687248,
-0.07010133564472198,
-0.023323724046349525,
-0.10931389778852463,
0.07069282233715057,
0.06514690071344376,
-0.03648250177502632,
-0.037509672343730927,
-0.16925501823425293,
-0.04153125733137131,
0.08305200934410095,
-0.07355722784996033,
-0.0312919020652771
] |
null | null | null |
# `paper-rec` Model Card
Last updated: 2022-02-04
## Model Details
`paper-rec` goal is to recommend users what scientific papers to read next based on their preferences. This is a test model used to explore Hugging Face Hub capabilities and identify requirements to enable support for recommendation task in the ecosystem.
### Model date
2022-02-04
### Model type
Recommender System model with support of a Language Model for feature extraction.
### Paper & samples
The overall idea for `paper-rec` test model is inspired by this work: [NU:BRIEF – A Privacy-aware Newsletter Personalization Engine for Publishers](https://arxiv.org/abs/2109.03955).
However, for `paper-rec`, we use a different language model more suitable for longer text, namely *Sentence Transformers*: [Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks](https://arxiv.org/abs/1908.10084), in particular: [sentence-transformers/all-MiniLM-L6-v2](https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2).
## Model Use
The intended direct users are recommender systems' practitioners and enthusiasts that would like to experiment with the task of scientific paper recommendation.
## Data, Performance, and Limitations
### Data
The data used for this model corresponds to the [RSS news feeds for arXiv updates](https://arxiv.org/help/rss) accessed on 2022-02-04. In particular to the ones related to Machine Learning and AI:
1. [Artificial Intelligence](http://arxiv.org/rss/cs.AI)
1. [Computation and Language](http://arxiv.org/rss/cs.CL)
1. [Computer Vision and Pattern Recognition](http://arxiv.org/rss/cs.CV)
1. [Information Retrieval](http://arxiv.org/rss/cs.IR)
1. [Machine Learning (cs)](http://arxiv.org/rss/cs.LG)
1. [Machine Learning (stat)](http://arxiv.org/rss/stat.ML)
### Performance
N/A
## Limitations
The model is limited to the papers fetched on 2022-02-04, that is, those papers are the only ones it can recommend.
|
{"language": ["en"], "license": "mit", "tags": ["recsys", "pytorch", "sentence_transformers"]}
| null |
bluebalam/paper-rec
|
[
"recsys",
"pytorch",
"sentence_transformers",
"en",
"arxiv:2109.03955",
"arxiv:1908.10084",
"license:mit",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"2109.03955",
"1908.10084"
] |
[
"en"
] |
TAGS
#recsys #pytorch #sentence_transformers #en #arxiv-2109.03955 #arxiv-1908.10084 #license-mit #region-us
|
# 'paper-rec' Model Card
Last updated: 2022-02-04
## Model Details
'paper-rec' goal is to recommend users what scientific papers to read next based on their preferences. This is a test model used to explore Hugging Face Hub capabilities and identify requirements to enable support for recommendation task in the ecosystem.
### Model date
2022-02-04
### Model type
Recommender System model with support of a Language Model for feature extraction.
### Paper & samples
The overall idea for 'paper-rec' test model is inspired by this work: NU:BRIEF – A Privacy-aware Newsletter Personalization Engine for Publishers.
However, for 'paper-rec', we use a different language model more suitable for longer text, namely *Sentence Transformers*: Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks, in particular: sentence-transformers/all-MiniLM-L6-v2.
## Model Use
The intended direct users are recommender systems' practitioners and enthusiasts that would like to experiment with the task of scientific paper recommendation.
## Data, Performance, and Limitations
### Data
The data used for this model corresponds to the RSS news feeds for arXiv updates accessed on 2022-02-04. In particular to the ones related to Machine Learning and AI:
1. Artificial Intelligence
1. Computation and Language
1. Computer Vision and Pattern Recognition
1. Information Retrieval
1. Machine Learning (cs)
1. Machine Learning (stat)
### Performance
N/A
## Limitations
The model is limited to the papers fetched on 2022-02-04, that is, those papers are the only ones it can recommend.
|
[
"# 'paper-rec' Model Card\r\n\r\nLast updated: 2022-02-04",
"## Model Details\r\n'paper-rec' goal is to recommend users what scientific papers to read next based on their preferences. This is a test model used to explore Hugging Face Hub capabilities and identify requirements to enable support for recommendation task in the ecosystem.",
"### Model date\r\n2022-02-04",
"### Model type\r\nRecommender System model with support of a Language Model for feature extraction.",
"### Paper & samples\r\nThe overall idea for 'paper-rec' test model is inspired by this work: NU:BRIEF – A Privacy-aware Newsletter Personalization Engine for Publishers.\r\n\r\nHowever, for 'paper-rec', we use a different language model more suitable for longer text, namely *Sentence Transformers*: Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks, in particular: sentence-transformers/all-MiniLM-L6-v2.",
"## Model Use\r\nThe intended direct users are recommender systems' practitioners and enthusiasts that would like to experiment with the task of scientific paper recommendation.",
"## Data, Performance, and Limitations",
"### Data \r\nThe data used for this model corresponds to the RSS news feeds for arXiv updates accessed on 2022-02-04. In particular to the ones related to Machine Learning and AI:\r\n\r\n1. Artificial Intelligence\r\n1. Computation and Language\r\n1. Computer Vision and Pattern Recognition\r\n1. Information Retrieval\r\n1. Machine Learning (cs)\r\n1. Machine Learning (stat)",
"### Performance \r\nN/A",
"## Limitations\r\nThe model is limited to the papers fetched on 2022-02-04, that is, those papers are the only ones it can recommend."
] |
[
"TAGS\n#recsys #pytorch #sentence_transformers #en #arxiv-2109.03955 #arxiv-1908.10084 #license-mit #region-us \n",
"# 'paper-rec' Model Card\r\n\r\nLast updated: 2022-02-04",
"## Model Details\r\n'paper-rec' goal is to recommend users what scientific papers to read next based on their preferences. This is a test model used to explore Hugging Face Hub capabilities and identify requirements to enable support for recommendation task in the ecosystem.",
"### Model date\r\n2022-02-04",
"### Model type\r\nRecommender System model with support of a Language Model for feature extraction.",
"### Paper & samples\r\nThe overall idea for 'paper-rec' test model is inspired by this work: NU:BRIEF – A Privacy-aware Newsletter Personalization Engine for Publishers.\r\n\r\nHowever, for 'paper-rec', we use a different language model more suitable for longer text, namely *Sentence Transformers*: Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks, in particular: sentence-transformers/all-MiniLM-L6-v2.",
"## Model Use\r\nThe intended direct users are recommender systems' practitioners and enthusiasts that would like to experiment with the task of scientific paper recommendation.",
"## Data, Performance, and Limitations",
"### Data \r\nThe data used for this model corresponds to the RSS news feeds for arXiv updates accessed on 2022-02-04. In particular to the ones related to Machine Learning and AI:\r\n\r\n1. Artificial Intelligence\r\n1. Computation and Language\r\n1. Computer Vision and Pattern Recognition\r\n1. Information Retrieval\r\n1. Machine Learning (cs)\r\n1. Machine Learning (stat)",
"### Performance \r\nN/A",
"## Limitations\r\nThe model is limited to the papers fetched on 2022-02-04, that is, those papers are the only ones it can recommend."
] |
[
42,
14,
56,
7,
20,
114,
35,
8,
74,
6,
32
] |
[
"passage: TAGS\n#recsys #pytorch #sentence_transformers #en #arxiv-2109.03955 #arxiv-1908.10084 #license-mit #region-us \n# 'paper-rec' Model Card\r\n\r\nLast updated: 2022-02-04## Model Details\r\n'paper-rec' goal is to recommend users what scientific papers to read next based on their preferences. This is a test model used to explore Hugging Face Hub capabilities and identify requirements to enable support for recommendation task in the ecosystem.### Model date\r\n2022-02-04### Model type\r\nRecommender System model with support of a Language Model for feature extraction.### Paper & samples\r\nThe overall idea for 'paper-rec' test model is inspired by this work: NU:BRIEF – A Privacy-aware Newsletter Personalization Engine for Publishers.\r\n\r\nHowever, for 'paper-rec', we use a different language model more suitable for longer text, namely *Sentence Transformers*: Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks, in particular: sentence-transformers/all-MiniLM-L6-v2.## Model Use\r\nThe intended direct users are recommender systems' practitioners and enthusiasts that would like to experiment with the task of scientific paper recommendation.## Data, Performance, and Limitations### Data \r\nThe data used for this model corresponds to the RSS news feeds for arXiv updates accessed on 2022-02-04. In particular to the ones related to Machine Learning and AI:\r\n\r\n1. Artificial Intelligence\r\n1. Computation and Language\r\n1. Computer Vision and Pattern Recognition\r\n1. Information Retrieval\r\n1. Machine Learning (cs)\r\n1. Machine Learning (stat)### Performance \r\nN/A## Limitations\r\nThe model is limited to the papers fetched on 2022-02-04, that is, those papers are the only ones it can recommend."
] |
[
0.02965588867664337,
0.09337615966796875,
-0.004602936562150717,
-0.007138981483876705,
0.0221999604254961,
-0.0328727588057518,
0.06180863454937935,
0.06965789943933487,
0.022222883999347687,
0.07933159172534943,
0.0715288296341896,
0.02114756777882576,
0.04703914001584053,
0.0845261886715889,
0.015572513453662395,
-0.2279166728258133,
0.06458837538957596,
-0.021318286657333374,
0.08847469091415405,
0.0990225076675415,
0.12279490381479263,
-0.08349274098873138,
0.09894391149282455,
0.005429933313280344,
0.047845691442489624,
-0.007377343717962503,
-0.05701315775513649,
-0.028368325904011726,
0.07127149403095245,
0.05868170037865639,
0.07732202857732773,
0.03594131022691727,
0.0650327056646347,
-0.19913749396800995,
0.0015711031155660748,
0.03998497873544693,
0.007628429681062698,
0.05054013058543205,
0.08263286203145981,
0.002780663315206766,
0.2085932195186615,
-0.020854566246271133,
0.07944802194833755,
0.03233163803815842,
-0.0586731992661953,
-0.04714182764291763,
-0.05409770831465721,
-0.0019121966324746609,
0.05293973162770271,
0.10315583646297455,
-0.069841668009758,
0.15234149992465973,
-0.08683056384325027,
0.057803113013505936,
0.25343936681747437,
-0.016128791496157646,
-0.021694540977478027,
-0.021092718467116356,
0.1905481070280075,
0.07432033866643906,
-0.03762637451291084,
-0.01240839995443821,
0.020743418484926224,
0.04328816011548042,
0.05895819887518883,
-0.008158352226018906,
0.07712693512439728,
0.01747136376798153,
-0.168564572930336,
-0.07224193215370178,
0.12486013770103455,
0.01242249459028244,
-0.10069206357002258,
-0.11472808569669724,
-0.04283370450139046,
0.09515608102083206,
-0.014351090416312218,
-0.10942036658525467,
0.03854774683713913,
-0.03150617703795433,
0.05339616537094116,
-0.06639654189348221,
-0.11408267170190811,
-0.04477434232831001,
-0.031262315809726715,
0.11820512264966965,
0.028769806027412415,
0.014367533847689629,
-0.009657980874180794,
0.10105209052562714,
0.01823054440319538,
-0.032289791852235794,
-0.05513588339090347,
-0.07417279481887817,
-0.06094679608941078,
0.008237674832344055,
-0.06130998581647873,
-0.14626789093017578,
-0.0023195738904178143,
0.12332123517990112,
0.041565749794244766,
0.002439341973513365,
0.08967098593711853,
0.03154454380273819,
0.08702275156974792,
0.08907730877399445,
-0.10912247002124786,
0.0421433225274086,
-0.038818199187517166,
0.045085735619068146,
-0.015016148798167706,
-0.01681923307478428,
-0.014748923480510712,
0.08360949158668518,
0.105608269572258,
0.001650591497309506,
0.043093904852867126,
0.007296599913388491,
-0.02907579019665718,
0.0015935312258079648,
0.14046871662139893,
-0.0702083557844162,
-0.006563172675669193,
-0.016467126086354256,
-0.055435534566640854,
0.0011453646002337337,
0.029156867414712906,
0.03425445780158043,
-0.043072935193777084,
0.10427332669496536,
-0.10728594660758972,
-0.04436355084180832,
-0.09262911975383759,
-0.1440640240907669,
0.04921519011259079,
-0.048750001937150955,
-0.003888562321662903,
-0.13260819017887115,
-0.1710471212863922,
-0.09045588225126266,
0.04974401369690895,
-0.05775619298219681,
0.006868720520287752,
-0.027871785685420036,
-0.030322620645165443,
-0.03266233950853348,
0.02452203817665577,
0.05311763286590576,
-0.016662059351801872,
0.022977443411946297,
-0.09382262825965881,
0.06492623686790466,
-0.019171906635165215,
0.020267389714717865,
-0.13165700435638428,
0.031058643013238907,
-0.06746421009302139,
0.06926696002483368,
-0.11196654289960861,
0.03096778132021427,
-0.0971861332654953,
-0.06262508779764175,
0.005861404351890087,
0.0478891059756279,
0.036723073571920395,
0.13046810030937195,
-0.23272687196731567,
-0.021142244338989258,
0.084574393928051,
-0.10073354840278625,
-0.062143415212631226,
0.15419837832450867,
-0.053012698888778687,
0.1287001222372055,
0.11261744052171707,
0.1256183683872223,
0.019422566518187523,
-0.11410002410411835,
-0.09507373720407486,
-0.05260305106639862,
-0.061220359057188034,
0.12820352613925934,
0.09190376102924347,
-0.01106353010982275,
0.05679331347346306,
0.03170301020145416,
-0.09648709744215012,
-0.04322853311896324,
-0.03700694814324379,
-0.02421899139881134,
0.013473611325025558,
-0.05126562342047691,
0.014279479160904884,
-0.008515688590705395,
0.0007665676530450583,
-0.04019956290721893,
-0.11752408742904663,
0.005959345027804375,
0.05918844789266586,
-0.054839327931404114,
0.05155865103006363,
-0.08359742164611816,
0.038325920701026917,
0.052819713950157166,
0.03361111134290695,
-0.15695945918560028,
0.02203862927854061,
0.06528054922819138,
-0.13851068913936615,
0.08264636993408203,
0.06777845323085785,
0.01718478836119175,
0.011341911740601063,
-0.009230423718690872,
0.0051512336358428,
-0.06677987426519394,
-0.0008408640860579908,
-0.09635888785123825,
-0.11133139580488205,
0.0038895579054951668,
-0.06869952380657196,
0.09471580386161804,
-0.1426718533039093,
0.022476911544799805,
0.029891053214669228,
0.05053478851914406,
0.054032713174819946,
-0.06292293220758438,
0.016959071159362793,
-0.0020284391939640045,
-0.04077575355768204,
0.014652974903583527,
0.004510646685957909,
-0.027234967797994614,
-0.05969727411866188,
0.046506281942129135,
-0.11414194107055664,
-0.1519642174243927,
0.020084837451577187,
0.012526046484708786,
-0.036675915122032166,
0.024971643462777138,
-0.04089315980672836,
-0.025857601314783096,
-0.11321897804737091,
-0.16773708164691925,
0.19877301156520844,
0.03268011659383774,
0.020065398886799812,
-0.11917351931333542,
-0.07902023941278458,
-0.024385539814829826,
-0.07852210849523544,
-0.04065418615937233,
0.13003826141357422,
0.07305492460727692,
-0.11536800861358643,
0.024174589663743973,
-0.03038780391216278,
-0.08888112008571625,
0.06963616609573364,
0.029105456545948982,
-0.11660264432430267,
0.011678568087518215,
-0.058993592858314514,
-0.008281197398900986,
0.06369470804929733,
-0.04207374528050423,
0.019090980291366577,
0.07523784786462784,
-0.013696628622710705,
0.037897106260061264,
-0.06877222657203674,
0.03616686910390854,
0.019460873678326607,
-0.004401711281388998,
-0.04319978877902031,
-0.008895708248019218,
-0.03534386679530144,
0.09242098033428192,
-0.028178315609693527,
0.00879153236746788,
-0.06716763228178024,
-0.03217053413391113,
-0.1595647782087326,
0.17480574548244476,
-0.03460903465747833,
-0.21623007953166962,
-0.15018095076084137,
0.05101693421602249,
0.02306336537003517,
0.011079341173171997,
0.05564618483185768,
-0.050886120647192,
-0.1065480187535286,
-0.18167081475257874,
-0.004578659310936928,
0.04406411200761795,
-0.059865131974220276,
-0.09525545686483383,
0.011637234129011631,
-0.004746428225189447,
-0.11690284311771393,
0.002513585612177849,
-0.048683181405067444,
0.06807564198970795,
0.03627603128552437,
-0.0014964902075007558,
0.16477006673812866,
0.0970449149608612,
-0.0059868888929486275,
-0.03686366230249405,
-0.02670440822839737,
0.2960505187511444,
-0.06483100354671478,
0.12406963855028152,
0.0807785615324974,
-0.07726027071475983,
0.07186806946992874,
0.09646327793598175,
-0.005278557538986206,
-0.051967862993478775,
0.07442180067300797,
0.018653061240911484,
-0.06244366243481636,
-0.2591648995876312,
-0.07393904030323029,
0.010920686647295952,
0.004270114935934544,
0.03397400304675102,
0.026787878945469856,
0.05098714306950569,
0.046057216823101044,
-0.03781444951891899,
0.0037854397669434547,
-0.005890295375138521,
0.07827844470739365,
-0.007501030340790749,
-0.02135205641388893,
0.0881061851978302,
-0.04706880822777748,
-0.03231790289282799,
0.08731935173273087,
-0.07861508429050446,
0.3089583218097687,
-0.012524958699941635,
0.14449523389339447,
0.08392999321222305,
-0.11344568431377411,
0.017865225672721863,
0.10751096159219742,
-0.06081586331129074,
0.033877305686473846,
-0.06507404148578644,
-0.03864660859107971,
-0.011664851568639278,
0.1504179984331131,
0.011709203012287617,
-0.08787484467029572,
0.00279802642762661,
-0.003442585002630949,
0.04840686544775963,
0.09202840924263,
0.025029709562659264,
-0.15397994220256805,
-0.026693711057305336,
0.09986676275730133,
-0.07702454924583435,
-0.03164711594581604,
-0.01781410351395607,
0.13349302113056183,
-0.08991625905036926,
-0.018642934039235115,
-0.0062371306121349335,
0.10885374248027802,
-0.06559918820858002,
0.016986817121505737,
-0.06840427219867706,
0.05674328655004501,
-0.020644789561629295,
0.11494377255439758,
-0.14338089525699615,
0.20067453384399414,
-0.03497007489204407,
0.046675972640514374,
-0.09288494288921356,
0.005346009973436594,
-0.006471509579569101,
-0.009771150536835194,
0.1513986587524414,
0.016751118004322052,
-0.09455689042806625,
0.011686713434755802,
-0.07942376285791397,
0.024299027398228645,
0.03730843588709831,
-0.04561120271682739,
0.11044220626354218,
-0.01955711469054222,
0.012647566385567188,
-0.014514246955513954,
0.06447836756706238,
-0.10640842467546463,
-0.155714213848114,
0.05854855477809906,
-0.08763454109430313,
0.03946305066347122,
-0.0727861225605011,
-0.04932000860571861,
-0.00018471117073204368,
0.04536088928580284,
-0.08153563737869263,
-0.07054485380649567,
-0.09784576296806335,
0.004757724702358246,
0.1022992804646492,
-0.07274322956800461,
0.008551020175218582,
0.03419119492173195,
0.10343049466609955,
-0.03184320405125618,
0.008631563745439053,
0.008603432215750217,
-0.1029980331659317,
-0.15933099389076233,
-0.06637348234653473,
0.037152621895074844,
0.16897331178188324,
0.12621915340423584,
0.01598489284515381,
0.014756659977138042,
-0.09131941944360733,
-0.08796068280935287,
0.001135222497396171,
0.10287725925445557,
-0.010992673225700855,
0.008697683922946453,
-0.05829945206642151,
-0.04040323197841644,
-0.13506224751472473,
-0.08067728579044342,
0.1035103052854538,
0.15541891753673553,
-0.03935137391090393,
0.1083996593952179,
0.19777634739875793,
-0.09698016941547394,
-0.19741271436214447,
-0.06813541799783707,
0.07435709238052368,
-0.014230124652385712,
0.010021526366472244,
-0.13226386904716492,
0.05909352749586105,
0.07579505443572998,
-0.005199000239372253,
-0.07589361816644669,
-0.1565612256526947,
-0.1385691910982132,
0.12113644182682037,
0.04736538231372833,
-0.005715643987059593,
-0.10164876282215118,
-0.050831668078899384,
-0.04170121252536774,
-0.043722838163375854,
0.06995126605033875,
0.01818236894905567,
0.022957980632781982,
0.035766541957855225,
0.023972511291503906,
0.018162617459893227,
-0.06378182023763657,
0.16085825860500336,
0.027424393221735954,
0.05311393365263939,
-0.044534217566251755,
-0.06733732670545578,
0.010554861277341843,
-0.026175593957304955,
0.1648809015750885,
-0.023940542712807655,
0.036387138068675995,
-0.05780524015426636,
-0.05784907564520836,
-0.08500795811414719,
0.026384873315691948,
-0.01200985535979271,
-0.09517478197813034,
-0.15720579028129578,
0.055627863854169846,
0.07717812061309814,
0.002422185381874442,
-0.045628927648067474,
-0.10636229813098907,
-0.027716070413589478,
0.07972890138626099,
0.20791038870811462,
-0.047367893159389496,
-0.041552744805812836,
0.0436239130795002,
-0.01927303709089756,
0.02012799307703972,
-0.08980689942836761,
-0.007710047531872988,
0.11554265022277832,
-0.005902180913835764,
0.1658182144165039,
0.007625619415193796,
-0.16453351080417633,
0.012437950819730759,
0.05136188119649887,
-0.1239253506064415,
-0.2078874558210373,
-0.04566425457596779,
0.1389065533876419,
-0.1831781268119812,
-0.06349483877420425,
0.0674016922712326,
-0.06265603005886078,
-0.017719946801662445,
-0.029103843495249748,
0.10445750504732132,
0.039948608726263046,
0.15234145522117615,
0.06774956732988358,
0.04169447720050812,
-0.0507594496011734,
0.00880435574799776,
0.08301748335361481,
-0.09461404383182526,
0.049593936651945114,
0.07650553435087204,
-0.06204444542527199,
-0.03845622390508652,
-0.051581162959337234,
0.08389867842197418,
-0.033726658672094345,
-0.07288387417793274,
-0.0658981129527092,
-0.08623992651700974,
0.06618447601795197,
0.13416244089603424,
0.04784893989562988,
-0.026646435260772705,
-0.02001098357141018,
0.02682464011013508,
-0.03763421252369881,
0.10869935899972916,
0.06417670100927353,
0.039063725620508194,
-0.044901713728904724,
-0.005516502540558577,
0.012713262811303139,
0.002622725907713175,
-0.03960124030709267,
0.023646565154194832,
-0.06837794929742813,
-0.010934371501207352,
-0.08557049185037613,
0.0325358584523201,
-0.0575438030064106,
-0.03323384374380112,
0.01966780796647072,
-0.04652544483542442,
-0.011917360126972198,
-0.01574116013944149,
-0.060956090688705444,
-0.020607352256774902,
-0.005969776771962643,
0.09078469127416611,
-0.10959062725305557,
0.008064712397754192,
0.1154790148139,
-0.05730940401554108,
0.06288819015026093,
0.00010393991396995261,
-0.04459957405924797,
0.012048259377479553,
-0.08412324637174606,
0.0575941801071167,
0.036860667169094086,
0.06683368235826492,
-0.02648351527750492,
-0.17203134298324585,
-0.04643533378839493,
0.0006629446288570762,
-0.019115734845399857,
-0.013024074025452137,
-0.012587791308760643,
-0.04524051770567894,
0.14791716635227203,
0.08348393440246582,
-0.11730103194713593,
-0.07517518103122711,
-0.004257371183484793,
0.002485969103872776,
0.04253925010561943,
0.15685738623142242,
-0.015212969854474068,
0.040797293186187744,
-0.10509715229272842,
0.03369035944342613,
0.05174470692873001,
-0.015852047130465508,
0.0154797974973917,
-0.08196868002414703,
0.0011479253880679607,
-0.0108451247215271,
0.19638079404830933,
0.01294759102165699,
-0.04498673975467682,
0.05374569818377495,
0.12729208171367645,
0.033493056893348694,
0.04061519354581833,
0.014019829221069813,
-0.01906873658299446,
-0.03779061511158943,
-0.0807173028588295,
-0.04275454208254814,
0.002054703887552023,
-0.06390685588121414,
0.1656508445739746,
0.13883785903453827,
0.09169064462184906,
0.07073748856782913,
0.03570994362235069,
0.013012786395847797,
-0.09869316220283508,
-0.007661897223442793,
0.053389668464660645,
0.023371044546365738,
-0.00795950461179018,
0.25612661242485046,
0.11242927610874176,
-0.12461318075656891,
0.07056809961795807,
0.012737818993628025,
-0.05273723229765892,
-0.0441615916788578,
-0.11042289435863495,
-0.06539173424243927,
-0.022872252389788628,
-0.027239875867962837,
-0.11249284446239471,
0.019783353433012962,
0.04397248849272728,
-0.010869516059756279,
-0.017911698669195175,
0.06939087808132172,
-0.08455498516559601,
-0.06408633291721344,
0.029933340847492218,
-0.024573706090450287,
-0.05063151940703392,
-0.04670161381363869,
0.024667147547006607,
0.01258850283920765,
0.106484055519104,
0.05285094305872917,
0.07791433483362198,
-0.02740112505853176,
-0.023915497586131096,
-0.027953684329986572,
-0.08813487738370895,
-0.00952139776200056,
-0.006732533220201731,
-0.048346780240535736,
0.0754559263586998,
0.04725572094321251,
-0.014294661581516266,
0.023510929197072983,
0.17347052693367004,
-0.040281932801008224,
-0.07497592270374298,
-0.141855388879776,
0.14031922817230225,
0.04131476953625679,
0.039436016231775284,
0.06254512816667557,
-0.13854949176311493,
-0.032901838421821594,
0.2269284576177597,
0.11739255487918854,
-0.01343414094299078,
-0.03060724213719368,
-0.016211407259106636,
0.025265978649258614,
0.01326175406575203,
0.046197209507226944,
0.023500990122556686,
0.3016984760761261,
-0.08328347653150558,
0.06773390620946884,
-0.05419309437274933,
-0.0459883026778698,
-0.025526845827698708,
0.11307063698768616,
0.06665521115064621,
-0.04295928403735161,
-0.07535301148891449,
0.08262959867715836,
-0.09794194996356964,
-0.18109230697155,
-0.16252869367599487,
-0.030213739722967148,
-0.0571000874042511,
0.014310713857412338,
0.05021072179079056,
-0.034860000014305115,
0.08441253006458282,
0.01694876328110695,
0.0011040987446904182,
0.12270846962928772,
0.03884643688797951,
-0.1143137738108635,
0.022779032588005066,
0.13449406623840332,
-0.05143347010016441,
0.18891294300556183,
0.015135547146201134,
0.1025419607758522,
0.09859061986207962,
-0.03351140394806862,
-0.10817722231149673,
0.0965813398361206,
0.033563073724508286,
-0.03916473686695099,
0.04832909628748894,
0.1598898023366928,
0.01638099178671837,
0.03183954581618309,
0.07190227508544922,
-0.08769171684980392,
0.05995265021920204,
0.05221438780426979,
-0.011845177970826626,
-0.05096777528524399,
0.09368935227394104,
-0.13016246259212494,
0.12262754887342453,
0.10500330477952957,
-0.01420396938920021,
0.013517200946807861,
-0.044112566858530045,
0.03348751366138458,
-0.01771077886223793,
0.039496321231126785,
0.025753136724233627,
-0.1773928552865982,
-0.007053126581013203,
-0.0359080508351326,
0.04090791940689087,
-0.3173917531967163,
-0.037402763962745667,
-0.004091597627848387,
0.012528033927083015,
0.0191742442548275,
0.058226730674505234,
0.11128824949264526,
-0.03479233756661415,
-0.027521390467882156,
-0.008984300307929516,
0.0018666090909391642,
0.0944707989692688,
-0.08493294566869736,
-0.04493695870041847
] |
null | null |
transformers
|
# Harry Potter Bot
|
{"tags": ["conversational"]}
|
text-generation
|
bmdonnell/DialoGPT-medium-harrypotter
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"conversational",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
# Harry Potter Bot
|
[
"# Harry Potter Bot"
] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"# Harry Potter Bot"
] |
[
51,
4
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n# Harry Potter Bot"
] |
[
0.026644257828593254,
0.04486115649342537,
-0.007239907514303923,
0.0715867355465889,
0.12511606514453888,
0.05003383755683899,
0.17409706115722656,
0.12458180636167526,
0.058117836713790894,
-0.025065917521715164,
0.1273707151412964,
0.24712808430194855,
-0.008743669837713242,
0.06997672468423843,
-0.03584281727671623,
-0.24900785088539124,
0.07522143423557281,
-0.002419703174382448,
-0.08045702427625656,
0.12280640006065369,
0.07113043963909149,
-0.06013808026909828,
0.09089069068431854,
-0.03778485953807831,
-0.1609358936548233,
-0.036358632147312164,
0.04005742818117142,
-0.11779385805130005,
0.15534265339374542,
0.04441668093204498,
0.05863495171070099,
-0.020320208743214607,
-0.08583956211805344,
-0.10747655481100082,
0.043793532997369766,
-0.010841798037290573,
-0.025944126769900322,
0.06701670587062836,
0.013539012521505356,
-0.09156887233257294,
0.16711485385894775,
0.15960197150707245,
0.08539241552352905,
0.0694003626704216,
-0.1425613909959793,
-0.03885280340909958,
-0.01085843238979578,
0.026643088087439537,
-0.0282888263463974,
0.09693154692649841,
-0.012254759669303894,
0.17271825671195984,
-0.053220316767692566,
0.08892258256673813,
0.18252117931842804,
-0.41167914867401123,
-0.038506995886564255,
0.07551548629999161,
0.06055224686861038,
0.1383373737335205,
-0.11453831940889359,
0.02581668458878994,
-0.006924706045538187,
0.016510266810655594,
-0.018801067024469376,
-0.07294915616512299,
-0.09655632078647614,
0.027957431972026825,
-0.10966163873672485,
-0.015197026543319225,
0.24609871208667755,
-0.10645028203725815,
0.03352579101920128,
-0.06646103411912918,
-0.07975391298532486,
0.03365718945860863,
-0.049143753945827484,
-0.041836079210042953,
-0.05098585784435272,
0.04996693506836891,
-0.009373060427606106,
-0.054377418011426926,
-0.07857297360897064,
0.0002518565161153674,
-0.11960650980472565,
0.1907435655593872,
0.04968671500682831,
0.05503183603286743,
-0.21760711073875427,
0.07189825177192688,
0.11699838191270828,
-0.03934847190976143,
0.011600290425121784,
-0.11447799205780029,
0.034368351101875305,
-0.000668363063596189,
-0.038295820355415344,
-0.08910879492759705,
0.05982331186532974,
0.18480496108531952,
-0.028886623680591583,
0.04432336613535881,
-0.017378030344843864,
0.0812792181968689,
0.07440884411334991,
0.005712093319743872,
0.03315164893865585,
-0.028680745512247086,
0.03804953396320343,
-0.09166644513607025,
0.021570177748799324,
-0.06870333850383759,
-0.16176915168762207,
-0.04172314330935478,
0.03631668537855148,
0.03509429842233658,
0.05374181643128395,
0.07579994201660156,
-0.022113030776381493,
-0.04017810523509979,
0.023110009729862213,
-0.0012867824407294393,
-0.006856351625174284,
-0.00029581066337414086,
-0.02885865420103073,
0.14081421494483948,
0.005461954046040773,
-0.020193176344037056,
-0.11991868913173676,
0.1178806945681572,
-0.10927870869636536,
0.016830414533615112,
0.014794624410569668,
-0.04437526687979698,
0.02298947423696518,
0.0229865163564682,
0.010474804788827896,
-0.15042179822921753,
-0.05235809087753296,
-0.0026194893289357424,
-0.015458626672625542,
-0.03567874804139137,
-0.0823211744427681,
-0.05960208177566528,
-0.02006269432604313,
0.0513136200606823,
-0.0016826835926622152,
0.01700710505247116,
-0.05803972855210304,
0.12012499570846558,
-0.07340685278177261,
0.11122135072946548,
-0.0980512946844101,
0.04576306790113449,
-0.10810940712690353,
-0.07501768320798874,
-0.14561600983142853,
0.031440094113349915,
-0.03201880678534508,
0.14343596994876862,
0.02212178334593773,
-0.024380460381507874,
-0.018377522006630898,
0.04159699007868767,
-0.08780476450920105,
0.19369176030158997,
-0.038656651973724365,
-0.14727361500263214,
0.2604096829891205,
-0.09814494848251343,
-0.1923614740371704,
0.11250179260969162,
-0.01811501756310463,
0.05394316837191582,
0.12122917920351028,
0.188701793551445,
-0.06436058133840561,
-0.010896406136453152,
0.04232706129550934,
0.05325908586382866,
-0.13655726611614227,
0.05530673637986183,
0.012029468081891537,
0.0017772155115380883,
-0.09732513129711151,
0.05142798274755478,
0.08828238397836685,
0.014646314084529877,
-0.024600084871053696,
-0.01308117900043726,
-0.009565980173647404,
-0.019389217719435692,
0.13931477069854736,
-0.003182648681104183,
0.13428814709186554,
-0.09243739396333694,
-0.036720190197229385,
-0.04490841180086136,
0.018869085237383842,
-0.001107892720028758,
0.07185239344835281,
-0.05550629273056984,
0.12709151208400726,
0.12919344007968903,
0.07092631608247757,
-0.14745141565799713,
-0.03792351856827736,
0.0005955251399427652,
0.1268249899148941,
0.10735585540533066,
0.025982819497585297,
0.06517564505338669,
-0.0048388149589300156,
-0.043313972651958466,
0.028239645063877106,
0.11176642030477524,
-0.040163811296224594,
-0.11327924579381943,
-0.1784486621618271,
0.04297782853245735,
-0.042124271392822266,
0.10337288677692413,
-0.08325621485710144,
0.032590948045253754,
0.008576571010053158,
0.0956893041729927,
0.0013484901282936335,
0.013055279850959778,
0.008220192044973373,
-0.014299747534096241,
-0.0873546451330185,
-0.014386882074177265,
0.08442795276641846,
-0.030419031158089638,
-0.07512110471725464,
0.1417330503463745,
-0.1332201212644577,
0.16641481220722198,
0.2363349199295044,
-0.2982061207294464,
-0.013743776828050613,
-0.05964867025613785,
-0.023341897875070572,
0.02325216867029667,
0.07413502037525177,
0.018994420766830444,
0.14240199327468872,
-0.0019080779748037457,
0.15905846655368805,
-0.048961907625198364,
-0.0770333930850029,
-0.05929899960756302,
-0.032749202102422714,
-0.009320919401943684,
0.07407857477664948,
0.07373114675283432,
-0.11622817814350128,
0.1481752097606659,
0.1430744230747223,
0.061412300914525986,
0.15801750123500824,
0.10089734941720963,
0.0036135476548224688,
0.08271120488643646,
-0.036943502724170685,
-0.024332266300916672,
-0.07867594808340073,
-0.3081752061843872,
-0.057041510939598083,
0.0815851166844368,
-0.021721895784139633,
0.07857275009155273,
-0.08415580540895462,
-0.037019528448581696,
-0.0287320613861084,
0.017915649339556694,
0.048282161355018616,
0.07788433134555817,
0.04208124428987503,
0.17911803722381592,
0.009213428013026714,
-0.02161657065153122,
0.06814594566822052,
0.02594306878745556,
-0.09335170686244965,
0.15746989846229553,
-0.1817355751991272,
-0.29939475655555725,
-0.07110463082790375,
-0.16356302797794342,
-0.013118211179971695,
0.05377117544412613,
0.07819730788469315,
-0.1409452259540558,
-0.01593049429357052,
0.006671871989965439,
0.04665317386388779,
-0.1983163207769394,
-0.0029375492595136166,
-0.12669500708580017,
0.07760734856128693,
-0.1604452133178711,
-0.10116671025753021,
-0.027149681001901627,
-0.017256587743759155,
-0.07531195133924484,
0.17428265511989594,
-0.1043747067451477,
0.021894900128245354,
0.19296139478683472,
0.02979738637804985,
0.03650131821632385,
-0.04523328319191933,
0.18556824326515198,
-0.11009835451841354,
-0.028278881683945656,
0.08388353884220123,
-0.028901532292366028,
0.03908773511648178,
0.05114533379673958,
-0.002995833056047559,
-0.11386426538228989,
0.022571144625544548,
-0.061523761600255966,
-0.03882989659905434,
-0.27728310227394104,
-0.10157372057437897,
-0.09416108578443527,
0.13095995783805847,
0.06353706866502762,
0.08694369345903397,
0.13677479326725006,
0.03241296112537384,
-0.03486865013837814,
-0.006788457743823528,
0.11350718885660172,
0.13196755945682526,
0.1546081006526947,
-0.054797545075416565,
0.10400725156068802,
-0.00732640502974391,
-0.06723599880933762,
0.0775177925825119,
0.010489492677152157,
0.06473548710346222,
0.03582092002034187,
0.05439159646630287,
-0.03474049270153046,
0.12163657695055008,
0.12976029515266418,
0.06400913000106812,
0.07449053227901459,
0.0018762258114293218,
-0.047343239188194275,
-0.0023769382387399673,
-0.11009418219327927,
0.06357737630605698,
0.048745810985565186,
-0.1361396461725235,
-0.05840175598859787,
-0.023479383438825607,
0.10314234346151352,
0.030318239703774452,
0.04201050102710724,
-0.14829635620117188,
-0.055223651230335236,
0.08196567744016647,
-0.08182991296052933,
-0.14589855074882507,
0.123079814016819,
-0.0020069878082722425,
-0.21787337958812714,
0.059871673583984375,
-0.00445617875084281,
0.09279157966375351,
-0.05691356211900711,
0.05937489494681358,
-0.12336664646863937,
-0.14690734446048737,
-0.004597262945026159,
0.07678814977407455,
-0.3614218831062317,
0.10696420818567276,
-0.01492918562144041,
-0.034170228987932205,
-0.07811900973320007,
-0.029855877161026,
0.013830301351845264,
0.09424669295549393,
0.07196108996868134,
0.020148398354649544,
0.06785272806882858,
-0.0406607948243618,
0.04343333840370178,
-0.005423454567790031,
0.10249773412942886,
-0.0017375926254317164,
-0.0022391490638256073,
-0.042196162045001984,
-0.005115449894219637,
-0.07990812510251999,
-0.0676470398902893,
0.08787580579519272,
-0.19311031699180603,
0.10162363201379776,
-0.04432025924324989,
0.09911154210567474,
0.023297002539038658,
-0.009774526581168175,
-0.014873764477670193,
0.1903144121170044,
-0.061184998601675034,
-0.10480397194623947,
-0.07800080627202988,
0.003084047930315137,
0.04311311990022659,
-0.021921120584011078,
0.001714059617370367,
-0.05075491592288017,
0.04557185247540474,
-0.14364401996135712,
-0.16942542791366577,
0.10049116611480713,
-0.03986566886305809,
-0.11294279247522354,
-0.02521209791302681,
0.17721779644489288,
-0.0034283786080777645,
0.07834594696760178,
-0.0030608291272073984,
0.010859235189855099,
-0.1614042967557907,
-0.04489457979798317,
0.06781932711601257,
-0.01755790039896965,
0.01517956331372261,
0.06319505721330643,
-0.010474820621311665,
0.025478791445493698,
-0.11388683319091797,
0.002701080869883299,
0.3331870138645172,
0.15942682325839996,
-0.007533974014222622,
0.16389495134353638,
0.095169797539711,
-0.07701768726110458,
-0.20377296209335327,
-0.11687740683555603,
-0.14614079892635345,
-0.09839248657226562,
-0.11418759822845459,
-0.1840914934873581,
0.0953584611415863,
-0.01949978433549404,
0.0390242300927639,
0.1834079921245575,
-0.26367655396461487,
-0.10321856290102005,
0.11387986689805984,
0.02451213076710701,
0.34731602668762207,
-0.13654576241970062,
-0.05909457057714462,
-0.06329049915075302,
-0.13515006005764008,
0.10986652225255966,
-0.048275843262672424,
0.10932222008705139,
-0.07713272422552109,
0.1895398050546646,
0.014632279984652996,
0.00452010240405798,
0.08054868876934052,
0.04267682507634163,
-0.04309546574950218,
-0.0853574350476265,
-0.11971835047006607,
0.05094245821237564,
0.04378562048077583,
0.0012319941306486726,
0.00019107430125586689,
-0.00988979171961546,
-0.10830558836460114,
-0.02040279097855091,
-0.07845893502235413,
0.030188707634806633,
-0.0032264897599816322,
-0.012956078164279461,
-0.06363765895366669,
-0.01565135270357132,
-0.04096111282706261,
0.04395265132188797,
0.19462695717811584,
-0.03720684349536896,
0.23511044681072235,
0.02668285183608532,
0.12747381627559662,
-0.19071733951568604,
-0.025029415264725685,
-0.017844390124082565,
-0.04071100056171417,
0.08528535813093185,
-0.07439415901899338,
0.0042008887976408005,
0.1203632652759552,
-0.027254542335867882,
0.04513894021511078,
0.12052612006664276,
0.007183260750025511,
0.01924232766032219,
0.05866866931319237,
-0.2831539511680603,
-0.06260104477405548,
-0.013306834734976292,
0.003389947582036257,
0.07960876822471619,
0.029080945998430252,
0.19366589188575745,
-0.010552232153713703,
-0.10842732340097427,
0.017762793228030205,
0.03275830298662186,
-0.02030261978507042,
0.09357069432735443,
0.0017647672211751342,
0.0005022393306717277,
-0.1737007051706314,
0.07648489624261856,
0.028424246236681938,
-0.12154875695705414,
0.040282927453517914,
0.23473821580410004,
-0.14052385091781616,
-0.11216624826192856,
-0.0805232971906662,
0.06423265486955643,
-0.059285249561071396,
0.03525875136256218,
-0.01691962592303753,
-0.14410193264484406,
0.07709000259637833,
0.08645118772983551,
0.03991714492440224,
0.08244728296995163,
-0.07356026023626328,
-0.01683993451297283,
-0.02293669432401657,
-0.015764553099870682,
-0.007774999365210533,
0.02652132138609886,
-0.04291326180100441,
0.1613030880689621,
-0.045192670077085495,
0.09649375081062317,
-0.07671203464269638,
-0.10391981899738312,
-0.166996568441391,
0.034507136791944504,
-0.07013712078332901,
-0.07280486077070236,
-0.11721280962228775,
-0.054082468152046204,
-0.006301205139607191,
-0.03604762256145477,
-0.00764160230755806,
-0.07937610149383545,
-0.125620499253273,
-0.0030617837328463793,
-0.008651883341372013,
0.00847567804157734,
-0.09048979729413986,
0.024212220683693886,
0.08525878936052322,
-0.04752087965607643,
0.16319029033184052,
0.25355300307273865,
-0.12160678952932358,
0.12206751853227615,
-0.08359070867300034,
-0.1338375359773636,
0.052975479513406754,
0.012324461713433266,
0.04365278780460358,
0.07718737423419952,
-0.025997882708907127,
0.005234678741544485,
0.03798993304371834,
0.09308423846960068,
0.11760129779577255,
-0.08418448269367218,
0.03781552240252495,
-0.024084946140646935,
-0.16604818403720856,
-0.02558642439544201,
-0.04595145583152771,
0.08855043351650238,
-0.044821981340646744,
0.13422754406929016,
-0.05320816487073898,
0.0693182572722435,
-0.06421879678964615,
0.04796583205461502,
0.021037599071860313,
-0.15891779959201813,
-0.0296399537473917,
-0.055398084223270416,
0.036036357283592224,
-0.028313780203461647,
0.1761445850133896,
-0.023282049223780632,
0.0008308379910886288,
0.06561797112226486,
0.10294128954410553,
-0.06701570749282837,
0.01593802496790886,
0.09655700623989105,
0.12114819139242172,
-0.08782581239938736,
-0.020131045952439308,
0.037759892642498016,
0.08523480594158173,
0.04282563924789429,
0.14794856309890747,
-0.007124816998839378,
0.06112099066376686,
0.06061524152755737,
-0.055587492883205414,
0.06979596614837646,
-0.0864967554807663,
-0.07101257890462875,
0.013126743957400322,
0.016490669921040535,
-0.029582500457763672,
0.18183600902557373,
0.1958584189414978,
-0.019330326467752457,
0.001131141558289528,
-0.025882378220558167,
-0.08366626501083374,
-0.1313849687576294,
-0.040749166160821915,
-0.0728154107928276,
-0.12910713255405426,
0.007339666597545147,
-0.13646093010902405,
0.01886795088648796,
-0.02695559151470661,
0.07664940506219864,
-0.06360716372728348,
0.015104886144399643,
0.12090817838907242,
-0.09605427831411362,
0.07066947221755981,
-0.02154127135872841,
0.030984265729784966,
-0.0280345119535923,
-0.033058684319257736,
-0.11438123136758804,
0.001025551580823958,
0.009485900402069092,
0.09302517771720886,
-0.10594018548727036,
0.043476402759552,
-0.14587293565273285,
-0.10754816234111786,
-0.015195286832749844,
0.0850139856338501,
-0.08747610449790955,
0.12155783921480179,
0.03344615176320076,
-0.042450033128261566,
0.001519853831268847,
0.26724275946617126,
-0.056416358798742294,
-0.025422725826501846,
-0.02092345617711544,
0.17232413589954376,
0.04200929403305054,
0.054295506328344345,
0.006604219786822796,
0.036292217671871185,
-0.0919392928481102,
0.2822611927986145,
0.3500478267669678,
-0.13359901309013367,
-0.0014675784623250365,
0.017521612346172333,
0.04838532954454422,
0.14439399540424347,
0.10677091032266617,
0.11816283315420151,
0.2604312300682068,
-0.08925234526395798,
0.028950760141015053,
-0.047741133719682693,
-0.008379627019166946,
-0.09908035397529602,
0.03626544028520584,
0.06645840406417847,
-0.07684948295354843,
-0.03871142491698265,
0.05767429247498512,
-0.29083186388015747,
0.08294567465782166,
-0.09522955119609833,
-0.1747271865606308,
-0.04494473710656166,
-0.0051972949877381325,
0.02525089681148529,
0.012523583136498928,
0.11822178214788437,
0.03821945562958717,
-0.09951874613761902,
0.05810529738664627,
0.0248149074614048,
-0.2566261887550354,
-0.05607978627085686,
0.14945709705352783,
-0.13654758036136627,
0.04369102790951729,
-0.03613487258553505,
0.019433021545410156,
0.060110364109277725,
0.08029571175575256,
-0.053139183670282364,
-0.07863858342170715,
-0.006672888062894344,
-0.016477739438414574,
-0.007054716814309359,
0.09070750325918198,
0.06352376192808151,
-0.038155701011419296,
0.13140861690044403,
-0.06731497496366501,
0.045702286064624786,
0.008074460551142693,
0.00014444276166614145,
0.024647017940878868,
0.028699034824967384,
-0.07089978456497192,
0.03745313361287117,
0.14623993635177612,
-0.028336919844150543,
0.00939762219786644,
-0.044544633477926254,
-0.08701327443122864,
-0.024569304659962654,
-0.0800858587026596,
-0.1334099918603897,
-0.1918977051973343,
-0.1287958025932312,
0.008190964348614216,
-0.022250719368457794,
-0.20086853206157684,
0.015490429475903511,
-0.12334953248500824,
0.0500875748693943,
-0.16471049189567566,
0.10479007661342621,
0.06119938939809799,
-0.0035236906260252,
-0.0031029649544507265,
0.049145881086587906,
0.058569055050611496,
0.1525193303823471,
-0.14604724943637848,
-0.040041934698820114
] |
null | null |
speechbrain
|
# Conformer Encoder/Decoder for Speech Translation
This model was trained with [SpeechBrain](https://speechbrain.github.io), and is based on the Fisher Callhome recipie.
The performance of the model is the following:
| Release | CoVoSTv2 JA->EN Test BLEU | Custom Dataset Validation BLEU | Custom Dataset Test BLEU | GPUs |
|:-------------:|:--------------:|:--------------:|:--------------:|:--------:|
| 01-13-21 | 9.73 | 8.38 | 12.01 | 1xRTX 3090 |
This model was trained on subtitled audio downloaded from YouTube, and was not fine-tuned on the CoVoSTv2 training set.
When calculating the BLEU score for CoVoSTv2, the utterances were first preprocessed by the same pipeline that preprocessed the original data for the model, which includes removing all punctuation outside of apostrophes, and removing capitalization, similar to the data preprocessing done for the Fisher Callhome dataset in the speechbrain recipe.
## Pipeline description
The system is trained with recordings sampled at 16kHz (single channel).
The code will automatically normalize your audio (i.e., resampling + mono channel selection) when calling *transcribe_file* if needed.
## Install SpeechBrain
First of all, install SpeechBrain with the following command:
```
pip install speechbrain
```
### Transcribing your own audio files (Spoken Japanese, to written English)
```python
from speechbrain.pretrained import EncoderDecoderASR
st_model = EncoderDecoderASR.from_hparams(source="bob80333/speechbrain_ja2en_st_63M_yt600h")
st_model.transcribe_file("your_file_here.wav")
```
### Inference on GPU
To perform inference on the GPU, add `run_opts={"device":"cuda"}` when calling the `from_hparams` method.
### Limitations:
The model is likely to get caught in repetitions. The model is not very good at translation, which is reflected by its low BLEU scores.
The outputs of this model are unlikely to be correct, do not rely on it for any serious purpose.
This model was trained on data from Youtube, and has inherited whatever biases can be found in Youtube audio/subtitles.
The creator of this model doesn't actually know Japanese.
|
{"language": "en", "tags": ["speech-translation", "CTC", "Attention", "Transformer", "pytorch", "speechbrain", "automatic-speech-recognition"], "metrics": ["BLEU"]}
|
automatic-speech-recognition
|
bob80333/speechbrain_ja2en_st_63M_yt600h
|
[
"speechbrain",
"speech-translation",
"CTC",
"Attention",
"Transformer",
"pytorch",
"automatic-speech-recognition",
"en",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#speechbrain #speech-translation #CTC #Attention #Transformer #pytorch #automatic-speech-recognition #en #region-us
|
Conformer Encoder/Decoder for Speech Translation
================================================
This model was trained with SpeechBrain, and is based on the Fisher Callhome recipie.
The performance of the model is the following:
This model was trained on subtitled audio downloaded from YouTube, and was not fine-tuned on the CoVoSTv2 training set.
When calculating the BLEU score for CoVoSTv2, the utterances were first preprocessed by the same pipeline that preprocessed the original data for the model, which includes removing all punctuation outside of apostrophes, and removing capitalization, similar to the data preprocessing done for the Fisher Callhome dataset in the speechbrain recipe.
Pipeline description
--------------------
The system is trained with recordings sampled at 16kHz (single channel).
The code will automatically normalize your audio (i.e., resampling + mono channel selection) when calling *transcribe\_file* if needed.
Install SpeechBrain
-------------------
First of all, install SpeechBrain with the following command:
### Transcribing your own audio files (Spoken Japanese, to written English)
### Inference on GPU
To perform inference on the GPU, add 'run\_opts={"device":"cuda"}' when calling the 'from\_hparams' method.
### Limitations:
The model is likely to get caught in repetitions. The model is not very good at translation, which is reflected by its low BLEU scores.
The outputs of this model are unlikely to be correct, do not rely on it for any serious purpose.
This model was trained on data from Youtube, and has inherited whatever biases can be found in Youtube audio/subtitles.
The creator of this model doesn't actually know Japanese.
|
[
"### Transcribing your own audio files (Spoken Japanese, to written English)",
"### Inference on GPU\n\n\nTo perform inference on the GPU, add 'run\\_opts={\"device\":\"cuda\"}' when calling the 'from\\_hparams' method.",
"### Limitations:\n\n\nThe model is likely to get caught in repetitions. The model is not very good at translation, which is reflected by its low BLEU scores.\nThe outputs of this model are unlikely to be correct, do not rely on it for any serious purpose.\nThis model was trained on data from Youtube, and has inherited whatever biases can be found in Youtube audio/subtitles.\nThe creator of this model doesn't actually know Japanese."
] |
[
"TAGS\n#speechbrain #speech-translation #CTC #Attention #Transformer #pytorch #automatic-speech-recognition #en #region-us \n",
"### Transcribing your own audio files (Spoken Japanese, to written English)",
"### Inference on GPU\n\n\nTo perform inference on the GPU, add 'run\\_opts={\"device\":\"cuda\"}' when calling the 'from\\_hparams' method.",
"### Limitations:\n\n\nThe model is likely to get caught in repetitions. The model is not very good at translation, which is reflected by its low BLEU scores.\nThe outputs of this model are unlikely to be correct, do not rely on it for any serious purpose.\nThis model was trained on data from Youtube, and has inherited whatever biases can be found in Youtube audio/subtitles.\nThe creator of this model doesn't actually know Japanese."
] |
[
42,
19,
48,
104
] |
[
"passage: TAGS\n#speechbrain #speech-translation #CTC #Attention #Transformer #pytorch #automatic-speech-recognition #en #region-us \n### Transcribing your own audio files (Spoken Japanese, to written English)### Inference on GPU\n\n\nTo perform inference on the GPU, add 'run\\_opts={\"device\":\"cuda\"}' when calling the 'from\\_hparams' method.### Limitations:\n\n\nThe model is likely to get caught in repetitions. The model is not very good at translation, which is reflected by its low BLEU scores.\nThe outputs of this model are unlikely to be correct, do not rely on it for any serious purpose.\nThis model was trained on data from Youtube, and has inherited whatever biases can be found in Youtube audio/subtitles.\nThe creator of this model doesn't actually know Japanese."
] |
[
-0.07954481244087219,
-0.011822487227618694,
0.002179332310333848,
0.06890212744474411,
0.14924372732639313,
-0.010244171135127544,
0.09229147434234619,
0.06913883984088898,
0.09641464799642563,
0.08546993136405945,
0.042919889092445374,
0.02436748705804348,
0.11616445332765579,
0.051859237253665924,
0.031686846166849136,
-0.23322348296642303,
0.10423856228590012,
-0.015896271914243698,
0.19202342629432678,
0.06411951035261154,
0.03401999548077583,
-0.08342500776052475,
0.06222055107355118,
0.02586466260254383,
-0.18101131916046143,
-0.03758828341960907,
0.1083163470029831,
-0.05258438363671303,
0.07003124058246613,
0.09750740230083466,
-0.01502660196274519,
-0.00237064971588552,
0.056549716740846634,
-0.07512543350458145,
0.040053606033325195,
-0.012971118092536926,
0.029486458748579025,
0.01110592670738697,
0.0844758152961731,
0.03992439806461334,
0.04309484362602234,
0.12251543998718262,
-0.04722856357693672,
0.09595140069723129,
-0.08514861762523651,
-0.030419806018471718,
0.035568732768297195,
-0.036115653812885284,
0.014242229051887989,
0.038193777203559875,
-0.12027250975370407,
0.10370798408985138,
-0.13143527507781982,
0.07403664290904999,
0.1407710164785385,
-0.1631746143102646,
0.02980830892920494,
0.13094189763069153,
0.0431506484746933,
0.057485926896333694,
-0.01861502416431904,
0.08051221817731857,
0.09206219017505646,
0.005846075247973204,
-0.11829143017530441,
-0.019350668415427208,
-0.027322500944137573,
-0.07854057848453522,
-0.08592892438173294,
-0.02291545830667019,
0.1584797203540802,
0.022754020988941193,
-0.044232893735170364,
-0.08723276108503342,
-0.031754594296216965,
-0.08246825635433197,
-0.041634924709796906,
0.013274150900542736,
-0.037915829569101334,
0.0004365044878795743,
0.0793430432677269,
0.0069899242371320724,
-0.017635557800531387,
-0.17780640721321106,
-0.07925424724817276,
0.1449519246816635,
0.04640498384833336,
0.04843198508024216,
-0.10988323390483856,
0.09291622787714005,
-0.11371137946844101,
-0.03439721092581749,
0.008130471222102642,
-0.09558851271867752,
-0.0572160929441452,
0.08278008550405502,
-0.0525970384478569,
-0.020443812012672424,
0.009578658267855644,
-0.03588540852069855,
0.11445054411888123,
0.04109443724155426,
-0.03004968725144863,
0.10708967596292496,
0.06978492438793182,
0.09446416795253754,
-0.20930080115795135,
0.027530230581760406,
0.0853821188211441,
0.05639009550213814,
0.020762765780091286,
-0.060479599982500076,
-0.13597838580608368,
-0.05706574767827988,
0.04079277440905571,
0.03326070308685303,
-0.03306882083415985,
0.07544625550508499,
-0.12632334232330322,
-0.044415395706892014,
-0.021040653809905052,
-0.06613965332508087,
-0.06019217520952225,
0.05073246732354164,
-0.11220411956310272,
0.11304016411304474,
-0.0010393854463472962,
0.04437125474214554,
-0.05311140418052673,
-0.14634092152118683,
-0.079242043197155,
0.02019350603222847,
-0.05965514853596687,
-0.09299446642398834,
0.016301989555358887,
-0.07181639969348907,
0.03830311819911003,
-0.12917406857013702,
-0.06773832440376282,
-0.038143306970596313,
-0.0680522695183754,
-0.039383094757795334,
0.035743337124586105,
-0.13323692977428436,
0.010806427337229252,
-0.02113080956041813,
-0.06881215423345566,
-0.05234498158097267,
-0.04404522106051445,
0.08464222401380539,
-0.015174603089690208,
0.09066282957792282,
-0.08489947766065598,
0.08910360932350159,
-0.08993207663297653,
0.02446240559220314,
-0.1924072802066803,
0.16927661001682281,
-0.016678696498274803,
-0.08495752513408661,
-0.08243376016616821,
-0.02811649814248085,
-0.005816753022372723,
0.10565430670976639,
0.054089292883872986,
0.17987653613090515,
-0.29863011837005615,
-0.04512922465801239,
0.184193417429924,
-0.04840410500764847,
-0.00304081616923213,
0.23809057474136353,
-0.041080597788095474,
0.032387230545282364,
0.1308724284172058,
0.2463974952697754,
-0.16968902945518494,
-0.1056538000702858,
0.01605617068707943,
-0.011077916249632835,
-0.042838215827941895,
0.08617811650037766,
0.06294076144695282,
0.012104465626180172,
-0.132696270942688,
0.05335914343595505,
-0.05909033864736557,
0.04193840175867081,
-0.05120943486690521,
-0.05661796033382416,
0.04748300835490227,
-0.05404312163591385,
0.014219489879906178,
-0.0002963261504191905,
0.006487542297691107,
0.04411397501826286,
-0.07051482796669006,
-0.07481857389211655,
0.10592585057020187,
-0.11792401224374771,
0.029582474380731583,
-0.13997876644134521,
0.20660875737667084,
-0.1209975853562355,
0.021057773381471634,
-0.11756891757249832,
0.07090383768081665,
0.05563683062791824,
-0.042765773832798004,
0.16419093310832977,
0.005587830673903227,
-0.036280885338783264,
0.024152003228664398,
0.04958527535200119,
-0.011398998089134693,
0.023150838911533356,
0.030937986448407173,
0.025866717100143433,
-0.1487230658531189,
0.07466047257184982,
-0.011417152360081673,
0.09972889721393585,
-0.12093739956617355,
-0.08090536296367645,
0.0576937198638916,
0.06657591462135315,
-0.06739277392625809,
0.010715890675783157,
0.0495600625872612,
0.014814666472375393,
-0.026925355195999146,
0.021904518827795982,
0.04632339999079704,
-0.044782817363739014,
-0.11541301012039185,
0.19713614881038666,
-0.23885083198547363,
-0.11647248268127441,
0.1990269273519516,
-0.1327725201845169,
-0.057831957936286926,
-0.0071864137426018715,
0.09671179950237274,
-0.042558297514915466,
0.06208963319659233,
-0.0927620530128479,
0.29091593623161316,
-0.04901326820254326,
0.09491849690675735,
-0.14334648847579956,
0.14620858430862427,
0.11650034785270691,
-0.046312958002090454,
-0.01718171499669552,
0.08137809485197067,
0.14789673686027527,
-0.04988564923405647,
0.06830443441867828,
-0.11016316711902618,
-0.060729846358299255,
0.27627047896385193,
0.013299145735800266,
-0.10799024999141693,
-0.01371585950255394,
-0.026975620537996292,
-0.06665301322937012,
0.14050500094890594,
-0.10413913428783417,
-0.08210073411464691,
0.05327637866139412,
0.0352427214384079,
0.027433089911937714,
-0.12899833917617798,
-0.0357121042907238,
-0.07397107034921646,
-0.058546386659145355,
-0.06882315874099731,
0.04971412569284439,
-0.03277072310447693,
0.08723052591085434,
-0.03964921459555626,
-0.09287756681442261,
0.05514615401625633,
-0.05062952637672424,
-0.08751170337200165,
0.0818750262260437,
-0.12511689960956573,
-0.24371671676635742,
-0.14989067614078522,
-0.00957082025706768,
-0.11072362214326859,
0.10295049101114273,
-0.029757428914308548,
-0.0894089788198471,
-0.061592280864715576,
-0.041680049151182175,
0.0772169902920723,
0.01753944717347622,
-0.0637877881526947,
-0.13463161885738373,
-0.07248128950595856,
-0.047979988157749176,
-0.09730588644742966,
-0.008871202357113361,
-0.04971516132354736,
-0.053642529994249344,
0.017526401206851006,
-0.1166008934378624,
0.054482601583004,
0.22897730767726898,
0.06326111406087875,
0.02254331298172474,
-0.07259002327919006,
0.21697568893432617,
-0.12843528389930725,
-0.04250122234225273,
0.0927191823720932,
-0.04636223986744881,
-0.036787375807762146,
0.20129463076591492,
0.002268129028379917,
-0.09251614660024643,
0.04409940540790558,
-0.08267277479171753,
-0.07699604332447052,
-0.14540687203407288,
-0.07058946788311005,
-0.03940635547041893,
0.006777430884540081,
-0.004650958348065615,
-0.002881931606680155,
0.0872236043214798,
0.04609638825058937,
-0.06031568720936775,
0.07949879765510559,
0.007715041283518076,
0.019334757700562477,
0.08157121390104294,
-0.062446773052215576,
0.06759747117757797,
0.025574127212166786,
0.00011371263099135831,
0.0695406123995781,
0.0745631754398346,
0.129987433552742,
-0.014135709963738918,
0.1999099850654602,
0.06327516585588455,
0.05028686672449112,
0.17006172239780426,
0.016299372538924217,
0.018826937302947044,
0.036220483481884,
-0.06486008316278458,
-0.08470875024795532,
-0.09408680349588394,
0.10128111392259598,
0.19345442950725555,
-0.013570036739110947,
-0.050786782056093216,
0.05138168856501579,
0.0017437324859201908,
0.04144998639822006,
0.04565931856632233,
-0.25404855608940125,
-0.016983844339847565,
0.015516193583607674,
0.014649897813796997,
-0.07030282914638519,
0.1772056519985199,
0.14409375190734863,
-0.10048002749681473,
-0.08414031565189362,
0.0708254799246788,
0.06422644108533859,
-0.08435125648975372,
-0.0050196475349366665,
-0.1309605836868286,
0.07930358499288559,
-0.007090251427143812,
0.03475061058998108,
-0.2355629801750183,
0.12300760298967361,
0.01680217869579792,
0.06282088160514832,
-0.022200727835297585,
-0.06940560787916183,
0.085932657122612,
-0.016761135309934616,
0.17963463068008423,
-0.00677842739969492,
0.04752872884273529,
-0.14388182759284973,
-0.14124906063079834,
0.01700744405388832,
0.06036067381501198,
0.07938425987958908,
-0.029703976586461067,
-0.014000254683196545,
0.013312163762748241,
-0.025745388120412827,
-0.14837191998958588,
-0.08896655589342117,
-0.0010224820580333471,
0.012923382222652435,
0.11923465132713318,
0.12305209040641785,
0.040102191269397736,
-0.07396046817302704,
-0.1860281229019165,
-0.09308343380689621,
-0.17711542546749115,
-0.032691825181245804,
-0.02046893537044525,
-0.0027897499967366457,
0.11915668100118637,
-0.030060794204473495,
0.12019500881433487,
-0.043715737760066986,
0.0847192108631134,
0.005681863520294428,
-0.06265716254711151,
0.1601404994726181,
-0.051199302077293396,
-0.11174400895833969,
-0.03925763815641403,
0.1096055880188942,
0.060189031064510345,
0.07557588815689087,
0.03177616745233536,
0.0008986006141640246,
0.0010233779903501272,
-0.07737043499946594,
-0.07297759503126144,
0.12273530662059784,
-0.06773354858160019,
0.1613350659608841,
0.03105565905570984,
-0.1853315681219101,
-0.16246962547302246,
-0.018475700169801712,
0.10962995141744614,
0.2085297703742981,
-0.05896703153848648,
0.06076647713780403,
0.2654876410961151,
-0.04571891948580742,
-0.26314371824264526,
0.02120363898575306,
0.029255183413624763,
0.10069922357797623,
-0.05811123549938202,
-0.07097715884447098,
0.045075759291648865,
-0.06845264136791229,
-0.010437184944748878,
0.10918434709310532,
-0.14013169705867767,
-0.14821302890777588,
0.19834725558757782,
0.10465411096811295,
0.21212342381477356,
-0.02236531302332878,
-0.01788443885743618,
0.03885507211089134,
-0.07098511606454849,
0.0682714581489563,
-0.00696380203589797,
0.15321394801139832,
0.056563638150691986,
0.10476555675268173,
0.03073190152645111,
0.014756819233298302,
0.11182776093482971,
0.046610213816165924,
0.02371908724308014,
-0.03991066291928291,
-0.11749307811260223,
0.018487296998500824,
0.06391947716474533,
0.09251119941473007,
0.06449103355407715,
0.0003905933990608901,
0.002424629172310233,
-0.08990740776062012,
-0.07745575904846191,
-0.02028825506567955,
0.03144560381770134,
-0.06494533270597458,
-0.062309470027685165,
0.04845602065324783,
-0.013535453006625175,
0.07858093082904816,
-0.03391031175851822,
-0.16252262890338898,
-0.1661914736032486,
0.10708054900169373,
0.1310274749994278,
0.012227093800902367,
-0.022038932889699936,
0.03872804343700409,
-0.07594972103834152,
0.17932696640491486,
-0.012997687794268131,
0.053870704025030136,
0.09628364443778992,
0.0040625156834721565,
0.19630634784698486,
0.02098616398870945,
-0.12609270215034485,
0.10623648017644882,
0.007111336104571819,
-0.0737902969121933,
-0.09868207573890686,
-0.0018553833942860365,
-0.001126337330788374,
0.16011717915534973,
-0.07380331307649612,
0.10334479063749313,
-0.11458967626094818,
-0.00301090395078063,
-0.003032463835552335,
0.030447447672486305,
-0.0732710063457489,
0.049032580107450485,
0.09526214748620987,
-0.010447017848491669,
-0.17514003813266754,
0.10132269561290741,
0.011375985108315945,
-0.10692716389894485,
0.10050603002309799,
-0.06425639986991882,
-0.08860398083925247,
-0.035142682492733,
-0.22315800189971924,
0.05355178937315941,
-0.01713256537914276,
-0.13517789542675018,
-0.06977499276399612,
-0.09926154464483261,
-0.07552909106016159,
0.059919387102127075,
0.061308491975069046,
0.04378799721598625,
-0.08391937613487244,
-0.022978121414780617,
-0.07761359959840775,
0.050853706896305084,
0.043022315949201584,
-0.03458702936768532,
-0.12216313928365707,
0.059185564517974854,
0.1142624095082283,
0.08784614503383636,
-0.11321833729743958,
-0.09561251848936081,
-0.09477398544549942,
0.08905899524688721,
-0.14664316177368164,
0.08297516405582428,
-0.04435945674777031,
-0.013893960043787956,
0.01499367505311966,
-0.049183305352926254,
-0.03347240760922432,
0.02110724151134491,
-0.08296319842338562,
0.06693175435066223,
0.04142924025654793,
0.0665205642580986,
-0.009386545047163963,
-0.011937775649130344,
0.045313071459531784,
-0.021191727370023727,
0.0704374834895134,
0.1217922642827034,
-0.08027028292417526,
0.09092741459608078,
-0.17438900470733643,
0.0028799737337976694,
0.039309658110141754,
0.04734513536095619,
0.011031349189579487,
-0.02348785661160946,
0.0019520141649991274,
0.011512787081301212,
0.040572717785835266,
-0.06979445368051529,
0.044729720801115036,
-0.029519755393266678,
-0.03308916464447975,
-0.005887895822525024,
0.03560970723628998,
-0.07242888957262039,
0.06067878380417824,
0.011839181184768677,
0.10716722160577774,
0.018512187525629997,
-0.024184230715036392,
-0.01332188956439495,
0.0004985634586773813,
0.003053562482818961,
-0.0809641033411026,
-0.018294360488653183,
-0.03262251988053322,
-0.07243529707193375,
0.05039357766509056,
-0.0179976224899292,
0.25158366560935974,
0.024086931720376015,
-0.054409321397542953,
-0.06631424278020859,
0.03850764036178589,
0.0009821404237300158,
0.050552718341350555,
0.11568793654441833,
0.044267818331718445,
0.0561022087931633,
-0.10570181161165237,
0.05537153780460358,
0.09437142312526703,
0.17638035118579865,
-0.018295489251613617,
-0.002999740419909358,
0.045520663261413574,
0.11229097843170166,
0.08962257951498032,
-0.029524415731430054,
0.009022736921906471,
0.0209798701107502,
-0.08922287821769714,
0.03498232364654541,
-0.03613654896616936,
0.15051889419555664,
0.12386014312505722,
-0.022922901436686516,
0.023277226835489273,
-0.034800752997398376,
-0.11778292059898376,
-0.12440714240074158,
-0.15550824999809265,
-0.041541341692209244,
-0.11380346119403839,
0.027899425476789474,
-0.06355883181095123,
0.02563004568219185,
0.10497153550386429,
0.09025952219963074,
-0.12994708120822906,
0.22728461027145386,
-0.08952907472848892,
-0.10038238018751144,
0.12598752975463867,
0.0012202052166685462,
-0.021255088970065117,
-0.027028130367398262,
-0.0677415058016777,
0.1258530169725418,
-0.024715706706047058,
0.048781152814626694,
0.038388047367334366,
-0.04177582263946533,
0.03263822942972183,
-0.01102182362228632,
-0.06413386762142181,
-0.025517191737890244,
0.014643851667642593,
-0.022253481671214104,
0.14339183270931244,
0.09296143054962158,
-0.05880444496870041,
0.009855397045612335,
0.02723984234035015,
0.01849418878555298,
0.02306075394153595,
-0.10672786086797714,
0.18484434485435486,
-0.11989448964595795,
0.048481009900569916,
-0.03335437923669815,
-0.10952536016702652,
-0.04831404611468315,
0.23021617531776428,
0.15366797149181366,
-0.2009868025779724,
-0.0724000558257103,
-0.018919246271252632,
0.01976992003619671,
-0.08015040308237076,
0.16631542146205902,
-0.046228569000959396,
0.2110423743724823,
0.005566371139138937,
0.05287233367562294,
-0.0972972884774208,
-0.09378444403409958,
0.05278356745839119,
-0.07196957617998123,
0.031168339774012566,
-0.05986315757036209,
-0.1187124028801918,
0.02361956797540188,
-0.3093836307525635,
-0.0788097083568573,
-0.11014995723962784,
-0.02924686297774315,
-0.04805566370487213,
-0.030253905802965164,
-0.13420791923999786,
0.13843542337417603,
-0.010181653313338757,
-0.10684937983751297,
0.07470191270112991,
0.054375097155570984,
0.008205212652683258,
-0.07880719006061554,
0.004541712813079357,
0.021223336458206177,
-0.008341703563928604,
0.14314493536949158,
-0.04592818394303322,
0.18925197422504425,
-0.033394839614629745,
0.09972727298736572,
0.026377201080322266,
0.1312389224767685,
0.03273595869541168,
-0.08875701576471329,
-0.026913803070783615,
0.1999654322862625,
0.008863881230354309,
0.023353658616542816,
0.0016928567783907056,
0.014134151861071587,
0.08791115880012512,
-0.08666786551475525,
-0.09085451811552048,
-0.09694353491067886,
0.024899665266275406,
-0.08540429174900055,
0.14184173941612244,
0.034193314611911774,
-0.03146365284919739,
-0.06112964451313019,
-0.030112994834780693,
0.043825142085552216,
0.00426498195156455,
-0.00781983695924282,
0.035962894558906555,
-0.18995049595832825,
0.034903667867183685,
0.006210525520145893,
0.011815259233117104,
-0.22027012705802917,
0.041792500764131546,
-0.10795831680297852,
-0.04897880554199219,
-0.012163417413830757,
0.048701103776693344,
-0.025303814560174942,
0.014181411825120449,
-0.012559136375784874,
-0.06243595480918884,
0.016014160588383675,
0.06914369761943817,
-0.09835557639598846,
-0.19981130957603455
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# t5-small-finetuned-cnn-wei0
This model is a fine-tuned version of [t5-small](https://huggingface.co/t5-small) on the cnn_dailymail dataset.
It achieves the following results on the evaluation set:
- Loss: 1.7149
- Rouge1: 24.2324
- Rouge2: 11.7178
- Rougel: 20.0508
- Rougelsum: 22.8698
- Gen Len: 19.0
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 2e-05
- train_batch_size: 12
- eval_batch_size: 12
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 1
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum | Gen Len |
|:-------------:|:-----:|:----:|:---------------:|:-------:|:-------:|:-------:|:---------:|:-------:|
| 1.9068 | 1.0 | 4786 | 1.7149 | 24.2324 | 11.7178 | 20.0508 | 22.8698 | 19.0 |
### Framework versions
- Transformers 4.11.3
- Pytorch 1.9.0+cu111
- Datasets 1.14.0
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "datasets": ["cnn_dailymail"], "metrics": ["rouge"], "model-index": [{"name": "t5-small-finetuned-cnn-wei0", "results": [{"task": {"type": "text2text-generation", "name": "Sequence-to-sequence Language Modeling"}, "dataset": {"name": "cnn_dailymail", "type": "cnn_dailymail", "args": "3.0.0"}, "metrics": [{"type": "rouge", "value": 24.2324, "name": "Rouge1"}]}]}]}
|
text2text-generation
|
bochaowei/t5-small-finetuned-cnn-wei0
|
[
"transformers",
"pytorch",
"tensorboard",
"t5",
"text2text-generation",
"generated_from_trainer",
"dataset:cnn_dailymail",
"license:apache-2.0",
"model-index",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #t5 #text2text-generation #generated_from_trainer #dataset-cnn_dailymail #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
t5-small-finetuned-cnn-wei0
===========================
This model is a fine-tuned version of t5-small on the cnn\_dailymail dataset.
It achieves the following results on the evaluation set:
* Loss: 1.7149
* Rouge1: 24.2324
* Rouge2: 11.7178
* Rougel: 20.0508
* Rougelsum: 22.8698
* Gen Len: 19.0
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 2e-05
* train\_batch\_size: 12
* eval\_batch\_size: 12
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 1
* mixed\_precision\_training: Native AMP
### Training results
### Framework versions
* Transformers 4.11.3
* Pytorch 1.9.0+cu111
* Datasets 1.14.0
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 12\n* eval\\_batch\\_size: 12\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 1\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.14.0\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #t5 #text2text-generation #generated_from_trainer #dataset-cnn_dailymail #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 12\n* eval\\_batch\\_size: 12\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 1\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.14.0\n* Tokenizers 0.10.3"
] |
[
80,
113,
4,
34
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #t5 #text2text-generation #generated_from_trainer #dataset-cnn_dailymail #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 12\n* eval\\_batch\\_size: 12\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 1\n* mixed\\_precision\\_training: Native AMP### Training results### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.14.0\n* Tokenizers 0.10.3"
] |
[
-0.1127171516418457,
0.15341074764728546,
-0.002246245974674821,
0.09727239608764648,
0.09984598308801651,
0.017721712589263916,
0.1817321926355362,
0.14620377123355865,
-0.08602169901132584,
0.06366996467113495,
0.14935088157653809,
0.11173245310783386,
0.050648096948862076,
0.21978752315044403,
-0.06035446375608444,
-0.2199099063873291,
0.05126430094242096,
0.036925435066223145,
-0.001842185971327126,
0.13711446523666382,
0.08659113943576813,
-0.11839822679758072,
0.09728552401065826,
0.020120376721024513,
-0.18150362372398376,
-0.04508300870656967,
-0.016465365886688232,
-0.0754384845495224,
0.11150124669075012,
0.014192890375852585,
0.08301691710948944,
0.041114043444395065,
0.05870150029659271,
-0.16204388439655304,
0.009155986830592155,
0.043869901448488235,
-0.002330191433429718,
0.10605956614017487,
0.04084789752960205,
-0.0076094698160886765,
0.05909062922000885,
-0.0852281004190445,
0.06243573874235153,
0.006728391628712416,
-0.13033364713191986,
-0.20905479788780212,
-0.1312320977449417,
0.057929567992687225,
0.06638659536838531,
0.08284080773591995,
-0.0085463160648942,
0.1664353907108307,
-0.014482613652944565,
0.09196094423532486,
0.2147097885608673,
-0.3066096901893616,
-0.054236650466918945,
0.009676262736320496,
0.03148699924349785,
0.08451021462678909,
-0.07289379835128784,
-0.03078625164926052,
0.03409109637141228,
0.041019849479198456,
0.147751584649086,
-0.0026598498225212097,
-0.03825406730175018,
-0.020998505875468254,
-0.13007739186286926,
-0.08343757688999176,
0.2024753987789154,
0.05593540892004967,
-0.04100653901696205,
-0.07400009781122208,
-0.05965980514883995,
-0.19607295095920563,
-0.047611054033041,
0.014362377114593983,
0.03880508989095688,
-0.036564383655786514,
-0.09609097987413406,
-0.020294569432735443,
-0.07789047062397003,
-0.02969660423696041,
-0.05299079418182373,
0.09938476979732513,
0.03418080136179924,
0.016328291967511177,
-0.048217806965112686,
0.0780869573354721,
-0.012094380334019661,
-0.16053146123886108,
-0.012791285291314125,
0.01261171419173479,
0.018313637003302574,
-0.0422900952398777,
-0.039047807455062866,
-0.09225558489561081,
0.020034972578287125,
0.15903329849243164,
-0.05961831659078598,
0.05872039124369621,
-0.026749055832624435,
0.029499609023332596,
-0.054534006863832474,
0.1705903857946396,
-0.024696800857782364,
-0.017381833866238594,
0.0210407767444849,
0.08997800201177597,
0.05195052921772003,
-0.037331290543079376,
-0.11293169111013412,
0.04490478336811066,
0.12546943128108978,
0.014773162081837654,
-0.02072584256529808,
0.06585612893104553,
-0.04818548262119293,
-0.03014608658850193,
0.0705544501543045,
-0.10014351457357407,
0.026670604944229126,
-0.011797181330621243,
-0.06146523356437683,
-0.02992326021194458,
0.0006290515884757042,
0.017647113651037216,
-0.030079422518610954,
0.06730319559574127,
-0.10088173300027847,
0.005309446714818478,
-0.06375973671674728,
-0.12381161004304886,
0.037335701286792755,
-0.10562703758478165,
-0.01004757359623909,
-0.08784832060337067,
-0.17283205687999725,
-0.023646358400583267,
0.03876015543937683,
-0.0408402718603611,
-0.07495994120836258,
-0.07305475324392319,
-0.08455639332532883,
0.043342288583517075,
-0.023724963888525963,
0.08177521824836731,
-0.07996534556150436,
0.08185800909996033,
0.018676646053791046,
0.06509216129779816,
-0.05034273862838745,
0.04146122187376022,
-0.08055446296930313,
0.038865476846694946,
-0.15881972014904022,
0.07387915253639221,
-0.047962699085474014,
0.0739850327372551,
-0.10969230532646179,
-0.0872822031378746,
0.03356415405869484,
-0.03820988908410072,
0.08652741461992264,
0.10676515102386475,
-0.1807413399219513,
-0.05825841799378395,
0.1987023949623108,
-0.07221613079309464,
-0.1576261818408966,
0.12026291340589523,
-0.057263292372226715,
0.020690545439720154,
0.06618187576532364,
0.21340778470039368,
0.03955359756946564,
-0.08542883396148682,
-0.0372554250061512,
-0.05950158089399338,
0.07778054475784302,
-0.07572226971387863,
0.0777360051870346,
0.012500923126935959,
0.053932853043079376,
0.01318095251917839,
-0.01097925566136837,
0.031160768121480942,
-0.08979466557502747,
-0.08551455289125443,
-0.0444706492125988,
-0.07910623401403427,
0.02567395567893982,
0.019792355597019196,
0.06559387594461441,
-0.1133706271648407,
-0.08731501549482346,
0.03658675029873848,
0.074671670794487,
-0.08194108307361603,
0.035473741590976715,
-0.10276716202497482,
0.10824460536241531,
-0.07719377428293228,
-0.0038342250045388937,
-0.17133000493049622,
-0.06278136372566223,
0.03472291678190231,
0.010635715909302235,
0.02468309924006462,
-0.06678080558776855,
0.060698747634887695,
0.05511076748371124,
-0.028651325032114983,
-0.0288949403911829,
-0.03014175221323967,
0.004714258946478367,
-0.11381798982620239,
-0.1869269162416458,
-0.04483078420162201,
-0.03828602656722069,
0.1481109857559204,
-0.1662156879901886,
0.037816114723682404,
0.04448672756552696,
0.112810879945755,
0.03882591426372528,
-0.03407115861773491,
-0.0009041057201102376,
0.0680093914270401,
-0.049132075160741806,
-0.07418784499168396,
0.05184933915734291,
0.03423738852143288,
-0.09944827109575272,
0.009396074339747429,
-0.14341802895069122,
0.13607822358608246,
0.13563169538974762,
0.02186487801373005,
-0.049915097653865814,
-0.017347551882267,
-0.053268563002347946,
-0.026406677439808846,
-0.03489028289914131,
0.016225695610046387,
0.13456794619560242,
0.02927866019308567,
0.14709988236427307,
-0.09305991977453232,
-0.04917750135064125,
0.048634544014930725,
-0.0263107530772686,
-0.012126985006034374,
0.09069766104221344,
0.022222505882382393,
-0.11632663756608963,
0.13992704451084137,
0.12382985651493073,
-0.028364116325974464,
0.11825427412986755,
-0.05800603702664375,
-0.07105551660060883,
-0.04294145852327347,
-0.023803124204277992,
0.022587649524211884,
0.09731710702180862,
-0.08807928115129471,
-0.01913522556424141,
0.048286885023117065,
0.036699745804071426,
0.004160511773079634,
-0.18095465004444122,
-0.007644291035830975,
0.031098362058401108,
-0.05969082564115524,
-0.05724310502409935,
-0.013202759437263012,
0.008685990236699581,
0.09951241314411163,
0.022534772753715515,
-0.05804502218961716,
0.0400666818022728,
0.012020844034850597,
-0.07279421389102936,
0.18109866976737976,
-0.09684115648269653,
-0.17997170984745026,
-0.11828076094388962,
-0.11681589484214783,
-0.06475292891263962,
-0.01068525668233633,
0.07093322277069092,
-0.06521469354629517,
-0.04819250479340553,
-0.1200002133846283,
-0.04541953280568123,
0.019996430724859238,
0.024280210956931114,
0.05296687036752701,
-0.022147227078676224,
0.05068432539701462,
-0.10043588280677795,
-0.02173735946416855,
-0.013581029139459133,
0.018858639523386955,
0.06513772159814835,
-0.0012753759510815144,
0.11506983637809753,
0.1211489737033844,
-0.01866767555475235,
0.04481997340917587,
-0.0303364098072052,
0.263048380613327,
-0.07532573491334915,
-0.017549019306898117,
0.10769882798194885,
-0.007706969510763884,
0.08157142996788025,
0.12728366255760193,
0.04756689816713333,
-0.09565040469169617,
-0.0011722553754225373,
-0.00021661422215402126,
-0.0440756157040596,
-0.2239195704460144,
-0.03868289664387703,
-0.05109730362892151,
-0.005952428560703993,
0.11704389750957489,
0.03462666645646095,
0.0474015548825264,
0.05424043908715248,
0.01598845236003399,
0.06915266066789627,
-0.01931917481124401,
0.10320506244897842,
0.12479857355356216,
0.07135371118783951,
0.12538142502307892,
-0.05152671039104462,
-0.02852017618715763,
0.053956691175699234,
0.02990339882671833,
0.21672998368740082,
-0.0030232020653784275,
0.21326205134391785,
0.03558330237865448,
0.16610227525234222,
0.02338865026831627,
0.06809856742620468,
-0.007377361413091421,
0.005570974666625261,
-0.017086636275053024,
-0.04099835827946663,
-0.039705563336610794,
0.018050074577331543,
-0.044852688908576965,
0.007371935527771711,
-0.09490254521369934,
0.008912727236747742,
0.04904188588261604,
0.3098996877670288,
0.057433802634477615,
-0.40055572986602783,
-0.09585826843976974,
-0.0009874544339254498,
-0.022295860573649406,
-0.04357500374317169,
-0.00950254499912262,
0.07843940705060959,
-0.07489658892154694,
0.07971851527690887,
-0.07080665230751038,
0.10710316896438599,
-0.0842641144990921,
0.02482457086443901,
0.0490499809384346,
0.060444679111242294,
-0.011822500266134739,
0.049018651247024536,
-0.25662821531295776,
0.2668873965740204,
0.019666846841573715,
0.07024122774600983,
-0.07818050682544708,
-0.005754182115197182,
0.010078954510390759,
0.02899661660194397,
0.06102212145924568,
-0.0018697647610679269,
-0.08142969757318497,
-0.1516873836517334,
-0.13723871111869812,
0.019393231719732285,
0.06324760615825653,
-0.012837988324463367,
0.12374676018953323,
-0.0059175435453653336,
-0.0050271316431462765,
0.028587812557816505,
-0.025390202179551125,
-0.04183600842952728,
-0.11514948308467865,
0.033790066838264465,
0.07117959856987,
0.015812858939170837,
-0.05759154632687569,
-0.10925765335559845,
-0.06754333525896072,
0.16228577494621277,
0.03835888206958771,
-0.06499329209327698,
-0.12329838424921036,
0.047430187463760376,
0.09260260313749313,
-0.07701953500509262,
0.03096967563033104,
0.003513289848342538,
0.13293607532978058,
0.010770302265882492,
-0.06514240801334381,
0.09036976844072342,
-0.03865601494908333,
-0.17244026064872742,
-0.04720024764537811,
0.11066277325153351,
0.02304462157189846,
0.05383772775530815,
0.004701063968241215,
0.046420980244874954,
-0.04093515872955322,
-0.06869208812713623,
0.03139866515994072,
0.0032853365410119295,
0.09625176340341568,
-0.02694268897175789,
0.004385477397590876,
0.021590398624539375,
-0.07466834038496017,
-0.027435969561338425,
0.17805656790733337,
0.26369741559028625,
-0.08219227194786072,
0.05621794983744621,
0.042735978960990906,
-0.046796027570962906,
-0.1423652023077011,
-0.007361493539065123,
0.053675781935453415,
0.012396350502967834,
-0.004573810379952192,
-0.1679341048002243,
0.03484123572707176,
0.1004922091960907,
-0.019177822396159172,
0.08148107677698135,
-0.3102376461029053,
-0.12155225872993469,
0.07936880737543106,
0.11214593052864075,
0.0941912978887558,
-0.13900908827781677,
-0.05653385445475578,
-0.021832283586263657,
-0.16832365095615387,
0.1543135643005371,
-0.10139442980289459,
0.11450023204088211,
-0.024574480950832367,
0.1144769936800003,
0.003011151449754834,
-0.05641049146652222,
0.12960593402385712,
0.03223623335361481,
0.06371571123600006,
-0.06223968043923378,
0.010605125688016415,
0.10887826979160309,
-0.0934986099600792,
0.055855054408311844,
-0.0854615792632103,
0.04512985423207283,
-0.11111181974411011,
-0.017898431047797203,
-0.05842852592468262,
-0.005957687273621559,
-0.026949966326355934,
-0.024291209876537323,
-0.037680212408304214,
0.019938543438911438,
0.07792314887046814,
-0.01518800389021635,
0.18982398509979248,
0.0326596274971962,
0.12819531559944153,
0.13391093909740448,
0.08922218531370163,
-0.10932261496782303,
-0.04711846262216568,
-0.01506128441542387,
-0.04393850639462471,
0.030044332146644592,
-0.14832289516925812,
0.030755091458559036,
0.13518010079860687,
0.009855675511062145,
0.13064728677272797,
0.058335840702056885,
-0.05186881870031357,
0.03301543369889259,
0.05582708492875099,
-0.1750430166721344,
-0.10894400626420975,
0.018164144828915596,
0.0228768028318882,
-0.14392323791980743,
0.03836728632450104,
0.13951626420021057,
-0.05007309839129448,
-0.03053818829357624,
-0.0006308932788670063,
0.02221360057592392,
-0.007001962978392839,
0.17317627370357513,
0.028491497039794922,
0.05725964531302452,
-0.12153720110654831,
0.07461048662662506,
0.0695771723985672,
-0.09296909719705582,
0.049985796213150024,
0.08657626062631607,
-0.11950115114450455,
-0.026524841785430908,
0.04090580344200134,
0.16352367401123047,
-0.06520577520132065,
-0.05312469229102135,
-0.13991856575012207,
-0.10799732059240341,
0.09668394178152084,
0.1555708348751068,
0.061639465391635895,
0.03168405964970589,
-0.026021098718047142,
-0.031619634479284286,
-0.10861308127641678,
0.08749142289161682,
0.06322938948869705,
0.07997239381074905,
-0.11397991329431534,
0.09315762668848038,
-0.016452927142381668,
0.051570307463407516,
-0.012723206542432308,
0.00801441166549921,
-0.09287605434656143,
-0.003027938539162278,
-0.12235543876886368,
0.024602191522717476,
-0.05379725247621536,
0.0000071418130573874805,
-0.023299532011151314,
-0.050523463636636734,
-0.0664849802851677,
0.005786942783743143,
-0.10777978599071503,
-0.040339261293411255,
0.010766078718006611,
0.04159075766801834,
-0.12577909231185913,
-0.01812530867755413,
0.010815037414431572,
-0.08709045499563217,
0.0940956398844719,
0.04958292096853256,
-0.012861877679824829,
0.01615908369421959,
-0.02473258227109909,
-0.000012697068086708896,
0.04478259012103081,
0.008582236245274544,
0.08598986268043518,
-0.11956754326820374,
-0.01954030618071556,
0.01813800074160099,
0.009258925914764404,
0.03522805497050285,
0.10788508504629135,
-0.11388880759477615,
-0.008751987479627132,
-0.012807938270270824,
-0.03656764328479767,
-0.06779191642999649,
0.05753354728221893,
0.10962601751089096,
0.02615269646048546,
0.18524421751499176,
-0.06306158006191254,
0.014520145952701569,
-0.1947062760591507,
-0.0028306220192462206,
0.0016176644712686539,
-0.13703492283821106,
-0.0842379629611969,
-0.04118683189153671,
0.07166960835456848,
-0.06507594883441925,
0.14122341573238373,
-0.012772402726113796,
0.016467662528157234,
0.03265083208680153,
-0.009280051104724407,
-0.050568316131830215,
0.01841423287987709,
0.18326367437839508,
0.02763141132891178,
-0.046026524156332016,
0.0868823379278183,
0.014990965835750103,
0.07216472923755646,
0.11981681734323502,
0.19044461846351624,
0.1084694042801857,
0.09339864552021027,
0.09783728420734406,
0.034254707396030426,
-0.03508615121245384,
-0.1620124876499176,
0.05516419932246208,
-0.04665059223771095,
0.14334283769130707,
0.002609225455671549,
0.20378132164478302,
0.09318152070045471,
-0.1603599488735199,
0.034275174140930176,
-0.03211866691708565,
-0.08542507141828537,
-0.0798996314406395,
-0.09561863541603088,
-0.10275674611330032,
-0.13440768420696259,
-0.0026920530945062637,
-0.12988868355751038,
0.03665440157055855,
0.09052115678787231,
0.017315106466412544,
-0.0012650134740397334,
0.0946437418460846,
0.028512977063655853,
0.01261973287910223,
0.06279406696557999,
-0.009050056338310242,
-0.0394403450191021,
-0.06814631074666977,
-0.0816609114408493,
0.013324709609150887,
0.018542736768722534,
0.06381061673164368,
-0.002912900410592556,
0.018292803317308426,
0.04972388222813606,
-0.030887888744473457,
-0.12590289115905762,
0.00988831277936697,
0.02174692414700985,
0.06710005551576614,
0.02541995979845524,
0.02317310869693756,
-0.000876443984452635,
-0.00351197412237525,
0.18907424807548523,
-0.05205057933926582,
-0.05412202328443527,
-0.1340809017419815,
0.17235451936721802,
0.00935029610991478,
-0.05573435127735138,
0.043563276529312134,
-0.07021105289459229,
-0.0027930978685617447,
0.18176406621932983,
0.18543009459972382,
-0.04737056791782379,
-0.0211543757468462,
-0.01992526650428772,
-0.010026715695858002,
-0.011007016524672508,
0.09523313492536545,
0.10617946088314056,
0.012774009257555008,
-0.07299768179655075,
-0.0285799577832222,
-0.06914211064577103,
-0.011816035956144333,
-0.033548057079315186,
0.06229354441165924,
0.010263923555612564,
-0.008470147848129272,
-0.04546530172228813,
0.06700771301984787,
-0.07246483862400055,
-0.057384029030799866,
0.0138785932213068,
-0.2069828063249588,
-0.18229326605796814,
0.006553020793944597,
0.06073649600148201,
-0.011366154067218304,
0.05163245648145676,
-0.0031370592769235373,
0.010043954476714134,
0.0898069366812706,
-0.033161710947752,
-0.06407734006643295,
-0.06848405301570892,
0.09515831619501114,
-0.12835517525672913,
0.20487958192825317,
-0.026783913373947144,
0.046021465212106705,
0.13477694988250732,
0.04493904113769531,
-0.12411235272884369,
0.05001579970121384,
0.04454034939408302,
-0.0028627654537558556,
0.0272645466029644,
0.10704801231622696,
-0.03250700235366821,
0.06183065474033356,
0.05827092379331589,
-0.10219856351613998,
-0.017647573724389076,
-0.06764949858188629,
-0.0030107570346444845,
-0.03233170881867409,
-0.05413708835840225,
-0.030678195878863335,
0.13508333265781403,
0.17549394071102142,
-0.05470341071486473,
-0.006186522077769041,
-0.04156433790922165,
0.014868799597024918,
0.05824936181306839,
-0.003988464362919331,
-0.05536478012800217,
-0.25413787364959717,
-0.010721497237682343,
0.07779601216316223,
0.001003031269647181,
-0.25871461629867554,
-0.0892442986369133,
-0.017223624512553215,
-0.04566342756152153,
-0.08355028182268143,
0.10642189532518387,
0.08730113506317139,
0.03643171116709709,
-0.070976622402668,
0.03161642700433731,
-0.06601471453905106,
0.157104030251503,
-0.13933831453323364,
-0.07699708640575409
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# t5-small-finetuned-cnn-wei1
This model is a fine-tuned version of [t5-small](https://huggingface.co/t5-small) on the cnn_dailymail dataset.
It achieves the following results on the evaluation set:
- Loss: 1.6819
- Rouge1: 41.1796
- Rouge2: 18.9426
- Rougel: 29.2338
- Rougelsum: 38.4087
- Gen Len: 72.7607
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 4e-05
- train_batch_size: 12
- eval_batch_size: 12
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 1
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum | Gen Len |
|:-------------:|:-----:|:-----:|:---------------:|:-------:|:-------:|:-------:|:---------:|:-------:|
| 1.8582 | 1.0 | 23927 | 1.6819 | 41.1796 | 18.9426 | 29.2338 | 38.4087 | 72.7607 |
### Framework versions
- Transformers 4.11.3
- Pytorch 1.9.0+cu111
- Datasets 1.14.0
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "datasets": ["cnn_dailymail"], "metrics": ["rouge"], "model-index": [{"name": "t5-small-finetuned-cnn-wei1", "results": [{"task": {"type": "text2text-generation", "name": "Sequence-to-sequence Language Modeling"}, "dataset": {"name": "cnn_dailymail", "type": "cnn_dailymail", "args": "3.0.0"}, "metrics": [{"type": "rouge", "value": 41.1796, "name": "Rouge1"}]}]}]}
|
text2text-generation
|
bochaowei/t5-small-finetuned-cnn-wei1
|
[
"transformers",
"pytorch",
"tensorboard",
"t5",
"text2text-generation",
"generated_from_trainer",
"dataset:cnn_dailymail",
"license:apache-2.0",
"model-index",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #t5 #text2text-generation #generated_from_trainer #dataset-cnn_dailymail #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
t5-small-finetuned-cnn-wei1
===========================
This model is a fine-tuned version of t5-small on the cnn\_dailymail dataset.
It achieves the following results on the evaluation set:
* Loss: 1.6819
* Rouge1: 41.1796
* Rouge2: 18.9426
* Rougel: 29.2338
* Rougelsum: 38.4087
* Gen Len: 72.7607
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 4e-05
* train\_batch\_size: 12
* eval\_batch\_size: 12
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 1
* mixed\_precision\_training: Native AMP
### Training results
### Framework versions
* Transformers 4.11.3
* Pytorch 1.9.0+cu111
* Datasets 1.14.0
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 4e-05\n* train\\_batch\\_size: 12\n* eval\\_batch\\_size: 12\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 1\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.14.0\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #t5 #text2text-generation #generated_from_trainer #dataset-cnn_dailymail #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 4e-05\n* train\\_batch\\_size: 12\n* eval\\_batch\\_size: 12\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 1\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.14.0\n* Tokenizers 0.10.3"
] |
[
80,
113,
4,
34
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #t5 #text2text-generation #generated_from_trainer #dataset-cnn_dailymail #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 4e-05\n* train\\_batch\\_size: 12\n* eval\\_batch\\_size: 12\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 1\n* mixed\\_precision\\_training: Native AMP### Training results### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.14.0\n* Tokenizers 0.10.3"
] |
[
-0.1134011298418045,
0.15276066958904266,
-0.002206424018368125,
0.09783855825662613,
0.10051389038562775,
0.018132874742150307,
0.18138355016708374,
0.14562676846981049,
-0.08657807111740112,
0.06374701857566833,
0.14927734434604645,
0.11157327890396118,
0.050521135330200195,
0.21909400820732117,
-0.060146965086460114,
-0.21945087611675262,
0.05092761293053627,
0.037173062562942505,
-0.0015638533513993025,
0.1367952823638916,
0.08612029999494553,
-0.11891677230596542,
0.0976884737610817,
0.019842661917209625,
-0.18163469433784485,
-0.04482431709766388,
-0.01697211153805256,
-0.0754179134964943,
0.11155229061841965,
0.014137409627437592,
0.08304614573717117,
0.040536537766456604,
0.0583961084485054,
-0.16089443862438202,
0.009298233315348625,
0.04374762997031212,
-0.002312535885721445,
0.10604111105203629,
0.0407242476940155,
-0.007620744872838259,
0.057940516620874405,
-0.08549042046070099,
0.06261585652828217,
0.006781073287129402,
-0.1305193156003952,
-0.21063831448554993,
-0.13146811723709106,
0.05710286647081375,
0.06602934002876282,
0.08380179107189178,
-0.008823464624583721,
0.16600516438484192,
-0.01446946058422327,
0.09229938685894012,
0.21378383040428162,
-0.30755314230918884,
-0.0542309433221817,
0.009897303767502308,
0.03152760490775108,
0.08480007946491241,
-0.07357100397348404,
-0.030419763177633286,
0.03421986103057861,
0.0415286123752594,
0.14770999550819397,
-0.0025857798755168915,
-0.037504274398088455,
-0.020606940612196922,
-0.13010621070861816,
-0.08261148631572723,
0.20411913096904755,
0.05615066736936569,
-0.040810711681842804,
-0.07453866302967072,
-0.05899837985634804,
-0.19721145927906036,
-0.048025861382484436,
0.01519867219030857,
0.03906635195016861,
-0.037306107580661774,
-0.09622557461261749,
-0.01903585158288479,
-0.07752009481191635,
-0.028800049796700478,
-0.05283476784825325,
0.09864930808544159,
0.03424776345491409,
0.016004998236894608,
-0.047526583075523376,
0.07777277380228043,
-0.011996867135167122,
-0.16059158742427826,
-0.012143947184085846,
0.012507778592407703,
0.018541082739830017,
-0.04240557178854942,
-0.039244819432497025,
-0.09084971994161606,
0.020266462117433548,
0.15961426496505737,
-0.05983416736125946,
0.05921725183725357,
-0.026357918977737427,
0.0287290271371603,
-0.054827604442834854,
0.17034220695495605,
-0.025257829576730728,
-0.018290724605321884,
0.021307216957211494,
0.09015931189060211,
0.051421333104372025,
-0.037209007889032364,
-0.112730473279953,
0.04540298134088516,
0.12496480345726013,
0.014892401173710823,
-0.02125430293381214,
0.0670197531580925,
-0.04743601754307747,
-0.02983849309384823,
0.07112300395965576,
-0.10045753419399261,
0.026398342102766037,
-0.011716093868017197,
-0.06121200695633888,
-0.02951699309051037,
0.0003443487221375108,
0.01705632545053959,
-0.030234090983867645,
0.06719549000263214,
-0.10129953175783157,
0.004708214197307825,
-0.0637688934803009,
-0.12420700490474701,
0.03672874718904495,
-0.10487930476665497,
-0.009556137025356293,
-0.0877525731921196,
-0.17219455540180206,
-0.024535756558179855,
0.038544170558452606,
-0.040899842977523804,
-0.07480792701244354,
-0.07266947627067566,
-0.08488232642412186,
0.043380558490753174,
-0.023599538952112198,
0.08269261568784714,
-0.08016973733901978,
0.0815458670258522,
0.019782764837145805,
0.06520206481218338,
-0.05003030598163605,
0.04164068400859833,
-0.08002278953790665,
0.03857201337814331,
-0.1597026139497757,
0.07334817945957184,
-0.04835032299160957,
0.07538674771785736,
-0.10918696969747543,
-0.08778981119394302,
0.0346650704741478,
-0.03789934515953064,
0.08633408695459366,
0.10665455460548401,
-0.1795768290758133,
-0.058139633387327194,
0.19750063121318817,
-0.0713803619146347,
-0.15801650285720825,
0.12013483047485352,
-0.05729527398943901,
0.019687823951244354,
0.06641208380460739,
0.21341954171657562,
0.03943977132439613,
-0.08578473329544067,
-0.037520796060562134,
-0.059893898665905,
0.0778859332203865,
-0.07525872439146042,
0.07691292464733124,
0.012824521400034428,
0.053170379251241684,
0.013690885156393051,
-0.010340056382119656,
0.031135616824030876,
-0.08949179202318192,
-0.08529199659824371,
-0.04437672346830368,
-0.07915843278169632,
0.025096353143453598,
0.02012849599123001,
0.0655457153916359,
-0.11404314637184143,
-0.08726100623607635,
0.03494914248585701,
0.07463540881872177,
-0.08207320421934128,
0.03471297398209572,
-0.1026543602347374,
0.10878947377204895,
-0.07740262150764465,
-0.0036403462290763855,
-0.1711685061454773,
-0.0635727271437645,
0.03443426266312599,
0.01187897752970457,
0.02426891215145588,
-0.0668000653386116,
0.060956742614507675,
0.055322762578725815,
-0.028939856216311455,
-0.029121115803718567,
-0.02879245951771736,
0.004818924702703953,
-0.11392581462860107,
-0.1872650384902954,
-0.0450621135532856,
-0.038018759340047836,
0.14774392545223236,
-0.16705536842346191,
0.037637241184711456,
0.044388577342033386,
0.11247032135725021,
0.03823528811335564,
-0.03367344290018082,
-0.000488856399897486,
0.06831653416156769,
-0.048632800579071045,
-0.07419323921203613,
0.052154846489429474,
0.034273140132427216,
-0.09872543811798096,
0.008720430545508862,
-0.1437739133834839,
0.13697221875190735,
0.13621792197227478,
0.021127568557858467,
-0.05049740523099899,
-0.017512090504169464,
-0.053166139870882034,
-0.026457717642188072,
-0.0339173823595047,
0.015977265313267708,
0.13525617122650146,
0.028325246647000313,
0.1473197638988495,
-0.09259843826293945,
-0.04922645539045334,
0.04887329414486885,
-0.025438105687499046,
-0.011487524025142193,
0.09119941294193268,
0.021015502512454987,
-0.11449803411960602,
0.13995590806007385,
0.12314486503601074,
-0.027794821187853813,
0.11791082471609116,
-0.05860777571797371,
-0.07067380845546722,
-0.042739301919937134,
-0.02426496148109436,
0.02237490378320217,
0.09774847328662872,
-0.08860012888908386,
-0.019630135968327522,
0.04845321550965309,
0.03656105324625969,
0.004086434841156006,
-0.18032225966453552,
-0.007536029443144798,
0.030939793214201927,
-0.05929424986243248,
-0.05718276649713516,
-0.013016610406339169,
0.008448971435427666,
0.09923264384269714,
0.022850053384900093,
-0.058243490755558014,
0.04006758704781532,
0.012094066478312016,
-0.07232853770256042,
0.18110494315624237,
-0.09720626473426819,
-0.17938579618930817,
-0.11828003078699112,
-0.11855415999889374,
-0.06530559062957764,
-0.010855739004909992,
0.07085415720939636,
-0.06632477790117264,
-0.04886530712246895,
-0.11958009004592896,
-0.04590699076652527,
0.01960894651710987,
0.0243074968457222,
0.05271216481924057,
-0.02277718111872673,
0.05079687759280205,
-0.10012373328208923,
-0.02164280414581299,
-0.013791920617222786,
0.018999869003891945,
0.06530515849590302,
-0.0007598971715196967,
0.1153751090168953,
0.12032697349786758,
-0.017943380400538445,
0.04478779435157776,
-0.030191397294402122,
0.26432934403419495,
-0.07575763016939163,
-0.017433954402804375,
0.10761502385139465,
-0.007336202077567577,
0.08104129880666733,
0.12717340886592865,
0.0481967069208622,
-0.0961490273475647,
-0.001282037585042417,
-0.00016854227578733116,
-0.0437312051653862,
-0.22374482452869415,
-0.03835110366344452,
-0.051289595663547516,
-0.006268874742090702,
0.11700696498155594,
0.03466162458062172,
0.04792063683271408,
0.05446890741586685,
0.016267593950033188,
0.06992651522159576,
-0.019865509122610092,
0.10296367108821869,
0.1264241337776184,
0.07098057866096497,
0.1255970001220703,
-0.05187421292066574,
-0.028607187792658806,
0.05370103940367699,
0.02982991561293602,
0.21756714582443237,
-0.00288459868170321,
0.21387962996959686,
0.03563673049211502,
0.16639384627342224,
0.02354838326573372,
0.06846189498901367,
-0.007990803569555283,
0.00499740382656455,
-0.01659196801483631,
-0.04102513566613197,
-0.03836864233016968,
0.01736065186560154,
-0.04559830203652382,
0.007396929431706667,
-0.09510132670402527,
0.0073037841357290745,
0.04891238734126091,
0.30987972021102905,
0.056738559156656265,
-0.4015936255455017,
-0.0962114930152893,
-0.001661537797190249,
-0.02204401046037674,
-0.04368509352207184,
-0.009230237454175949,
0.07739012688398361,
-0.07455050945281982,
0.0801292434334755,
-0.0707513689994812,
0.10698294639587402,
-0.08471332490444183,
0.025255804881453514,
0.05023985728621483,
0.061289507895708084,
-0.012397846207022667,
0.048827312886714935,
-0.2570571303367615,
0.2660518288612366,
0.02000957913696766,
0.07053554803133011,
-0.07806585729122162,
-0.005720428191125393,
0.010444979183375835,
0.02994106337428093,
0.06086883321404457,
-0.0021806161385029554,
-0.08095615357160568,
-0.1521044224500656,
-0.1367589235305786,
0.01977919600903988,
0.06296249479055405,
-0.012569781392812729,
0.12416334450244904,
-0.0055411867797374725,
-0.0055417269468307495,
0.02843012847006321,
-0.026247236877679825,
-0.04176119714975357,
-0.11563087999820709,
0.03355250880122185,
0.07173944264650345,
0.016325756907463074,
-0.057162873446941376,
-0.10925692319869995,
-0.06967692077159882,
0.1615837961435318,
0.03907627984881401,
-0.065209299325943,
-0.12363579869270325,
0.046568743884563446,
0.09290079772472382,
-0.07628918439149857,
0.03128461912274361,
0.003389668883755803,
0.13206398487091064,
0.011455298401415348,
-0.06499066948890686,
0.09054216742515564,
-0.03892529010772705,
-0.1722431629896164,
-0.04718002304434776,
0.11062155663967133,
0.02264365367591381,
0.05335015058517456,
0.004314476624131203,
0.04690264165401459,
-0.0418989434838295,
-0.06883320957422256,
0.031799498945474625,
0.0015833303332328796,
0.09739583730697632,
-0.027222003787755966,
0.0037529319524765015,
0.021906832233071327,
-0.07455043494701385,
-0.027335412800312042,
0.17696069180965424,
0.26253223419189453,
-0.08174049109220505,
0.05520094931125641,
0.04281088337302208,
-0.046506982296705246,
-0.1417430341243744,
-0.00713921245187521,
0.05287934094667435,
0.012437167577445507,
-0.005506881512701511,
-0.168514683842659,
0.03589954972267151,
0.09993956983089447,
-0.018700912594795227,
0.08360565453767776,
-0.30814340710639954,
-0.12145081907510757,
0.07949548959732056,
0.11231395602226257,
0.09471471607685089,
-0.1396377831697464,
-0.056363075971603394,
-0.021690640598535538,
-0.16815614700317383,
0.15283308923244476,
-0.10219616442918777,
0.11483431607484818,
-0.024803008884191513,
0.11435360461473465,
0.00267747207544744,
-0.05667790025472641,
0.12943977117538452,
0.032950807362794876,
0.06442468613386154,
-0.0625431165099144,
0.011114888824522495,
0.10854903608560562,
-0.09318356961011887,
0.05582645907998085,
-0.08553668111562729,
0.04492093250155449,
-0.1118226870894432,
-0.01780524291098118,
-0.05862851068377495,
-0.006143258884549141,
-0.027118129655718803,
-0.02388007566332817,
-0.037528373301029205,
0.01996147260069847,
0.07731407880783081,
-0.015382854267954826,
0.18894048035144806,
0.032392702996730804,
0.128435879945755,
0.13267944753170013,
0.08900202810764313,
-0.10943204164505005,
-0.04864279925823212,
-0.014445362612605095,
-0.04410001263022423,
0.030144961550831795,
-0.14828605949878693,
0.029964210465550423,
0.1354343742132187,
0.009327363222837448,
0.1303921937942505,
0.05853778123855591,
-0.05143611878156662,
0.03253452852368355,
0.056002382189035416,
-0.17536504566669464,
-0.10780089348554611,
0.01807844638824463,
0.020107582211494446,
-0.1436547487974167,
0.03859620541334152,
0.13916413486003876,
-0.04969548434019089,
-0.030251188203692436,
-0.0006308492156676948,
0.021884959191083908,
-0.006651301868259907,
0.17351461946964264,
0.02878839150071144,
0.05741249397397041,
-0.12239962071180344,
0.0744531899690628,
0.06980067491531372,
-0.09333875775337219,
0.04966055974364281,
0.08726933598518372,
-0.12046397477388382,
-0.0268976129591465,
0.040257878601551056,
0.16471293568611145,
-0.06412478536367416,
-0.05330212414264679,
-0.14057345688343048,
-0.10851588845252991,
0.0969269722700119,
0.15462495386600494,
0.06131814047694206,
0.03120969608426094,
-0.025850538164377213,
-0.03159176930785179,
-0.1091914102435112,
0.08785074949264526,
0.06224829703569412,
0.08033913373947144,
-0.11399433016777039,
0.09227149188518524,
-0.01630762591958046,
0.051287692040205,
-0.012741451151669025,
0.008164230734109879,
-0.09318731725215912,
-0.003226746106520295,
-0.12226243317127228,
0.02392493188381195,
-0.053492043167352676,
0.0002784091921057552,
-0.023644983768463135,
-0.04995965212583542,
-0.06635809689760208,
0.006101225037127733,
-0.10769214481115341,
-0.04026518017053604,
0.011944664642214775,
0.041956957429647446,
-0.12496455758810043,
-0.01792377419769764,
0.010151386260986328,
-0.08724948018789291,
0.0942194014787674,
0.049655482172966,
-0.01297521311789751,
0.015934595838189125,
-0.02332698367536068,
-0.00008662882464705035,
0.04407655447721481,
0.008175616152584553,
0.08624826371669769,
-0.11951437592506409,
-0.019607283174991608,
0.017696086317300797,
0.009405798278748989,
0.03539254888892174,
0.10744783282279968,
-0.11407625675201416,
-0.008743527345359325,
-0.01200837641954422,
-0.03558974340558052,
-0.06787145882844925,
0.057368699461221695,
0.11003969609737396,
0.025564488023519516,
0.18498903512954712,
-0.0632505714893341,
0.014229659922420979,
-0.1947270631790161,
-0.003128238022327423,
0.001267773099243641,
-0.13830675184726715,
-0.08376999944448471,
-0.04118771106004715,
0.07222841680049896,
-0.06537700444459915,
0.14263416826725006,
-0.012955233454704285,
0.01655644364655018,
0.03254728764295578,
-0.008580266498029232,
-0.05103859305381775,
0.01863461174070835,
0.18307416141033173,
0.027445711195468903,
-0.04627933353185654,
0.08673495054244995,
0.015858126804232597,
0.07175832241773605,
0.12042336910963058,
0.18886645138263702,
0.10810545831918716,
0.09399989992380142,
0.09738428145647049,
0.03431286662817001,
-0.03579475358128548,
-0.16124901175498962,
0.055823855102062225,
-0.0469585545361042,
0.14447146654129028,
0.0022043318022042513,
0.20240424573421478,
0.09282108396291733,
-0.1595563292503357,
0.03458517789840698,
-0.032834310084581375,
-0.08546353131532669,
-0.07898779958486557,
-0.09435854852199554,
-0.10234483331441879,
-0.13474556803703308,
-0.0026453768368810415,
-0.13002336025238037,
0.036319661885499954,
0.0914398729801178,
0.017614029347896576,
-0.0015876444522291422,
0.09613640606403351,
0.029499690979719162,
0.012984476052224636,
0.06308400630950928,
-0.009124417789280415,
-0.04003028944134712,
-0.06771554797887802,
-0.08140894025564194,
0.013070023618638515,
0.017283517867326736,
0.0632736012339592,
-0.003202913561835885,
0.017849620431661606,
0.050210993736982346,
-0.030194994062185287,
-0.1258803755044937,
0.009952899068593979,
0.022109782323241234,
0.06720993667840958,
0.02614748291671276,
0.022470617666840553,
-0.0009805166628211737,
-0.0038765312638133764,
0.18837279081344604,
-0.05181984603404999,
-0.05326556786894798,
-0.13402141630649567,
0.1716351956129074,
0.008942045271396637,
-0.05652571842074394,
0.04301602020859718,
-0.0698210746049881,
-0.0018756968202069402,
0.18347235023975372,
0.18599820137023926,
-0.046965084969997406,
-0.0213483739644289,
-0.01961270347237587,
-0.010096260346472263,
-0.010853263549506664,
0.09563582390546799,
0.10659278184175491,
0.011942550539970398,
-0.07294026762247086,
-0.028951819986104965,
-0.06938063353300095,
-0.012009663507342339,
-0.03371736779808998,
0.0621040016412735,
0.011241946369409561,
-0.008531945757567883,
-0.04493586719036102,
0.06699218600988388,
-0.07206486165523529,
-0.057618480175733566,
0.014698265120387077,
-0.20619451999664307,
-0.1824122965335846,
0.005795297212898731,
0.060375288128852844,
-0.010966106317937374,
0.05134572833776474,
-0.0027359912637621164,
0.009979589842259884,
0.09077063947916031,
-0.033507268875837326,
-0.06349395960569382,
-0.06921332329511642,
0.0953727439045906,
-0.12896309792995453,
0.20442935824394226,
-0.026976924389600754,
0.045951347798109055,
0.13508708775043488,
0.04471168667078018,
-0.12431176751852036,
0.050236426293849945,
0.04425736144185066,
-0.003670318750664592,
0.027770472690463066,
0.10675777494907379,
-0.03206145390868187,
0.06135672703385353,
0.05730683356523514,
-0.10129200667142868,
-0.01714003086090088,
-0.06759439408779144,
-0.0031468835659325123,
-0.03233349323272705,
-0.05415179580450058,
-0.030612660571932793,
0.1354931741952896,
0.17643773555755615,
-0.05471676588058472,
-0.006306130439043045,
-0.0415441133081913,
0.014767038635909557,
0.058528389781713486,
-0.005081397946923971,
-0.055670879781246185,
-0.25447186827659607,
-0.010551881976425648,
0.07841343432664871,
0.0006384059088304639,
-0.25957444310188293,
-0.08878124505281448,
-0.017277011647820473,
-0.04556695744395256,
-0.08364951610565186,
0.10609880089759827,
0.08758647739887238,
0.03688118979334831,
-0.07112393528223038,
0.03370418772101402,
-0.06601783633232117,
0.1569395214319229,
-0.13981939852237701,
-0.07727614045143127
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# t5-small-finetuned-xsum-wei0
This model is a fine-tuned version of [t5-small](https://huggingface.co/t5-small) on the xsum dataset.
It achieves the following results on the evaluation set:
- Loss: 2.6289
- Rouge1: 25.7398
- Rouge2: 6.1361
- Rougel: 19.8262
- Rougelsum: 19.8284
- Gen Len: 18.7984
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 2e-05
- train_batch_size: 12
- eval_batch_size: 12
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 1
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum | Gen Len |
|:-------------:|:-----:|:----:|:---------------:|:-------:|:------:|:-------:|:---------:|:-------:|
| 2.858 | 1.0 | 1701 | 2.6289 | 25.7398 | 6.1361 | 19.8262 | 19.8284 | 18.7984 |
### Framework versions
- Transformers 4.11.3
- Pytorch 1.9.0+cu111
- Datasets 1.14.0
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "datasets": ["xsum"], "metrics": ["rouge"], "model-index": [{"name": "t5-small-finetuned-xsum-wei0", "results": [{"task": {"type": "text2text-generation", "name": "Sequence-to-sequence Language Modeling"}, "dataset": {"name": "xsum", "type": "xsum", "args": "default"}, "metrics": [{"type": "rouge", "value": 25.7398, "name": "Rouge1"}]}]}]}
|
text2text-generation
|
bochaowei/t5-small-finetuned-xsum-wei0
|
[
"transformers",
"pytorch",
"tensorboard",
"t5",
"text2text-generation",
"generated_from_trainer",
"dataset:xsum",
"license:apache-2.0",
"model-index",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #t5 #text2text-generation #generated_from_trainer #dataset-xsum #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
t5-small-finetuned-xsum-wei0
============================
This model is a fine-tuned version of t5-small on the xsum dataset.
It achieves the following results on the evaluation set:
* Loss: 2.6289
* Rouge1: 25.7398
* Rouge2: 6.1361
* Rougel: 19.8262
* Rougelsum: 19.8284
* Gen Len: 18.7984
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 2e-05
* train\_batch\_size: 12
* eval\_batch\_size: 12
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 1
* mixed\_precision\_training: Native AMP
### Training results
### Framework versions
* Transformers 4.11.3
* Pytorch 1.9.0+cu111
* Datasets 1.14.0
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 12\n* eval\\_batch\\_size: 12\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 1\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.14.0\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #t5 #text2text-generation #generated_from_trainer #dataset-xsum #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 12\n* eval\\_batch\\_size: 12\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 1\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.14.0\n* Tokenizers 0.10.3"
] |
[
77,
113,
4,
34
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #t5 #text2text-generation #generated_from_trainer #dataset-xsum #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 12\n* eval\\_batch\\_size: 12\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 1\n* mixed\\_precision\\_training: Native AMP### Training results### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.14.0\n* Tokenizers 0.10.3"
] |
[
-0.1074589341878891,
0.12799325585365295,
-0.0028053256683051586,
0.09292007237672806,
0.11187656968832016,
-0.001165493275038898,
0.15507888793945312,
0.1627812385559082,
-0.11289947479963303,
0.05975770950317383,
0.13899968564510345,
0.1276674121618271,
0.05297937989234924,
0.17660482227802277,
-0.06536241620779037,
-0.25123515725135803,
0.03965150937438011,
0.05438624322414398,
0.0003992293495684862,
0.12949953973293304,
0.08762653172016144,
-0.11898940801620483,
0.08919588476419449,
0.02995019592344761,
-0.18163250386714935,
-0.02100364677608013,
0.000671680667437613,
-0.0807400718331337,
0.11095846444368362,
0.02929452434182167,
0.09405191987752914,
0.033244539052248,
0.04617895558476448,
-0.15886113047599792,
0.010430463589727879,
0.061024267226457596,
0.007088577374815941,
0.10549861192703247,
0.060472987592220306,
-0.010193069465458393,
0.10860417038202286,
-0.07006484270095825,
0.07079833000898361,
0.019244780763983727,
-0.12690727412700653,
-0.25989943742752075,
-0.11564197391271591,
0.053539931774139404,
0.07715735584497452,
0.08503822982311249,
-0.008392168208956718,
0.17918646335601807,
-0.022380735725164413,
0.10955184698104858,
0.23616862297058105,
-0.301705926656723,
-0.05937875807285309,
-0.020769614726305008,
0.05130687728524208,
0.07701262831687927,
-0.07616385072469711,
-0.03546581417322159,
0.026109034195542336,
0.05051794648170471,
0.147442027926445,
-0.015395681373775005,
-0.03122602216899395,
-0.017877185717225075,
-0.13752481341362,
-0.07252857834100723,
0.1608085185289383,
0.031876206398010254,
-0.04473784938454628,
-0.07809458673000336,
-0.07601255923509598,
-0.18535494804382324,
-0.04892900213599205,
0.006733782589435577,
0.035040512681007385,
-0.03897592052817345,
-0.08331018686294556,
-0.019632747396826744,
-0.08114973455667496,
-0.033306628465652466,
-0.044854044914245605,
0.11354673653841019,
0.04301145672798157,
0.01587650738656521,
-0.06349728256464005,
0.0767139345407486,
-0.02394190803170204,
-0.1624210625886917,
-0.005595758091658354,
0.012482375837862492,
0.010538674890995026,
-0.04093165695667267,
-0.037800803780555725,
-0.12423653900623322,
0.003815684700384736,
0.15565642714500427,
-0.08884792029857635,
0.07464113086462021,
-0.0349605530500412,
0.03651741147041321,
-0.0717720165848732,
0.19259658455848694,
-0.023553958162665367,
0.002291490091010928,
0.02199062518775463,
0.08207884430885315,
0.05897872522473335,
-0.036200571805238724,
-0.11605463176965714,
0.04169273003935814,
0.11632619053125381,
0.02510383166372776,
-0.027848001569509506,
0.053826916962862015,
-0.045525144785642624,
-0.03022787906229496,
0.05816280096769333,
-0.102436363697052,
0.027932357043027878,
-0.0148517657071352,
-0.058545712381601334,
-0.010440553538501263,
0.017882956191897392,
0.003906575031578541,
-0.04384322091937065,
0.08423375338315964,
-0.09321478009223938,
0.013192111626267433,
-0.07821741700172424,
-0.13760995864868164,
0.0371415838599205,
-0.1024445965886116,
-0.002148308791220188,
-0.09123585373163223,
-0.1433057188987732,
-0.01317310519516468,
0.054876986891031265,
-0.04342179745435715,
-0.062170207500457764,
-0.046888966113328934,
-0.08754590898752213,
0.045874595642089844,
-0.01866729184985161,
0.0876149907708168,
-0.0707462728023529,
0.08943195641040802,
0.0360662080347538,
0.0685805082321167,
-0.039930809289216995,
0.04765506833791733,
-0.08860956877470016,
0.045223552733659744,
-0.20962946116924286,
0.05933675169944763,
-0.04754907265305519,
0.07903632521629333,
-0.10793068259954453,
-0.09932300448417664,
0.042689286172389984,
-0.02735999971628189,
0.10327746719121933,
0.09618362784385681,
-0.17529267072677612,
-0.07057563215494156,
0.1949135661125183,
-0.08731336891651154,
-0.1435607522726059,
0.1325874775648117,
-0.04466872662305832,
0.011136210523545742,
0.05493377521634102,
0.223547101020813,
0.056181829422712326,
-0.0990501120686531,
-0.01740361750125885,
-0.045389290899038315,
0.07099833339452744,
-0.07109340280294418,
0.07751309871673584,
0.0023585143499076366,
0.07052071392536163,
0.005243603605777025,
0.012689231894910336,
0.034162167459726334,
-0.08039317280054092,
-0.08164911717176437,
-0.0484875850379467,
-0.07058443129062653,
0.012399296276271343,
0.038198381662368774,
0.06086745485663414,
-0.12674261629581451,
-0.10854771733283997,
0.04867049679160118,
0.07887593656778336,
-0.08262902498245239,
0.05539381504058838,
-0.09694216400384903,
0.1214841827750206,
-0.07226520031690598,
-0.00691940076649189,
-0.18165959417819977,
-0.03392307087779045,
0.033785365521907806,
0.0058737220242619514,
0.013214774429798126,
-0.05422939732670784,
0.06232493743300438,
0.0739889144897461,
-0.03461097553372383,
-0.03428993001580238,
-0.019396115094423294,
0.0011325159575790167,
-0.1204695999622345,
-0.1931132972240448,
-0.04384288191795349,
-0.03676958009600639,
0.1044856384396553,
-0.15663689374923706,
0.04005139321088791,
0.057807594537734985,
0.11165279895067215,
0.042884472757577896,
-0.030708545818924904,
-0.0011161795118823647,
0.07482491433620453,
-0.048982080072164536,
-0.07331216335296631,
0.06254008412361145,
0.030186453834176064,
-0.0928896889090538,
0.01122109405696392,
-0.16290874779224396,
0.15493647754192352,
0.13445644080638885,
0.008384795859456062,
-0.06004597991704941,
-0.015904994681477547,
-0.05383134260773659,
-0.026301277801394463,
-0.022042520344257355,
0.0223483145236969,
0.16008402407169342,
0.028361763805150986,
0.15970748662948608,
-0.09906795620918274,
-0.054864510893821716,
0.04943578317761421,
-0.02791105955839157,
-0.00944508146494627,
0.1116301491856575,
0.03973492980003357,
-0.124892957508564,
0.14337001740932465,
0.13232871890068054,
-0.04808254912495613,
0.13333527743816376,
-0.06445107609033585,
-0.07428757846355438,
-0.0362730473279953,
-0.010166139341890812,
0.03168868273496628,
0.10714894533157349,
-0.11242473125457764,
-0.01936175674200058,
0.041330911219120026,
0.02666156180202961,
0.00620002206414938,
-0.18825815618038177,
-0.0032459604553878307,
0.04023367911577225,
-0.05006387457251549,
-0.05198267474770546,
-0.005271914880722761,
0.009712575934827328,
0.10050874948501587,
0.01559632457792759,
-0.050944551825523376,
0.030316568911075592,
0.01233154907822609,
-0.06770113110542297,
0.1864948719739914,
-0.10354278236627579,
-0.1740916222333908,
-0.12162180244922638,
-0.10684869438409805,
-0.055330790579319,
-0.005369632039219141,
0.07807274907827377,
-0.07682619243860245,
-0.0461110845208168,
-0.10497938096523285,
-0.03543839231133461,
-0.004979209508746862,
0.02320186235010624,
0.030437223613262177,
-0.02313845045864582,
0.06799005717039108,
-0.11041723191738129,
-0.030624937266111374,
-0.018625497817993164,
0.015406639315187931,
0.06238273158669472,
0.013703938573598862,
0.11482845991849899,
0.13010869920253754,
-0.02098087966442108,
0.03838309273123741,
-0.046375397592782974,
0.23811791837215424,
-0.07431567460298538,
-0.01367127988487482,
0.13553880155086517,
-0.02098599076271057,
0.09147195518016815,
0.1214863732457161,
0.04666636884212494,
-0.08709597587585449,
-0.005266087129712105,
0.00734054995700717,
-0.044817373156547546,
-0.2206629514694214,
-0.017371106892824173,
-0.055650725960731506,
0.008142379112541676,
0.10467244684696198,
0.025216568261384964,
0.02740880288183689,
0.05058613792061806,
0.010027381591498852,
0.060303691774606705,
-0.025090988725423813,
0.10917414724826813,
0.12934482097625732,
0.05378665775060654,
0.14276812970638275,
-0.053639814257621765,
-0.02497894875705242,
0.05140956863760948,
0.018832596018910408,
0.21368880569934845,
-0.010257545858621597,
0.2037346512079239,
0.04338008537888527,
0.15966066718101501,
0.02662285976111889,
0.06757418811321259,
-0.023831939324736595,
-0.0044116610661149025,
-0.016006769612431526,
-0.04827504605054855,
-0.04620913788676262,
0.01733585074543953,
-0.05632296949625015,
0.03153601288795471,
-0.1198943629860878,
0.015761857852339745,
0.048076361417770386,
0.2991895377635956,
0.042026814073324203,
-0.37676817178726196,
-0.11046711355447769,
0.006773718167096376,
-0.045008234679698944,
-0.043527714908123016,
0.003575146198272705,
0.09723667800426483,
-0.08129101991653442,
0.07165884226560593,
-0.08591920882463455,
0.10842549055814743,
-0.06402673572301865,
0.03300289809703827,
0.053046807646751404,
0.08488380908966064,
-0.014022774994373322,
0.051350660622119904,
-0.2803294062614441,
0.26872000098228455,
0.024721721187233925,
0.06638751924037933,
-0.07116779685020447,
0.01629297062754631,
0.013687439262866974,
0.04160438850522041,
0.06102291867136955,
-0.010054823011159897,
-0.10896262526512146,
-0.16375850141048431,
-0.10585761815309525,
0.015582427382469177,
0.07624634355306625,
0.009430247358977795,
0.12492696195840836,
-0.016691848635673523,
-0.0024821017868816853,
0.04299458488821983,
-0.020089533179998398,
-0.03005647286772728,
-0.11201586574316025,
0.027911242097616196,
0.04303687810897827,
-0.030745558440685272,
-0.07434733957052231,
-0.10627181082963943,
-0.04800717160105705,
0.16498732566833496,
0.022521579638123512,
-0.07422991096973419,
-0.1305168867111206,
0.03818925842642784,
0.08241311460733414,
-0.09261777997016907,
0.03267872706055641,
-0.013745195232331753,
0.12206945568323135,
-0.0006291373865678906,
-0.07408752292394638,
0.10873030126094818,
-0.05420639365911484,
-0.16428184509277344,
-0.052152860909700394,
0.12326045334339142,
0.009150196798145771,
0.06031205505132675,
-0.01074814423918724,
0.04230894893407822,
-0.03358158469200134,
-0.06787849217653275,
0.02769376151263714,
0.0060342769138514996,
0.10138782858848572,
-0.04542985185980797,
-0.01439259760081768,
0.026224439963698387,
-0.0716777965426445,
-0.024496007710695267,
0.18285579979419708,
0.25262823700904846,
-0.0831933543086052,
0.07062230259180069,
0.03659762069582939,
-0.05696260556578636,
-0.14688648283481598,
0.011284909211099148,
0.061825186014175415,
0.008426899090409279,
0.003306770231574774,
-0.17859293520450592,
0.028194045647978783,
0.08815208077430725,
-0.014133213087916374,
0.08526738733053207,
-0.3151911795139313,
-0.12428281456232071,
0.08845964819192886,
0.12659390270709991,
0.07703401893377304,
-0.1558118760585785,
-0.047864045947790146,
-0.02309456095099449,
-0.13384981453418732,
0.13757829368114471,
-0.10254886746406555,
0.11499965190887451,
-0.027163051068782806,
0.10781486332416534,
0.01309670228511095,
-0.05944589897990227,
0.11277931183576584,
-0.013463224284350872,
0.06879418343305588,
-0.06467466056346893,
0.02039303444325924,
0.09348852187395096,
-0.08543779700994492,
0.04378002509474754,
-0.10007110983133316,
0.03445431590080261,
-0.12611499428749084,
-0.014140802435576916,
-0.06663600355386734,
0.0035395510494709015,
-0.03497513011097908,
-0.03596718981862068,
-0.03799371421337128,
0.01062516588717699,
0.07275927066802979,
-0.024036558344960213,
0.18279287219047546,
0.015979859977960587,
0.15007543563842773,
0.14720019698143005,
0.09360025823116302,
-0.11703650653362274,
-0.06449494510889053,
0.0005951116909272969,
-0.03377130627632141,
0.04243915155529976,
-0.1583450436592102,
0.028240717947483063,
0.13700921833515167,
0.005573910661041737,
0.12456339597702026,
0.06348744034767151,
-0.06396618485450745,
0.0275848601013422,
0.05241449549794197,
-0.16979505121707916,
-0.09977670758962631,
-0.00014900513633619994,
0.04158719256520271,
-0.13206593692302704,
0.03576512634754181,
0.1307540386915207,
-0.05730408802628517,
-0.02568771131336689,
0.0053456565365195274,
0.019397448748350143,
-0.01271937508136034,
0.1769995093345642,
0.0308542400598526,
0.06430942565202713,
-0.10688863694667816,
0.07775326073169708,
0.0614481195807457,
-0.11417698860168457,
0.057837795466184616,
0.11324169486761093,
-0.09705883264541626,
-0.026858391240239143,
0.036494169384241104,
0.16921600699424744,
-0.06063899025321007,
-0.048185113817453384,
-0.1538301259279251,
-0.11517678201198578,
0.09703414887189865,
0.1840856671333313,
0.06467374414205551,
0.009808756411075592,
-0.04313131421804428,
-0.008685389533638954,
-0.12479282170534134,
0.10302001982927322,
0.05070367082953453,
0.07747862488031387,
-0.12268511205911636,
0.12256182730197906,
-0.009745701216161251,
0.042307667434215546,
-0.009218765422701836,
0.015988776460289955,
-0.10960526764392853,
0.0033797803334891796,
-0.1423777937889099,
0.00832214206457138,
-0.04494244605302811,
-0.0011973814107477665,
-0.023193443194031715,
-0.03375978022813797,
-0.05914922058582306,
0.01675104722380638,
-0.11260288953781128,
-0.036319516599178314,
0.010412446223199368,
0.029330682009458542,
-0.12630459666252136,
-0.01920931786298752,
0.010201842524111271,
-0.0862070694565773,
0.08357042819261551,
0.044234272092580795,
-0.004748845472931862,
0.020747167989611626,
-0.02305678278207779,
0.00012171916023362428,
0.04829733818769455,
0.00537034822627902,
0.07995195686817169,
-0.11924608796834946,
-0.013915679417550564,
0.008945612236857414,
0.01340132113546133,
0.028972944244742393,
0.115643709897995,
-0.11617381870746613,
-0.007972880266606808,
0.007970787584781647,
-0.057109441608190536,
-0.06905458867549896,
0.068453848361969,
0.09392105042934418,
0.022841304540634155,
0.18228091299533844,
-0.07420357316732407,
0.035178784281015396,
-0.20087119936943054,
-0.0038895888719707727,
0.0049581220373511314,
-0.14096827805042267,
-0.06500066071748734,
-0.03850414976477623,
0.06734596937894821,
-0.07149671763181686,
0.11092301458120346,
0.004950069356709719,
0.03931140899658203,
0.04493129253387451,
-0.03126824274659157,
-0.02071281522512436,
0.015212364494800568,
0.17608197033405304,
0.02058245986700058,
-0.04343518614768982,
0.08155103027820587,
0.020666198804974556,
0.081175297498703,
0.12803125381469727,
0.19913002848625183,
0.11667686700820923,
0.06320013105869293,
0.0961175486445427,
0.023615676909685135,
-0.030358344316482544,
-0.18801501393318176,
0.03964875265955925,
-0.032345857471227646,
0.14836564660072327,
-0.0047457581385970116,
0.20506168901920319,
0.12052969634532928,
-0.1612660139799118,
0.048087891191244125,
-0.04112517461180687,
-0.08629781007766724,
-0.10536807030439377,
-0.09831316769123077,
-0.08669181168079376,
-0.13120131194591522,
-0.010870601050555706,
-0.12808246910572052,
0.04765982925891876,
0.061899054795503616,
0.015682265162467957,
-0.006216309033334255,
0.12053301930427551,
0.028537537902593613,
0.008887127041816711,
0.06321465969085693,
0.007875645533204079,
-0.034891605377197266,
-0.05035153776407242,
-0.07208861410617828,
0.017868410795927048,
0.00036957956035621464,
0.05477017164230347,
-0.008419430814683437,
-0.006293799262493849,
0.04942861199378967,
-0.0280731450766325,
-0.11799081414937973,
0.012490789406001568,
0.030224161222577095,
0.07340351492166519,
0.0450759083032608,
0.013756993226706982,
0.004066175781190395,
-0.014913016930222511,
0.20068150758743286,
-0.07401707768440247,
-0.05277926102280617,
-0.11658910661935806,
0.2477595955133438,
0.010925473645329475,
-0.0564635768532753,
0.034320931881666183,
-0.06638534367084503,
-0.00799079891294241,
0.2009539008140564,
0.17506442964076996,
-0.04073476046323776,
-0.01607629284262657,
-0.02119165100157261,
-0.00929268542677164,
-0.025587672367691994,
0.11024604737758636,
0.1273358315229416,
0.02946407161653042,
-0.07323799282312393,
-0.02998521365225315,
-0.06204081326723099,
-0.01574384793639183,
-0.04924844577908516,
0.07520291209220886,
0.023194268345832825,
-0.0025199258234351873,
-0.03083890676498413,
0.054563362151384354,
-0.054510220885276794,
-0.06129785627126694,
0.004910604562610388,
-0.21439644694328308,
-0.17130769789218903,
0.004245540127158165,
0.0803946778178215,
-0.01175619475543499,
0.058988574892282486,
-0.0029474766924977303,
0.00823167059570551,
0.10027970373630524,
-0.018093015998601913,
-0.07131160795688629,
-0.07739321142435074,
0.10197465121746063,
-0.15690012276172638,
0.19021447002887726,
-0.0312860831618309,
0.030104180797934532,
0.140971839427948,
0.05221068859100342,
-0.11158500611782074,
0.05537766218185425,
0.04982685670256615,
-0.04700254648923874,
0.014416840858757496,
0.1290515959262848,
-0.03030751459300518,
0.07255911082029343,
0.0453176386654377,
-0.1120244711637497,
-0.012037028558552265,
-0.09178739041090012,
-0.020289523527026176,
-0.01986098103225231,
-0.045981504023075104,
-0.04989011958241463,
0.13199079036712646,
0.19831021130084991,
-0.04609641432762146,
-0.007404172793030739,
-0.06241052970290184,
0.009049183689057827,
0.06719040870666504,
-0.010307771153748035,
-0.053266700357198715,
-0.2554090619087219,
0.0003185949753969908,
0.07628002762794495,
-0.007989364676177502,
-0.27227717638015747,
-0.08573779463768005,
-0.0004909297567792237,
-0.043751854449510574,
-0.10848870128393173,
0.0937419906258583,
0.07124421000480652,
0.04228309541940689,
-0.06897798180580139,
0.005559871438890696,
-0.06642989069223404,
0.1607312709093094,
-0.134971484541893,
-0.06209824979305267
] |
null | null |
transformers
|
20% of the training data
---
license: apache-2.0
tags:
- generated_from_trainer
datasets:
- xsum
metrics:
- rouge
model-index:
- name: t5-small-finetuned-xsum-wei1
results:
- task:
name: Sequence-to-sequence Language Modeling
type: text2text-generation
dataset:
name: xsum
type: xsum
args: default
metrics:
- name: Rouge1
type: rouge
value: 27.5875
---
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# t5-small-finetuned-xsum-wei1
This model is a fine-tuned version of [t5-small](https://huggingface.co/t5-small) on the xsum dataset.
It achieves the following results on the evaluation set:
- Loss: 2.5287
- Rouge1: 27.5875
- Rouge2: 7.4083
- Rougel: 21.5654
- Rougelsum: 21.5716
- Gen Len: 18.8205
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 2e-05
- train_batch_size: 12
- eval_batch_size: 12
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 2
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum | Gen Len |
|:-------------:|:-----:|:----:|:---------------:|:-------:|:------:|:-------:|:---------:|:-------:|
| 2.7677 | 1.0 | 3401 | 2.5441 | 27.4235 | 7.2208 | 21.3535 | 21.3636 | 18.8311 |
| 2.735 | 2.0 | 6802 | 2.5287 | 27.5875 | 7.4083 | 21.5654 | 21.5716 | 18.8205 |
### Framework versions
- Transformers 4.11.3
- Pytorch 1.9.0+cu111
- Datasets 1.14.0
- Tokenizers 0.10.3
|
{}
|
text2text-generation
|
bochaowei/t5-small-finetuned-xsum-wei1
|
[
"transformers",
"pytorch",
"tensorboard",
"t5",
"text2text-generation",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #t5 #text2text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
20% of the training data
------------------------
license: apache-2.0
tags:
* generated\_from\_trainer
datasets:
* xsum
metrics:
* rouge
model-index:
* name: t5-small-finetuned-xsum-wei1
results:
+ task:
name: Sequence-to-sequence Language Modeling
type: text2text-generation
dataset:
name: xsum
type: xsum
args: default
metrics:
- name: Rouge1
type: rouge
value: 27.5875
---
t5-small-finetuned-xsum-wei1
============================
This model is a fine-tuned version of t5-small on the xsum dataset.
It achieves the following results on the evaluation set:
* Loss: 2.5287
* Rouge1: 27.5875
* Rouge2: 7.4083
* Rougel: 21.5654
* Rougelsum: 21.5716
* Gen Len: 18.8205
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 2e-05
* train\_batch\_size: 12
* eval\_batch\_size: 12
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 2
* mixed\_precision\_training: Native AMP
### Training results
### Framework versions
* Transformers 4.11.3
* Pytorch 1.9.0+cu111
* Datasets 1.14.0
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 12\n* eval\\_batch\\_size: 12\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 2\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.14.0\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #t5 #text2text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 12\n* eval\\_batch\\_size: 12\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 2\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.14.0\n* Tokenizers 0.10.3"
] |
[
52,
113,
4,
34
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #t5 #text2text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 12\n* eval\\_batch\\_size: 12\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 2\n* mixed\\_precision\\_training: Native AMP### Training results### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.14.0\n* Tokenizers 0.10.3"
] |
[
-0.092025525867939,
0.021059297025203705,
-0.0022535722237080336,
0.09244439005851746,
0.1668420433998108,
0.026458540931344032,
0.12460797280073166,
0.1251605749130249,
-0.10517226159572601,
0.03372708335518837,
0.12329793721437454,
0.15999822318553925,
0.018496645614504814,
0.1343013197183609,
-0.07194299250841141,
-0.2830175459384918,
-0.01125409547239542,
0.04781951382756233,
-0.03557444363832474,
0.13735195994377136,
0.09087809175252914,
-0.1400161236524582,
0.06606448441743851,
-0.0043764435686171055,
-0.20233136415481567,
0.01031877938657999,
0.016777049750089645,
-0.06155518814921379,
0.16337159276008606,
0.028639081865549088,
0.13614076375961304,
0.017535753548145294,
0.08414473384618759,
-0.19114534556865692,
0.014580664224922657,
0.0500929020345211,
0.01650644652545452,
0.08592522144317627,
0.06348997354507446,
-0.014809894375503063,
0.10882236808538437,
-0.10078564286231995,
0.06245925650000572,
0.0011973925866186619,
-0.126919224858284,
-0.18573695421218872,
-0.06831873953342438,
0.0033647301606833935,
0.06960761547088623,
0.09743557125329971,
-0.021046597510576248,
0.1528109759092331,
-0.0862959772348404,
0.11345328390598297,
0.22132714092731476,
-0.28774794936180115,
-0.07076041400432587,
-0.0011932477355003357,
0.03536026552319527,
0.10096988826990128,
-0.10184838622808456,
-0.013602633960545063,
0.04816541075706482,
0.05576743185520172,
0.1310669183731079,
-0.03213924169540405,
-0.11305341124534607,
0.012459848076105118,
-0.14722749590873718,
-0.03252565488219261,
0.11112377047538757,
0.02361993119120598,
-0.025115063413977623,
-0.04531358554959297,
-0.0702560767531395,
-0.16333749890327454,
-0.046838123351335526,
-0.022296568378806114,
0.03347437456250191,
-0.03808383643627167,
-0.09475499391555786,
-0.023190684616565704,
-0.1064673513174057,
-0.05278218537569046,
-0.06984369456768036,
0.1416577249765396,
0.03769155219197273,
-0.008879192173480988,
-0.04052996635437012,
0.10702786594629288,
-0.01754605770111084,
-0.13094504177570343,
0.03272531181573868,
0.028171103447675705,
-0.02204703353345394,
-0.06124139577150345,
-0.07723898440599442,
-0.10838014632463455,
-0.005235990509390831,
0.10018433630466461,
-0.0613664910197258,
0.06450363993644714,
-0.0005218714941293001,
0.030912332236766815,
-0.08553101867437363,
0.19482018053531647,
-0.023987876251339912,
-0.014803381636738777,
0.0038948047440499067,
0.056591469794511795,
-0.0028504030779004097,
-0.02597948908805847,
-0.1036439761519432,
0.007077035028487444,
0.1401921659708023,
0.010452629998326302,
-0.07427085191011429,
0.0711764320731163,
-0.045921456068754196,
-0.01898754946887493,
-0.049376651644706726,
-0.10104150325059891,
0.04149148240685463,
-0.0104909036308527,
-0.065595842897892,
0.008435413241386414,
0.010865379124879837,
0.029642825946211815,
-0.0392947793006897,
0.1347988098859787,
-0.08124557882547379,
0.04319373890757561,
-0.10496651381254196,
-0.1304864138364792,
0.007983183488249779,
-0.05801042169332504,
0.007859393022954464,
-0.09918010234832764,
-0.15255790948867798,
-0.023240936920046806,
0.04676961526274681,
-0.032474543899297714,
-0.04754624143242836,
-0.06819213926792145,
-0.06106838211417198,
0.02290693111717701,
-0.025289831683039665,
0.17530952394008636,
-0.0572882816195488,
0.11715541034936905,
0.04335159808397293,
0.0673619955778122,
-0.025613956153392792,
0.050294164568185806,
-0.08148317784070969,
0.0026760969776660204,
-0.18371723592281342,
0.07412105798721313,
-0.03128466382622719,
0.06603270024061203,
-0.0809682086110115,
-0.10598922520875931,
0.000760735827498138,
0.008855953812599182,
0.09803316742181778,
0.09953254461288452,
-0.1682170033454895,
-0.08077292889356613,
0.18211662769317627,
-0.05458930507302284,
-0.09405971318483353,
0.12715759873390198,
-0.07287168502807617,
0.04900234937667847,
0.08071480691432953,
0.19444724917411804,
0.04150964692234993,
-0.07977578788995743,
0.03304613381624222,
-0.03382832184433937,
0.06349063664674759,
-0.028206199407577515,
0.041368477046489716,
0.01788194105029106,
0.0004392007540445775,
0.019963907077908516,
-0.0008234884589910507,
0.05685722455382347,
-0.10925289243459702,
-0.07837113738059998,
-0.03933345898985863,
-0.08431770652532578,
0.057552456855773926,
0.059577714651823044,
0.08608349412679672,
-0.11394163966178894,
-0.08301511406898499,
0.06929881870746613,
0.06264390796422958,
-0.07695256173610687,
0.044026657938957214,
-0.05890992283821106,
0.06069926172494888,
-0.03917206451296806,
-0.019109662622213364,
-0.20783039927482605,
-0.028365695849061012,
0.0077142296358942986,
0.05855676159262657,
0.03650779649615288,
0.01113954745233059,
0.07687180489301682,
0.06152243912220001,
-0.05412401258945465,
-0.020554523915052414,
-0.014155172742903233,
-0.003942030016332865,
-0.1468653380870819,
-0.1830207258462906,
-0.011340474709868431,
-0.02528906986117363,
0.12749548256397247,
-0.2145901620388031,
0.025474512949585915,
-0.011169265024363995,
0.08077973127365112,
0.017023857682943344,
-0.0054121180437505245,
-0.04412149637937546,
0.09499531239271164,
-0.03543385863304138,
-0.04316540062427521,
0.07784264534711838,
0.005227356217801571,
-0.08639626950025558,
-0.03184831142425537,
-0.13696575164794922,
0.1498376578092575,
0.1280374825000763,
-0.13647256791591644,
-0.09393759816884995,
-0.024266105145215988,
-0.05343254283070564,
-0.02876891940832138,
-0.05071311444044113,
0.02062765508890152,
0.21501938998699188,
0.002428176812827587,
0.15516653656959534,
-0.07058294117450714,
-0.046879842877388,
0.013227827847003937,
-0.036099281162023544,
0.035037681460380554,
0.12084318697452545,
0.09309584647417068,
-0.09346884489059448,
0.11416143923997879,
0.12106036394834518,
-0.09430857747793198,
0.14432711899280548,
-0.03325532749295235,
-0.08406824618577957,
-0.010870602913200855,
-0.017343666404485703,
0.00210847076959908,
0.06986092031002045,
-0.13484559953212738,
-0.01524506788700819,
0.015634950250387192,
0.027218185365200043,
0.025641141459345818,
-0.23102769255638123,
-0.02600725181400776,
0.03881128877401352,
-0.03390401974320412,
-0.0006280704401433468,
-0.01694413460791111,
0.025726476684212685,
0.11976936459541321,
0.002205608645454049,
-0.06963998079299927,
0.020615849643945694,
0.0019466778030619025,
-0.08387196063995361,
0.20984558761119843,
-0.07966714352369308,
-0.17050659656524658,
-0.09834893047809601,
-0.09839694947004318,
-0.024746285751461983,
0.003094971412792802,
0.05751783773303032,
-0.09471980482339859,
-0.027490222826600075,
-0.061489950865507126,
0.024462567642331123,
-0.0003645686083473265,
0.035761017352342606,
-0.005213277414441109,
-0.012144397012889385,
0.04946232587099075,
-0.09520415961742401,
-0.01588771864771843,
-0.05416698381304741,
-0.04787849634885788,
0.07823240011930466,
0.028834398835897446,
0.1078295186161995,
0.17022468149662018,
-0.03068634867668152,
0.017976347357034683,
-0.04340779036283493,
0.22536729276180267,
-0.07857342809438705,
-0.019949331879615784,
0.10655944794416428,
-0.02743368223309517,
0.058614376932382584,
0.11029733717441559,
0.05235392227768898,
-0.09561873972415924,
0.032460346817970276,
0.03178059682250023,
-0.02861090935766697,
-0.22254517674446106,
-0.03316653519868851,
-0.05960281193256378,
-0.03019964136183262,
0.09098315238952637,
0.009961235336959362,
0.04697376862168312,
0.057280439883470535,
0.0527242049574852,
0.07789503037929535,
-0.029469329863786697,
0.061497051268815994,
0.14768335223197937,
0.040869712829589844,
0.13876178860664368,
-0.036519333720207214,
-0.09385374933481216,
0.03500243276357651,
-0.024165958166122437,
0.23187963664531708,
0.0026515221688896418,
0.111570343375206,
0.03779631480574608,
0.14925864338874817,
0.015026269480586052,
0.08860704302787781,
-0.0026492648757994175,
-0.04526486620306969,
-0.017332956194877625,
-0.03310352563858032,
-0.04028918966650963,
0.01654653809964657,
-0.032784007489681244,
0.02545916847884655,
-0.12902051210403442,
-0.01986360177397728,
0.05147236958146095,
0.24757300317287445,
0.04724104329943657,
-0.3220520615577698,
-0.07700857520103455,
0.006620192900300026,
-0.06436477601528168,
-0.02555685304105282,
0.01178478542715311,
0.10067669302225113,
-0.10964551568031311,
0.0346783809363842,
-0.08600110560655594,
0.1002446860074997,
-0.05650441721081734,
0.057325854897499084,
0.03226643428206444,
0.10043131560087204,
-0.010662063956260681,
0.06775511801242828,
-0.3315392732620239,
0.28193730115890503,
0.009839099831879139,
0.06735062599182129,
-0.07995632290840149,
-0.014660867862403393,
0.03764106333255768,
0.03376587852835655,
0.032269589602947235,
-0.02203722856938839,
-0.061933089047670364,
-0.18477727472782135,
-0.056730419397354126,
0.03856237977743149,
0.12157558649778366,
-0.015380606055259705,
0.12300518155097961,
-0.0302848219871521,
0.009446079842746258,
0.07055404782295227,
-0.026195406913757324,
-0.07019644230604172,
-0.09015387296676636,
-0.00775063456967473,
0.01723175309598446,
-0.010914744809269905,
-0.05925939604640007,
-0.12306902557611465,
-0.10003092885017395,
0.16047054529190063,
0.030696813017129898,
-0.025550948455929756,
-0.12312433868646622,
0.10309860110282898,
0.07201053947210312,
-0.07966116815805435,
0.02559397555887699,
0.02440391480922699,
0.07916247844696045,
0.017970208078622818,
-0.06122075393795967,
0.11792541295289993,
-0.05002429708838463,
-0.15481352806091309,
-0.05659255012869835,
0.11264026165008545,
0.025540996342897415,
0.07042457908391953,
-0.011251077055931091,
0.016425225883722305,
-0.02874705195426941,
-0.080205537378788,
0.020358072593808174,
-0.030636345967650414,
0.06817694753408432,
0.027703335508704185,
-0.06062128394842148,
0.015455798245966434,
-0.07543361186981201,
-0.036697667092084885,
0.2303551584482193,
0.23259976506233215,
-0.07166770100593567,
0.014088793657720089,
0.024778973311185837,
-0.07305024564266205,
-0.17965741455554962,
0.05183253064751625,
0.07672648131847382,
0.024556273594498634,
0.04414432495832443,
-0.17043574154376984,
0.08163157105445862,
0.0772782638669014,
0.005478948354721069,
0.0984271690249443,
-0.32121965289115906,
-0.13730724155902863,
0.10422317683696747,
0.15946830809116364,
0.12346746027469635,
-0.1365012228488922,
-0.018522681668400764,
-0.017714690417051315,
-0.09379307180643082,
0.11270186305046082,
-0.08477352559566498,
0.13491138815879822,
-0.006719463039189577,
0.13032294809818268,
0.01842990331351757,
-0.05282037332653999,
0.09729833900928497,
-0.008500222116708755,
0.08541064709424973,
-0.06361405551433563,
-0.016371380537748337,
0.023705577477812767,
-0.04112975299358368,
-0.0035312441177666187,
-0.06525061279535294,
0.017940690740942955,
-0.0983627587556839,
-0.03460584208369255,
-0.07625269889831543,
0.022398212924599648,
-0.03374018520116806,
-0.062045078724622726,
-0.020796455442905426,
0.019189637154340744,
0.046896807849407196,
-0.012875028885900974,
0.10778190940618515,
-0.01848190650343895,
0.15995660424232483,
0.09210173785686493,
0.10840097069740295,
-0.06856945157051086,
-0.02531464770436287,
-0.005104046314954758,
-0.015437884256243706,
0.032050155103206635,
-0.13231779634952545,
0.024867868050932884,
0.1584782898426056,
0.01006227731704712,
0.1503368318080902,
0.08575227111577988,
-0.03047570399940014,
0.021475493907928467,
0.06470626592636108,
-0.16767719388008118,
-0.08558396250009537,
-0.00952786672860384,
-0.05433247238397598,
-0.0982886403799057,
0.0229713786393404,
0.11960815638303757,
-0.06175750121474266,
-0.01625063084065914,
-0.011786764487624168,
-0.005633166525512934,
-0.053837236016988754,
0.2028617262840271,
0.04952593520283699,
0.05123584717512131,
-0.0911550372838974,
0.04880587384104729,
0.051984287798404694,
-0.09904883801937103,
0.025026580318808556,
0.11636702716350555,
-0.07424892485141754,
-0.03871932253241539,
0.09378433972597122,
0.20663967728614807,
-0.052333079278469086,
-0.030161604285240173,
-0.14334343373775482,
-0.12608815729618073,
0.0837700366973877,
0.1796724945306778,
0.08785030245780945,
0.001843577716499567,
-0.07146760076284409,
0.015612290240824223,
-0.1331697553396225,
0.08845842629671097,
0.07160135358572006,
0.0676351860165596,
-0.11866692453622818,
0.19845840334892273,
0.0003898621944244951,
0.04634016379714012,
-0.030752871185541153,
0.014021688140928745,
-0.1090482696890831,
0.025157762691378593,
-0.13957567512989044,
-0.030659331008791924,
-0.0027201997581869364,
-0.0025905545335263014,
-0.004428676795214415,
-0.06250907480716705,
-0.05745221674442291,
-0.007325334474444389,
-0.11844877898693085,
-0.01964188553392887,
0.020883118733763695,
0.03334129974246025,
-0.10573096573352814,
-0.039952658116817474,
0.02941562980413437,
-0.06722743809223175,
0.06969194859266281,
0.04418341815471649,
-0.0006818489055149257,
0.0681208148598671,
-0.1337125599384308,
0.0028897160664200783,
0.06300130486488342,
0.007741584442555904,
0.06892858445644379,
-0.08273165673017502,
-0.0013083178782835603,
-0.0003288834705017507,
0.09637451171875,
0.034630224108695984,
0.08293324708938599,
-0.12452515214681625,
0.009311501868069172,
-0.0375128872692585,
-0.09594202786684036,
-0.06652292609214783,
0.036279402673244476,
0.04102693498134613,
0.02297755889594555,
0.1768387258052826,
-0.09515709429979324,
0.052667468786239624,
-0.21802163124084473,
0.0018081627786159515,
-0.01255337055772543,
-0.11494580656290054,
-0.07456149905920029,
-0.08618541061878204,
0.08041272312402725,
-0.05877958983182907,
0.1178673803806305,
0.026780756190419197,
0.07764855027198792,
0.03253767266869545,
-0.041088614612817764,
-0.0072923568077385426,
0.032408326864242554,
0.19849994778633118,
0.03714016079902649,
-0.04398658871650696,
0.047501858323812485,
0.059757571667432785,
0.10602764785289764,
0.13242457807064056,
0.2314358949661255,
0.13475918769836426,
0.0068961093202233315,
0.0948338583111763,
0.012636538594961166,
-0.03879483416676521,
-0.1478165090084076,
0.03759685531258583,
-0.050930608063936234,
0.09924396127462387,
-0.02991855889558792,
0.22110030055046082,
0.07489733397960663,
-0.16194480657577515,
0.04347073286771774,
-0.0581439845263958,
-0.10369649529457092,
-0.1087266057729721,
-0.031675197184085846,
-0.09430057555437088,
-0.139177143573761,
-0.0033139511942863464,
-0.12390652298927307,
0.035777587443590164,
0.0968184843659401,
0.025875402614474297,
-0.02284424938261509,
0.16384702920913696,
0.04591560363769531,
0.012283088639378548,
0.09009437263011932,
0.002051302930340171,
-0.0010125241242349148,
-0.09787994623184204,
-0.07388018071651459,
-0.02132752165198326,
-0.0007146730786189437,
0.040746647864580154,
-0.04704097658395767,
-0.07308444380760193,
0.028159625828266144,
-0.03042304329574108,
-0.10795371979475021,
0.012835506349802017,
0.027358023449778557,
0.07500146329402924,
0.04468410462141037,
0.012569937855005264,
-0.006242480594664812,
-0.02040589414536953,
0.23652617633342743,
-0.08027207106351852,
-0.10106991976499557,
-0.10460428148508072,
0.25859004259109497,
0.02818569354712963,
-0.003931014332920313,
0.026596227660775185,
-0.061472684144973755,
0.006759725045412779,
0.26033100485801697,
0.17302045226097107,
-0.11728822439908981,
-0.00934702716767788,
0.008371646516025066,
-0.0050290548242628574,
-0.01157099287956953,
0.12873998284339905,
0.1391865611076355,
0.056407056748867035,
-0.10824872553348541,
-0.04186071455478668,
-0.05002450570464134,
-0.012280497699975967,
-0.04071858152747154,
0.0674251839518547,
0.05531313270330429,
0.01301455870270729,
-0.04055153578519821,
0.07224944233894348,
-0.07835642993450165,
-0.08321834355592728,
0.0037451342213898897,
-0.2194160521030426,
-0.16694699227809906,
-0.00574052007868886,
0.09855350106954575,
-0.011169101111590862,
0.06562936305999756,
-0.025889327749609947,
-0.0032429404091089964,
0.054155535995960236,
-0.022990861907601357,
-0.053018588572740555,
-0.051072828471660614,
0.0916966125369072,
-0.13831274211406708,
0.1501818299293518,
-0.042186494916677475,
0.06093381717801094,
0.13090910017490387,
0.06352400034666061,
-0.04955007880926132,
0.07102903723716736,
0.042010825127363205,
-0.07518307119607925,
0.04063045606017113,
0.11901048570871353,
-0.034647777676582336,
0.03420576453208923,
0.057315926998853683,
-0.14982925355434418,
0.035091038793325424,
-0.09043341130018234,
-0.035139959305524826,
-0.027195237576961517,
-0.05052787438035011,
-0.057192277163267136,
0.12365052103996277,
0.22135190665721893,
-0.018655642867088318,
0.031236562877893448,
-0.07537055015563965,
0.0008685264619998634,
0.035131946206092834,
0.06307349354028702,
-0.08515630662441254,
-0.2615763545036316,
-0.008068710565567017,
0.08607905358076096,
-0.02717544324696064,
-0.2761549651622772,
-0.09772715717554092,
-0.004333134740591049,
-0.05943251773715019,
-0.11649973690509796,
0.11193397641181946,
0.09229950606822968,
0.04053911939263344,
-0.049011386930942535,
-0.09392859041690826,
-0.06899212300777435,
0.17839789390563965,
-0.14602363109588623,
-0.07377665489912033
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# t5-small-finetuned-xsum-wei2
This model is a fine-tuned version of [t5-small](https://huggingface.co/t5-small) on the xsum dataset.
It achieves the following results on the evaluation set:
- Loss: 2.4131
- Rouge1: 29.2287
- Rouge2: 8.4073
- Rougel: 23.0934
- Rougelsum: 23.0954
- Gen Len: 18.8236
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 4e-05
- train_batch_size: 12
- eval_batch_size: 12
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 1
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum | Gen Len |
|:-------------:|:-----:|:-----:|:---------------:|:-------:|:------:|:-------:|:---------:|:-------:|
| 2.633 | 1.0 | 17004 | 2.4131 | 29.2287 | 8.4073 | 23.0934 | 23.0954 | 18.8236 |
### Framework versions
- Transformers 4.11.3
- Pytorch 1.9.0+cu111
- Datasets 1.14.0
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "datasets": ["xsum"], "metrics": ["rouge"], "model-index": [{"name": "t5-small-finetuned-xsum-wei2", "results": [{"task": {"type": "text2text-generation", "name": "Sequence-to-sequence Language Modeling"}, "dataset": {"name": "xsum", "type": "xsum", "args": "default"}, "metrics": [{"type": "rouge", "value": 29.2287, "name": "Rouge1"}]}]}]}
|
text2text-generation
|
bochaowei/t5-small-finetuned-xsum-wei2
|
[
"transformers",
"pytorch",
"tensorboard",
"t5",
"text2text-generation",
"generated_from_trainer",
"dataset:xsum",
"license:apache-2.0",
"model-index",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #t5 #text2text-generation #generated_from_trainer #dataset-xsum #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
t5-small-finetuned-xsum-wei2
============================
This model is a fine-tuned version of t5-small on the xsum dataset.
It achieves the following results on the evaluation set:
* Loss: 2.4131
* Rouge1: 29.2287
* Rouge2: 8.4073
* Rougel: 23.0934
* Rougelsum: 23.0954
* Gen Len: 18.8236
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 4e-05
* train\_batch\_size: 12
* eval\_batch\_size: 12
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 1
* mixed\_precision\_training: Native AMP
### Training results
### Framework versions
* Transformers 4.11.3
* Pytorch 1.9.0+cu111
* Datasets 1.14.0
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 4e-05\n* train\\_batch\\_size: 12\n* eval\\_batch\\_size: 12\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 1\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.14.0\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #t5 #text2text-generation #generated_from_trainer #dataset-xsum #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 4e-05\n* train\\_batch\\_size: 12\n* eval\\_batch\\_size: 12\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 1\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.14.0\n* Tokenizers 0.10.3"
] |
[
77,
113,
4,
34
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #t5 #text2text-generation #generated_from_trainer #dataset-xsum #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 4e-05\n* train\\_batch\\_size: 12\n* eval\\_batch\\_size: 12\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 1\n* mixed\\_precision\\_training: Native AMP### Training results### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.14.0\n* Tokenizers 0.10.3"
] |
[
-0.10806673765182495,
0.1272156685590744,
-0.002769108396023512,
0.09340924769639969,
0.11236200481653214,
-0.000779868452809751,
0.15493476390838623,
0.16228511929512024,
-0.11336699873209,
0.05980030447244644,
0.1389257311820984,
0.12752880156040192,
0.05289485305547714,
0.17613038420677185,
-0.06515885144472122,
-0.25074005126953125,
0.03940366581082344,
0.05467585101723671,
0.0006518431473523378,
0.12919838726520538,
0.08731631934642792,
-0.11940330266952515,
0.08954981714487076,
0.029798777773976326,
-0.18178357183933258,
-0.020794842392206192,
0.0001980540546355769,
-0.08070296049118042,
0.11095311492681503,
0.029295850545167923,
0.09412375837564468,
0.03280030936002731,
0.04588469862937927,
-0.1577461063861847,
0.01057723630219698,
0.060812268406152725,
0.007023714017122984,
0.10547517985105515,
0.06024850904941559,
-0.010164976119995117,
0.10765302926301956,
-0.07034451514482498,
0.07093959301710129,
0.01926666498184204,
-0.12700697779655457,
-0.26116442680358887,
-0.11583392322063446,
0.052815694361925125,
0.07677634060382843,
0.08581406623125076,
-0.008652674965560436,
0.17886030673980713,
-0.02232525497674942,
0.10985555499792099,
0.23514127731323242,
-0.30252599716186523,
-0.05944613739848137,
-0.020525678992271423,
0.0513831228017807,
0.07724981009960175,
-0.07671713829040527,
-0.03509686142206192,
0.026310695335268974,
0.05101160705089569,
0.1474340409040451,
-0.015213852748274803,
-0.030688736587762833,
-0.017528031021356583,
-0.1375274956226349,
-0.071743443608284,
0.16234451532363892,
0.032077014446258545,
-0.04460039734840393,
-0.07861107587814331,
-0.07540878653526306,
-0.18637129664421082,
-0.04930505156517029,
0.007429591380059719,
0.035268038511276245,
-0.03970080614089966,
-0.08341949433088303,
-0.018598996102809906,
-0.08084549754858017,
-0.03256026282906532,
-0.044787611812353134,
0.112843818962574,
0.043036460876464844,
0.015595157630741596,
-0.062925323843956,
0.07648226618766785,
-0.023862969130277634,
-0.16247715055942535,
-0.005123621318489313,
0.012410723604261875,
0.010846764780580997,
-0.04106120765209198,
-0.03795686364173889,
-0.12315893918275833,
0.004018017556518316,
0.1562771499156952,
-0.08884023874998093,
0.07499609887599945,
-0.034599192440509796,
0.035822510719299316,
-0.0720534473657608,
0.1924165040254593,
-0.02393670566380024,
0.001460531260818243,
0.022272450849413872,
0.08232830464839935,
0.05858449637889862,
-0.03615611419081688,
-0.11591625958681107,
0.04212285205721855,
0.11592387408018112,
0.02513233758509159,
-0.02837332710623741,
0.05487271025776863,
-0.044807299971580505,
-0.029935762286186218,
0.058770276606082916,
-0.10262396931648254,
0.02770381234586239,
-0.014872772619128227,
-0.05833863839507103,
-0.010100997053086758,
0.017684005200862885,
0.0034124436788260937,
-0.0439397394657135,
0.08414358645677567,
-0.09361060708761215,
0.012675190344452858,
-0.07829287648200989,
-0.13802871108055115,
0.03661433607339859,
-0.10171109437942505,
-0.001674234983511269,
-0.09123551845550537,
-0.14280490577220917,
-0.014010732062160969,
0.05471273511648178,
-0.043461479246616364,
-0.06203373521566391,
-0.046662621200084686,
-0.08782164007425308,
0.04597129672765732,
-0.018538856878876686,
0.08851996809244156,
-0.07092557847499847,
0.08911911398172379,
0.037130385637283325,
0.06870157271623611,
-0.03978344425559044,
0.047794025391340256,
-0.08806469291448593,
0.04492177069187164,
-0.21043598651885986,
0.05879460647702217,
-0.04792514443397522,
0.08031735569238663,
-0.10741744935512543,
-0.09973092377185822,
0.04369574785232544,
-0.027014387771487236,
0.10316262394189835,
0.0960228368639946,
-0.17428003251552582,
-0.07046952098608017,
0.19386141002178192,
-0.08666457235813141,
-0.14403752982616425,
0.13253676891326904,
-0.044721465557813644,
0.010372938588261604,
0.05514078959822655,
0.22357703745365143,
0.056102197617292404,
-0.0994696170091629,
-0.017735162749886513,
-0.0457284040749073,
0.07113511115312576,
-0.07076487690210342,
0.07673688977956772,
0.002799807582050562,
0.06984900683164597,
0.005679272580891848,
0.01336890272796154,
0.034139588475227356,
-0.08014532923698425,
-0.08146868646144867,
-0.04843692481517792,
-0.07067849487066269,
0.011895820498466492,
0.03844423219561577,
0.06086860969662666,
-0.12744052708148956,
-0.10850217193365097,
0.04717998951673508,
0.07891901582479477,
-0.08274804800748825,
0.05463411659002304,
-0.09695715457201004,
0.12197192013263702,
-0.07239512354135513,
-0.006709856912493706,
-0.18144187331199646,
-0.03455400466918945,
0.03351642191410065,
0.00684636365622282,
0.012928382493555546,
-0.054273590445518494,
0.06259769201278687,
0.07423576712608337,
-0.03490450978279114,
-0.03445369005203247,
-0.018286531791090965,
0.0012385204900056124,
-0.12065791338682175,
-0.19337698817253113,
-0.04402840510010719,
-0.036518391221761703,
0.10430080443620682,
-0.15741609036922455,
0.039980895817279816,
0.05774307996034622,
0.11135683953762054,
0.04238174110651016,
-0.030247123911976814,
-0.000780009082518518,
0.07511235028505325,
-0.04858120158314705,
-0.07327744364738464,
0.06274375319480896,
0.030264616012573242,
-0.09221655875444412,
0.010551582090556622,
-0.16315357387065887,
0.15572230517864227,
0.13500210642814636,
0.0078717777505517,
-0.06052997335791588,
-0.016080183908343315,
-0.05373125895857811,
-0.026403699070215225,
-0.02117941901087761,
0.02203402854502201,
0.16066038608551025,
0.027451166883111,
0.15983328223228455,
-0.09867219626903534,
-0.05490129813551903,
0.049624957144260406,
-0.027155492454767227,
-0.008850015699863434,
0.11207132041454315,
0.038598839193582535,
-0.12332328408956528,
0.14337441325187683,
0.13165153563022614,
-0.04766819626092911,
0.13306136429309845,
-0.06500759720802307,
-0.0739394798874855,
-0.036055125296115875,
-0.010644365102052689,
0.031535934656858444,
0.10758914798498154,
-0.1129222959280014,
-0.019848579540848732,
0.041459597647190094,
0.0264621339738369,
0.0062029557302594185,
-0.1876697838306427,
-0.003102195216342807,
0.04010656476020813,
-0.04968416318297386,
-0.05204157531261444,
-0.00515448534861207,
0.00945199467241764,
0.10026911646127701,
0.015829887241125107,
-0.05115795135498047,
0.030287500470876694,
0.012436331249773502,
-0.0672282800078392,
0.1864163726568222,
-0.10383100807666779,
-0.17352637648582458,
-0.12164200842380524,
-0.10825062543153763,
-0.05577792227268219,
-0.005546000320464373,
0.07802946865558624,
-0.07775543630123138,
-0.04668805003166199,
-0.10461027920246124,
-0.035890690982341766,
-0.005177074111998081,
0.023223944008350372,
0.030312834307551384,
-0.023705653846263885,
0.06803102791309357,
-0.11019199341535568,
-0.030554143711924553,
-0.018798815086483955,
0.015669727697968483,
0.06260823458433151,
0.014073322527110577,
0.11529991775751114,
0.12932512164115906,
-0.020437611266970634,
0.0383351631462574,
-0.046210575848817825,
0.23929138481616974,
-0.07475080341100693,
-0.013567866757512093,
0.13549277186393738,
-0.020660458132624626,
0.0909508764743805,
0.12126694619655609,
0.04719601571559906,
-0.08753620088100433,
-0.005355204921215773,
0.007312592584639788,
-0.04450710490345955,
-0.22053126990795135,
-0.01706893928349018,
-0.05580752342939377,
0.007801666855812073,
0.10462246835231781,
0.025224559009075165,
0.02805069461464882,
0.050760667771101,
0.01029700692743063,
0.06107451394200325,
-0.025600949302315712,
0.10894440114498138,
0.13068945705890656,
0.05348851531744003,
0.14300081133842468,
-0.05401460826396942,
-0.025072559714317322,
0.051164016127586365,
0.018698642030358315,
0.21432167291641235,
-0.010227746330201626,
0.2042551338672638,
0.043395522981882095,
0.15987344086170197,
0.026793647557497025,
0.06797816604375839,
-0.024428579956293106,
-0.0049511450342834,
-0.015577166341245174,
-0.04827996715903282,
-0.045103784650564194,
0.01678411290049553,
-0.05700746178627014,
0.03157772496342659,
-0.12003035098314285,
0.014357639476656914,
0.04797663167119026,
0.299159973859787,
0.04144086688756943,
-0.3776948153972626,
-0.11087673902511597,
0.006177952047437429,
-0.04472586140036583,
-0.043678201735019684,
0.0037484888453036547,
0.09632738679647446,
-0.08089277148246765,
0.0720115602016449,
-0.08583610504865646,
0.10838653147220612,
-0.06433896720409393,
0.03340590372681618,
0.05409996211528778,
0.08573310077190399,
-0.014599191024899483,
0.051166292279958725,
-0.2807866930961609,
0.2679580748081207,
0.025039006024599075,
0.06681621074676514,
-0.07114255428314209,
0.01630435883998871,
0.014066597446799278,
0.042439114302396774,
0.060819391161203384,
-0.01036276388913393,
-0.10864529758691788,
-0.16400104761123657,
-0.10553479939699173,
0.015969427302479744,
0.07591921091079712,
0.00966903567314148,
0.12531308829784393,
-0.016301970928907394,
-0.0029416976030915976,
0.042854830622673035,
-0.020891617983579636,
-0.029982570558786392,
-0.1125204861164093,
0.02774382010102272,
0.043642308562994,
-0.030198752880096436,
-0.07394064962863922,
-0.10631854087114334,
-0.04979146644473076,
0.16446025669574738,
0.023252636194229126,
-0.07442664355039597,
-0.1307775229215622,
0.03738464042544365,
0.08263901621103287,
-0.09193933755159378,
0.03290571644902229,
-0.013782770372927189,
0.12130400538444519,
-0.00009269014844903722,
-0.07391821593046188,
0.10885076224803925,
-0.05445827171206474,
-0.16403107345104218,
-0.052112795412540436,
0.12311423569917679,
0.00881531834602356,
0.059913069009780884,
-0.011081688106060028,
0.04268891364336014,
-0.03438640385866165,
-0.06794685870409012,
0.028041990473866463,
0.004480825737118721,
0.10227550566196442,
-0.045713432133197784,
-0.014915907755494118,
0.026432566344738007,
-0.07159793376922607,
-0.024492042139172554,
0.18174681067466736,
0.25156864523887634,
-0.08276168256998062,
0.06962382048368454,
0.03671548515558243,
-0.05670325085520744,
-0.14632442593574524,
0.011418895795941353,
0.061054106801748276,
0.008517286740243435,
0.0023317437153309584,
-0.17905159294605255,
0.029046446084976196,
0.08763567358255386,
-0.013726015575230122,
0.08725593984127045,
-0.31313860416412354,
-0.12423218786716461,
0.08858410269021988,
0.12671491503715515,
0.07777392864227295,
-0.1563982516527176,
-0.04769564047455788,
-0.022925468161702156,
-0.133877694606781,
0.136244997382164,
-0.10336453467607498,
0.1152215451002121,
-0.027404814958572388,
0.1077456921339035,
0.012816970236599445,
-0.05972573161125183,
0.11270749568939209,
-0.012912275269627571,
0.0694686695933342,
-0.0650266483426094,
0.020899290218949318,
0.09316243976354599,
-0.08524783700704575,
0.043841149657964706,
-0.10023648291826248,
0.03429066017270088,
-0.1265847384929657,
-0.014073589816689491,
-0.06680306047201157,
0.0033790564630180597,
-0.03513189032673836,
-0.03560264781117439,
-0.03789295256137848,
0.010665098205208778,
0.07209315150976181,
-0.024235811084508896,
0.1820783019065857,
0.015735678374767303,
0.15038561820983887,
0.14606772363185883,
0.09344150125980377,
-0.11731575429439545,
-0.06566804647445679,
0.0011922763660550117,
-0.03396710380911827,
0.042572785168886185,
-0.15826593339443207,
0.027515564113855362,
0.13729356229305267,
0.005047835409641266,
0.12435897439718246,
0.0636097714304924,
-0.0635753720998764,
0.02713608182966709,
0.05264715105295181,
-0.17008285224437714,
-0.09868409484624863,
-0.0002854498743545264,
0.039311159402132034,
-0.1319298893213272,
0.035922929644584656,
0.13049203157424927,
-0.05699620395898819,
-0.025407765060663223,
0.005300797056406736,
0.01906978338956833,
-0.012350017204880714,
0.17731201648712158,
0.031124861910939217,
0.06443610042333603,
-0.10762083530426025,
0.07748591899871826,
0.061627477407455444,
-0.1146135926246643,
0.05751054361462593,
0.11374953389167786,
-0.09790290892124176,
-0.02720329910516739,
0.035941869020462036,
0.17035025358200073,
-0.059712737798690796,
-0.04845026507973671,
-0.1544320434331894,
-0.11566036194562912,
0.09719152748584747,
0.18319499492645264,
0.06441406160593033,
0.00939029548317194,
-0.04293776676058769,
-0.00862220861017704,
-0.12533394992351532,
0.10333096235990524,
0.049853816628456116,
0.07786890119314194,
-0.12264146655797958,
0.12176846712827682,
-0.009681985713541508,
0.042030785232782364,
-0.009242122992873192,
0.016223523765802383,
-0.10978978872299194,
0.003116859821602702,
-0.14232484996318817,
0.007755265571177006,
-0.044638849794864655,
-0.0009508638177067041,
-0.02344403974711895,
-0.0333043672144413,
-0.05905460938811302,
0.017022782936692238,
-0.11254969239234924,
-0.036216218024492264,
0.01149867381900549,
0.02970404364168644,
-0.1255163997411728,
-0.019084878265857697,
0.009608756750822067,
-0.08636956661939621,
0.08361071348190308,
0.04431821405887604,
-0.004796444904059172,
0.02054213359951973,
-0.02182593382894993,
0.000058771733165485784,
0.0476713627576828,
0.005084346979856491,
0.0801718607544899,
-0.11909998953342438,
-0.013991895131766796,
0.008614384569227695,
0.013514135964214802,
0.029152655974030495,
0.11520028114318848,
-0.11624829471111298,
-0.007918957620859146,
0.008709955960512161,
-0.056244876235723495,
-0.06920377165079117,
0.06837181746959686,
0.09441853314638138,
0.022316843271255493,
0.18204966187477112,
-0.07444651424884796,
0.03483397513628006,
-0.20082291960716248,
-0.004089004825800657,
0.00465488713234663,
-0.14213742315769196,
-0.0645986795425415,
-0.03864581137895584,
0.06780657172203064,
-0.07179949432611465,
0.11218146234750748,
0.004794209264218807,
0.03933865576982498,
0.04481394216418266,
-0.03069021739065647,
-0.02123802714049816,
0.015401962213218212,
0.17586402595043182,
0.020361270755529404,
-0.04355262592434883,
0.08138252794742584,
0.021439464762806892,
0.0808105394244194,
0.12857182323932648,
0.19780637323856354,
0.11624474078416824,
0.06378824263811111,
0.09596213698387146,
0.023681871592998505,
-0.03093722090125084,
-0.18742243945598602,
0.04022693634033203,
-0.032624270766973495,
0.14933063089847565,
-0.0050853886641561985,
0.2038184106349945,
0.12016794085502625,
-0.1604968160390854,
0.04834777116775513,
-0.041838254779577255,
-0.0863909125328064,
-0.10448557883501053,
-0.09707632660865784,
-0.0863661840558052,
-0.13143549859523773,
-0.010860299691557884,
-0.12819665670394897,
0.047365203499794006,
0.06252896040678024,
0.01592523418366909,
-0.006563671864569187,
0.12181217223405838,
0.029357051476836205,
0.009312068112194538,
0.06347658485174179,
0.007714451756328344,
-0.03544095903635025,
-0.050072766840457916,
-0.07194484770298004,
0.017617393285036087,
-0.000623556028585881,
0.054248444736003876,
-0.00869728997349739,
-0.006664905231446028,
0.049803227186203,
-0.02736487425863743,
-0.11800934374332428,
0.012526542879641056,
0.030497819185256958,
0.07358354330062866,
0.04570655897259712,
0.013132987543940544,
0.003940423019230366,
-0.015194343402981758,
0.20005030930042267,
-0.07378960400819778,
-0.05214204266667366,
-0.11663410812616348,
0.24711956083774567,
0.010534247383475304,
-0.057144470512866974,
0.033808931708335876,
-0.06602586060762405,
-0.007225691340863705,
0.2023860067129135,
0.17565765976905823,
-0.0403040386736393,
-0.016322005540132523,
-0.020967012271285057,
-0.009319585748016834,
-0.025482483208179474,
0.11053445190191269,
0.12763944268226624,
0.028689829632639885,
-0.07315870374441147,
-0.030219754204154015,
-0.06225419417023659,
-0.015889449045062065,
-0.04926137626171112,
0.07498694956302643,
0.024034976959228516,
-0.0025520692579448223,
-0.03035755455493927,
0.05449787154793739,
-0.05408729240298271,
-0.06149299815297127,
0.005658331327140331,
-0.21372979879379272,
-0.17141400277614594,
0.00367767084389925,
0.08013617247343063,
-0.011379540897905827,
0.05877966806292534,
-0.002591683529317379,
0.008241729810833931,
0.10111300647258759,
-0.01848042570054531,
-0.07073456794023514,
-0.07793236523866653,
0.1022053211927414,
-0.15752635896205902,
0.18991687893867493,
-0.03143029659986496,
0.029999982565641403,
0.14130471646785736,
0.05197294428944588,
-0.1117434948682785,
0.0555666908621788,
0.049492888152599335,
-0.047728948295116425,
0.014969092793762684,
0.12879742681980133,
-0.029966851696372032,
0.07209462672472,
0.04446517676115036,
-0.11130565404891968,
-0.011629589833319187,
-0.09171950817108154,
-0.02042531594634056,
-0.019828850403428078,
-0.04591736942529678,
-0.04982694983482361,
0.1323474645614624,
0.199149489402771,
-0.04610713943839073,
-0.007495453581213951,
-0.06228210777044296,
0.009032033383846283,
0.06748209148645401,
-0.011310548521578312,
-0.05355502665042877,
-0.25565195083618164,
0.0004417637246660888,
0.07678067684173584,
-0.00829776469618082,
-0.27308762073516846,
-0.0853227898478508,
-0.0006328764138743281,
-0.04363834485411644,
-0.10859435051679611,
0.09345760196447372,
0.07175242900848389,
0.042708102613687515,
-0.06907473504543304,
0.00746297650039196,
-0.06651908904314041,
0.16050885617733002,
-0.1353461891412735,
-0.06236465275287628
] |
null | null |
transformers
|
# GPT2-Persian
bolbolzaban/gpt2-persian is gpt2 language model that is trained with hyper parameters similar to standard gpt2-medium with following differences:
1. The context size is reduced from 1024 to 256 sub words in order to make the training affordable
2. Instead of BPE, google sentence piece tokenizor is used for tokenization.
3. The training dataset only include Persian text. All non-persian characters are replaced with especial tokens (e.g [LAT], [URL], [NUM])
Please refer to this [blog post](https://medium.com/@khashei/a-not-so-dangerous-ai-in-the-persian-language-39172a641c84) for further detail.
Also try the model [here](https://huggingface.co/bolbolzaban/gpt2-persian?text=%D8%AF%D8%B1+%DB%8C%DA%A9+%D8%A7%D8%AA%D9%81%D8%A7%D9%82+%D8%B4%DA%AF%D9%81%D8%AA+%D8%A7%D9%86%DA%AF%DB%8C%D8%B2%D8%8C+%D9%BE%DA%98%D9%88%D9%87%D8%B4%DA%AF%D8%B1%D8%A7%D9%86) or on [Bolbolzaban.com](http://www.bolbolzaban.com/text).
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline, AutoTokenizer, GPT2LMHeadModel
tokenizer = AutoTokenizer.from_pretrained('bolbolzaban/gpt2-persian')
model = GPT2LMHeadModel.from_pretrained('bolbolzaban/gpt2-persian')
generator = pipeline('text-generation', model, tokenizer=tokenizer, config={'max_length':256})
sample = generator('در یک اتفاق شگفت انگیز، پژوهشگران')
```
If you are using Tensorflow import TFGPT2LMHeadModel instead of GPT2LMHeadModel.
## Fine-tuning
Find a basic fine-tuning example on this [Github Repo](https://github.com/khashei/bolbolzaban-gpt2-persian).
## Special Tokens
gpt-persian is trained for the purpose of research on Persian poetry. Because of that all english words and numbers are replaced with special tokens and only standard Persian alphabet is used as part of input text. Here is one example:
Original text: اگر آیفون یا آیپد شما دارای سیستم عامل iOS 14.3 یا iPadOS 14.3 یا نسخههای جدیدتر باشد
Text used in training: اگر آیفون یا آیپد شما دارای سیستم عامل [LAT] [NUM] یا [LAT] [NUM] یا نسخههای جدیدتر باشد
Please consider normalizing your input text using [Hazm](https://github.com/sobhe/hazm) or similar libraries and ensure only Persian characters are provided as input.
If you want to use classical Persian poetry as input use [BOM] (begining of mesra) at the beginning of each verse (مصرع) followed by [EOS] (end of statement) at the end of each couplet (بیت).
See following links for example:
[[BOM] توانا بود](https://huggingface.co/bolbolzaban/gpt2-persian?text=%5BBOM%5D+%D8%AA%D9%88%D8%A7%D9%86%D8%A7+%D8%A8%D9%88%D8%AF)
[[BOM] توانا بود هر که دانا بود [BOM]](https://huggingface.co/bolbolzaban/gpt2-persian?text=%5BBOM%5D+%D8%AA%D9%88%D8%A7%D9%86%D8%A7+%D8%A8%D9%88%D8%AF+%D9%87%D8%B1+%DA%A9%D9%87+%D8%AF%D8%A7%D9%86%D8%A7+%D8%A8%D9%88%D8%AF+%5BBOM%5D)
[[BOM] توانا بود هر که دانا بود [BOM] ز دانش دل پیر](https://huggingface.co/bolbolzaban/gpt2-persian?text=%5BBOM%5D+%D8%AA%D9%88%D8%A7%D9%86%D8%A7+%D8%A8%D9%88%D8%AF+%D9%87%D8%B1+%DA%A9%D9%87+%D8%AF%D8%A7%D9%86%D8%A7+%D8%A8%D9%88%D8%AF+%5BBOM%5D+%D8%B2+%D8%AF%D8%A7%D9%86%D8%B4+%D8%AF%D9%84+%D9%BE%DB%8C%D8%B1)
[[BOM] توانا بود هر که دانا بود [BOM] ز دانش دل پیربرنا بود [EOS]](https://huggingface.co/bolbolzaban/gpt2-persian?text=%5BBOM%5D+%D8%AA%D9%88%D8%A7%D9%86%D8%A7+%D8%A8%D9%88%D8%AF+%D9%87%D8%B1+%DA%A9%D9%87+%D8%AF%D8%A7%D9%86%D8%A7+%D8%A8%D9%88%D8%AF+%5BBOM%5D+%D8%B2+%D8%AF%D8%A7%D9%86%D8%B4+%D8%AF%D9%84+%D9%BE%DB%8C%D8%B1%D8%A8%D8%B1%D9%86%D8%A7+%D8%A8%D9%88%D8%AF++%5BEOS%5D)
If you like to know about structure of classical Persian poetry refer to these [blog posts](https://medium.com/@khashei).
## Acknowledgment
This project is supported by Cloud TPUs from Google’s TensorFlow Research Cloud (TFRC).
## Citation and Reference
Please reference "bolbolzaban.com" website if you are using gpt2-persian in your research or commertial application.
## Contacts
Please reachout on [Linkedin](https://www.linkedin.com/in/khashei/) or [Telegram](https://t.me/khasheia) if you have any question or need any help to use the model.
Follow [Bolbolzaban](http://bolbolzaban.com/about) on [Twitter](https://twitter.com/bolbol_zaban), [Telegram](https://t.me/bolbol_zaban) or [Instagram](https://www.instagram.com/bolbolzaban/)
|
{"language": "fa", "license": "apache-2.0", "tags": ["farsi", "persian"]}
|
text-generation
|
bolbolzaban/gpt2-persian
|
[
"transformers",
"pytorch",
"tf",
"jax",
"gpt2",
"text-generation",
"farsi",
"persian",
"fa",
"doi:10.57967/hf/1207",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"fa"
] |
TAGS
#transformers #pytorch #tf #jax #gpt2 #text-generation #farsi #persian #fa #doi-10.57967/hf/1207 #license-apache-2.0 #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us
|
# GPT2-Persian
bolbolzaban/gpt2-persian is gpt2 language model that is trained with hyper parameters similar to standard gpt2-medium with following differences:
1. The context size is reduced from 1024 to 256 sub words in order to make the training affordable
2. Instead of BPE, google sentence piece tokenizor is used for tokenization.
3. The training dataset only include Persian text. All non-persian characters are replaced with especial tokens (e.g [LAT], [URL], [NUM])
Please refer to this blog post for further detail.
Also try the model here or on URL.
## How to use
You can use this model directly with a pipeline for text generation:
If you are using Tensorflow import TFGPT2LMHeadModel instead of GPT2LMHeadModel.
## Fine-tuning
Find a basic fine-tuning example on this Github Repo.
## Special Tokens
gpt-persian is trained for the purpose of research on Persian poetry. Because of that all english words and numbers are replaced with special tokens and only standard Persian alphabet is used as part of input text. Here is one example:
Original text: اگر آیفون یا آیپد شما دارای سیستم عامل iOS 14.3 یا iPadOS 14.3 یا نسخههای جدیدتر باشد
Text used in training: اگر آیفون یا آیپد شما دارای سیستم عامل [LAT] [NUM] یا [LAT] [NUM] یا نسخههای جدیدتر باشد
Please consider normalizing your input text using Hazm or similar libraries and ensure only Persian characters are provided as input.
If you want to use classical Persian poetry as input use [BOM] (begining of mesra) at the beginning of each verse (مصرع) followed by [EOS] (end of statement) at the end of each couplet (بیت).
See following links for example:
[[BOM] توانا بود](URL
[[BOM] توانا بود هر که دانا بود [BOM]](URL
[[BOM] توانا بود هر که دانا بود [BOM] ز دانش دل پیر](URL
[[BOM] توانا بود هر که دانا بود [BOM] ز دانش دل پیربرنا بود [EOS]](URL
If you like to know about structure of classical Persian poetry refer to these blog posts.
## Acknowledgment
This project is supported by Cloud TPUs from Google’s TensorFlow Research Cloud (TFRC).
and Reference
Please reference "URL" website if you are using gpt2-persian in your research or commertial application.
## Contacts
Please reachout on Linkedin or Telegram if you have any question or need any help to use the model.
Follow Bolbolzaban on Twitter, Telegram or Instagram
|
[
"# GPT2-Persian\nbolbolzaban/gpt2-persian is gpt2 language model that is trained with hyper parameters similar to standard gpt2-medium with following differences:\n1. The context size is reduced from 1024 to 256 sub words in order to make the training affordable \n2. Instead of BPE, google sentence piece tokenizor is used for tokenization.\n3. The training dataset only include Persian text. All non-persian characters are replaced with especial tokens (e.g [LAT], [URL], [NUM])\n\nPlease refer to this blog post for further detail. \nAlso try the model here or on URL.",
"## How to use\nYou can use this model directly with a pipeline for text generation:\n\n\nIf you are using Tensorflow import TFGPT2LMHeadModel instead of GPT2LMHeadModel.",
"## Fine-tuning\nFind a basic fine-tuning example on this Github Repo.",
"## Special Tokens\ngpt-persian is trained for the purpose of research on Persian poetry. Because of that all english words and numbers are replaced with special tokens and only standard Persian alphabet is used as part of input text. Here is one example:\n\nOriginal text: اگر آیفون یا آیپد شما دارای سیستم عامل iOS 14.3 یا iPadOS 14.3 یا نسخههای جدیدتر باشد\n\nText used in training: اگر آیفون یا آیپد شما دارای سیستم عامل [LAT] [NUM] یا [LAT] [NUM] یا نسخههای جدیدتر باشد\n\nPlease consider normalizing your input text using Hazm or similar libraries and ensure only Persian characters are provided as input.\n\nIf you want to use classical Persian poetry as input use [BOM] (begining of mesra) at the beginning of each verse (مصرع) followed by [EOS] (end of statement) at the end of each couplet (بیت). \n\nSee following links for example:\n\n[[BOM] توانا بود](URL\n\n[[BOM] توانا بود هر که دانا بود [BOM]](URL\n\n[[BOM] توانا بود هر که دانا بود [BOM] ز دانش دل پیر](URL\n\n[[BOM] توانا بود هر که دانا بود [BOM] ز دانش دل پیربرنا بود [EOS]](URL\n\nIf you like to know about structure of classical Persian poetry refer to these blog posts.",
"## Acknowledgment\nThis project is supported by Cloud TPUs from Google’s TensorFlow Research Cloud (TFRC).\nand Reference\nPlease reference \"URL\" website if you are using gpt2-persian in your research or commertial application.",
"## Contacts\nPlease reachout on Linkedin or Telegram if you have any question or need any help to use the model.\n\nFollow Bolbolzaban on Twitter, Telegram or Instagram"
] |
[
"TAGS\n#transformers #pytorch #tf #jax #gpt2 #text-generation #farsi #persian #fa #doi-10.57967/hf/1207 #license-apache-2.0 #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us \n",
"# GPT2-Persian\nbolbolzaban/gpt2-persian is gpt2 language model that is trained with hyper parameters similar to standard gpt2-medium with following differences:\n1. The context size is reduced from 1024 to 256 sub words in order to make the training affordable \n2. Instead of BPE, google sentence piece tokenizor is used for tokenization.\n3. The training dataset only include Persian text. All non-persian characters are replaced with especial tokens (e.g [LAT], [URL], [NUM])\n\nPlease refer to this blog post for further detail. \nAlso try the model here or on URL.",
"## How to use\nYou can use this model directly with a pipeline for text generation:\n\n\nIf you are using Tensorflow import TFGPT2LMHeadModel instead of GPT2LMHeadModel.",
"## Fine-tuning\nFind a basic fine-tuning example on this Github Repo.",
"## Special Tokens\ngpt-persian is trained for the purpose of research on Persian poetry. Because of that all english words and numbers are replaced with special tokens and only standard Persian alphabet is used as part of input text. Here is one example:\n\nOriginal text: اگر آیفون یا آیپد شما دارای سیستم عامل iOS 14.3 یا iPadOS 14.3 یا نسخههای جدیدتر باشد\n\nText used in training: اگر آیفون یا آیپد شما دارای سیستم عامل [LAT] [NUM] یا [LAT] [NUM] یا نسخههای جدیدتر باشد\n\nPlease consider normalizing your input text using Hazm or similar libraries and ensure only Persian characters are provided as input.\n\nIf you want to use classical Persian poetry as input use [BOM] (begining of mesra) at the beginning of each verse (مصرع) followed by [EOS] (end of statement) at the end of each couplet (بیت). \n\nSee following links for example:\n\n[[BOM] توانا بود](URL\n\n[[BOM] توانا بود هر که دانا بود [BOM]](URL\n\n[[BOM] توانا بود هر که دانا بود [BOM] ز دانش دل پیر](URL\n\n[[BOM] توانا بود هر که دانا بود [BOM] ز دانش دل پیربرنا بود [EOS]](URL\n\nIf you like to know about structure of classical Persian poetry refer to these blog posts.",
"## Acknowledgment\nThis project is supported by Cloud TPUs from Google’s TensorFlow Research Cloud (TFRC).\nand Reference\nPlease reference \"URL\" website if you are using gpt2-persian in your research or commertial application.",
"## Contacts\nPlease reachout on Linkedin or Telegram if you have any question or need any help to use the model.\n\nFollow Bolbolzaban on Twitter, Telegram or Instagram"
] |
[
85,
142,
44,
21,
307,
56,
37
] |
[
"passage: TAGS\n#transformers #pytorch #tf #jax #gpt2 #text-generation #farsi #persian #fa #doi-10.57967/hf/1207 #license-apache-2.0 #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us \n# GPT2-Persian\nbolbolzaban/gpt2-persian is gpt2 language model that is trained with hyper parameters similar to standard gpt2-medium with following differences:\n1. The context size is reduced from 1024 to 256 sub words in order to make the training affordable \n2. Instead of BPE, google sentence piece tokenizor is used for tokenization.\n3. The training dataset only include Persian text. All non-persian characters are replaced with especial tokens (e.g [LAT], [URL], [NUM])\n\nPlease refer to this blog post for further detail. \nAlso try the model here or on URL.## How to use\nYou can use this model directly with a pipeline for text generation:\n\n\nIf you are using Tensorflow import TFGPT2LMHeadModel instead of GPT2LMHeadModel.## Fine-tuning\nFind a basic fine-tuning example on this Github Repo."
] |
[
-0.05905081704258919,
0.11201489716768265,
-0.004987296648323536,
0.09325363487005234,
0.11019580066204071,
0.023312734439969063,
0.17264798283576965,
0.11999918520450592,
-0.0393003411591053,
0.003335883840918541,
0.05602089688181877,
0.005405556410551071,
0.08170146495103836,
0.17169234156608582,
0.10026366263628006,
-0.34750914573669434,
-0.039610810577869415,
-0.01775183528661728,
0.09239861369132996,
0.09273594617843628,
0.15189886093139648,
-0.017464369535446167,
0.04818513244390488,
0.05790185555815697,
-0.15376533567905426,
0.07197139412164688,
0.0020969195757061243,
-0.08205223083496094,
0.08131382614374161,
0.05202040076255798,
0.11045541614294052,
-0.009891816414892673,
0.050442151725292206,
-0.10601215064525604,
0.04228649660944939,
0.07573254406452179,
-0.048319872468709946,
0.05992404744029045,
0.1052292212843895,
-0.08766695857048035,
0.17855392396450043,
-0.02851993776857853,
-0.032506849616765976,
0.05476823076605797,
-0.07869002223014832,
-0.11000804603099823,
-0.046227216720581055,
0.17946359515190125,
0.04024266451597214,
0.08007647842168808,
-0.015695981681346893,
0.012656543403863907,
-0.061675287783145905,
0.0673520416021347,
0.21049916744232178,
-0.2876555323600769,
-0.060545772314071655,
0.09236504137516022,
-0.05416232720017433,
0.06744246929883957,
-0.06654782593250275,
0.01790805719792843,
-0.01732676848769188,
0.019976040348410606,
0.04037012159824371,
-0.034211330115795135,
-0.06819391995668411,
-0.028380589559674263,
-0.08817336708307266,
-0.0032203146256506443,
0.07502608001232147,
0.01908140629529953,
-0.05369565263390541,
-0.0978340432047844,
-0.05620810016989708,
-0.00434458814561367,
-0.027594031766057014,
-0.025324445217847824,
0.010952146723866463,
0.0724632665514946,
0.03151848912239075,
-0.20256349444389343,
-0.1386283040046692,
-0.06282749772071838,
-0.05319158732891083,
0.08696051687002182,
0.06754401326179504,
0.02112623117864132,
-0.002038655336946249,
0.11135969310998917,
-0.14967131614685059,
-0.07397417724132538,
-0.008974915370345116,
-0.07507427036762238,
-0.042303599417209625,
-0.016070470213890076,
-0.02267596498131752,
-0.11857278645038605,
-0.019584400579333305,
0.10110729932785034,
0.042114801704883575,
0.0753743052482605,
0.07205197215080261,
0.0337260477244854,
-0.016573546454310417,
0.07491906732320786,
-0.0691588819026947,
0.01879739761352539,
0.09973428398370743,
0.016995983198285103,
-0.016545137390494347,
0.024691712111234665,
-0.08200618624687195,
-0.07242850214242935,
0.018505612388253212,
0.030307048931717873,
-0.029168961569666862,
0.04875602200627327,
-0.024074234068393707,
-0.02770143561065197,
0.12331065535545349,
-0.1335843801498413,
-0.012869237922132015,
0.0012165294028818607,
-0.0698453038930893,
0.004153542220592499,
0.07480500638484955,
-0.006705994252115488,
-0.1213926300406456,
-0.02850310690701008,
-0.03485749661922455,
0.048010800033807755,
-0.04641667380928993,
-0.04979148507118225,
-0.01569424942135811,
-0.05443456396460533,
-0.012768101878464222,
-0.181371808052063,
-0.191594198346138,
0.023725589737296104,
0.06281544268131256,
-0.04495355859398842,
0.0008259735768660903,
0.010753452777862549,
0.013857644982635975,
-0.06426489353179932,
-0.0008966723689809442,
0.0028782039880752563,
-0.03071797452867031,
0.033498287200927734,
-0.019405532628297806,
0.04303431510925293,
0.017529023811221123,
0.011188543401658535,
-0.033973731100559235,
-0.04599231109023094,
-0.21945379674434662,
0.14524780213832855,
-0.04664258658885956,
0.011202151887118816,
-0.12886063754558563,
-0.06018504127860069,
-0.05685856193304062,
-0.003869256004691124,
0.017039088532328606,
0.1085578203201294,
-0.07682714611291885,
-0.045501671731472015,
0.2177104353904724,
-0.03215298429131508,
0.0488194115459919,
0.11859973520040512,
-0.0426262691617012,
0.12043248862028122,
0.14926236867904663,
0.17749027907848358,
0.1778707504272461,
-0.15466445684432983,
0.010557751171290874,
0.07618705183267593,
-0.0743875801563263,
0.023379512131214142,
0.04735080897808075,
-0.007721263449639082,
0.04567261040210724,
0.034568917006254196,
0.0009814436780288815,
0.08109093457460403,
-0.018640803173184395,
-0.0421999916434288,
0.013034210540354252,
-0.09183481335639954,
-0.04331352934241295,
-0.005571033339947462,
0.09685759991407394,
-0.027580663561820984,
-0.13278569281101227,
0.0012892850209027529,
0.10642734169960022,
-0.04051228240132332,
-0.0008346394752152264,
-0.09402268379926682,
0.04234880581498146,
-0.005937139503657818,
0.00619788933545351,
-0.12684571743011475,
-0.026487715542316437,
0.018588725477457047,
0.0295176450163126,
0.1283048540353775,
0.03785901889204979,
0.068602055311203,
0.04539445415139198,
-0.08402072638273239,
0.009331205859780312,
-0.013210032135248184,
-0.026965275406837463,
-0.06257776916027069,
-0.03824587166309357,
-0.03785314783453941,
0.0004495542962104082,
0.11461815983057022,
-0.06535786390304565,
0.06730286777019501,
0.04432450234889984,
0.11148342490196228,
-0.0027689493726938963,
-0.039518941193819046,
0.04291442781686783,
-0.06408913433551788,
0.050973355770111084,
-0.10710185021162033,
0.028678685426712036,
0.04777528718113899,
0.006687863264232874,
0.03077428974211216,
-0.029660332947969437,
-0.041212115436792374,
0.11576977372169495,
0.051779843866825104,
-0.1171262115240097,
-0.014629559591412544,
-0.035338107496500015,
0.023497939109802246,
-0.03376538306474686,
-0.009835036471486092,
0.229334756731987,
-0.006304347887635231,
0.1310819387435913,
-0.11144315451383591,
-0.01445219200104475,
-0.02075572870671749,
-0.05511566251516342,
0.021562423557043076,
0.04994921013712883,
-0.026358695700764656,
-0.1874971240758896,
0.03699573501944542,
-0.07671117782592773,
-0.04832293465733528,
0.17763514816761017,
0.05459904670715332,
-0.06257493048906326,
0.0011374884052202106,
0.08364164084196091,
0.00593179278075695,
0.053816065192222595,
0.06116629019379616,
-0.010573424398899078,
0.02472713030874729,
0.029695585370063782,
0.09797120839357376,
-0.12991507351398468,
0.002256052801385522,
0.02601613849401474,
-0.09934699535369873,
-0.008107234723865986,
0.006426734384149313,
-0.03969663754105568,
0.05873655155301094,
-0.004339308012276888,
0.07093469053506851,
0.006451781373471022,
0.013929472304880619,
-0.07682038098573685,
0.13628168404102325,
-0.1123109832406044,
-0.1954696774482727,
-0.1241081953048706,
-0.035141751170158386,
-0.023348815739154816,
0.030556639656424522,
0.049117423593997955,
-0.12935470044612885,
-0.04252264276146889,
-0.04875355213880539,
0.0750531479716301,
-0.009396959096193314,
-0.021847430616617203,
-0.058131564408540726,
0.02293432131409645,
0.015357443131506443,
-0.14372512698173523,
-0.008383051492273808,
0.005716609302908182,
-0.1207926869392395,
0.050549548119306564,
-0.10258837789297104,
-0.0017892596079036593,
0.04178915545344353,
-0.043567195534706116,
0.036203570663928986,
-0.02371828816831112,
0.2658750116825104,
-0.0628732219338417,
0.13151198625564575,
0.21688446402549744,
0.10568986088037491,
0.05736343935132027,
0.0370529368519783,
0.022406594827771187,
-0.044827111065387726,
0.023458357900381088,
0.06293891370296478,
-0.039352573454380035,
-0.21654805541038513,
-0.004089859779924154,
-0.054786164313554764,
-0.09192422777414322,
0.07895942032337189,
0.09608405083417892,
-0.009420969523489475,
0.11701776832342148,
-0.056580204516649246,
0.0714123323559761,
0.06656862795352936,
0.07948444783687592,
0.046619631350040436,
0.01635972410440445,
0.06231413409113884,
-0.0748000293970108,
0.012029173783957958,
0.1236448884010315,
0.11201093345880508,
0.14528167247772217,
-0.08073558658361435,
0.14840348064899445,
0.03850476071238518,
0.11870628595352173,
0.04367269203066826,
0.06540295481681824,
-0.014377373270690441,
-0.0038054361939430237,
-0.017894331365823746,
-0.05430057644844055,
-0.0385642908513546,
0.03295370936393738,
-0.010772660374641418,
-0.052043505012989044,
-0.013331078924238682,
0.018635855987668037,
0.07967375218868256,
0.16787593066692352,
-0.020193912088871002,
-0.1937359869480133,
-0.0812583863735199,
-0.024788113310933113,
-0.10376518964767456,
-0.059261925518512726,
-0.014271245338022709,
0.03198593109846115,
-0.13767844438552856,
0.04115569591522217,
-0.005612880922853947,
0.10006403177976608,
-0.020206525921821594,
-0.006023060530424118,
0.018056562170386314,
0.14554843306541443,
-0.023840509355068207,
0.10553040355443954,
-0.18726076185703278,
0.07368481904268265,
0.0519116036593914,
0.13613992929458618,
-0.07702319324016571,
0.032810576260089874,
0.06932001560926437,
0.034284353256225586,
0.0835423544049263,
0.000484724179841578,
0.05597025528550148,
-0.07919757068157196,
-0.08724498748779297,
0.03542286530137062,
0.07453885674476624,
-0.033017124980688095,
0.023653950542211533,
-0.003971952944993973,
0.012057095766067505,
-0.02871572971343994,
-0.05428623408079147,
-0.20581848919391632,
-0.17687660455703735,
0.031814973801374435,
-0.012927104718983173,
0.011392150074243546,
-0.06560724973678589,
-0.028741750866174698,
-0.01953970640897751,
0.23256821930408478,
-0.0013863632921129465,
-0.16433179378509521,
-0.12542898952960968,
0.06939465552568436,
0.0676620602607727,
-0.10233237594366074,
0.04197121784090996,
-0.01765184849500656,
0.07969836890697479,
-0.04294423758983612,
-0.10993833094835281,
-0.005873450543731451,
-0.06171310320496559,
-0.01973281428217888,
0.04051187261939049,
0.12331783026456833,
0.08016780763864517,
0.027105875313282013,
0.036664433777332306,
-0.0673808753490448,
-0.05804862082004547,
-0.11610959470272064,
-0.042099032551050186,
0.027656083926558495,
0.07848551124334335,
0.015765978023409843,
-0.05957484245300293,
-0.007625618018209934,
-0.08695158362388611,
0.021664347499608994,
0.10919834673404694,
0.17173612117767334,
-0.06927862763404846,
0.08874895423650742,
0.11927761882543564,
-0.02659505233168602,
-0.13576748967170715,
-0.028741488233208656,
0.08237718045711517,
0.03802015632390976,
-0.03433002158999443,
-0.27151188254356384,
0.04019038379192352,
0.07404109835624695,
0.010578607209026814,
0.015820913016796112,
-0.23549441993236542,
-0.11111073940992355,
0.09392553567886353,
0.041082266718149185,
-0.011591717600822449,
-0.13690324127674103,
-0.019133659079670906,
-0.02751617319881916,
-0.16313287615776062,
0.08753857761621475,
-0.1561567783355713,
0.14582893252372742,
0.05032762140035629,
0.11264508962631226,
0.00907190889120102,
0.005498180631548166,
0.12195070087909698,
-0.01998436264693737,
0.028081471100449562,
-0.048560794442892075,
0.08846380561590195,
0.0844569131731987,
-0.01340063288807869,
0.10426171123981476,
-0.05523144081234932,
0.056952252984046936,
-0.09871622174978256,
-0.09663991630077362,
-0.09262530505657196,
0.04451866075396538,
-0.003551783272996545,
-0.10182312875986099,
-0.0803212895989418,
0.044764552265405655,
0.06043514236807823,
-0.015475266613066196,
-0.08336718380451202,
-0.0645696297287941,
0.02188541181385517,
0.06984240561723709,
0.048046041280031204,
-0.11593560129404068,
-0.0913088470697403,
-0.04578634351491928,
-0.038317203521728516,
0.07600899040699005,
-0.17734277248382568,
-0.01458835881203413,
0.03238610923290253,
-0.01698830910027027,
0.09703796356916428,
0.0029430100694298744,
-0.13216149806976318,
-0.0019306524191051722,
0.07888323068618774,
-0.11429601162672043,
-0.10904492437839508,
-0.0572461299598217,
0.00769458943977952,
-0.03028230555355549,
-0.029465805739164352,
0.12822437286376953,
-0.08220706880092621,
-0.07284964621067047,
0.026011278852820396,
-0.016644377261400223,
-0.07291580736637115,
0.11742233484983444,
0.09539157152175903,
0.003707602620124817,
-0.0694541409611702,
0.1107008084654808,
0.07153759151697159,
-0.0050714691169559956,
0.00856584683060646,
0.14049193263053894,
-0.11564240604639053,
-0.06775347888469696,
-0.034062664955854416,
0.003731564385816455,
-0.03375060483813286,
-0.021638598293066025,
-0.06057816743850708,
-0.013463323935866356,
-0.002613985212519765,
-0.06599240750074387,
0.02932090498507023,
-0.02799079567193985,
-0.05120912939310074,
0.07520468533039093,
-0.08096428215503693,
0.014690535143017769,
0.08349820971488953,
0.0052115097641944885,
-0.11578540503978729,
0.18624937534332275,
-0.004329390358179808,
0.0661645159125328,
-0.028236165642738342,
0.03649849817156792,
-0.06421907991170883,
-0.0030109903309494257,
-0.07414738088846207,
0.019250167533755302,
-0.10811354219913483,
0.007584477309137583,
-0.028733985498547554,
-0.0327664352953434,
-0.01418673899024725,
0.06328863650560379,
-0.06686290353536606,
-0.042243391275405884,
-0.03893708065152168,
0.029022568836808205,
-0.09963827580213547,
0.03537836670875549,
0.015402217395603657,
-0.03253559768199921,
0.13373634219169617,
0.07328949123620987,
-0.04237078130245209,
0.06998361647129059,
-0.09809602051973343,
-0.03137248754501343,
0.015084940008819103,
-0.003355287481099367,
0.001946200500242412,
-0.021046053618192673,
0.06783293932676315,
-0.010997210629284382,
0.007700799498707056,
-0.003385051852092147,
0.10499197989702225,
-0.09778869152069092,
-0.041409313678741455,
-0.044313620775938034,
0.045574720948934555,
-0.1282760500907898,
0.07302042096853256,
0.10846802592277527,
0.041881803423166275,
0.04664839431643486,
-0.0773477703332901,
0.022630605846643448,
-0.15641021728515625,
-0.007015202660113573,
-0.024711867794394493,
-0.05267803743481636,
-0.01960303820669651,
-0.054443877190351486,
0.057455189526081085,
-0.011392035521566868,
0.1165992021560669,
0.03976515308022499,
-0.06278959661722183,
0.012407025322318077,
-0.011031187139451504,
0.10236357152462006,
-0.007586417719721794,
0.11965890973806381,
0.022009599953889847,
-0.006798350717872381,
-0.026136865839362144,
0.007307562977075577,
0.021235013380646706,
-0.06331777572631836,
0.09496959298849106,
0.14367014169692993,
-0.011475830338895321,
0.052929747849702835,
-0.04509134590625763,
-0.08008509874343872,
-0.06477541476488113,
0.019868595525622368,
-0.01623765006661415,
-0.024518631398677826,
-0.12563496828079224,
-0.07220581918954849,
0.22801803052425385,
-0.10826991498470306,
0.09363315999507904,
0.03937722370028496,
-0.09028397500514984,
-0.11333376914262772,
-0.17822419106960297,
-0.026843184605240822,
-0.07025608420372009,
0.007502035703510046,
-0.07691085338592529,
0.018926376476883888,
0.016278866678476334,
0.023709319531917572,
-0.02926654927432537,
0.19829650223255157,
-0.037470944225788116,
-0.13672125339508057,
0.0684373751282692,
-0.025949710980057716,
0.01044231653213501,
-0.006690889596939087,
-0.039643120020627975,
0.031445376574993134,
0.025843899697065353,
0.05658901855349541,
0.021344348788261414,
0.06571298837661743,
0.023320550099015236,
-0.08559965342283249,
-0.0007935867179185152,
-0.027150528505444527,
0.030095821246504784,
0.025540625676512718,
0.13124209642410278,
0.005530869122594595,
-0.08818931132555008,
-0.0027622331399470568,
0.21540763974189758,
0.020165391266345978,
-0.143284872174263,
-0.15815772116184235,
0.14788134396076202,
0.005793801974505186,
-0.01848878711462021,
-0.007997607812285423,
-0.07608343660831451,
-0.009412378072738647,
0.21801462769508362,
0.24368564784526825,
-0.07504227012395859,
0.017142046242952347,
0.0022704738657921553,
-0.0058998423628509045,
0.03501124680042267,
0.13166235387325287,
0.0546940416097641,
0.15224067866802216,
-0.05958855524659157,
0.08855339884757996,
0.027177361771464348,
-0.008607186377048492,
-0.1522897481918335,
0.12278717756271362,
0.016729945316910744,
0.015467153862118721,
-0.02413707971572876,
0.08226164430379868,
-0.009329608641564846,
-0.09623882919549942,
-0.05476958677172661,
0.002202110830694437,
-0.10411487519741058,
0.00530819920822978,
-0.04287504032254219,
0.023819398134946823,
0.07430554926395416,
-0.008152767084538937,
0.05574222654104233,
0.11474642902612686,
0.010890288278460503,
-0.10619111359119415,
-0.052251897752285004,
0.11782538890838623,
-0.001974075101315975,
0.11693501472473145,
-0.025545503944158554,
0.04348398372530937,
0.08135499060153961,
-0.02587878704071045,
-0.10740009695291519,
0.0813773050904274,
0.034496087580919266,
-0.0453030951321125,
0.02933793142437935,
0.07577119022607803,
-0.01808221824467182,
-0.08310279250144958,
0.018463606014847755,
-0.05098677799105644,
0.009710931219160557,
-0.025937067344784737,
0.05495424196124077,
-0.1175369918346405,
0.0686437264084816,
-0.1348923295736313,
0.10060559958219528,
0.1839836984872818,
-0.05272902920842171,
0.009832511655986309,
-0.07334159314632416,
0.042443402111530304,
0.08842790871858597,
0.031585246324539185,
-0.032519709318876266,
-0.12333408743143082,
-0.03853694722056389,
0.009459913708269596,
0.030008770525455475,
-0.15783119201660156,
-0.04345085099339485,
-0.03712410852313042,
-0.025769082829356194,
-0.06081000715494156,
0.14438778162002563,
0.05646326765418053,
0.047329824417829514,
0.012278860434889793,
-0.10915710031986237,
-0.044594258069992065,
0.04440414905548096,
-0.1539291888475418,
-0.06053895875811577
] |
null | null |
transformers
|
# Personal DialoGPT Model
|
{"tags": ["conversational"]}
|
text-generation
|
bonebambi/DialoGPT-small-ThakirClone
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"conversational",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
# Personal DialoGPT Model
|
[
"# Personal DialoGPT Model"
] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"# Personal DialoGPT Model"
] |
[
51,
7
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n# Personal DialoGPT Model"
] |
[
-0.013048808090388775,
0.06021147221326828,
-0.005783938802778721,
0.013958388939499855,
0.14544299244880676,
-0.01806739903986454,
0.1659075766801834,
0.13064159452915192,
-0.004324611742049456,
-0.03570028394460678,
0.0889839380979538,
0.1748768836259842,
0.0022326563484966755,
0.05585828050971031,
-0.054340802133083344,
-0.29537978768348694,
0.04832177236676216,
0.04510147124528885,
0.07056012004613876,
0.11485455930233002,
0.09371862560510635,
-0.03807985037565231,
0.09028392285108566,
0.024512887001037598,
-0.12602056562900543,
0.009868393652141094,
0.03162707760930061,
-0.11769705265760422,
0.13774262368679047,
0.06948722898960114,
0.024774452671408653,
0.010170499794185162,
-0.03788784146308899,
-0.15852703154087067,
0.031279515475034714,
-0.009807943366467953,
-0.03295792266726494,
0.02695859782397747,
0.021787768229842186,
-0.07130707055330276,
0.14297670125961304,
0.13217221200466156,
0.01153564453125,
0.049784861505031586,
-0.15286162495613098,
-0.01863701082766056,
0.01150808110833168,
0.052107520401477814,
0.08008676767349243,
0.12553317844867706,
-0.05658089369535446,
0.11134108901023865,
-0.08624481409788132,
0.1009792685508728,
0.08065293729305267,
-0.28938156366348267,
-0.012571386992931366,
0.11251706629991531,
0.023401957005262375,
0.05789438635110855,
-0.03909793868660927,
0.06880117207765579,
0.006520743481814861,
0.0164069514721632,
-0.02915431372821331,
-0.07124263048171997,
-0.11539819091558456,
0.01408698596060276,
-0.09151915460824966,
-0.01628687232732773,
0.2377876490354538,
-0.04440774768590927,
0.06288298964500427,
-0.11140765249729156,
-0.07932751625776291,
-0.025691192597150803,
-0.05408379063010216,
-0.038010984659194946,
-0.09273432195186615,
0.08850739151239395,
-0.008905977942049503,
-0.08443761616945267,
-0.13105671107769012,
-0.03211786597967148,
-0.16858159005641937,
0.12765398621559143,
0.03953579068183899,
0.035142261534929276,
-0.21151317656040192,
0.09634167701005936,
0.011654864065349102,
-0.06431891024112701,
0.04385466128587723,
-0.103164903819561,
0.02457885630428791,
0.013512092642486095,
-0.02903372421860695,
-0.03711957857012749,
0.08016254007816315,
0.093216173350811,
0.024390308186411858,
0.01938934251666069,
-0.029608145356178284,
0.04090144485235214,
0.06566376984119415,
0.08283527195453644,
0.0002742937358561903,
-0.05252537131309509,
0.015255886130034924,
-0.10345696657896042,
-0.018682176247239113,
-0.04612671956419945,
-0.1839587241411209,
-0.005747679620981216,
0.030406609177589417,
0.06609699875116348,
0.04364343732595444,
0.13313931226730347,
-0.0019470381084829569,
-0.041563309729099274,
0.05461965128779411,
-0.014835025183856487,
-0.018414612859487534,
0.014959082938730717,
-0.01301512774080038,
0.10896787792444229,
-0.014380373060703278,
0.04491670802235603,
-0.14261314272880554,
-0.010789449326694012,
-0.032647278159856796,
-0.010131141170859337,
-0.026291752234101295,
-0.04117988795042038,
-0.00723618920892477,
-0.039525315165519714,
0.004036822821944952,
-0.1620883345603943,
-0.15378792583942413,
-0.02163652703166008,
-0.021849170327186584,
-0.05586634948849678,
-0.11913582682609558,
-0.10146109014749527,
0.015401155687868595,
0.0322013758122921,
-0.077916719019413,
-0.06148559972643852,
-0.07515516877174377,
0.07353732734918594,
-0.025777263566851616,
0.07544145733118057,
-0.09390159696340561,
0.08487721532583237,
-0.08585382252931595,
-0.044515542685985565,
-0.026626188308000565,
0.12181751430034637,
0.013893509283661842,
0.07869051396846771,
-0.0027613884303718805,
-0.020436106249690056,
-0.10517703741788864,
0.07599592208862305,
-0.05812564119696617,
0.22568035125732422,
-0.08567668497562408,
-0.0980721265077591,
0.28289660811424255,
-0.05314332991838455,
-0.1059209480881691,
0.14318571984767914,
0.002906656125560403,
0.09663143008947372,
0.15070278942584991,
0.20353716611862183,
0.03006395511329174,
0.01416553370654583,
0.07701633125543594,
0.1289450079202652,
-0.10009632259607315,
-0.0030616815201938152,
0.010725573636591434,
-0.02592826448380947,
-0.0604068785905838,
0.03271862119436264,
0.10795938223600388,
0.07055113464593887,
-0.049670230597257614,
-0.03428072854876518,
0.00553296459838748,
-0.008308573625981808,
0.058838482946157455,
-0.02537301741540432,
0.10961899161338806,
-0.039928995072841644,
-0.02555195800960064,
0.008439665660262108,
0.023045402020215988,
-0.05313680320978165,
0.02925197407603264,
-0.0900467038154602,
0.06632155179977417,
0.024615800008177757,
0.058245521038770676,
-0.13616056740283966,
-0.040003519505262375,
-0.028494488447904587,
0.13558417558670044,
0.060535334050655365,
0.1311691552400589,
0.053594157099723816,
-0.050857383757829666,
-0.01239355094730854,
0.03876267001032829,
0.1696697175502777,
-0.02983245439827442,
-0.06342148035764694,
-0.0826948955655098,
0.10013411194086075,
-0.055215589702129364,
0.11497454345226288,
-0.048066046088933945,
0.01644733175635338,
-0.006047913338989019,
0.07493539154529572,
-0.010914694517850876,
0.03176223114132881,
0.010345591232180595,
-0.007774231024086475,
-0.028008950874209404,
0.008546470664441586,
0.10522263497114182,
0.0009730160236358643,
-0.06128334254026413,
0.23445728421211243,
-0.18253660202026367,
0.1505470871925354,
0.1660557985305786,
-0.24733324348926544,
0.026825904846191406,
-0.11716578900814056,
-0.0435449443757534,
0.012892944738268852,
0.01922835409641266,
-0.04049820452928543,
0.2704106569290161,
-0.02395317144691944,
0.16761253774166107,
-0.04479500651359558,
-0.034993331879377365,
-0.0358320027589798,
-0.060178011655807495,
0.008640069514513016,
0.07240668684244156,
0.07311566919088364,
-0.1354125738143921,
0.16243304312229156,
0.04871803894639015,
0.07190600782632828,
0.20464764535427094,
0.036811426281929016,
0.01478339172899723,
0.087142713367939,
0.003976028878241777,
-0.07578884810209274,
-0.08189460635185242,
-0.3158131539821625,
-0.040340155363082886,
0.0735374167561531,
0.04172460362315178,
0.1239105835556984,
-0.09969688206911087,
-0.025614671409130096,
-0.006510540377348661,
-0.037974800914525986,
0.03313430771231651,
0.09992553293704987,
0.030281847342848778,
0.1359136700630188,
-0.009749316610395908,
-0.04757368192076683,
0.049048710614442825,
0.007132282014936209,
-0.10059182345867157,
0.1725512444972992,
-0.13726384937763214,
-0.37808865308761597,
-0.1093190386891365,
-0.20556947588920593,
-0.06351924687623978,
0.06697945296764374,
0.10668784379959106,
-0.13249732553958893,
-0.020117441192269325,
-0.009943981654942036,
0.12000758200883865,
-0.06644231081008911,
-0.01857275329530239,
-0.056737449020147324,
0.015114144422113895,
-0.12058846652507782,
-0.11653778702020645,
-0.04674852266907692,
-0.03363294526934624,
-0.07554136961698532,
0.11225232481956482,
-0.12450052797794342,
0.001685097930021584,
0.23853909969329834,
0.0787336453795433,
0.045236680656671524,
-0.05323633924126625,
0.21380159258842468,
-0.1053624078631401,
-0.0001334144180873409,
0.2019454538822174,
-0.017541000619530678,
0.04567978158593178,
0.1491367518901825,
-0.001966604497283697,
-0.0782284289598465,
0.04346178472042084,
-0.009344751015305519,
-0.05556032434105873,
-0.20438753068447113,
-0.1632833182811737,
-0.09543877840042114,
0.0967807024717331,
0.02169620618224144,
0.06668179482221603,
0.1741151362657547,
0.05568603426218033,
-0.04016494378447533,
0.004428705666214228,
0.043000977486371994,
0.07813548296689987,
0.277063250541687,
-0.09344352781772614,
0.13472282886505127,
-0.01364726573228836,
-0.17311078310012817,
0.07893466204404831,
0.05166492611169815,
0.05819439888000488,
0.058898478746414185,
0.07323353737592697,
0.010582010261714458,
0.04828035831451416,
0.1172395870089531,
0.04843248054385185,
0.02743608132004738,
-0.03815118595957756,
-0.0363205187022686,
-0.03840228170156479,
-0.04708850756287575,
0.026366114616394043,
0.07948638498783112,
-0.1746499389410019,
-0.014266891404986382,
-0.055706217885017395,
0.07163646072149277,
0.09723170846700668,
0.09793645888566971,
-0.162073016166687,
-0.028623538091778755,
0.06568117439746857,
-0.06517746299505234,
-0.1443445384502411,
0.08555512130260468,
0.041392311453819275,
-0.15051966905593872,
0.018377669155597687,
0.014869865030050278,
0.11428263038396835,
-0.102552130818367,
0.09188266098499298,
-0.10806574672460556,
-0.08712338656187057,
0.011279277503490448,
0.10716796666383743,
-0.2737039625644684,
0.19385500252246857,
-0.01473439671099186,
-0.059737678617239,
-0.1154552549123764,
-0.014638958498835564,
0.015488949604332447,
0.098875030875206,
0.0708303228020668,
-0.00685368524864316,
0.043913278728723526,
0.010030361823737621,
-0.0526653416454792,
0.038540031760931015,
0.09321767836809158,
-0.02738896571099758,
-0.03483074530959129,
-0.050584692507982254,
-0.0063171167857944965,
-0.019451068714261055,
-0.11690042912960052,
-0.008422785438597202,
-0.17635031044483185,
0.07646184414625168,
0.0742020532488823,
0.08932094275951385,
0.03870343416929245,
-0.01013091579079628,
-0.10320451855659485,
0.21811673045158386,
0.016857728362083435,
-0.08027174323797226,
-0.08071383833885193,
-0.029856469482183456,
0.029379503801465034,
-0.05008665472269058,
0.01891850307583809,
-0.04686275124549866,
0.034748051315546036,
-0.04674576222896576,
-0.1681484580039978,
0.09092425554990768,
-0.10125921666622162,
-0.04367338493466377,
-0.01202328596264124,
0.21669839322566986,
-0.014605987817049026,
0.020537810400128365,
0.047421231865882874,
-0.02938479371368885,
-0.10625775903463364,
-0.08851082623004913,
-0.013782781548798084,
0.05056115984916687,
-0.014138239435851574,
0.05074210464954376,
-0.038341790437698364,
-0.07724093645811081,
-0.07613073289394379,
-0.03447191044688225,
0.3281380534172058,
0.11583354324102402,
-0.03606148064136505,
0.17881076037883759,
0.13734303414821625,
-0.07013867795467377,
-0.2830623686313629,
-0.11347241699695587,
-0.0870685800909996,
-0.05094730481505394,
-0.07145868241786957,
-0.1813488006591797,
0.09149875491857529,
-0.03868449851870537,
-0.01582617312669754,
0.06410331279039383,
-0.30721721053123474,
-0.09287510812282562,
0.1873684525489807,
-0.02296457812190056,
0.37324216961860657,
-0.09648281335830688,
-0.08723518997430801,
-0.058371320366859436,
-0.16646692156791687,
0.147234246134758,
-0.012940637767314911,
0.10620693862438202,
-0.00008388470450881869,
0.1661759912967682,
0.06132911890745163,
0.011935651302337646,
0.09237989038228989,
0.018879013136029243,
-0.05405472218990326,
-0.11344414949417114,
-0.040612928569316864,
-0.05192381888628006,
0.028259895741939545,
0.04297501966357231,
-0.029330048710107803,
0.015450065024197102,
-0.13350261747837067,
-0.05927055701613426,
-0.08917073160409927,
0.03609996661543846,
0.04253779724240303,
-0.07261993736028671,
-0.037377141416072845,
-0.06154102459549904,
0.0015143761411309242,
0.02196049876511097,
0.10993031412363052,
-0.12983255088329315,
0.1395028680562973,
0.052669957280159,
0.1589258313179016,
-0.12566779553890228,
-0.008995439857244492,
-0.06884440034627914,
-0.05308595672249794,
0.04460509866476059,
-0.06857971101999283,
0.030744053423404694,
0.09162923693656921,
-0.048752423375844955,
0.10357396304607391,
0.07599959522485733,
0.009036150760948658,
0.01399153284728527,
0.09329665452241898,
-0.22266153991222382,
-0.07595246285200119,
-0.08116845041513443,
0.028919508680701256,
0.07100872695446014,
0.08580632507801056,
0.1923755556344986,
-0.000650464033242315,
-0.039354871958494186,
-0.004161592572927475,
0.02264438569545746,
-0.042962778359651566,
0.0755743607878685,
-0.037482693791389465,
0.012392773292958736,
-0.15042747557163239,
0.0634896531701088,
-0.00014593951345887035,
-0.09030564874410629,
0.02578072063624859,
0.1392775923013687,
-0.09065494686365128,
-0.13924424350261688,
-0.047558631747961044,
0.09728831797838211,
-0.0916738510131836,
-0.03064759261906147,
-0.030578818172216415,
-0.15591910481452942,
0.06252940744161606,
0.10732295364141464,
0.05400609225034714,
0.08109672367572784,
-0.08981561660766602,
-0.00786274392157793,
-0.03250380977988243,
-0.01224524062126875,
0.028861558064818382,
-0.038189712911844254,
-0.05989081412553787,
0.08202801644802094,
-0.026967395097017288,
0.11575552076101303,
-0.09376074373722076,
-0.12685097754001617,
-0.1506149023771286,
0.04230370745062828,
-0.10070512443780899,
-0.09076070785522461,
-0.10770498961210251,
-0.028384488075971603,
0.003165848320350051,
-0.014603939838707447,
-0.039859380573034286,
-0.058517150580883026,
-0.1224692091345787,
0.040933091193437576,
-0.03598189353942871,
0.022906014695763588,
-0.06108416989445686,
0.05299099162220955,
0.05466795712709427,
-0.004800828639417887,
0.17593684792518616,
0.1462915688753128,
-0.11868622899055481,
0.10045221447944641,
-0.16565142571926117,
-0.05212767422199249,
0.10689632594585419,
0.026352791115641594,
0.03815722465515137,
0.06421361863613129,
0.01866389811038971,
0.0582803413271904,
0.05049237236380577,
0.05559266731142998,
0.05059659108519554,
-0.09226836264133453,
0.07238586246967316,
-0.0565694235265255,
-0.13987931609153748,
-0.04371263459324837,
-0.04617593437433243,
0.009163083508610725,
0.028221068903803825,
0.08087790757417679,
-0.07525266706943512,
0.06543876230716705,
-0.046101897954940796,
0.04258731007575989,
0.018062012270092964,
-0.1415063440799713,
0.009533987380564213,
-0.09387307614088058,
0.04318857565522194,
0.02452300302684307,
0.2410578578710556,
0.05529094114899635,
-0.03249230235815048,
0.016892312094569206,
0.06933804601430893,
0.048738881945610046,
-0.019089164212346077,
0.18176041543483734,
0.10038843750953674,
-0.06153136119246483,
-0.0846366360783577,
0.07657323032617569,
0.013247720897197723,
0.042750850319862366,
0.09699449688196182,
-0.028333652764558792,
-0.02339785173535347,
0.09141333401203156,
0.026003288105130196,
0.0006060494342818856,
-0.09768451750278473,
-0.15117760002613068,
-0.04700921103358269,
0.051797740161418915,
-0.09248658269643784,
0.14976099133491516,
0.11805494129657745,
-0.004117221105843782,
0.04413290694355965,
-0.002516311127692461,
-0.06346102058887482,
-0.1848774552345276,
-0.18269412219524384,
-0.06644494831562042,
-0.14888879656791687,
-0.0035811830312013626,
-0.12472209334373474,
0.040573518723249435,
0.007984466850757599,
0.08251039683818817,
-0.082066111266613,
0.08595458418130875,
0.0018150806427001953,
-0.12489297240972519,
0.08033236861228943,
-0.038614992052316666,
0.09350708872079849,
-0.0683898776769638,
0.0022889557294547558,
-0.08606124669313431,
0.046502694487571716,
0.02076529711484909,
0.036586202681064606,
-0.06233896687626839,
0.003134238999336958,
-0.11637882888317108,
-0.06205221265554428,
-0.05513457953929901,
0.0479884147644043,
-0.024248315021395683,
0.17208774387836456,
0.022425444796681404,
-0.03772715851664543,
0.02221817709505558,
0.2699437737464905,
-0.08261463791131973,
-0.09518903493881226,
-0.07998912036418915,
0.22098860144615173,
0.0039255921728909016,
0.09391161799430847,
-0.010534289292991161,
0.012009406462311745,
-0.1007990837097168,
0.3557683229446411,
0.3215644061565399,
-0.06953628361225128,
0.01487854402512312,
0.0044026607647538185,
0.0448882058262825,
0.08740605413913727,
0.12359047681093216,
0.12762489914894104,
0.3079226613044739,
-0.05740072950720787,
-0.020222283899784088,
0.0014620802830904722,
-0.014279074035584927,
-0.08152655512094498,
0.03688944876194,
0.06075572595000267,
-0.05807368829846382,
-0.016145383939146996,
0.1146506816148758,
-0.2588891386985779,
0.12205447256565094,
-0.17010313272476196,
-0.14370304346084595,
-0.08409092575311661,
0.018266117200255394,
0.06969809532165527,
0.0450453944504261,
0.10705507546663284,
0.012435901910066605,
-0.06925185024738312,
0.10506851971149445,
0.021182402968406677,
-0.210024893283844,
-0.00796404480934143,
0.09047217667102814,
-0.05919202044606209,
-0.026582147926092148,
-0.0284173134714365,
0.07426580041646957,
0.07192851603031158,
0.05150933563709259,
0.005397267173975706,
0.08480636775493622,
-0.013765847310423851,
-0.07269999384880066,
0.045175567269325256,
0.05988011136651039,
0.02713518589735031,
-0.061134885996580124,
0.06659010797739029,
-0.13579052686691284,
0.05589422211050987,
0.007353511638939381,
-0.0018502046586945653,
-0.03490952029824257,
0.04098643735051155,
-0.0915704220533371,
0.06733576208353043,
0.07956817746162415,
-0.0008822708623483777,
-0.013363691978156567,
-0.026576368138194084,
-0.016639577224850655,
-0.0461883582174778,
-0.06891319155693054,
-0.08970851451158524,
-0.17903664708137512,
-0.11168204247951508,
0.03446175158023834,
-0.008509651757776737,
-0.16223573684692383,
0.01683996059000492,
-0.09630490839481354,
0.0472976453602314,
-0.11818549036979675,
0.101368248462677,
0.05146665871143341,
0.022344207391142845,
0.011776317842304707,
-0.03980530798435211,
0.05653753876686096,
0.11240746825933456,
-0.13578109443187714,
-0.08306589722633362
] |
null | null |
transformers
|
# DistilWav2Vec2 Adult/Child Speech Classifier 37M
DistilWav2Vec2 Adult/Child Speech Classifier is an audio classification model based on the [wav2vec 2.0](https://arxiv.org/abs/2006.11477) architecture. This model is a distilled version of [wav2vec2-adult-child-cls](https://huggingface.co/bookbot/wav2vec2-adult-child-cls) on a private adult/child speech classification dataset.
This model was trained using HuggingFace's PyTorch framework. All training was done on a Tesla P100, provided by Kaggle. Training metrics were logged via Tensorboard.
## Model
| Model | #params | Arch. | Training/Validation data (text) |
| ------------------------------------- | ------- | ----------- | ----------------------------------------- |
| `distil-wav2vec2-adult-child-cls-37m` | 37M | wav2vec 2.0 | Adult/Child Speech Classification Dataset |
## Evaluation Results
The model achieves the following results on evaluation:
| Dataset | Loss | Accuracy | F1 |
| --------------------------------- | ------ | -------- | ------ |
| Adult/Child Speech Classification | 0.1431 | 95.89% | 0.9624 |
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- `learning_rate`: 3e-05
- `train_batch_size`: 32
- `eval_batch_size`: 32
- `seed`: 42
- `gradient_accumulation_steps`: 4
- `total_train_batch_size`: 128
- `optimizer`: Adam with `betas=(0.9,0.999)` and `epsilon=1e-08`
- `lr_scheduler_type`: linear
- `lr_scheduler_warmup_ratio`: 0.1
- `num_epochs`: 5
### Training results
| Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 |
| :-----------: | :---: | :--: | :-------------: | :------: | :----: |
| 0.2586 | 1.0 | 96 | 0.2257 | 0.9298 | 0.9363 |
| 0.1917 | 2.0 | 192 | 0.1743 | 0.9460 | 0.9500 |
| 0.1568 | 3.0 | 288 | 0.1701 | 0.9511 | 0.9545 |
| 0.0965 | 4.0 | 384 | 0.1501 | 0.9548 | 0.9584 |
| 0.1179 | 5.0 | 480 | 0.1431 | 0.9589 | 0.9624 |
## Disclaimer
Do consider the biases which came from pre-training datasets that may be carried over into the results of this model.
## Authors
DistilWav2Vec2 Adult/Child Speech Classifier was trained and evaluated by [Ananto Joyoadikusumo](https://anantoj.github.io/). All computation and development are done on Kaggle.
### Framework versions
- Transformers 4.16.2
- Pytorch 1.10.2+cu102
- Datasets 1.18.3
- Tokenizers 0.10.3
|
{"language": "en", "license": "apache-2.0", "tags": ["audio-classification", "generated_from_trainer"], "metrics": ["accuracy", "f1"], "model-index": [{"name": "distil-wav2vec2-adult-child-cls-37m", "results": []}]}
|
audio-classification
|
bookbot/distil-wav2vec2-adult-child-cls-37m
|
[
"transformers",
"pytorch",
"tensorboard",
"wav2vec2",
"audio-classification",
"generated_from_trainer",
"en",
"arxiv:2006.11477",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"2006.11477"
] |
[
"en"
] |
TAGS
#transformers #pytorch #tensorboard #wav2vec2 #audio-classification #generated_from_trainer #en #arxiv-2006.11477 #license-apache-2.0 #endpoints_compatible #region-us
|
DistilWav2Vec2 Adult/Child Speech Classifier 37M
================================================
DistilWav2Vec2 Adult/Child Speech Classifier is an audio classification model based on the wav2vec 2.0 architecture. This model is a distilled version of wav2vec2-adult-child-cls on a private adult/child speech classification dataset.
This model was trained using HuggingFace's PyTorch framework. All training was done on a Tesla P100, provided by Kaggle. Training metrics were logged via Tensorboard.
Model
-----
Evaluation Results
------------------
The model achieves the following results on evaluation:
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* 'learning\_rate': 3e-05
* 'train\_batch\_size': 32
* 'eval\_batch\_size': 32
* 'seed': 42
* 'gradient\_accumulation\_steps': 4
* 'total\_train\_batch\_size': 128
* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'
* 'lr\_scheduler\_type': linear
* 'lr\_scheduler\_warmup\_ratio': 0.1
* 'num\_epochs': 5
### Training results
Disclaimer
----------
Do consider the biases which came from pre-training datasets that may be carried over into the results of this model.
Authors
-------
DistilWav2Vec2 Adult/Child Speech Classifier was trained and evaluated by Ananto Joyoadikusumo. All computation and development are done on Kaggle.
### Framework versions
* Transformers 4.16.2
* Pytorch 1.10.2+cu102
* Datasets 1.18.3
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* 'learning\\_rate': 3e-05\n* 'train\\_batch\\_size': 32\n* 'eval\\_batch\\_size': 32\n* 'seed': 42\n* 'gradient\\_accumulation\\_steps': 4\n* 'total\\_train\\_batch\\_size': 128\n* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'\n* 'lr\\_scheduler\\_type': linear\n* 'lr\\_scheduler\\_warmup\\_ratio': 0.1\n* 'num\\_epochs': 5",
"### Training results\n\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which came from pre-training datasets that may be carried over into the results of this model.\n\n\nAuthors\n-------\n\n\nDistilWav2Vec2 Adult/Child Speech Classifier was trained and evaluated by Ananto Joyoadikusumo. All computation and development are done on Kaggle.",
"### Framework versions\n\n\n* Transformers 4.16.2\n* Pytorch 1.10.2+cu102\n* Datasets 1.18.3\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #audio-classification #generated_from_trainer #en #arxiv-2006.11477 #license-apache-2.0 #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* 'learning\\_rate': 3e-05\n* 'train\\_batch\\_size': 32\n* 'eval\\_batch\\_size': 32\n* 'seed': 42\n* 'gradient\\_accumulation\\_steps': 4\n* 'total\\_train\\_batch\\_size': 128\n* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'\n* 'lr\\_scheduler\\_type': linear\n* 'lr\\_scheduler\\_warmup\\_ratio': 0.1\n* 'num\\_epochs': 5",
"### Training results\n\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which came from pre-training datasets that may be carried over into the results of this model.\n\n\nAuthors\n-------\n\n\nDistilWav2Vec2 Adult/Child Speech Classifier was trained and evaluated by Ananto Joyoadikusumo. All computation and development are done on Kaggle.",
"### Framework versions\n\n\n* Transformers 4.16.2\n* Pytorch 1.10.2+cu102\n* Datasets 1.18.3\n* Tokenizers 0.10.3"
] |
[
63,
170,
79,
35
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #audio-classification #generated_from_trainer #en #arxiv-2006.11477 #license-apache-2.0 #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* 'learning\\_rate': 3e-05\n* 'train\\_batch\\_size': 32\n* 'eval\\_batch\\_size': 32\n* 'seed': 42\n* 'gradient\\_accumulation\\_steps': 4\n* 'total\\_train\\_batch\\_size': 128\n* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'\n* 'lr\\_scheduler\\_type': linear\n* 'lr\\_scheduler\\_warmup\\_ratio': 0.1\n* 'num\\_epochs': 5### Training results\n\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which came from pre-training datasets that may be carried over into the results of this model.\n\n\nAuthors\n-------\n\n\nDistilWav2Vec2 Adult/Child Speech Classifier was trained and evaluated by Ananto Joyoadikusumo. All computation and development are done on Kaggle.### Framework versions\n\n\n* Transformers 4.16.2\n* Pytorch 1.10.2+cu102\n* Datasets 1.18.3\n* Tokenizers 0.10.3"
] |
[
-0.07341974973678589,
0.16885074973106384,
-0.003847966669127345,
0.05822892114520073,
0.07861470431089401,
-0.004116253927350044,
0.14707869291305542,
0.107424795627594,
-0.02463751658797264,
0.12783679366111755,
0.04930301010608673,
0.07326442003250122,
0.08419784158468246,
0.07653339952230453,
-0.059100598096847534,
-0.23475533723831177,
0.032690126448869705,
-0.07081862539052963,
-0.11169906705617905,
0.13235828280448914,
0.07712090015411377,
-0.10119420289993286,
0.03923111781477928,
-0.0043706693686544895,
-0.08067482709884644,
-0.030571065843105316,
-0.029091350734233856,
-0.04765314236283302,
0.06812094897031784,
0.04525856301188469,
0.08490551263093948,
0.05197741463780403,
0.048957470804452896,
-0.2818554639816284,
0.004889331292361021,
0.06351612508296967,
0.046497710049152374,
0.05393262207508087,
0.09910960495471954,
-0.03248085826635361,
0.18563047051429749,
-0.0576392337679863,
0.04580647498369217,
0.04297329857945442,
-0.11375408619642258,
-0.19825787842273712,
-0.08790487051010132,
0.05080043897032738,
0.13399983942508698,
0.0652850791811943,
-0.06816186010837555,
0.10934312641620636,
-0.043246474117040634,
0.09072574973106384,
0.18054886162281036,
-0.23999838531017303,
-0.04901966452598572,
-0.008094457909464836,
0.029631223529577255,
0.06608414649963379,
-0.11363320052623749,
-0.00384578388184309,
0.054186057299375534,
-0.026019500568509102,
0.03966042771935463,
-0.02690996415913105,
0.07928919047117233,
-0.02426949515938759,
-0.16752293705940247,
-0.0863383561372757,
0.13406091928482056,
0.08503945916891098,
-0.05724675580859184,
-0.12772798538208008,
-0.005520500708371401,
-0.12720659375190735,
0.009390534833073616,
-0.02112695947289467,
0.004725134931504726,
-0.04997027665376663,
0.0012567240046337247,
-0.020730646327137947,
-0.10310070961713791,
-0.03352852910757065,
0.04495938867330551,
0.20801009237766266,
0.021931225433945656,
-0.00865207053720951,
0.03911855071783066,
0.09441889822483063,
0.09767845273017883,
-0.1631910353899002,
0.010809216648340225,
-0.005049547180533409,
-0.08705832064151764,
-0.038769692182540894,
-0.01995932310819626,
0.008553704246878624,
0.01477512065321207,
0.13155771791934967,
-0.038036689162254333,
0.051021941006183624,
-0.026733754202723503,
-0.0030797843355685472,
0.025599094107747078,
0.12495899200439453,
-0.05224670469760895,
-0.048207662999629974,
-0.05245637148618698,
0.09537744522094727,
-0.0007471522549167275,
-0.03707697615027428,
-0.042409125715494156,
0.026174286380410194,
0.07378096133470535,
0.08752351999282837,
0.00501353619620204,
0.010099831968545914,
-0.08875632286071777,
-0.06419194489717484,
0.025815367698669434,
-0.12275225669145584,
0.025953738018870354,
0.06978090852499008,
-0.052408184856176376,
0.019460853189229965,
-0.006116658914834261,
0.05861187353730202,
-0.06064280495047569,
0.0754159614443779,
-0.0630006343126297,
-0.005525680724531412,
-0.03163667395710945,
-0.08477833867073059,
0.021920405328273773,
-0.06221497431397438,
-0.005647339392453432,
-0.039315637201070786,
-0.02366386540234089,
-0.08336605131626129,
0.045802388340234756,
-0.064961738884449,
-0.05215635523200035,
-0.07298796623945236,
-0.09495691955089569,
0.03422069922089577,
-0.005066830199211836,
0.1010686531662941,
-0.04291790351271629,
0.09826615452766418,
0.01252171490341425,
0.030017508193850517,
0.11587262898683548,
0.05026297643780708,
-0.07536657154560089,
0.06446685642004013,
-0.11822853237390518,
0.1073947325348854,
-0.0833040326833725,
0.0014258896699175239,
-0.16535532474517822,
-0.1067686453461647,
0.010412991978228092,
-0.008676784113049507,
0.048781611025333405,
0.13510683178901672,
-0.13062430918216705,
-0.08052989840507507,
0.10554125159978867,
-0.07987280189990997,
-0.11804842203855515,
0.1308765858411789,
-0.03651518374681473,
0.04559175297617912,
0.0461001843214035,
0.16766978800296783,
0.036667559295892715,
-0.10901027917861938,
-0.06382329761981964,
-0.11732326447963715,
0.12027262896299362,
0.13482239842414856,
0.10452689975500107,
-0.038227926939725876,
0.05283990874886513,
-0.03952529653906822,
-0.08186078071594238,
-0.010772290639579296,
-0.028864825144410133,
-0.090317003428936,
-0.0008520354167558253,
-0.04814158380031586,
0.03925366327166557,
0.023292534053325653,
-0.02119768038392067,
-0.05460088700056076,
-0.1386159211397171,
-0.028344424441456795,
0.07139772921800613,
-0.09485755115747452,
0.015217158943414688,
-0.08119171112775803,
0.052640244364738464,
-0.019423682242631912,
-0.02372615784406662,
-0.17030073702335358,
-0.015890929847955704,
0.04472482204437256,
-0.10150843113660812,
0.03974612429738045,
-0.011746219359338284,
0.027409851551055908,
0.03300762549042702,
-0.013043064624071121,
-0.04831434413790703,
-0.045900240540504456,
0.005056430585682392,
-0.03240278735756874,
-0.22718173265457153,
-0.035818006843328476,
-0.027132263407111168,
0.20377129316329956,
-0.2428278625011444,
-0.010732158087193966,
0.10320350527763367,
0.12867677211761475,
0.035564880818128586,
-0.05056995898485184,
0.03149108588695526,
0.025196917355060577,
-0.030914178118109703,
-0.04595484957098961,
0.008586213923990726,
-0.005664951168000698,
-0.11928689479827881,
0.0360209122300148,
-0.19638954102993011,
-0.08218982815742493,
0.08628049492835999,
0.0003557232557795942,
-0.08600953966379166,
-0.058249928057193756,
-0.0490940660238266,
-0.04955286160111427,
-0.01357776578515768,
0.007881478406488895,
0.18075139820575714,
0.05645492300391197,
0.0918499007821083,
-0.09093333035707474,
-0.07872878015041351,
0.02899160422384739,
-0.018842626363039017,
-0.024789979681372643,
0.1330554336309433,
0.03453954681754112,
-0.1343604177236557,
0.0941319540143013,
0.12003763765096664,
-0.020498165860772133,
0.1297055333852768,
-0.033637698739767075,
-0.11041377484798431,
-0.08225614577531815,
0.034535203129053116,
0.016372453421354294,
0.04660365730524063,
-0.10777617245912552,
0.021932853385806084,
0.02956360951066017,
0.0324961394071579,
-0.0120500847697258,
-0.16030320525169373,
0.028879135847091675,
0.04288666322827339,
-0.062151726335287094,
-0.021236592903733253,
0.005214817821979523,
-0.00890637282282114,
0.07909011840820312,
0.009938790462911129,
0.017553964629769325,
-0.023843076080083847,
-0.06647709757089615,
-0.11233551055192947,
0.1649516075849533,
-0.0607139877974987,
-0.13793841004371643,
-0.1002328023314476,
-0.0345379039645195,
-0.046653322875499725,
-0.006115737371146679,
0.03644070029258728,
-0.047406405210494995,
-0.05147029086947441,
-0.07874642312526703,
0.03450668975710869,
-0.005622323602437973,
-0.015709418803453445,
-0.006769545841962099,
0.021953104063868523,
0.03018670156598091,
-0.08452744781970978,
0.01119308453053236,
0.026922592893242836,
-0.029760349541902542,
0.0007951799198053777,
0.05502823367714882,
0.06663725525140762,
0.1666686087846756,
0.049563776701688766,
-0.012175768613815308,
-0.018408162519335747,
0.2240825742483139,
-0.15706728398799896,
0.0061766356229782104,
0.0933748260140419,
-0.0892036110162735,
0.03952842205762863,
0.18104662001132965,
0.012691052630543709,
-0.10087278485298157,
0.05729493871331215,
0.06764504313468933,
-0.023304253816604614,
-0.29022136330604553,
-0.03817116096615791,
-0.04450048878788948,
-0.01415756344795227,
0.08798661082983017,
0.025591164827346802,
-0.014885167591273785,
0.036073654890060425,
-0.06095802038908005,
-0.020182112231850624,
0.033338624984025955,
0.0649522915482521,
0.1047234833240509,
0.0350891649723053,
0.08182195574045181,
-0.018391387537121773,
-0.033670615404844284,
0.05289458855986595,
0.02528994530439377,
0.1808650642633438,
0.0004301570006646216,
0.21576671302318573,
0.06259778887033463,
0.08421777188777924,
-0.029638979583978653,
0.012498674914240837,
0.03555141016840935,
0.02992170862853527,
0.017593348398804665,
-0.08133372664451599,
-0.04573020339012146,
0.09800182282924652,
0.09949968755245209,
-0.01664532721042633,
-0.05486675724387169,
-0.0024019822012633085,
0.035749953240156174,
0.329944372177124,
0.09726744890213013,
-0.21032723784446716,
-0.06611839681863785,
0.0549657866358757,
-0.06110738590359688,
-0.04152863100171089,
-0.007897323928773403,
0.11362946778535843,
-0.09694650024175644,
0.05902091786265373,
-0.05149003118276596,
0.0705944150686264,
-0.11182697862386703,
-0.017956282943487167,
0.02940460667014122,
-0.0007064028177410364,
-0.013410807587206364,
0.06211409345269203,
-0.19231845438480377,
0.25589194893836975,
0.0012578521855175495,
0.06134193390607834,
-0.05696018040180206,
0.032850779592990875,
0.004739335272461176,
-0.07433871924877167,
0.14452728629112244,
-0.0068653663620352745,
-0.03931274265050888,
-0.14141732454299927,
-0.09960684180259705,
0.00856787245720625,
0.15530548989772797,
-0.09899413585662842,
0.13378378748893738,
-0.045884497463703156,
0.004433813970535994,
0.004609881434589624,
-0.06737291812896729,
-0.07117041200399399,
-0.0889270007610321,
0.07462102919816971,
-0.009209330193698406,
0.026676423847675323,
-0.04201675206422806,
-0.08368347585201263,
-0.07799260318279266,
0.15012148022651672,
-0.14862675964832306,
-0.06683581322431564,
-0.10738003253936768,
0.019877316430211067,
0.15827825665473938,
-0.060115426778793335,
0.025012735277414322,
0.004591183736920357,
0.14203666150569916,
0.026369329541921616,
0.01610700599849224,
0.09829941391944885,
-0.045127879828214645,
-0.23892386257648468,
-0.024843571707606316,
0.17442955076694489,
0.05090678483247757,
0.07155971229076385,
-0.013815267942845821,
0.05694654956459999,
-0.0036486145108938217,
-0.08681700378656387,
0.05842803418636322,
-0.012972907163202763,
-0.00602169893682003,
0.07597008347511292,
-0.006149249151349068,
-0.03095880337059498,
-0.1569337397813797,
-0.0645139291882515,
0.09410873800516129,
0.359358012676239,
-0.057509101927280426,
0.05655863508582115,
0.09406421333551407,
-0.08789906650781631,
-0.1538453847169876,
-0.009286491200327873,
0.1386396884918213,
0.045306846499443054,
0.04832790791988373,
-0.1762830913066864,
0.05470096692442894,
0.06767293065786362,
-0.028131108731031418,
0.10864941775798798,
-0.28442853689193726,
-0.13253220915794373,
0.06988067179918289,
0.06072073429822922,
-0.10426856577396393,
-0.15655680000782013,
-0.07029896229505539,
-0.011572131887078285,
-0.0680335983633995,
0.08669833838939667,
0.0038657095283269882,
0.11149126291275024,
0.04784286022186279,
0.07845652848482132,
0.040532197803258896,
-0.04124786704778671,
0.16221186518669128,
0.0314381942152977,
0.0345439612865448,
-0.053146276623010635,
-0.043227486312389374,
-0.019973088055849075,
-0.0674654021859169,
0.03314584493637085,
-0.046208176761865616,
0.022591494023799896,
-0.10981472581624985,
-0.05455108731985092,
-0.060028158128261566,
0.016309602186083794,
-0.061094071716070175,
-0.06429014354944229,
-0.03989693522453308,
0.06836719810962677,
0.0947556346654892,
-0.0005734630976803601,
0.06577086448669434,
-0.0609346367418766,
0.04073312506079674,
0.17754830420017242,
0.15416547656059265,
0.1150272861123085,
-0.06724285334348679,
-0.02080162800848484,
0.014270121231675148,
0.04151130095124245,
-0.14464588463306427,
0.06719164550304413,
0.14780758321285248,
0.0456976518034935,
0.17077523469924927,
0.002538911532610655,
-0.10709605365991592,
0.007161268964409828,
0.03271349519491196,
-0.10885652899742126,
-0.11776662617921829,
0.0020875982008874416,
0.011320723220705986,
-0.15887132287025452,
-0.07228536158800125,
0.14100554585456848,
-0.021372966468334198,
-0.017216404899954796,
0.017143281176686287,
0.03981148824095726,
-0.015435528010129929,
0.1557096242904663,
0.01609610579907894,
0.07233130931854248,
-0.0590585358440876,
0.08736459910869598,
0.09141109138727188,
-0.12768490612506866,
0.07265061885118484,
0.05198652669787407,
-0.037807706743478775,
-0.02002309262752533,
-0.02457234263420105,
0.020316872745752335,
0.007672795094549656,
-0.02316739782691002,
-0.06471564620733261,
-0.09453976154327393,
0.039356641471385956,
0.09401548653841019,
0.036887072026729584,
0.059898845851421356,
-0.025662364438176155,
0.0037477193400263786,
-0.12206071615219116,
0.14567646384239197,
0.042566508054733276,
0.02371411770582199,
-0.11577804386615753,
0.08744155615568161,
0.004080680664628744,
-0.01889868453145027,
-0.0025620809756219387,
-0.012367131188511848,
-0.0807759165763855,
0.028066910803318024,
-0.07487431168556213,
0.024269070476293564,
-0.04719199612736702,
-0.0034731898922473192,
0.017194675281643867,
-0.07848415523767471,
-0.056150540709495544,
0.017285728827118874,
-0.10540253669023514,
-0.03316478058695793,
-0.018842831254005432,
0.11903666704893112,
-0.1277827024459839,
-0.046765461564064026,
0.07084055244922638,
-0.11695415526628494,
0.09665621072053909,
0.003621331648901105,
-0.019648516550660133,
0.035778772085905075,
-0.11654376983642578,
0.052994441241025925,
0.032935068011283875,
0.025864647701382637,
0.013913379982113838,
-0.2440413385629654,
-0.014116308651864529,
-0.023952456191182137,
0.012935515493154526,
-0.0018541846657171845,
0.030193602666258812,
-0.1254940629005432,
-0.04537232220172882,
-0.02925356663763523,
-0.0695287361741066,
-0.040155574679374695,
0.03816714882850647,
0.027580687776207924,
0.035823773592710495,
0.21062541007995605,
-0.04016534984111786,
0.09820231050252914,
-0.1633230596780777,
-0.006662354338914156,
-0.006853544153273106,
-0.008449327200651169,
-0.04852669686079025,
-0.04027898237109184,
0.06217436119914055,
-0.08721834421157837,
0.07586601376533508,
-0.044210515916347504,
0.03677903860807419,
0.045700810849666595,
-0.09011674672365189,
-0.012553899548947811,
0.05229847878217697,
0.1655612289905548,
0.05847221612930298,
-0.024891117587685585,
0.05727168545126915,
-0.03443312644958496,
0.008821052499115467,
0.11136246472597122,
0.13386230170726776,
0.13020946085453033,
0.05884398892521858,
0.05554698780179024,
0.08636444061994553,
-0.09837877750396729,
-0.13200107216835022,
0.11834321171045303,
-0.049884382635354996,
0.11404348909854889,
-0.031116796657443047,
0.20840585231781006,
0.10055422782897949,
-0.21032260358333588,
0.05900062248110771,
-0.04240986332297325,
-0.0961025282740593,
-0.10885994136333466,
-0.12254565209150314,
-0.07786644995212555,
-0.06479897350072861,
0.023039158433675766,
-0.11306744068861008,
0.05838160961866379,
0.03290753439068794,
0.048087429255247116,
-0.007385616190731525,
0.10434938967227936,
-0.02625722438097,
-0.036527182906866074,
0.10581567138433456,
0.02034732513129711,
-0.01277783140540123,
-0.00130862754303962,
-0.03948226198554039,
0.04811883345246315,
0.011186708696186543,
0.07490135729312897,
-0.004720646422356367,
-0.06660550087690353,
0.028446031734347343,
-0.05591036006808281,
-0.1108817309141159,
0.019797932356595993,
0.0004159442614763975,
0.07670611888170242,
0.13789404928684235,
0.050107382237911224,
0.006904827430844307,
-0.009809623472392559,
0.17744217813014984,
-0.08263765275478363,
-0.008814236149191856,
-0.16165494918823242,
0.15405559539794922,
-0.027653511613607407,
0.0057900696992874146,
0.020107343792915344,
-0.11534774303436279,
0.00789670366793871,
0.11952663958072662,
0.10256225615739822,
-0.013994324952363968,
-0.0050791846588253975,
0.0033340617083013058,
0.025113608688116074,
-0.008185382932424545,
0.039475079625844955,
0.08598899096250534,
0.07209966331720352,
-0.04793643206357956,
-0.012101702392101288,
-0.05345753952860832,
-0.07005077600479126,
0.028936095535755157,
0.08515941351652145,
0.010914844460785389,
-0.022363727912306786,
-0.04256041347980499,
0.14588457345962524,
-0.07483641803264618,
-0.20197954773902893,
0.04617483541369438,
-0.13346143066883087,
-0.1743156760931015,
0.004126796964555979,
0.021105527877807617,
0.05022828280925751,
0.04621544107794762,
-0.0033009157050400972,
-0.07992219179868698,
0.13605691492557526,
0.014348913915455341,
-0.013735036365687847,
-0.033869873732328415,
0.06636744737625122,
-0.06299775838851929,
0.18606360256671906,
0.0035714376717805862,
0.10872502624988556,
0.09851137548685074,
0.03363829478621483,
-0.07729940861463547,
0.053710468113422394,
0.09552864730358124,
-0.12006272375583649,
0.030504705384373665,
0.20521558821201324,
-0.04544898122549057,
0.14080283045768738,
0.09215985983610153,
-0.05877376347780228,
0.03196452185511589,
-0.06365945190191269,
-0.044427450746297836,
-0.10010722279548645,
0.02247227169573307,
-0.07866107672452927,
0.1439661681652069,
0.21100081503391266,
-0.06808396428823471,
-0.011816482059657574,
-0.03626459836959839,
0.00913473591208458,
0.030237693339586258,
0.11523406952619553,
-0.024475840851664543,
-0.23075269162654877,
0.03926609829068184,
0.02097664773464203,
0.05955592915415764,
-0.2166491448879242,
-0.05463949218392372,
0.03928564488887787,
-0.024340342730283737,
-0.0463411919772625,
0.11243379861116409,
-0.002473023720085621,
0.03206070140004158,
-0.05457917973399162,
-0.11313773691654205,
-0.026335885748267174,
0.16219204664230347,
-0.16444915533065796,
-0.07067527621984482
] |
null | null |
transformers
|
# DistilWav2Vec2 Adult/Child Speech Classifier 52M
DistilWav2Vec2 Adult/Child Speech Classifier is an audio classification model based on the [wav2vec 2.0](https://arxiv.org/abs/2006.11477) architecture. This model is a distilled version of [wav2vec2-adult-child-cls](https://huggingface.co/bookbot/wav2vec2-adult-child-cls) on a private adult/child speech classification dataset.
This model was trained using HuggingFace's PyTorch framework. All training was done on a Tesla P100, provided by Kaggle. Training metrics were logged via Tensorboard.
## Model
| Model | #params | Arch. | Training/Validation data (text) |
| ------------------------------------- | ------- | ----------- | ----------------------------------------- |
| `distil-wav2vec2-adult-child-cls-52m` | 52M | wav2vec 2.0 | Adult/Child Speech Classification Dataset |
## Evaluation Results
The model achieves the following results on evaluation:
| Dataset | Loss | Accuracy | F1 |
| --------------------------------- | ------ | -------- | ------ |
| Adult/Child Speech Classification | 0.1301 | 96.03% | 0.9639 |
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- `learning_rate`: 3e-05
- `train_batch_size`: 32
- `eval_batch_size`: 32
- `seed`: 42
- `gradient_accumulation_steps`: 4
- `total_train_batch_size`: 128
- `optimizer`: Adam with `betas=(0.9,0.999)` and `epsilon=1e-08`
- `lr_scheduler_type`: linear
- `lr_scheduler_warmup_ratio`: 0.1
- `num_epochs`: 3
### Training results
| Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 |
| :-----------: | :---: | :--: | :-------------: | :------: | :----: |
| 0.212 | 1.0 | 96 | 0.1561 | 0.9561 | 0.9596 |
| 0.1523 | 2.0 | 192 | 0.1408 | 0.9575 | 0.9616 |
| 0.0844 | 3.0 | 288 | 0.1301 | 0.9603 | 0.9639 |
## Disclaimer
Do consider the biases which came from pre-training datasets that may be carried over into the results of this model.
## Authors
DistilWav2Vec2 Adult/Child Speech Classifier was trained and evaluated by [Wilson Wongso](https://w11wo.github.io/). All computation and development are done on Kaggle.
## Framework versions
- Transformers 4.16.2
- Pytorch 1.10.2+cu102
- Datasets 1.18.3
- Tokenizers 0.10.3
|
{"language": "en", "license": "apache-2.0", "tags": ["audio-classification", "generated_from_trainer"], "metrics": ["accuracy", "f1"], "model-index": [{"name": "distil-wav2vec2-adult-child-cls-52m", "results": []}]}
|
audio-classification
|
bookbot/distil-wav2vec2-adult-child-cls-52m
|
[
"transformers",
"pytorch",
"tensorboard",
"safetensors",
"wav2vec2",
"audio-classification",
"generated_from_trainer",
"en",
"arxiv:2006.11477",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"2006.11477"
] |
[
"en"
] |
TAGS
#transformers #pytorch #tensorboard #safetensors #wav2vec2 #audio-classification #generated_from_trainer #en #arxiv-2006.11477 #license-apache-2.0 #endpoints_compatible #region-us
|
DistilWav2Vec2 Adult/Child Speech Classifier 52M
================================================
DistilWav2Vec2 Adult/Child Speech Classifier is an audio classification model based on the wav2vec 2.0 architecture. This model is a distilled version of wav2vec2-adult-child-cls on a private adult/child speech classification dataset.
This model was trained using HuggingFace's PyTorch framework. All training was done on a Tesla P100, provided by Kaggle. Training metrics were logged via Tensorboard.
Model
-----
Evaluation Results
------------------
The model achieves the following results on evaluation:
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* 'learning\_rate': 3e-05
* 'train\_batch\_size': 32
* 'eval\_batch\_size': 32
* 'seed': 42
* 'gradient\_accumulation\_steps': 4
* 'total\_train\_batch\_size': 128
* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'
* 'lr\_scheduler\_type': linear
* 'lr\_scheduler\_warmup\_ratio': 0.1
* 'num\_epochs': 3
### Training results
Disclaimer
----------
Do consider the biases which came from pre-training datasets that may be carried over into the results of this model.
Authors
-------
DistilWav2Vec2 Adult/Child Speech Classifier was trained and evaluated by Wilson Wongso. All computation and development are done on Kaggle.
Framework versions
------------------
* Transformers 4.16.2
* Pytorch 1.10.2+cu102
* Datasets 1.18.3
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* 'learning\\_rate': 3e-05\n* 'train\\_batch\\_size': 32\n* 'eval\\_batch\\_size': 32\n* 'seed': 42\n* 'gradient\\_accumulation\\_steps': 4\n* 'total\\_train\\_batch\\_size': 128\n* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'\n* 'lr\\_scheduler\\_type': linear\n* 'lr\\_scheduler\\_warmup\\_ratio': 0.1\n* 'num\\_epochs': 3",
"### Training results\n\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which came from pre-training datasets that may be carried over into the results of this model.\n\n\nAuthors\n-------\n\n\nDistilWav2Vec2 Adult/Child Speech Classifier was trained and evaluated by Wilson Wongso. All computation and development are done on Kaggle.\n\n\nFramework versions\n------------------\n\n\n* Transformers 4.16.2\n* Pytorch 1.10.2+cu102\n* Datasets 1.18.3\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #safetensors #wav2vec2 #audio-classification #generated_from_trainer #en #arxiv-2006.11477 #license-apache-2.0 #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* 'learning\\_rate': 3e-05\n* 'train\\_batch\\_size': 32\n* 'eval\\_batch\\_size': 32\n* 'seed': 42\n* 'gradient\\_accumulation\\_steps': 4\n* 'total\\_train\\_batch\\_size': 128\n* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'\n* 'lr\\_scheduler\\_type': linear\n* 'lr\\_scheduler\\_warmup\\_ratio': 0.1\n* 'num\\_epochs': 3",
"### Training results\n\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which came from pre-training datasets that may be carried over into the results of this model.\n\n\nAuthors\n-------\n\n\nDistilWav2Vec2 Adult/Child Speech Classifier was trained and evaluated by Wilson Wongso. All computation and development are done on Kaggle.\n\n\nFramework versions\n------------------\n\n\n* Transformers 4.16.2\n* Pytorch 1.10.2+cu102\n* Datasets 1.18.3\n* Tokenizers 0.10.3"
] |
[
68,
170,
110
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #safetensors #wav2vec2 #audio-classification #generated_from_trainer #en #arxiv-2006.11477 #license-apache-2.0 #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* 'learning\\_rate': 3e-05\n* 'train\\_batch\\_size': 32\n* 'eval\\_batch\\_size': 32\n* 'seed': 42\n* 'gradient\\_accumulation\\_steps': 4\n* 'total\\_train\\_batch\\_size': 128\n* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'\n* 'lr\\_scheduler\\_type': linear\n* 'lr\\_scheduler\\_warmup\\_ratio': 0.1\n* 'num\\_epochs': 3### Training results\n\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which came from pre-training datasets that may be carried over into the results of this model.\n\n\nAuthors\n-------\n\n\nDistilWav2Vec2 Adult/Child Speech Classifier was trained and evaluated by Wilson Wongso. All computation and development are done on Kaggle.\n\n\nFramework versions\n------------------\n\n\n* Transformers 4.16.2\n* Pytorch 1.10.2+cu102\n* Datasets 1.18.3\n* Tokenizers 0.10.3"
] |
[
-0.0806550532579422,
0.1737031489610672,
-0.003713031532242894,
0.06088895723223686,
0.08642083406448364,
0.005234900861978531,
0.16034150123596191,
0.10433167964220047,
-0.029819602146744728,
0.1078934594988823,
0.08112217485904694,
0.07895489782094955,
0.07917075604200363,
0.08513465523719788,
-0.05367639288306236,
-0.24874427914619446,
0.04577365145087242,
-0.0691298097372055,
-0.10547030717134476,
0.14427225291728973,
0.0776120126247406,
-0.10344163328409195,
0.05868759751319885,
0.0021711676381528378,
-0.06650450080633163,
-0.05363401770591736,
-0.021514268592000008,
-0.06348605453968048,
0.08281716704368591,
0.04888931289315224,
0.09510161727666855,
0.08225775510072708,
0.07494194805622101,
-0.2649666368961334,
0.010861605405807495,
0.07799451053142548,
0.03592199459671974,
0.0685214027762413,
0.10213477164506912,
-0.050997719168663025,
0.15639877319335938,
-0.058608025312423706,
0.059161681681871414,
0.045494768768548965,
-0.11228476464748383,
-0.22108934819698334,
-0.08042488992214203,
0.04874838516116142,
0.11559014022350311,
0.06281828880310059,
-0.04845666512846947,
0.0932445377111435,
-0.037901390343904495,
0.09088863432407379,
0.1935037523508072,
-0.25512200593948364,
-0.0404752679169178,
0.017866378650069237,
0.030497580766677856,
0.0489104762673378,
-0.1080736592411995,
0.012012181803584099,
0.03659743815660477,
-0.021974695846438408,
0.08200688660144806,
-0.02099180407822132,
0.054406750947237015,
-0.0042405277490615845,
-0.16206303238868713,
-0.07509734481573105,
0.13603824377059937,
0.06717715412378311,
-0.06767665594816208,
-0.14107127487659454,
-0.02599080093204975,
-0.10852804780006409,
0.019736649468541145,
-0.01484699733555317,
0.009467086754739285,
-0.05328455939888954,
-0.00995550025254488,
-0.022793391719460487,
-0.0803358405828476,
-0.05831734091043472,
0.012224425561726093,
0.2223760038614273,
0.021740635856986046,
0.003363923169672489,
0.034610774368047714,
0.10494814813137054,
0.0797765776515007,
-0.14921978116035461,
-0.009409024380147457,
-0.022617487236857414,
-0.10705053061246872,
-0.02269393391907215,
-0.015184237621724606,
0.008470938540995121,
0.008550550788640976,
0.13228148221969604,
-0.04810033738613129,
0.06792528182268143,
0.0005186125636100769,
-0.004420677665621042,
0.01542386133223772,
0.14614354074001312,
-0.0706409215927124,
-0.01967167668044567,
-0.04877175763249397,
0.09779670089483261,
0.004231536295264959,
-0.04883139207959175,
-0.05397931858897209,
0.03230579197406769,
0.08140552788972855,
0.0696796327829361,
0.0009038380230776966,
0.049241624772548676,
-0.06173926591873169,
-0.06781214475631714,
0.02123316191136837,
-0.13752567768096924,
0.01728222705423832,
0.08049057424068451,
-0.06682491302490234,
0.00982038863003254,
0.0214517954736948,
0.034372273832559586,
-0.06069738790392876,
0.10377862304449081,
-0.06231817230582237,
-0.0076023731380701065,
-0.03485346585512161,
-0.10670285671949387,
0.00843377411365509,
-0.08442416042089462,
-0.022503957152366638,
-0.04654897004365921,
-0.06043111905455589,
-0.06797432154417038,
0.06495915353298187,
-0.05067861080169678,
-0.058666978031396866,
-0.07071630656719208,
-0.08463065326213837,
0.045279841870069504,
-0.019065560773015022,
0.08881518244743347,
-0.050490766763687134,
0.08304096013307571,
-0.004331687930971384,
0.0404849536716938,
0.10021447390317917,
0.05178622156381607,
-0.06305134296417236,
0.07054832577705383,
-0.1360703855752945,
0.11819889396429062,
-0.076106496155262,
-0.014185753650963306,
-0.16845622658729553,
-0.11192937195301056,
0.028658386319875717,
0.004004738759249449,
0.048790525645017624,
0.15878437459468842,
-0.12774965167045593,
-0.07741864025592804,
0.13800671696662903,
-0.08686285465955734,
-0.11366330832242966,
0.11563263833522797,
-0.041839152574539185,
0.011949723586440086,
0.039454031735658646,
0.16147932410240173,
0.05061644688248634,
-0.11566891521215439,
-0.030386753380298615,
-0.07390834391117096,
0.09504459798336029,
0.1347837597131729,
0.11512927711009979,
-0.0450759083032608,
0.011503063142299652,
-0.03015361726284027,
-0.09125228971242905,
-0.0395602323114872,
-0.039191052317619324,
-0.08358016610145569,
0.008978202939033508,
-0.03975991532206535,
0.06288957595825195,
0.00942027848213911,
-0.01018106285482645,
-0.042725928127765656,
-0.14901506900787354,
0.006336426828056574,
0.07364930212497711,
-0.1132262796163559,
0.017320845276117325,
-0.08665570616722107,
0.03625312075018883,
0.0013829694362357259,
-0.017127923667430878,
-0.16025930643081665,
-0.04612087085843086,
0.031198957934975624,
-0.11989166587591171,
0.040225185453891754,
-0.013217923231422901,
0.03733737766742706,
0.05911584571003914,
-0.02327708713710308,
-0.044668592512607574,
-0.03201134502887726,
-0.0007887725369073451,
-0.018513726070523262,
-0.2256048023700714,
-0.0544368177652359,
-0.027594199404120445,
0.19171833992004395,
-0.25206825137138367,
0.0050142742693424225,
0.07517524808645248,
0.0882827639579773,
0.017804088070988655,
-0.04791763052344322,
0.03720271214842796,
0.019510744139552116,
-0.03350408375263214,
-0.05219781771302223,
0.029470471665263176,
-0.013925096020102501,
-0.13966022431850433,
0.057607054710388184,
-0.21936282515525818,
-0.0458257831633091,
0.10072885453701019,
0.004545269999653101,
-0.07937326282262802,
-0.08265512436628342,
-0.04518511891365051,
-0.04492860659956932,
-0.02672838233411312,
0.01536369975656271,
0.19769176840782166,
0.05127199739217758,
0.10596711933612823,
-0.10147068649530411,
-0.07213590294122696,
0.025698572397232056,
-0.004621728323400021,
-0.0286614540964365,
0.13274522125720978,
0.035958629101514816,
-0.10275235772132874,
0.0893130972981453,
0.10327821224927902,
-0.015060247853398323,
0.1108551025390625,
-0.032528650015592575,
-0.0799781084060669,
-0.07067181915044785,
0.026611043140292168,
0.009077845141291618,
0.057860687375068665,
-0.08324220031499863,
0.018386563286185265,
0.03349456191062927,
0.03679845109581947,
-0.014815721660852432,
-0.1607004851102829,
0.018243148922920227,
0.04637570306658745,
-0.07365293800830841,
-0.05525122582912445,
-0.012317219749093056,
0.00004785972851095721,
0.08223897218704224,
0.009202925488352776,
0.006625263020396233,
-0.006309172138571739,
-0.05951528251171112,
-0.1170540302991867,
0.19616325199604034,
-0.07003418356180191,
-0.15985631942749023,
-0.1147938072681427,
-0.02741960622370243,
-0.06491166353225708,
0.0007870185654610395,
0.017587019130587578,
-0.03887008875608444,
-0.05199965834617615,
-0.09783667325973511,
0.05430253595113754,
-0.02099471539258957,
-0.00955809187144041,
-0.0030934056267142296,
0.0034373532980680466,
0.046528831124305725,
-0.09903569519519806,
0.008334992453455925,
0.00843788031488657,
-0.05355975776910782,
0.007637984585016966,
0.06910310685634613,
0.04630783945322037,
0.1684933751821518,
0.03525436297059059,
0.00730146374553442,
-0.02155284211039543,
0.20162230730056763,
-0.12514011561870575,
0.0035875553730875254,
0.0995071530342102,
-0.09046019613742828,
0.03965179622173309,
0.18152716755867004,
0.0218705665320158,
-0.09568343311548233,
0.03690118342638016,
0.05679057911038399,
-0.0357091948390007,
-0.29481083154678345,
-0.05096146836876869,
-0.039132121950387955,
0.004573724698275328,
0.10369686037302017,
0.015253537334501743,
-0.03169538080692291,
0.04353313520550728,
-0.05490153655409813,
-0.04229212924838066,
0.06422903388738632,
0.06877053529024124,
0.10045371949672699,
0.042193327099084854,
0.10629808157682419,
-0.014254368841648102,
-0.019790751859545708,
0.03934810683131218,
0.0035023074597120285,
0.18522168695926666,
-0.01992291584610939,
0.18866194784641266,
0.07428614050149918,
0.06829963624477386,
-0.0016250303015112877,
0.033806223422288895,
0.028238385915756226,
0.03038763627409935,
0.008964607492089272,
-0.07945910841226578,
-0.061577633023262024,
0.0827345997095108,
0.06718067824840546,
-0.010912803933024406,
-0.0832929015159607,
0.019008178263902664,
0.030051685869693756,
0.3001108765602112,
0.0870666429400444,
-0.24586167931556702,
-0.07179446518421173,
0.03767517954111099,
-0.04906019568443298,
-0.03863830864429474,
0.0030714760068804026,
0.12916795909404755,
-0.11372558027505875,
0.09113715589046478,
-0.06594423949718475,
0.07522977888584137,
-0.11825189739465714,
-0.008955427445471287,
0.044670045375823975,
0.037035439163446426,
-0.021354379132390022,
0.061895549297332764,
-0.20385494828224182,
0.2758801579475403,
-0.0028973128646612167,
0.042522914707660675,
-0.05422177165746689,
0.029069041833281517,
0.00335022178478539,
-0.040381819009780884,
0.14061321318149567,
-0.006507826037704945,
-0.0694706067442894,
-0.10498913377523422,
-0.11391731351613998,
0.022584332153201103,
0.1444689929485321,
-0.10251247882843018,
0.12251172959804535,
-0.036670465022325516,
0.0028452619444578886,
0.006299708504229784,
-0.023530013859272003,
-0.053351324051618576,
-0.1004924476146698,
0.05491693690419197,
-0.04486624523997307,
0.040659647434949875,
-0.050213560461997986,
-0.083786241710186,
-0.11228475719690323,
0.14682510495185852,
-0.15566396713256836,
-0.06539592146873474,
-0.11124200373888016,
0.007272339425981045,
0.13492728769779205,
-0.07085850089788437,
0.024898599833250046,
0.0021417897660285234,
0.11693732440471649,
0.01987808756530285,
-0.01492884662002325,
0.10278825461864471,
-0.04893817380070686,
-0.2345326691865921,
-0.024271314963698387,
0.1779795140028,
0.05142467841506004,
0.06845029443502426,
-0.020576026290655136,
0.053670305758714676,
0.015381457284092903,
-0.09643013775348663,
0.054899804294109344,
0.042991288006305695,
0.0076616317965090275,
0.07813531160354614,
0.003026113845407963,
-0.03385284170508385,
-0.12740249931812286,
-0.044504426419734955,
0.1108294427394867,
0.31512272357940674,
-0.07959938794374466,
0.08619381487369537,
0.11179232597351074,
-0.08086617290973663,
-0.18615922331809998,
-0.005053736735135317,
0.1181669756770134,
0.02668727934360504,
0.020040925592184067,
-0.19606630504131317,
0.06362554430961609,
0.06198734790086746,
-0.022206539288163185,
0.059646666049957275,
-0.2980292737483978,
-0.1476368010044098,
0.09309422969818115,
0.06762758642435074,
-0.08073762059211731,
-0.1494472473859787,
-0.06379583477973938,
-0.0173043180257082,
-0.08369199186563492,
0.1373259425163269,
-0.022283174097537994,
0.11059041321277618,
0.03883552923798561,
0.0607028529047966,
0.03833030164241791,
-0.04420386254787445,
0.1594618409872055,
0.029954073950648308,
0.0404304638504982,
-0.06147655099630356,
-0.023607954382896423,
-0.03016749583184719,
-0.06449415534734726,
0.045205846428871155,
-0.0604056715965271,
0.026678038761019707,
-0.09645189344882965,
-0.05545885115861893,
-0.058389246463775635,
0.03152212128043175,
-0.06028493866324425,
-0.06450643390417099,
-0.025988668203353882,
0.06155143305659294,
0.09468227624893188,
-0.0064848982729017735,
0.07019002735614777,
-0.06500747054815292,
0.058288607746362686,
0.19280532002449036,
0.1697414219379425,
0.0850849524140358,
-0.07220032811164856,
-0.011560937389731407,
0.017625605687499046,
0.058694835752248764,
-0.15188592672348022,
0.06199358403682709,
0.14252842962741852,
0.051478542387485504,
0.16742049157619476,
0.020227262750267982,
-0.09399648755788803,
-0.01168240699917078,
0.023192256689071655,
-0.13556915521621704,
-0.13769713044166565,
0.0008824659744277596,
-0.001943678711540997,
-0.153935506939888,
-0.033011119812726974,
0.13478927314281464,
-0.022085178643465042,
-0.003045222256332636,
0.016934527084231377,
0.05115741118788719,
-0.006677263416349888,
0.15423646569252014,
0.013394868932664394,
0.09346934407949448,
-0.07998642325401306,
0.11176630109548569,
0.08644108474254608,
-0.1402725726366043,
0.06506714224815369,
0.06358400732278824,
-0.04860934987664223,
-0.02068711444735527,
-0.035254042595624924,
0.005152927711606026,
0.018218664452433586,
-0.029119368642568588,
-0.0481947660446167,
-0.13003890216350555,
0.04397345334291458,
0.11091535538434982,
0.03805958107113838,
0.06903711706399918,
-0.014633961021900177,
-0.01043570414185524,
-0.11994799226522446,
0.12906259298324585,
0.039136920124292374,
0.024041227996349335,
-0.12136723101139069,
0.1058277040719986,
0.016428515315055847,
0.002851372119039297,
-0.006833583116531372,
-0.020287709310650826,
-0.11005368828773499,
0.019472766667604446,
-0.08070560544729233,
0.03498246893286705,
-0.055616460740566254,
0.007391359191387892,
0.009181836619973183,
-0.06704656779766083,
-0.0500064492225647,
0.01563323847949505,
-0.10040826350450516,
-0.02297542802989483,
-0.02078418619930744,
0.09599930047988892,
-0.1294729858636856,
-0.03538461774587631,
0.060036152601242065,
-0.11530283093452454,
0.11011983454227448,
0.015323713421821594,
-0.03147514909505844,
0.0207279734313488,
-0.12303270399570465,
0.03524124622344971,
0.0055482229217886925,
0.023248199373483658,
0.009223096072673798,
-0.24348071217536926,
-0.0034435915295034647,
-0.029825754463672638,
-0.009656769223511219,
0.004127766937017441,
0.05252645164728165,
-0.11810865998268127,
-0.0271700881421566,
-0.00683523528277874,
-0.04346758872270584,
-0.04773125797510147,
0.04204925522208214,
0.03480278328061104,
0.01708747260272503,
0.19306538999080658,
-0.05126814916729927,
0.10243885964155197,
-0.17868372797966003,
-0.008769881911575794,
-0.001124794245697558,
-0.028341932222247124,
-0.0464133620262146,
-0.034929633140563965,
0.07654763758182526,
-0.08733129501342773,
0.10741831362247467,
-0.023648137226700783,
0.023687390610575676,
0.04444222152233124,
-0.05193430557847023,
-0.024347025901079178,
0.0649985522031784,
0.1491224318742752,
0.038540419191122055,
-0.02395378053188324,
0.07564717531204224,
-0.020462986081838608,
0.01041620783507824,
0.11527795344591141,
0.17342600226402283,
0.14437709748744965,
0.028679070994257927,
0.04992376267910004,
0.07492871582508087,
-0.1067175343632698,
-0.12588746845722198,
0.1287803053855896,
-0.05165513604879379,
0.12541934847831726,
-0.034580666571855545,
0.16497661173343658,
0.08508063852787018,
-0.20202185213565826,
0.07053691148757935,
-0.030337892472743988,
-0.10691120475530624,
-0.14098340272903442,
-0.14837025105953217,
-0.08011195063591003,
-0.07669349014759064,
0.023026343435049057,
-0.10710189491510391,
0.049758315086364746,
0.06047920882701874,
0.05238209292292595,
0.0023826907854527235,
0.1102454662322998,
-0.0450654998421669,
-0.040913164615631104,
0.08276291191577911,
0.030461471527814865,
-0.012005160562694073,
0.0076252794824540615,
-0.05242945998907089,
0.057726021856069565,
0.01747450791299343,
0.06854892522096634,
-0.004048215691000223,
-0.024030065163969994,
0.04131375998258591,
-0.04555830359458923,
-0.10163179785013199,
0.012087803333997726,
0.010621253401041031,
0.07940702140331268,
0.134518563747406,
0.04450994357466698,
-0.007871494628489017,
-0.018404215574264526,
0.20344354212284088,
-0.07886644452810287,
-0.030893543735146523,
-0.15393003821372986,
0.2168567180633545,
-0.026249095797538757,
0.007765636779367924,
0.032528992742300034,
-0.08202441036701202,
-0.0009730304009281099,
0.15350334346294403,
0.17454160749912262,
-0.02798043005168438,
-0.0032348379027098417,
-0.0015060961013659835,
0.020280689001083374,
0.021307632327079773,
0.06424366682767868,
0.0844099223613739,
0.07536891102790833,
-0.04785708338022232,
-0.012648362666368484,
-0.0446021631360054,
-0.056336015462875366,
0.028415365144610405,
0.11784954369068146,
0.01530008390545845,
-0.02739541605114937,
-0.035890039056539536,
0.10871313512325287,
-0.09718827158212662,
-0.18916499614715576,
0.01622816175222397,
-0.15022307634353638,
-0.1582411229610443,
-0.008302855305373669,
0.0276335496455431,
0.048589762300252914,
0.036897625774145126,
0.005182834342122078,
-0.07007983326911926,
0.10865897685289383,
0.016466202214360237,
-0.04060078039765358,
-0.03348328918218613,
0.06981125473976135,
-0.08749114722013474,
0.20880644023418427,
0.0015112264081835747,
0.09437426179647446,
0.09649710357189178,
0.028111634775996208,
-0.0786236897110939,
0.05488317459821701,
0.08252059668302536,
-0.11647073924541473,
0.0335315503180027,
0.1906864196062088,
-0.039443597197532654,
0.14283011853694916,
0.07471652328968048,
-0.07260674983263016,
0.026690620929002762,
-0.05626089498400688,
-0.06956756860017776,
-0.07207424938678741,
0.01854770816862583,
-0.07540525496006012,
0.1482294648885727,
0.22017523646354675,
-0.06210766360163689,
-0.0160202719271183,
-0.04530080035328865,
0.0113079734146595,
0.004546872805804014,
0.08621694892644882,
-0.005245210602879524,
-0.23545020818710327,
0.031924277544021606,
-0.029998524114489555,
0.0581880621612072,
-0.24199052155017853,
-0.05146500840783119,
0.039705757051706314,
-0.0299210287630558,
-0.044762201607227325,
0.10647276788949966,
0.013134490698575974,
0.046204824000597,
-0.06302427500486374,
-0.07208351790904999,
-0.021394383162260056,
0.15952295064926147,
-0.16831287741661072,
-0.06721130013465881
] |
null | null |
transformers
|
# DistilWav2Vec2 XLS-R Adult/Child Speech Classifier 64M
DistilWav2Vec2 XLS-R Adult/Child Speech Classifier is an audio classification model based on the [XLS-R](https://arxiv.org/abs/2111.09296) architecture. This model is a distilled version of [wav2vec2-xls-r-adult-child-cls](https://huggingface.co/bookbot/wav2vec2-xls-r-adult-child-cls) on a private adult/child speech classification dataset.
This model was trained using HuggingFace's PyTorch framework. All training was done on a Tesla P100, provided by Kaggle. Training metrics were logged via Tensorboard.
## Model
| Model | #params | Arch. | Training/Validation data (text) |
| ------------------------------------------- | ------- | ----- | ----------------------------------------- |
| `distil-wav2vec2-xls-r-adult-child-cls-64m` | 64M | XLS-R | Adult/Child Speech Classification Dataset |
## Evaluation Results
The model achieves the following results on evaluation:
| Dataset | Loss | Accuracy | F1 |
| --------------------------------- | ------ | -------- | ------ |
| Adult/Child Speech Classification | 0.2571 | 93.86% | 0.9425 |
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- `learning_rate`: 3e-05
- `train_batch_size`: 16
- `eval_batch_size`: 16
- `seed`: 42
- `gradient_accumulation_steps`: 4
- `total_train_batch_size`: 64
- `optimizer`: Adam with `betas=(0.9,0.999)` and `epsilon=1e-08`
- `lr_scheduler_type`: linear
- `lr_scheduler_warmup_ratio`: 0.1
- `num_epochs`: 5
### Training results
| Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 |
| :-----------: | :---: | :--: | :-------------: | :------: | :----: |
| 0.5509 | 1.0 | 191 | 0.3685 | 0.9086 | 0.9131 |
| 0.4543 | 2.0 | 382 | 0.3113 | 0.9247 | 0.9285 |
| 0.409 | 3.0 | 573 | 0.2723 | 0.9372 | 0.9418 |
| 0.3024 | 4.0 | 764 | 0.2786 | 0.9381 | 0.9417 |
| 0.3103 | 5.0 | 955 | 0.2571 | 0.9386 | 0.9425 |
## Disclaimer
Do consider the biases which came from pre-training datasets that may be carried over into the results of this model.
## Authors
DistilWav2Vec2 XLS-R Adult/Child Speech Classifier was trained and evaluated by [Ananto Joyoadikusumo](https://anantoj.github.io/). All computation and development are done on Kaggle.
## Framework versions
- Transformers 4.17.0.dev0
- Pytorch 1.10.2+cu102
- Datasets 1.18.3
- Tokenizers 0.11.0
|
{"language": "en", "license": "apache-2.0", "tags": ["audio-classification", "generated_from_trainer"], "metrics": ["accuracy", "f1"], "model-index": [{"name": "distil-wav2vec2-xls-r-adult-child-cls-64m", "results": []}]}
|
audio-classification
|
bookbot/distil-wav2vec2-xls-r-adult-child-cls-64m
|
[
"transformers",
"pytorch",
"tensorboard",
"wav2vec2",
"audio-classification",
"generated_from_trainer",
"en",
"arxiv:2111.09296",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"2111.09296"
] |
[
"en"
] |
TAGS
#transformers #pytorch #tensorboard #wav2vec2 #audio-classification #generated_from_trainer #en #arxiv-2111.09296 #license-apache-2.0 #endpoints_compatible #region-us
|
DistilWav2Vec2 XLS-R Adult/Child Speech Classifier 64M
======================================================
DistilWav2Vec2 XLS-R Adult/Child Speech Classifier is an audio classification model based on the XLS-R architecture. This model is a distilled version of wav2vec2-xls-r-adult-child-cls on a private adult/child speech classification dataset.
This model was trained using HuggingFace's PyTorch framework. All training was done on a Tesla P100, provided by Kaggle. Training metrics were logged via Tensorboard.
Model
-----
Evaluation Results
------------------
The model achieves the following results on evaluation:
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* 'learning\_rate': 3e-05
* 'train\_batch\_size': 16
* 'eval\_batch\_size': 16
* 'seed': 42
* 'gradient\_accumulation\_steps': 4
* 'total\_train\_batch\_size': 64
* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'
* 'lr\_scheduler\_type': linear
* 'lr\_scheduler\_warmup\_ratio': 0.1
* 'num\_epochs': 5
### Training results
Disclaimer
----------
Do consider the biases which came from pre-training datasets that may be carried over into the results of this model.
Authors
-------
DistilWav2Vec2 XLS-R Adult/Child Speech Classifier was trained and evaluated by Ananto Joyoadikusumo. All computation and development are done on Kaggle.
Framework versions
------------------
* Transformers 4.17.0.dev0
* Pytorch 1.10.2+cu102
* Datasets 1.18.3
* Tokenizers 0.11.0
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* 'learning\\_rate': 3e-05\n* 'train\\_batch\\_size': 16\n* 'eval\\_batch\\_size': 16\n* 'seed': 42\n* 'gradient\\_accumulation\\_steps': 4\n* 'total\\_train\\_batch\\_size': 64\n* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'\n* 'lr\\_scheduler\\_type': linear\n* 'lr\\_scheduler\\_warmup\\_ratio': 0.1\n* 'num\\_epochs': 5",
"### Training results\n\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which came from pre-training datasets that may be carried over into the results of this model.\n\n\nAuthors\n-------\n\n\nDistilWav2Vec2 XLS-R Adult/Child Speech Classifier was trained and evaluated by Ananto Joyoadikusumo. All computation and development are done on Kaggle.\n\n\nFramework versions\n------------------\n\n\n* Transformers 4.17.0.dev0\n* Pytorch 1.10.2+cu102\n* Datasets 1.18.3\n* Tokenizers 0.11.0"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #audio-classification #generated_from_trainer #en #arxiv-2111.09296 #license-apache-2.0 #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* 'learning\\_rate': 3e-05\n* 'train\\_batch\\_size': 16\n* 'eval\\_batch\\_size': 16\n* 'seed': 42\n* 'gradient\\_accumulation\\_steps': 4\n* 'total\\_train\\_batch\\_size': 64\n* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'\n* 'lr\\_scheduler\\_type': linear\n* 'lr\\_scheduler\\_warmup\\_ratio': 0.1\n* 'num\\_epochs': 5",
"### Training results\n\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which came from pre-training datasets that may be carried over into the results of this model.\n\n\nAuthors\n-------\n\n\nDistilWav2Vec2 XLS-R Adult/Child Speech Classifier was trained and evaluated by Ananto Joyoadikusumo. All computation and development are done on Kaggle.\n\n\nFramework versions\n------------------\n\n\n* Transformers 4.17.0.dev0\n* Pytorch 1.10.2+cu102\n* Datasets 1.18.3\n* Tokenizers 0.11.0"
] |
[
63,
170,
121
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #audio-classification #generated_from_trainer #en #arxiv-2111.09296 #license-apache-2.0 #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* 'learning\\_rate': 3e-05\n* 'train\\_batch\\_size': 16\n* 'eval\\_batch\\_size': 16\n* 'seed': 42\n* 'gradient\\_accumulation\\_steps': 4\n* 'total\\_train\\_batch\\_size': 64\n* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'\n* 'lr\\_scheduler\\_type': linear\n* 'lr\\_scheduler\\_warmup\\_ratio': 0.1\n* 'num\\_epochs': 5### Training results\n\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which came from pre-training datasets that may be carried over into the results of this model.\n\n\nAuthors\n-------\n\n\nDistilWav2Vec2 XLS-R Adult/Child Speech Classifier was trained and evaluated by Ananto Joyoadikusumo. All computation and development are done on Kaggle.\n\n\nFramework versions\n------------------\n\n\n* Transformers 4.17.0.dev0\n* Pytorch 1.10.2+cu102\n* Datasets 1.18.3\n* Tokenizers 0.11.0"
] |
[
-0.06653735041618347,
0.166920468211174,
-0.003993975929915905,
0.061406608670949936,
0.08919188380241394,
0.00007299699063878506,
0.13304543495178223,
0.11407528072595596,
-0.02771356701850891,
0.13155515491962433,
0.07630451768636703,
0.08751344680786133,
0.07813594490289688,
0.08478686213493347,
-0.05159049481153488,
-0.23204679787158966,
0.03707781434059143,
-0.07235920429229736,
-0.11321716010570526,
0.12833303213119507,
0.08636089414358139,
-0.10234231501817703,
0.05462406203150749,
-0.01463368721306324,
-0.07384414225816727,
-0.024338526651263237,
-0.018852630630135536,
-0.04805956408381462,
0.07244491577148438,
0.05542859435081482,
0.08656530827283859,
0.050486888736486435,
0.059215471148490906,
-0.2939605414867401,
0.00639547361060977,
0.06718384474515915,
0.037054143846035004,
0.0602068230509758,
0.07793860137462616,
-0.0349019430577755,
0.16613994538784027,
-0.06262048333883286,
0.059985943138599396,
0.037934042513370514,
-0.11479319632053375,
-0.20181891322135925,
-0.09089355170726776,
0.06277292221784592,
0.1171153336763382,
0.07265374064445496,
-0.054657645523548126,
0.0800870954990387,
-0.040334660559892654,
0.09604258090257645,
0.18296700716018677,
-0.23457252979278564,
-0.04663046449422836,
-0.009878803975880146,
0.027603261172771454,
0.0677638128399849,
-0.11526846140623093,
-0.004555905237793922,
0.041086189448833466,
-0.012102646753191948,
0.05730438232421875,
-0.0285336896777153,
0.05953891947865486,
-0.012827018275856972,
-0.15501469373703003,
-0.09538320451974869,
0.14380061626434326,
0.07115913927555084,
-0.055628083646297455,
-0.12015451490879059,
-0.013581914827227592,
-0.1504090428352356,
0.009030385874211788,
-0.023598596453666687,
0.0026672326494008303,
-0.04244207218289375,
0.004452661145478487,
-0.016980333253741264,
-0.09690927714109421,
-0.04242408275604248,
0.03686422482132912,
0.19658607244491577,
0.015678932890295982,
-0.0034798262640833855,
0.044033877551555634,
0.09647512435913086,
0.08178455382585526,
-0.15766045451164246,
0.0012380188563838601,
0.0016731597715988755,
-0.08613908290863037,
-0.03623319789767265,
-0.012378385290503502,
0.02237081155180931,
0.02138134278357029,
0.1456059217453003,
-0.044756930321455,
0.06644242256879807,
-0.012483304366469383,
0.0014481598045676947,
0.00889077689498663,
0.13345444202423096,
-0.053568363189697266,
-0.03225893899798393,
-0.039429932832717896,
0.0978340357542038,
0.0034572980366647243,
-0.047788262367248535,
-0.05337672308087349,
0.03580017387866974,
0.08259635418653488,
0.07991151511669159,
-0.00364925479516387,
0.021454384550452232,
-0.0789351761341095,
-0.055431973189115524,
0.01655544899404049,
-0.1350860297679901,
0.02813517116010189,
0.07162884622812271,
-0.05216340348124504,
0.004312931094318628,
0.007691604550927877,
0.04124048352241516,
-0.05780034139752388,
0.07782765477895737,
-0.06937222182750702,
-0.007921198382973671,
-0.040466148406267166,
-0.09239677339792252,
0.02092762291431427,
-0.06444605439901352,
-0.012152976356446743,
-0.048582691699266434,
-0.04406524449586868,
-0.07982928305864334,
0.05690572410821915,
-0.05986425653100014,
-0.06121028959751129,
-0.07463433593511581,
-0.09689988940954208,
0.05147461220622063,
-0.005629647057503462,
0.09672924876213074,
-0.04839612916111946,
0.10322998464107513,
-0.001432794495485723,
0.03238065540790558,
0.0935266762971878,
0.059571653604507446,
-0.0647505670785904,
0.07342281192541122,
-0.1462109088897705,
0.11218126863241196,
-0.08840997517108917,
-0.0032174072694033384,
-0.16545075178146362,
-0.1062559261918068,
0.029849335551261902,
-0.00805540569126606,
0.052430715411901474,
0.13203652203083038,
-0.14924055337905884,
-0.07474790513515472,
0.10695919394493103,
-0.07974997907876968,
-0.10370921343564987,
0.12544357776641846,
-0.04232297092676163,
0.037357721477746964,
0.04229861870408058,
0.1742798537015915,
0.05836314335465431,
-0.10766448825597763,
-0.059831470251083374,
-0.09611281752586365,
0.1035456508398056,
0.13976676762104034,
0.10258486121892929,
-0.03642561286687851,
0.0357784666121006,
-0.03123169019818306,
-0.08002719283103943,
-0.012748822569847107,
-0.028161868453025818,
-0.08851306140422821,
0.00540460180491209,
-0.0339898057281971,
0.036728635430336,
0.0191974937915802,
-0.02149542048573494,
-0.04619220271706581,
-0.1346958726644516,
-0.008003977127373219,
0.07777632027864456,
-0.10282662510871887,
0.012014733627438545,
-0.08172329515218735,
0.06016117334365845,
-0.005765007808804512,
-0.011919371783733368,
-0.16949786245822906,
-0.021439382806420326,
0.03592754527926445,
-0.09478193521499634,
0.043680861592292786,
-0.016791364178061485,
0.03046862594783306,
0.04088107496500015,
-0.017285030335187912,
-0.05226757004857063,
-0.05121546983718872,
0.0064763701520860195,
-0.029551714658737183,
-0.22793938219547272,
-0.041067346930503845,
-0.025609048083424568,
0.19763129949569702,
-0.2532348930835724,
-0.0029061445966362953,
0.09728149324655533,
0.11241919547319412,
0.03318434953689575,
-0.05064334347844124,
0.01728014647960663,
0.02899204194545746,
-0.033545102924108505,
-0.05216025933623314,
0.012363885529339314,
-0.00847073458135128,
-0.11985643953084946,
0.028223399072885513,
-0.20486347377300262,
-0.05381406098604202,
0.08798855543136597,
-0.0009009030181914568,
-0.0958619937300682,
-0.0683957189321518,
-0.04161626845598221,
-0.04619063436985016,
-0.021269865334033966,
0.002657216740772128,
0.1933436095714569,
0.0507064089179039,
0.09700711816549301,
-0.08300501108169556,
-0.06915558874607086,
0.028893468901515007,
-0.013561655767261982,
-0.027431882917881012,
0.13915666937828064,
0.044723350554704666,
-0.12974239885807037,
0.10591190308332443,
0.11396905034780502,
-0.025105973705649376,
0.12717333436012268,
-0.03351612389087677,
-0.10186605155467987,
-0.08568733185529709,
0.036617085337638855,
0.01585361361503601,
0.05097700282931328,
-0.10428798943758011,
0.021767951548099518,
0.025984037667512894,
0.02645891159772873,
-0.020066216588020325,
-0.1625036746263504,
0.02338898740708828,
0.039362065494060516,
-0.06931305676698685,
-0.023806707933545113,
-0.007719110231846571,
-0.009085199795663357,
0.07885301113128662,
0.02027244120836258,
0.005985662806779146,
-0.009379614144563675,
-0.056679029017686844,
-0.11370266228914261,
0.17271241545677185,
-0.08236831426620483,
-0.15191660821437836,
-0.11178677529096603,
-0.037212811410427094,
-0.04529231786727905,
0.00014763268700335175,
0.03029540367424488,
-0.04458501189947128,
-0.050345972180366516,
-0.08380462229251862,
0.032977934926748276,
-0.01093320269137621,
-0.007825165055692196,
-0.010454309172928333,
0.01585225947201252,
0.03747529909014702,
-0.08173687011003494,
0.0037331434432417154,
0.028873998671770096,
-0.028414275497198105,
0.0017048317240551114,
0.06450382620096207,
0.06605764478445053,
0.1633317917585373,
0.039876166731119156,
-0.003074360778555274,
-0.015606855973601341,
0.22732791304588318,
-0.14779117703437805,
0.008409315720200539,
0.09369993954896927,
-0.08985280990600586,
0.045931555330753326,
0.19406349956989288,
0.018877966329455376,
-0.09406784921884537,
0.04488340765237808,
0.0599847137928009,
-0.022627850994467735,
-0.2711193263530731,
-0.03374413773417473,
-0.04386425018310547,
-0.006250461097806692,
0.10459258407354355,
0.030409757047891617,
-0.01618446409702301,
0.030581751838326454,
-0.054672133177518845,
-0.010920979082584381,
0.0512414388358593,
0.06824135780334473,
0.1210101917386055,
0.02772974967956543,
0.08400847017765045,
-0.012918838299810886,
-0.03243507817387581,
0.04431724175810814,
0.022839754819869995,
0.18082182109355927,
-0.00471216905862093,
0.20982207357883453,
0.06072208285331726,
0.0819285660982132,
-0.01933603733778,
0.019536087289452553,
0.029896777123212814,
0.025886140763759613,
0.012451180256903172,
-0.07999929040670395,
-0.061684973537921906,
0.09380365163087845,
0.09486803412437439,
-0.014655808918178082,
-0.056419335305690765,
0.013024047948420048,
0.028470437973737717,
0.3236086964607239,
0.0899469256401062,
-0.22144988179206848,
-0.07662244141101837,
0.04905056208372116,
-0.05336697772145271,
-0.05080275982618332,
0.00017160060815513134,
0.13027706742286682,
-0.09644770622253418,
0.06003836169838905,
-0.05112961679697037,
0.06751830130815506,
-0.11276557296514511,
-0.014065487310290337,
0.043215326964855194,
0.020495329052209854,
-0.011394165456295013,
0.06686066091060638,
-0.21585610508918762,
0.24859067797660828,
0.00496110413223505,
0.05264148488640785,
-0.05533219501376152,
0.037350475788116455,
0.006049973424524069,
-0.05958286672830582,
0.13956403732299805,
-0.004965723492205143,
-0.0616769939661026,
-0.15033479034900665,
-0.10544411838054657,
0.0053105601109564304,
0.14307670295238495,
-0.10000020265579224,
0.1281166821718216,
-0.04941998049616814,
0.0003895260451827198,
0.006092623807489872,
-0.05924547463655472,
-0.05537324398756027,
-0.11114376038312912,
0.0652683898806572,
-0.010740608908236027,
0.040302399545907974,
-0.04704530909657478,
-0.07571389526128769,
-0.06897228956222534,
0.1474541425704956,
-0.16072632372379303,
-0.055873069912195206,
-0.10596287250518799,
0.027701305225491524,
0.14646755158901215,
-0.06892456114292145,
0.029258968308568,
0.004010613076388836,
0.1354590654373169,
0.02315702848136425,
0.005327186081558466,
0.0980302095413208,
-0.04236279055476189,
-0.23186038434505463,
-0.02467219904065132,
0.1706106960773468,
0.042342543601989746,
0.07624086737632751,
-0.02102070115506649,
0.061932988464832306,
0.003608990227803588,
-0.09509189426898956,
0.056700967252254486,
0.01488825585693121,
-0.009147847071290016,
0.07407410442829132,
-0.013527635484933853,
-0.03412195295095444,
-0.14258475601673126,
-0.0596252903342247,
0.11737233400344849,
0.3330390453338623,
-0.07110467553138733,
0.07141724228858948,
0.08470010757446289,
-0.08488564938306808,
-0.17548765242099762,
-0.017630508169531822,
0.13545651733875275,
0.03847417235374451,
0.027126768603920937,
-0.19206804037094116,
0.05582990497350693,
0.06999096274375916,
-0.019133886322379112,
0.11751800775527954,
-0.28663888573646545,
-0.1351652890443802,
0.07449985295534134,
0.0686865821480751,
-0.1034289225935936,
-0.14761100709438324,
-0.06801622360944748,
-0.01953684724867344,
-0.05210806429386139,
0.10312802344560623,
-0.0013220078544691205,
0.1099790558218956,
0.0437210313975811,
0.07724855840206146,
0.03754998371005058,
-0.04104297608137131,
0.16131944954395294,
0.015142984688282013,
0.04383116587996483,
-0.057823095470666885,
-0.05593414604663849,
-0.024788735434412956,
-0.06366736441850662,
0.03459545224905014,
-0.05592872574925423,
0.015207603573799133,
-0.10322718322277069,
-0.049026042222976685,
-0.05902915075421333,
0.017201997339725494,
-0.06485624611377716,
-0.059652697294950485,
-0.0367698036134243,
0.06514255702495575,
0.09565555304288864,
-0.0006399102858267725,
0.0747389942407608,
-0.06193190813064575,
0.04522072896361351,
0.18440783023834229,
0.13908985257148743,
0.10774481296539307,
-0.07556404173374176,
-0.007957417517900467,
0.013031044974923134,
0.042763836681842804,
-0.14556975662708282,
0.06406243145465851,
0.1497606784105301,
0.042767178267240524,
0.19040875136852264,
0.015584902837872505,
-0.09251143783330917,
0.00237546069547534,
0.03282824158668518,
-0.11966564506292343,
-0.1248188391327858,
-0.0021329496521502733,
0.018116462975740433,
-0.1607327163219452,
-0.07862789183855057,
0.137920543551445,
-0.028280213475227356,
-0.01150988694280386,
0.017167532816529274,
0.0497046522796154,
-0.024702029302716255,
0.15449294447898865,
0.018532639369368553,
0.08385657519102097,
-0.0670967623591423,
0.08702198415994644,
0.09825621545314789,
-0.1408943384885788,
0.07256002724170685,
0.06107645481824875,
-0.0407213494181633,
-0.022253993898630142,
-0.03158259391784668,
0.03408857434988022,
0.008817470632493496,
-0.027378259226679802,
-0.06209975481033325,
-0.10759397596120834,
0.054555829614400864,
0.106890007853508,
0.03339302912354469,
0.06437283009290695,
-0.03104383870959282,
0.001095289597287774,
-0.11881792545318604,
0.14393550157546997,
0.02628026343882084,
0.023237479850649834,
-0.12725435197353363,
0.0963861346244812,
0.010841241106390953,
-0.010428247042000294,
-0.0017002529930323362,
-0.018108664080500603,
-0.09694895893335342,
0.02439243532717228,
-0.06516554206609726,
0.03477972745895386,
-0.0490078367292881,
0.003799243364483118,
0.0216122604906559,
-0.061259325593709946,
-0.047110289335250854,
0.013333483599126339,
-0.10388056188821793,
-0.03717026486992836,
-0.020404472947120667,
0.10768398642539978,
-0.1291845142841339,
-0.05148964747786522,
0.06582818180322647,
-0.1190515011548996,
0.10773248970508575,
0.015226440504193306,
-0.015425401739776134,
0.02356468141078949,
-0.10978816449642181,
0.04744664207100868,
0.028428684920072556,
0.026613593101501465,
0.00857701525092125,
-0.2446950078010559,
-0.012018145062029362,
-0.035331256687641144,
-0.005208970978856087,
0.002076943637803197,
0.03673931583762169,
-0.12522391974925995,
-0.0359373539686203,
-0.02274143323302269,
-0.06731312721967697,
-0.05272599309682846,
0.049349937587976456,
0.03300520405173302,
0.028674593195319176,
0.20083341002464294,
-0.05373735353350639,
0.10120370239019394,
-0.16415700316429138,
-0.005984109826385975,
-0.00473820511251688,
-0.012618368491530418,
-0.058825790882110596,
-0.04175499826669693,
0.06516364961862564,
-0.08938711136579514,
0.07293311506509781,
-0.038634043186903,
0.026378938928246498,
0.045481543987989426,
-0.08992813527584076,
-0.027369359508156776,
0.05562308430671692,
0.1619402915239334,
0.05138583853840828,
-0.031461093574762344,
0.06166451796889305,
-0.027023732662200928,
0.015516545623540878,
0.1307251900434494,
0.13366223871707916,
0.1311478316783905,
0.05536789447069168,
0.049941983073949814,
0.07747635245323181,
-0.10590724647045135,
-0.12130971252918243,
0.13110733032226562,
-0.05328158289194107,
0.11981070786714554,
-0.03466062620282173,
0.20048768818378448,
0.09089849889278412,
-0.2095297873020172,
0.059333909302949905,
-0.046936094760894775,
-0.10715555399656296,
-0.11363847553730011,
-0.12885501980781555,
-0.08164357393980026,
-0.07028264552354813,
0.01851789467036724,
-0.11494288593530655,
0.06767302006483078,
0.027096638455986977,
0.04386860132217407,
-0.0019937006291002035,
0.10064768046140671,
-0.04197680577635765,
-0.03647300601005554,
0.08766527473926544,
0.025728266686201096,
-0.013698900118470192,
-0.008158022537827492,
-0.04021083191037178,
0.05128601938486099,
-0.0042587523348629475,
0.07262974232435226,
0.002970554633066058,
-0.03574120253324509,
0.03304888680577278,
-0.05047370120882988,
-0.10348685085773468,
0.021672820672392845,
0.004756633657962084,
0.07421544194221497,
0.13545049726963043,
0.045895904302597046,
-0.0030295902397483587,
-0.00969022698700428,
0.186097651720047,
-0.08181652426719666,
-0.01767531782388687,
-0.1601773500442505,
0.16859093308448792,
-0.021333614364266396,
0.011760277673602104,
0.016011349856853485,
-0.10209552198648453,
0.0035193050280213356,
0.1248120665550232,
0.11548313498497009,
-0.022055232897400856,
-0.003132237121462822,
0.0032852059230208397,
0.025130044668912888,
-0.005607132334262133,
0.04198120906949043,
0.09586093574762344,
0.0716080442070961,
-0.05903754383325577,
-0.01929120533168316,
-0.057859841734170914,
-0.06281262636184692,
0.014148542657494545,
0.08300764858722687,
0.010744224302470684,
-0.0157407745718956,
-0.042626190930604935,
0.11643342673778534,
-0.08513768017292023,
-0.19524163007736206,
0.04603464528918266,
-0.15082482993602753,
-0.1640893667936325,
-0.0005162977031432092,
0.02840033546090126,
0.04387291893362999,
0.03641040623188019,
0.0028063515201210976,
-0.07319912314414978,
0.1357114315032959,
0.015273425728082657,
-0.0326983816921711,
-0.03628434240818024,
0.06972533464431763,
-0.04194948449730873,
0.1824132204055786,
-0.009943063370883465,
0.10266478359699249,
0.10347607731819153,
0.0391588918864727,
-0.06880529224872589,
0.05276218801736832,
0.09532371908426285,
-0.11783052980899811,
0.022207314148545265,
0.19788426160812378,
-0.04257624223828316,
0.14991395175457,
0.08092888444662094,
-0.06719877570867538,
0.028100119903683662,
-0.05327857285737991,
-0.05936338007450104,
-0.0823366641998291,
0.010548616759479046,
-0.07296253740787506,
0.14665250480175018,
0.21328771114349365,
-0.05886813998222351,
-0.016304301097989082,
-0.043391723185777664,
0.006611524615436792,
0.026807229965925217,
0.10816840082406998,
-0.024897217750549316,
-0.22751390933990479,
0.03827674686908722,
0.002207244513556361,
0.05543256923556328,
-0.216522216796875,
-0.06249191239476204,
0.04366740956902504,
-0.02309458516538143,
-0.04757710173726082,
0.11637241393327713,
0.00123480090405792,
0.04314316809177399,
-0.0598565973341465,
-0.1034020408987999,
-0.02773991972208023,
0.16288475692272186,
-0.15718597173690796,
-0.06744987517595291
] |
null | null |
transformers
|
# DistilWav2Vec2 XLS-R Adult/Child Speech Classifier 89M
DistilWav2Vec2 XLS-R Adult/Child Speech Classifier is an audio classification model based on the [XLS-R](https://arxiv.org/abs/2111.09296) architecture. This model is a distilled version of [wav2vec2-xls-r-adult-child-cls](https://huggingface.co/bookbot/wav2vec2-xls-r-adult-child-cls) on a private adult/child speech classification dataset.
This model was trained using HuggingFace's PyTorch framework. All training was done on a Tesla P100, provided by Kaggle. Training metrics were logged via Tensorboard.
## Model
| Model | #params | Arch. | Training/Validation data (text) |
| ------------------------------------------- | ------- | ----- | ----------------------------------------- |
| `distil-wav2vec2-xls-r-adult-child-cls-89m` | 89M | XLS-R | Adult/Child Speech Classification Dataset |
## Evaluation Results
The model achieves the following results on evaluation:
| Dataset | Loss | Accuracy | F1 |
| --------------------------------- | ------ | -------- | ------ |
| Adult/Child Speech Classification | 0.3048 | 93.54% | 0.9420 |
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- `learning_rate`: 3e-05
- `train_batch_size`: 32
- `eval_batch_size`: 32
- `seed`: 42
- `gradient_accumulation_steps`: 4
- `total_train_batch_size`: 128
- `optimizer`: Adam with `betas=(0.9,0.999)` and `epsilon=1e-08`
- `lr_scheduler_type`: linear
- `lr_scheduler_warmup_ratio`: 0.1
- `num_epochs`: 5
### Training results
| Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 |
| :-----------: | :---: | :--: | :-------------: | :------: | :----: |
| 0.7711 | 1.0 | 96 | 0.5413 | 0.9017 | 0.9156 |
| 0.5551 | 2.0 | 192 | 0.4627 | 0.9164 | 0.9272 |
| 0.4166 | 3.0 | 288 | 0.3832 | 0.9261 | 0.9352 |
| 0.3928 | 4.0 | 384 | 0.3242 | 0.9331 | 0.9406 |
| 0.3622 | 5.0 | 480 | 0.3048 | 0.9354 | 0.9420 |
## Disclaimer
Do consider the biases which came from pre-training datasets that may be carried over into the results of this model.
## Authors
DistilWav2Vec2 XLS-R Adult/Child Speech Classifier was trained and evaluated by [Wilson Wongso](https://w11wo.github.io/). All computation and development are done on Kaggle.
## Framework versions
- Transformers 4.17.0.dev0
- Pytorch 1.10.2+cu102
- Datasets 1.18.3
- Tokenizers 0.11.0
|
{"language": "en", "license": "apache-2.0", "tags": ["audio-classification", "generated_from_trainer"], "metrics": ["accuracy", "f1"], "model-index": [{"name": "distil-wav2vec2-xls-r-adult-child-cls-89m", "results": []}]}
|
audio-classification
|
bookbot/distil-wav2vec2-xls-r-adult-child-cls-89m
|
[
"transformers",
"pytorch",
"tensorboard",
"wav2vec2",
"audio-classification",
"generated_from_trainer",
"en",
"arxiv:2111.09296",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"2111.09296"
] |
[
"en"
] |
TAGS
#transformers #pytorch #tensorboard #wav2vec2 #audio-classification #generated_from_trainer #en #arxiv-2111.09296 #license-apache-2.0 #endpoints_compatible #region-us
|
DistilWav2Vec2 XLS-R Adult/Child Speech Classifier 89M
======================================================
DistilWav2Vec2 XLS-R Adult/Child Speech Classifier is an audio classification model based on the XLS-R architecture. This model is a distilled version of wav2vec2-xls-r-adult-child-cls on a private adult/child speech classification dataset.
This model was trained using HuggingFace's PyTorch framework. All training was done on a Tesla P100, provided by Kaggle. Training metrics were logged via Tensorboard.
Model
-----
Evaluation Results
------------------
The model achieves the following results on evaluation:
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* 'learning\_rate': 3e-05
* 'train\_batch\_size': 32
* 'eval\_batch\_size': 32
* 'seed': 42
* 'gradient\_accumulation\_steps': 4
* 'total\_train\_batch\_size': 128
* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'
* 'lr\_scheduler\_type': linear
* 'lr\_scheduler\_warmup\_ratio': 0.1
* 'num\_epochs': 5
### Training results
Disclaimer
----------
Do consider the biases which came from pre-training datasets that may be carried over into the results of this model.
Authors
-------
DistilWav2Vec2 XLS-R Adult/Child Speech Classifier was trained and evaluated by Wilson Wongso. All computation and development are done on Kaggle.
Framework versions
------------------
* Transformers 4.17.0.dev0
* Pytorch 1.10.2+cu102
* Datasets 1.18.3
* Tokenizers 0.11.0
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* 'learning\\_rate': 3e-05\n* 'train\\_batch\\_size': 32\n* 'eval\\_batch\\_size': 32\n* 'seed': 42\n* 'gradient\\_accumulation\\_steps': 4\n* 'total\\_train\\_batch\\_size': 128\n* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'\n* 'lr\\_scheduler\\_type': linear\n* 'lr\\_scheduler\\_warmup\\_ratio': 0.1\n* 'num\\_epochs': 5",
"### Training results\n\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which came from pre-training datasets that may be carried over into the results of this model.\n\n\nAuthors\n-------\n\n\nDistilWav2Vec2 XLS-R Adult/Child Speech Classifier was trained and evaluated by Wilson Wongso. All computation and development are done on Kaggle.\n\n\nFramework versions\n------------------\n\n\n* Transformers 4.17.0.dev0\n* Pytorch 1.10.2+cu102\n* Datasets 1.18.3\n* Tokenizers 0.11.0"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #audio-classification #generated_from_trainer #en #arxiv-2111.09296 #license-apache-2.0 #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* 'learning\\_rate': 3e-05\n* 'train\\_batch\\_size': 32\n* 'eval\\_batch\\_size': 32\n* 'seed': 42\n* 'gradient\\_accumulation\\_steps': 4\n* 'total\\_train\\_batch\\_size': 128\n* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'\n* 'lr\\_scheduler\\_type': linear\n* 'lr\\_scheduler\\_warmup\\_ratio': 0.1\n* 'num\\_epochs': 5",
"### Training results\n\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which came from pre-training datasets that may be carried over into the results of this model.\n\n\nAuthors\n-------\n\n\nDistilWav2Vec2 XLS-R Adult/Child Speech Classifier was trained and evaluated by Wilson Wongso. All computation and development are done on Kaggle.\n\n\nFramework versions\n------------------\n\n\n* Transformers 4.17.0.dev0\n* Pytorch 1.10.2+cu102\n* Datasets 1.18.3\n* Tokenizers 0.11.0"
] |
[
63,
170,
117
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #audio-classification #generated_from_trainer #en #arxiv-2111.09296 #license-apache-2.0 #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* 'learning\\_rate': 3e-05\n* 'train\\_batch\\_size': 32\n* 'eval\\_batch\\_size': 32\n* 'seed': 42\n* 'gradient\\_accumulation\\_steps': 4\n* 'total\\_train\\_batch\\_size': 128\n* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'\n* 'lr\\_scheduler\\_type': linear\n* 'lr\\_scheduler\\_warmup\\_ratio': 0.1\n* 'num\\_epochs': 5### Training results\n\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which came from pre-training datasets that may be carried over into the results of this model.\n\n\nAuthors\n-------\n\n\nDistilWav2Vec2 XLS-R Adult/Child Speech Classifier was trained and evaluated by Wilson Wongso. All computation and development are done on Kaggle.\n\n\nFramework versions\n------------------\n\n\n* Transformers 4.17.0.dev0\n* Pytorch 1.10.2+cu102\n* Datasets 1.18.3\n* Tokenizers 0.11.0"
] |
[
-0.08456330001354218,
0.17305520176887512,
-0.003799437778070569,
0.06385703384876251,
0.09282472729682922,
0.0005753411678597331,
0.1446942239999771,
0.11928503215312958,
-0.03855795040726662,
0.11360159516334534,
0.0829833447933197,
0.09783901274204254,
0.08589745312929153,
0.09148696810007095,
-0.04608555883169174,
-0.2337580770254135,
0.027803707867860794,
-0.0678468719124794,
-0.10354258865118027,
0.13236446678638458,
0.07481946796178818,
-0.10892105102539062,
0.0631752610206604,
-0.00017358054174110293,
-0.07187177985906601,
-0.03818954527378082,
-0.014588086865842342,
-0.06089626997709274,
0.07796496152877808,
0.05509454756975174,
0.08252938836812973,
0.06486227363348007,
0.07007555663585663,
-0.2778798043727875,
0.00796290673315525,
0.07549048215150833,
0.04435401409864426,
0.07272190600633621,
0.08951554447412491,
-0.03117210604250431,
0.13213717937469482,
-0.0769745409488678,
0.05036188289523125,
0.04067791625857353,
-0.11146599799394608,
-0.2276109904050827,
-0.08838027715682983,
0.05045095458626747,
0.12067598849534988,
0.06895579397678375,
-0.04869745671749115,
0.08223064243793488,
-0.02506411261856556,
0.09210266917943954,
0.19579488039016724,
-0.2405673712491989,
-0.038625627756118774,
-0.006027836352586746,
0.021024322137236595,
0.06471416354179382,
-0.10919824987649918,
-0.0018308294238522649,
0.03926446661353111,
-0.014923998154699802,
0.06335072964429855,
-0.03300345689058304,
0.043794918805360794,
-0.0038917523343116045,
-0.14464691281318665,
-0.08533279597759247,
0.15926049649715424,
0.06653967499732971,
-0.06418755650520325,
-0.12486321479082108,
-0.012447712011635303,
-0.13532832264900208,
0.016208311542868614,
-0.013549085706472397,
0.005885518621653318,
-0.052139632403850555,
-0.016961675137281418,
-0.03176859766244888,
-0.0898783728480339,
-0.04526953771710396,
0.020261505618691444,
0.19154952466487885,
0.022753717377781868,
-0.000533729384187609,
0.03304886072874069,
0.10076140612363815,
0.09160271286964417,
-0.15109655261039734,
-0.008291647769510746,
-0.009570603258907795,
-0.10162641108036041,
-0.024825364351272583,
-0.009983517229557037,
0.0369856022298336,
0.013359304517507553,
0.13481645286083221,
-0.04509567469358444,
0.08027180284261703,
-0.0038746721111238003,
0.0006213809829205275,
0.007989239878952503,
0.14521846175193787,
-0.06211106479167938,
-0.02406994253396988,
-0.03703509271144867,
0.09014609456062317,
0.0002959095872938633,
-0.04813073202967644,
-0.060769472271203995,
0.04023134708404541,
0.07983069121837616,
0.06516678631305695,
0.008067323826253414,
0.03620878607034683,
-0.06866864114999771,
-0.06697109341621399,
0.015260552056133747,
-0.13616599142551422,
0.029297156259417534,
0.07577348500490189,
-0.0638957992196083,
0.006462860386818647,
0.013044539839029312,
0.03419041261076927,
-0.05953574180603027,
0.0804811343550682,
-0.06262998282909393,
-0.0019933697767555714,
-0.03386682644486427,
-0.10477104038000107,
0.013526203110814095,
-0.07868167012929916,
-0.013285307213664055,
-0.04993961751461029,
-0.050555381923913956,
-0.07463464140892029,
0.062387607991695404,
-0.054129716008901596,
-0.06832397729158401,
-0.07408982515335083,
-0.08829249441623688,
0.04364239424467087,
-0.0198125671595335,
0.10190252959728241,
-0.04408064857125282,
0.09558320790529251,
0.006603606976568699,
0.0362735316157341,
0.095737986266613,
0.056023161858320236,
-0.062341511249542236,
0.06980463117361069,
-0.13895036280155182,
0.11513804644346237,
-0.08073954284191132,
-0.013767923228442669,
-0.16766828298568726,
-0.10712272673845291,
0.02213209494948387,
-0.0032345782965421677,
0.05298520624637604,
0.14522773027420044,
-0.14835964143276215,
-0.07565955817699432,
0.11606494337320328,
-0.07013827562332153,
-0.10539240390062332,
0.12199503183364868,
-0.04152342677116394,
0.014343450777232647,
0.03884252905845642,
0.1792990267276764,
0.06197319179773331,
-0.1157359704375267,
-0.05179663002490997,
-0.07177051156759262,
0.10617492347955704,
0.1278148889541626,
0.10990452766418457,
-0.04308564215898514,
0.025512101128697395,
-0.026878058910369873,
-0.08338232338428497,
-0.02549339272081852,
-0.03627685829997063,
-0.08284320682287216,
0.004705994389951229,
-0.036682166159152985,
0.05340355634689331,
0.02110368013381958,
-0.008869978599250317,
-0.04345571994781494,
-0.13917623460292816,
0.01313543226569891,
0.07722557336091995,
-0.10937421023845673,
0.019913997501134872,
-0.09251639246940613,
0.04826487973332405,
-0.012878580018877983,
-0.018999256193637848,
-0.16465505957603455,
-0.024818718433380127,
0.029160894453525543,
-0.09672309458255768,
0.04872800409793854,
-0.005620729643851519,
0.03395787999033928,
0.04766490310430527,
-0.017139004543423653,
-0.049316346645355225,
-0.03291993960738182,
0.004293015692383051,
-0.030204106122255325,
-0.22189536690711975,
-0.05367707088589668,
-0.024948569014668465,
0.1967557668685913,
-0.2575473189353943,
0.0012741321697831154,
0.08256763964891434,
0.08903820812702179,
0.026089170947670937,
-0.05211414024233818,
0.027924783527851105,
0.030302220955491066,
-0.03645370528101921,
-0.054497722536325455,
0.019999327138066292,
-0.009535613469779491,
-0.1229800283908844,
0.03379707783460617,
-0.20463839173316956,
-0.055474262684583664,
0.1007760688662529,
-0.009687969461083412,
-0.07985900342464447,
-0.07366007566452026,
-0.041717659682035446,
-0.04660860449075699,
-0.030935531482100487,
0.005913423839956522,
0.20996905863285065,
0.04970061406493187,
0.104604572057724,
-0.08337768912315369,
-0.07593697309494019,
0.03438635542988777,
-0.013727403245866299,
-0.02698720246553421,
0.1360425055027008,
0.03369557112455368,
-0.08653136342763901,
0.10010804980993271,
0.09364853799343109,
-0.014183741062879562,
0.11021414399147034,
-0.03512953966856003,
-0.090815469622612,
-0.07725592702627182,
0.029781773686408997,
0.013347422704100609,
0.0568680576980114,
-0.09146922081708908,
0.009064785204827785,
0.030116256326436996,
0.03617246821522713,
-0.017294665798544884,
-0.17021490633487701,
0.02042110450565815,
0.04502984881401062,
-0.0682338997721672,
-0.04696333408355713,
-0.020353257656097412,
-0.00023545502335764468,
0.07428184896707535,
0.014344565570354462,
0.022368790581822395,
-0.003788919420912862,
-0.05407252162694931,
-0.11792927235364914,
0.17960572242736816,
-0.07947428524494171,
-0.16899743676185608,
-0.11593277752399445,
-0.042796336114406586,
-0.04928768798708916,
-0.002442065393552184,
0.02265617810189724,
-0.052691757678985596,
-0.039794862270355225,
-0.08794009685516357,
0.031133584678173065,
-0.027090532705187798,
-0.0071675581857562065,
-0.0015769538003951311,
0.006702912971377373,
0.05034752935171127,
-0.09031304717063904,
0.008736097253859043,
0.022976823151111603,
-0.037167150527238846,
0.005289893597364426,
0.06929884850978851,
0.051625728607177734,
0.1587255746126175,
0.03440564125776291,
0.009117528796195984,
-0.02050648257136345,
0.21432256698608398,
-0.13127021491527557,
0.0033090016804635525,
0.09642942994832993,
-0.08542618155479431,
0.04198905825614929,
0.17746978998184204,
0.015233072452247143,
-0.09196003526449203,
0.03516966849565506,
0.06091558188199997,
-0.030126182362437248,
-0.2848634719848633,
-0.03776455298066139,
-0.046359430998563766,
0.005627167411148548,
0.10820522904396057,
0.022941675037145615,
-0.03404602035880089,
0.03516669571399689,
-0.04672557860612869,
-0.0062946476973593235,
0.04723574221134186,
0.06478487700223923,
0.11139597743749619,
0.0356893464922905,
0.09133805334568024,
-0.015732908621430397,
-0.022458551451563835,
0.042573291808366776,
0.024853060021996498,
0.18658792972564697,
-0.010906870476901531,
0.19019725918769836,
0.06330519169569016,
0.08456780016422272,
-0.016894903033971786,
0.01900753378868103,
0.02615164965391159,
0.024961724877357483,
0.013345166109502316,
-0.07335156947374344,
-0.060790497809648514,
0.08709442615509033,
0.08575987070798874,
-0.012605017051100731,
-0.06713643670082092,
0.02081427536904812,
0.026978900656104088,
0.2994651794433594,
0.09700001031160355,
-0.22629795968532562,
-0.07366074621677399,
0.048003606498241425,
-0.04874560236930847,
-0.04408172145485878,
0.004099526908248663,
0.13327260315418243,
-0.10350651293992996,
0.07095245271921158,
-0.06289572268724442,
0.07111522555351257,
-0.1254258155822754,
-0.013514228165149689,
0.0469348207116127,
0.028971394523978233,
-0.012855185195803642,
0.0636887475848198,
-0.22496892511844635,
0.25631198287010193,
0.0063399518840014935,
0.0487937368452549,
-0.0587659515440464,
0.030393049120903015,
-0.0038277688436210155,
-0.06709067523479462,
0.14344938099384308,
0.00030192083795554936,
-0.08051735907793045,
-0.14797376096248627,
-0.11373037844896317,
0.018424611538648605,
0.140546053647995,
-0.08819299936294556,
0.11970449984073639,
-0.03992227837443352,
0.0038150933105498552,
0.003018399467691779,
-0.03324851393699646,
-0.051400646567344666,
-0.10722193121910095,
0.055931027978658676,
-0.027796007692813873,
0.03222128748893738,
-0.04235691577196121,
-0.07557898759841919,
-0.10299427807331085,
0.15351535379886627,
-0.17224134504795074,
-0.055852312594652176,
-0.10833154618740082,
0.0241079218685627,
0.13565462827682495,
-0.07153864949941635,
0.024884140118956566,
0.013146303594112396,
0.11762840300798416,
0.0221872515976429,
-0.009972499683499336,
0.09731189161539078,
-0.042042188346385956,
-0.22726717591285706,
-0.023343848064541817,
0.17204737663269043,
0.04631396010518074,
0.07902900874614716,
-0.025099730119109154,
0.057764098048210144,
0.007091930136084557,
-0.09218708425760269,
0.06453520804643631,
0.038962140679359436,
0.016017818823456764,
0.05920397862792015,
-0.000046928442316129804,
-0.02357177436351776,
-0.13677974045276642,
-0.05629533529281616,
0.11889250576496124,
0.3239019513130188,
-0.08003277331590652,
0.07529181987047195,
0.08180759102106094,
-0.08512014150619507,
-0.1750451773405075,
-0.0029453944880515337,
0.12466748058795929,
0.033340517431497574,
0.03064161166548729,
-0.2058088630437851,
0.05621861293911934,
0.06758499890565872,
-0.019818466156721115,
0.08943235129117966,
-0.29209354519844055,
-0.144850954413414,
0.08551313728094101,
0.06638064980506897,
-0.09143614023923874,
-0.15065662562847137,
-0.0706571564078331,
-0.012914454564452171,
-0.05888105556368828,
0.11743821948766708,
-0.0076939999125897884,
0.11075129359960556,
0.04543498903512955,
0.07184319198131561,
0.03809043765068054,
-0.04351019114255905,
0.1608673334121704,
0.023402011021971703,
0.03843966871500015,
-0.05218534544110298,
-0.05243338644504547,
-0.025889486074447632,
-0.0608542300760746,
0.03778444603085518,
-0.04980125278234482,
0.019829178228974342,
-0.11901500076055527,
-0.04602712765336037,
-0.06309938430786133,
0.022010264918208122,
-0.05877317115664482,
-0.06142016500234604,
-0.030462872236967087,
0.06447257846593857,
0.09004292637109756,
-0.001071406528353691,
0.08857188373804092,
-0.061213016510009766,
0.04588965326547623,
0.18913424015045166,
0.15396727621555328,
0.10593196749687195,
-0.06827813386917114,
-0.010334071703255177,
0.012260746210813522,
0.04421001672744751,
-0.15397197008132935,
0.06119725853204727,
0.14633671939373016,
0.050200603902339935,
0.17731106281280518,
0.015905708074569702,
-0.08909470587968826,
0.008634979836642742,
0.022530164569616318,
-0.12305154651403427,
-0.13120052218437195,
-0.004400998819619417,
0.005600367672741413,
-0.15702274441719055,
-0.048317234963178635,
0.12998846173286438,
-0.022735558450222015,
-0.013913209550082684,
0.014711661264300346,
0.04931194707751274,
-0.01953846588730812,
0.16768185794353485,
0.0246047955006361,
0.09040762484073639,
-0.07535168528556824,
0.10243546962738037,
0.09126932173967361,
-0.14193281531333923,
0.07337519526481628,
0.05556035414338112,
-0.04148722067475319,
-0.01816793903708458,
-0.040404364466667175,
0.0012043663300573826,
0.03790071979165077,
-0.02824225462973118,
-0.05825762450695038,
-0.10896812379360199,
0.04884243384003639,
0.07907848805189133,
0.028644580394029617,
0.06406310945749283,
-0.023387357592582703,
-0.004995113238692284,
-0.12938761711120605,
0.1340354084968567,
0.038150761276483536,
0.026472702622413635,
-0.13325060904026031,
0.12036879360675812,
0.01772705279290676,
0.011347443796694279,
0.0005091805942356586,
-0.020019855350255966,
-0.10636457800865173,
0.023415282368659973,
-0.07446236163377762,
0.03474510461091995,
-0.056319136172533035,
0.0005240278551355004,
0.011223808862268925,
-0.06451903283596039,
-0.04996354132890701,
0.020832030102610588,
-0.09959691762924194,
-0.028131213039159775,
-0.01930842734873295,
0.09795158356428146,
-0.12301594763994217,
-0.04403543099761009,
0.0606345534324646,
-0.11434636265039444,
0.10532422363758087,
0.017366191372275352,
-0.023976579308509827,
0.013602805323898792,
-0.10032370686531067,
0.037322673946619034,
0.01780867949128151,
0.03063584677875042,
0.01751217059791088,
-0.2500859200954437,
-0.005618638824671507,
-0.030323738232254982,
-0.0019144287798553705,
-0.0019778981804847717,
0.06080783158540726,
-0.12237438559532166,
-0.030737686902284622,
-0.027701906859874725,
-0.05191757157444954,
-0.05559102073311806,
0.05006315931677818,
0.041354939341545105,
0.02068289928138256,
0.18881437182426453,
-0.053622134029865265,
0.10148707032203674,
-0.18055234849452972,
-0.010126437060534954,
-0.006772034335881472,
-0.02331141009926796,
-0.03559057042002678,
-0.04422930255532265,
0.0776919275522232,
-0.08633480221033096,
0.09180273115634918,
-0.03389458358287811,
0.021819114685058594,
0.04300655052065849,
-0.07510169595479965,
-0.03522821515798569,
0.058634329587221146,
0.16059660911560059,
0.0322161428630352,
-0.02837633341550827,
0.0731656476855278,
-0.014450631104409695,
0.01676623523235321,
0.16224195063114166,
0.14828334748744965,
0.1367989182472229,
0.02609468251466751,
0.04651767015457153,
0.07389824092388153,
-0.11828272044658661,
-0.13110457360744476,
0.1429249793291092,
-0.05916142091155052,
0.12363775074481964,
-0.03975921869277954,
0.1795196682214737,
0.09031984210014343,
-0.2089710682630539,
0.062021974474191666,
-0.040962010622024536,
-0.11411300301551819,
-0.13146249949932098,
-0.1555560678243637,
-0.08006031811237335,
-0.0727747306227684,
0.017944125458598137,
-0.11157411336898804,
0.06042538210749626,
0.04841672256588936,
0.04563094303011894,
0.006276649422943592,
0.108639195561409,
-0.031834833323955536,
-0.04141494259238243,
0.07761399447917938,
0.02257528156042099,
-0.013660985976457596,
-0.009598523378372192,
-0.04737038165330887,
0.06492844223976135,
-0.002626965055242181,
0.07692143321037292,
-0.0027229960542172194,
-0.026182660833001137,
0.04177902266383171,
-0.046105317771434784,
-0.10116097331047058,
0.007528861053287983,
0.00586920278146863,
0.08572905510663986,
0.14362142980098724,
0.051849909126758575,
-0.005794403608888388,
-0.01551622711122036,
0.19663819670677185,
-0.08817970752716064,
-0.016612404957413673,
-0.15227024257183075,
0.1965598464012146,
-0.020192397758364677,
0.0014588052872568369,
0.027341308072209358,
-0.09401988983154297,
0.00982023123651743,
0.1548910140991211,
0.14947190880775452,
-0.017242297530174255,
-0.006288455333560705,
0.002361351391300559,
0.022886646911501884,
-0.0006665565306320786,
0.060712780803442,
0.08963277190923691,
0.0661078691482544,
-0.05513026565313339,
-0.031095173209905624,
-0.04840410500764847,
-0.05676031857728958,
0.017313718795776367,
0.11147415637969971,
0.008212685585021973,
-0.02051311917603016,
-0.031753938645124435,
0.10642892122268677,
-0.09223359823226929,
-0.19114281237125397,
0.03430894389748573,
-0.15437258780002594,
-0.15853598713874817,
0.001800380414351821,
0.028148353099822998,
0.04736095294356346,
0.036012906581163406,
-0.0010432926937937737,
-0.056395310908555984,
0.12205337733030319,
0.014565803110599518,
-0.04179368168115616,
-0.025866977870464325,
0.06851564347743988,
-0.08257449418306351,
0.18800224363803864,
-0.005737920757383108,
0.10257349163293839,
0.10112296789884567,
0.03860921040177345,
-0.0770917609333992,
0.05182592198252678,
0.08420061320066452,
-0.11562870442867279,
0.02848202735185623,
0.19333729147911072,
-0.03727751597762108,
0.15372860431671143,
0.07810429483652115,
-0.06050899997353554,
0.02527714893221855,
-0.04578518122434616,
-0.055366430431604385,
-0.07909300178289413,
0.004428527317941189,
-0.07643173635005951,
0.14984656870365143,
0.20180949568748474,
-0.06343908607959747,
-0.017376059666275978,
-0.047150831669569016,
0.0008274809806607664,
0.019240260124206543,
0.09867177903652191,
-0.021259721368551254,
-0.2382456511259079,
0.030272575095295906,
-0.019080961123108864,
0.05480271205306053,
-0.221751868724823,
-0.05109642073512077,
0.045250341296195984,
-0.040172070264816284,
-0.042853694409132004,
0.10779131948947906,
0.0030171810649335384,
0.04714381694793701,
-0.06760722398757935,
-0.07362236082553864,
-0.023085175082087517,
0.16688939929008484,
-0.16677828133106232,
-0.0643390640616417
] |
null | null |
transformers
|
## GPT-2 Indonesian Medium Kids Stories
GPT-2 Indonesian Medium Kids Stories is a causal language model based on the [OpenAI GPT-2](https://cdn.openai.com/better-language-models/language_models_are_unsupervised_multitask_learners.pdf) model. The model was originally the pre-trained [GPT2 Medium Indonesian](https://huggingface.co/flax-community/gpt2-medium-indonesian) model, which was then fine-tuned on Indonesian kids' stories from [Room To Read](https://literacycloud.org/) and [Let's Read](https://reader.letsreadasia.org/).
10% of the dataset was kept for evaluation purposes. The pre-trained model was fine-tuned and achieved an evaluation loss of 3.579 and an evaluation perplexity of 35.84.
Hugging Face's `Trainer` class from the [Transformers](https://huggingface.co/transformers) library was used to train the model. PyTorch was used as the backend framework during training, but the model remains compatible with other frameworks nonetheless.
## Model
| Model | #params | Arch. | Training/Validation data (text) |
| ------------------------------- | ------- | ----------- | --------------------------------- |
| `gpt2-indo-medium-kids-stories` | 345M | GPT2 Medium | Indonesian Kids' Stories (860 KB) |
## Evaluation Results
The model was fine-tuned for 3 epochs.
| Epoch | Training Loss | Validation Loss |
| ----- | ------------- | --------------- |
| 1 | 3.909100 | 3.627678 |
| 2 | 3.375300 | 3.562854 |
| 3 | 3.113300 | 3.578999 |
## How to Use (PyTorch)
### As Causal Language Model
```python
from transformers import pipeline
pretrained_name = "bookbot/gpt2-indo-medium-kids-stories"
nlp = pipeline(
"text-generation",
model=pretrained_name,
tokenizer=pretrained_name
)
nlp("Archie sedang mengendarai roket ke planet Mars.")
```
### Feature Extraction in PyTorch
```python
from transformers import GPT2LMHeadModel, GPT2TokenizerFast
pretrained_name = "bookbot/gpt2-indo-medium-kids-stories"
model = GPT2LMHeadModel.from_pretrained(pretrained_name)
tokenizer = GPT2TokenizerFast.from_pretrained(pretrained_name)
prompt = "Archie sedang mengendarai roket ke planet Mars."
encoded_input = tokenizer(prompt, return_tensors='pt')
output = model(**encoded_input)
```
## Disclaimer
Do consider the biases which come from both the pre-trained GPT-2 model and the Indonesian Kids' Stories dataset that may be carried over into the results of this model.
## Author
GPT-2 Indonesian Medium Kids Stories was trained and evaluated by [Wilson Wongso](https://w11wo.github.io/). All computation and development are done on Google Colaboratory using their free GPU access.
|
{"language": "id", "license": "mit", "tags": ["gpt2-indo-medium-kids-stories"], "widget": [{"text": "Archie sedang mengendarai roket ke planet Mars."}]}
|
text-generation
|
bookbot/gpt2-indo-medium-kids-stories
|
[
"transformers",
"pytorch",
"safetensors",
"gpt2",
"text-generation",
"gpt2-indo-medium-kids-stories",
"id",
"license:mit",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"id"
] |
TAGS
#transformers #pytorch #safetensors #gpt2 #text-generation #gpt2-indo-medium-kids-stories #id #license-mit #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
GPT-2 Indonesian Medium Kids Stories
------------------------------------
GPT-2 Indonesian Medium Kids Stories is a causal language model based on the OpenAI GPT-2 model. The model was originally the pre-trained GPT2 Medium Indonesian model, which was then fine-tuned on Indonesian kids' stories from Room To Read and Let's Read.
10% of the dataset was kept for evaluation purposes. The pre-trained model was fine-tuned and achieved an evaluation loss of 3.579 and an evaluation perplexity of 35.84.
Hugging Face's 'Trainer' class from the Transformers library was used to train the model. PyTorch was used as the backend framework during training, but the model remains compatible with other frameworks nonetheless.
Model
-----
Evaluation Results
------------------
The model was fine-tuned for 3 epochs.
Epoch: 1, Training Loss: 3.909100, Validation Loss: 3.627678
Epoch: 2, Training Loss: 3.375300, Validation Loss: 3.562854
Epoch: 3, Training Loss: 3.113300, Validation Loss: 3.578999
How to Use (PyTorch)
--------------------
### As Causal Language Model
### Feature Extraction in PyTorch
Disclaimer
----------
Do consider the biases which come from both the pre-trained GPT-2 model and the Indonesian Kids' Stories dataset that may be carried over into the results of this model.
Author
------
GPT-2 Indonesian Medium Kids Stories was trained and evaluated by Wilson Wongso. All computation and development are done on Google Colaboratory using their free GPU access.
|
[
"### As Causal Language Model",
"### Feature Extraction in PyTorch\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which come from both the pre-trained GPT-2 model and the Indonesian Kids' Stories dataset that may be carried over into the results of this model.\n\n\nAuthor\n------\n\n\nGPT-2 Indonesian Medium Kids Stories was trained and evaluated by Wilson Wongso. All computation and development are done on Google Colaboratory using their free GPU access."
] |
[
"TAGS\n#transformers #pytorch #safetensors #gpt2 #text-generation #gpt2-indo-medium-kids-stories #id #license-mit #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### As Causal Language Model",
"### Feature Extraction in PyTorch\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which come from both the pre-trained GPT-2 model and the Indonesian Kids' Stories dataset that may be carried over into the results of this model.\n\n\nAuthor\n------\n\n\nGPT-2 Indonesian Medium Kids Stories was trained and evaluated by Wilson Wongso. All computation and development are done on Google Colaboratory using their free GPU access."
] |
[
72,
7,
92
] |
[
"passage: TAGS\n#transformers #pytorch #safetensors #gpt2 #text-generation #gpt2-indo-medium-kids-stories #id #license-mit #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### As Causal Language Model### Feature Extraction in PyTorch\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which come from both the pre-trained GPT-2 model and the Indonesian Kids' Stories dataset that may be carried over into the results of this model.\n\n\nAuthor\n------\n\n\nGPT-2 Indonesian Medium Kids Stories was trained and evaluated by Wilson Wongso. All computation and development are done on Google Colaboratory using their free GPU access."
] |
[
-0.04255077987909317,
0.016273507848381996,
-0.0015324557898566127,
0.10324247926473618,
0.07839230448007584,
0.017594443634152412,
0.21767303347587585,
0.03609243407845497,
-0.058402761816978455,
-0.1459159106016159,
0.1243625059723854,
-0.00325419707223773,
0.10648161917924881,
0.08396798372268677,
-0.021217091009020805,
-0.31242844462394714,
0.04196273162961006,
-0.007953178137540817,
0.18049855530261993,
0.20730024576187134,
0.002641407074406743,
-0.013668673112988472,
0.059848714619874954,
0.08663931488990784,
-0.10616541653871536,
-0.12421143054962158,
0.02721254900097847,
-0.08292927592992783,
0.08262081444263458,
-0.04796740785241127,
0.06395279616117477,
0.06775732338428497,
-0.005191930569708347,
0.020517295226454735,
0.03384215757250786,
0.009676650166511536,
-0.017342139035463333,
0.012992346659302711,
0.07104367017745972,
0.03908415511250496,
0.25228458642959595,
0.08891450613737106,
-0.033291105180978775,
-0.0002911034389398992,
-0.1081334725022316,
-0.204086571931839,
0.0340370237827301,
0.007832693867385387,
0.05684829130768776,
0.0834074541926384,
-0.055761709809303284,
0.07460612803697586,
-0.19380272924900055,
0.06230738013982773,
0.17242613434791565,
-0.19273531436920166,
-0.09286273270845413,
0.08327999711036682,
0.027810318395495415,
-0.005495582707226276,
-0.06652957946062088,
0.015415317378938198,
0.02656514383852482,
0.008410555310547352,
0.002675438765436411,
-0.08514063060283661,
-0.054905056953430176,
-0.09348996728658676,
-0.07591108232736588,
-0.049223922193050385,
0.14419852197170258,
-0.09433218091726303,
-0.06454569101333618,
-0.1741526871919632,
-0.03767669200897217,
-0.008551446720957756,
0.01131280604749918,
0.03323161229491234,
-0.020838754251599312,
0.05761820077896118,
0.07471689581871033,
-0.05872134491801262,
-0.11893754452466965,
-0.06879033893346786,
0.013540448620915413,
0.138309046626091,
0.007783219218254089,
0.06525714695453644,
-0.1297610104084015,
0.1889466941356659,
0.14150123298168182,
-0.12329662591218948,
-0.005619936157017946,
-0.07283121347427368,
0.0875127762556076,
0.08969619125127792,
-0.010887051932513714,
0.07298872619867325,
0.02208525687456131,
-0.005597155541181564,
-0.09336119890213013,
-0.016526633873581886,
0.060308922082185745,
0.04491440951824188,
0.04753761366009712,
0.07712440192699432,
-0.0662267729640007,
0.04832601547241211,
0.09233564883470535,
-0.002458408707752824,
0.0916142389178276,
-0.020510537549853325,
-0.06649342179298401,
-0.023519301787018776,
0.05463779345154762,
0.06658004969358444,
-0.0011861331295222044,
0.14865294098854065,
-0.033208757638931274,
-0.035505931824445724,
0.0342942476272583,
-0.018568046391010284,
-0.06792709976434708,
-0.009532228112220764,
-0.04204985499382019,
0.1676349639892578,
0.011181098408997059,
0.023201599717140198,
-0.09026768058538437,
0.07048489898443222,
-0.015441986732184887,
0.008860558271408081,
0.0028779238928109407,
0.048023976385593414,
0.059631697833538055,
-0.08766596019268036,
0.0220070518553257,
-0.09516318887472153,
-0.27274906635284424,
-0.039785634726285934,
0.04367351159453392,
-0.06869541853666306,
-0.025016294792294502,
-0.09213719516992569,
-0.06313426792621613,
0.0023201650474220514,
-0.034848373383283615,
0.03155761584639549,
-0.04634658247232437,
0.11583948880434036,
-0.03558497875928879,
0.04561854526400566,
0.03553331270813942,
-0.01732785999774933,
-0.2383558601140976,
0.07937059551477432,
-0.05838688090443611,
0.08676005154848099,
0.16219116747379303,
0.06446099281311035,
0.04956779256463051,
-0.10673383623361588,
-0.055481746792793274,
-0.006000747438520193,
-0.052110809832811356,
0.2281036525964737,
-0.004132073372602463,
-0.0707746222615242,
0.16391482949256897,
-0.1396513730287552,
-0.1594599336385727,
0.16332314908504486,
-0.010637572035193443,
0.24847184121608734,
0.10750512778759003,
0.19531957805156708,
-0.09892910718917847,
0.030507665127515793,
0.024518311023712158,
-0.05360681563615799,
0.006283512804657221,
0.06985747814178467,
0.10923255980014801,
0.09938474744558334,
0.002043312881141901,
0.08274515718221664,
-0.0782575011253357,
0.10993900150060654,
0.001750826952047646,
-0.021268516778945923,
-0.006031339056789875,
-0.024365754798054695,
0.15179972350597382,
0.06904170662164688,
0.045093074440956116,
-0.05266252160072327,
-0.06553087383508682,
-0.15830303728580475,
0.01081801950931549,
-0.0940796509385109,
0.017725666984915733,
-0.1030886098742485,
0.1384778618812561,
-0.044116754084825516,
0.054824259132146835,
-0.027793077751994133,
0.011466724798083305,
-0.032335780560970306,
-0.07894281297922134,
-0.04454676806926727,
-0.09146635234355927,
0.011767983436584473,
0.0303003191947937,
-0.041124217212200165,
-0.037857770919799805,
0.04534048214554787,
-0.05729163438081741,
0.05080605670809746,
-0.043568193912506104,
0.06020217016339302,
0.020032377913594246,
0.20334313809871674,
-0.17167648673057556,
0.04565372318029404,
-0.007698597386479378,
0.028764061629772186,
-0.03860979899764061,
-0.009615660645067692,
0.08013831824064255,
-0.013084870763123035,
-0.020373499020934105,
-0.055759280920028687,
0.0920778140425682,
0.0744757354259491,
-0.09149685502052307,
0.18299362063407898,
-0.19238406419754028,
0.026364609599113464,
0.131917804479599,
-0.2778882086277008,
0.019999127835035324,
-0.08212260901927948,
-0.005022662691771984,
-0.003967169672250748,
0.004577957559376955,
0.05581594258546829,
0.2129644900560379,
0.025254547595977783,
0.141086608171463,
-0.1601223200559616,
-0.08028803020715714,
0.043996576219797134,
-0.06241970881819725,
0.08093611150979996,
0.08817916363477707,
-0.03832448273897171,
-0.0783541202545166,
0.11188112199306488,
0.12924042344093323,
0.07512854784727097,
0.20670340955257416,
0.012150822207331657,
0.01061788760125637,
-0.028500668704509735,
-0.03573145717382431,
-0.07624104619026184,
-0.005642366595566273,
-0.2927808165550232,
0.004837587475776672,
0.0722697526216507,
0.010639269836246967,
0.056662846356630325,
-0.186619371175766,
-0.09586358070373535,
-0.014556574635207653,
-0.011904551647603512,
-0.1066422313451767,
0.09568887203931808,
-0.03715885803103447,
0.07677606493234634,
-0.049931176006793976,
0.09639451652765274,
0.051895350217819214,
-0.013092616572976112,
-0.11862924695014954,
0.21014556288719177,
0.048118263483047485,
-0.3881472945213318,
-0.0678175613284111,
0.007914554327726364,
-0.09160421788692474,
0.026139119639992714,
0.07256880402565002,
-0.06921258568763733,
0.04520789906382561,
-0.059068549424409866,
0.09416815638542175,
0.06548762321472168,
-0.03538123518228531,
-0.06673523783683777,
0.026499852538108826,
-0.1108902245759964,
-0.021644221618771553,
-0.05583404004573822,
0.022204814478754997,
-0.16076944768428802,
0.18634763360023499,
-0.12369479238986969,
-0.027150293812155724,
0.17181530594825745,
0.062402285635471344,
-0.02943306602537632,
-0.016106177121400833,
0.15554514527320862,
-0.1506403386592865,
0.02123270556330681,
0.2315102219581604,
-0.14373192191123962,
0.0006427172338590026,
0.09611592441797256,
-0.019016429781913757,
-0.13935771584510803,
0.07420177757740021,
-0.07422567158937454,
-0.12408759444952011,
-0.24337010085582733,
-0.06475882977247238,
-0.041220467537641525,
0.10039640963077545,
-0.056065574288368225,
0.03656627982854843,
0.08792077004909515,
0.11815030872821808,
-0.09251311421394348,
0.04050251096487045,
0.11911632120609283,
0.033477749675512314,
0.09057310968637466,
-0.03141488879919052,
0.039864469319581985,
-0.012402872554957867,
-0.05891016870737076,
0.025772159919142723,
-0.08176222443580627,
0.1725066900253296,
-0.04277759790420532,
0.095846988260746,
0.04797679930925369,
0.05097265914082527,
0.0655137449502945,
0.16532930731773376,
0.0076263356022536755,
-0.03578883782029152,
-0.01183684915304184,
-0.05091783404350281,
-0.11621317267417908,
0.03544285148382187,
-0.16773481667041779,
0.0241752490401268,
-0.03807392343878746,
-0.04980041831731796,
0.10800310224294662,
0.12632329761981964,
-0.014599042013287544,
-0.119159996509552,
-0.07564909011125565,
0.04600844904780388,
0.025888295844197273,
-0.06922194361686707,
0.08997709304094315,
0.12569935619831085,
-0.21360917389392853,
0.017681699246168137,
-0.04547867923974991,
0.09026162326335907,
-0.1504315435886383,
0.012482357211411,
-0.03865005448460579,
-0.11630693078041077,
-0.010998348705470562,
0.1128266230225563,
-0.19644489884376526,
0.21118870377540588,
-0.024404939264059067,
0.0705917477607727,
-0.1651470810174942,
-0.08646690845489502,
0.003990004304796457,
0.07063215970993042,
0.18992841243743896,
0.024121001362800598,
0.12824155390262604,
-0.13162089884281158,
-0.07996263355016708,
0.05238454043865204,
0.07506121695041656,
0.028579995036125183,
0.008130403235554695,
-0.06828667223453522,
0.07672349363565445,
-0.08766490966081619,
0.023937581107020378,
-0.014362158253788948,
-0.023087499663233757,
0.025365713983774185,
-0.010279756970703602,
0.08497754484415054,
-0.05665098875761032,
-0.1031176820397377,
-0.012710888870060444,
-0.0024628934916108847,
0.0438360795378685,
-0.21432626247406006,
-0.03128724917769432,
-0.05379501357674599,
-0.020395006984472275,
-0.0487796887755394,
-0.01748146302998066,
-0.0037066389340907335,
0.04185586795210838,
0.02807224728167057,
-0.04997257888317108,
0.08792980015277863,
-0.018038416281342506,
-0.3067166209220886,
0.02610337920486927,
0.138636976480484,
0.0698782429099083,
0.07179062813520432,
0.032310787588357925,
-0.00648601446300745,
-0.041946034878492355,
-0.13840484619140625,
-0.06268039345741272,
0.09344086796045303,
0.007246151566505432,
0.0048647415824234486,
0.12399572879076004,
0.13638167083263397,
-0.07147365063428879,
-0.13450361788272858,
0.17542463541030884,
0.12280810624361038,
0.0165286622941494,
0.08165545016527176,
0.18856649100780487,
-0.08866085112094879,
-0.25288328528404236,
-0.023395899683237076,
-0.04954301938414574,
-0.03232719749212265,
-0.11480606347322464,
-0.061792295426130295,
0.051565103232860565,
0.09864699840545654,
0.0007492959266528487,
0.03296380862593651,
-0.3106105923652649,
-0.1328423023223877,
0.0603543259203434,
0.0637844055891037,
0.25094857811927795,
-0.16895727813243866,
0.0315154492855072,
-0.04477942734956741,
-0.12261130660772324,
0.08993519842624664,
-0.1330101191997528,
0.08314011245965958,
-0.06520397961139679,
0.019172323867678642,
0.004801888018846512,
-0.048441607505083084,
0.13199079036712646,
-0.06816070526838303,
0.025380272418260574,
-0.1332247406244278,
-0.0375332310795784,
0.0799955353140831,
0.0523492805659771,
0.043088797479867935,
-0.006053577177226543,
-0.04532068595290184,
-0.1108722984790802,
-0.14140430092811584,
-0.13924334943294525,
-0.041441649198532104,
-0.004057150334119797,
-0.10956726223230362,
-0.03516127169132233,
0.0900668129324913,
0.013706029392778873,
0.03952586650848389,
-0.03614667430520058,
-0.0971447080373764,
-0.025620965287089348,
-0.03400652855634689,
0.2362818419933319,
-0.15937037765979767,
0.09705417603254318,
-0.09034978598356247,
0.03511960431933403,
0.016396034508943558,
-0.17956580221652985,
-0.043929196894168854,
0.09661812335252762,
-0.006956353317946196,
0.08464685827493668,
0.07215024530887604,
-0.02086581103503704,
0.09028303623199463,
0.03982904553413391,
-0.14714384078979492,
-0.1401476114988327,
-0.11336679756641388,
-0.12496325373649597,
0.04089390113949776,
-0.01452871598303318,
0.06877730041742325,
-0.11506444215774536,
-0.03845740482211113,
-0.04457524046301842,
0.03404160216450691,
-0.03254251554608345,
0.07793959975242615,
-0.07492068409919739,
-0.0353623703122139,
-0.14327967166900635,
0.1680595874786377,
0.0562317781150341,
-0.021276716142892838,
-0.007500651758164167,
0.08036691695451736,
-0.08047164231538773,
-0.028960350900888443,
-0.05007975175976753,
-0.0005265638465061784,
0.08178283274173737,
-0.10616904497146606,
-0.07094936072826385,
-0.11458055675029755,
-0.04095619171857834,
-0.1323215514421463,
0.030915703624486923,
0.05439252033829689,
-0.03546755388379097,
-0.05012040212750435,
-0.0623980276286602,
0.07738018780946732,
0.05492505431175232,
-0.03286106511950493,
-0.06662853062152863,
0.12311972677707672,
0.11834647506475449,
0.07970932871103287,
-0.08938595652580261,
-0.06423227488994598,
-0.07235153764486313,
0.07843082398176193,
-0.08643491566181183,
0.07549374550580978,
-0.16216880083084106,
-0.015956269577145576,
-0.03485357016324997,
-0.07987730205059052,
-0.052653588354587555,
-0.009793112985789776,
-0.06395720690488815,
0.05831305310130119,
0.016147667542099953,
-0.00022231449838727713,
-0.011515899561345577,
-0.033325694501399994,
0.09727074950933456,
-0.03919777646660805,
0.12564674019813538,
0.07688488066196442,
-0.11916237324476242,
0.1496688723564148,
-0.2887841463088989,
0.08437638729810715,
0.044323842972517014,
0.005301788914948702,
-0.047750797122716904,
-0.014619913883507252,
0.023647872731089592,
0.103417307138443,
-0.04319831728935242,
0.06046050786972046,
0.02685677260160446,
-0.11062216758728027,
-0.014458365738391876,
-0.00575158791616559,
-0.030784688889980316,
-0.028222501277923584,
-0.06350560486316681,
0.03681819885969162,
-0.017581967636942863,
0.14254865050315857,
-0.0514712780714035,
0.14530658721923828,
-0.13451646268367767,
-0.015245397575199604,
0.03152110055088997,
-0.06564249843358994,
0.0024439385160803795,
-0.15363098680973053,
-0.0034709128085523844,
0.04367700219154358,
0.2471730262041092,
0.08117684721946716,
-0.009224212728440762,
-0.042133063077926636,
0.17775261402130127,
0.10636390745639801,
0.015048852190375328,
0.15125878155231476,
0.05627050995826721,
-0.03593050315976143,
-0.052599694579839706,
0.05407450720667839,
0.030374934896826744,
0.027336258441209793,
0.12280932068824768,
-0.02279040962457657,
0.07107573747634888,
0.07172635197639465,
-0.041941966861486435,
0.11356397718191147,
-0.14191757142543793,
-0.11829166114330292,
0.03630302473902702,
0.01535883266478777,
-0.05752160772681236,
0.17326082289218903,
0.16631890833377838,
-0.08678017556667328,
-0.010020308196544647,
-0.032416798174381256,
-0.08230302482843399,
-0.10791055113077164,
-0.30329546332359314,
-0.0313383974134922,
-0.08650524169206619,
0.010210735723376274,
-0.1191079169511795,
-0.060839176177978516,
0.14440786838531494,
0.05174409970641136,
-0.13024412095546722,
0.19183358550071716,
0.09845448285341263,
-0.05462606996297836,
0.14033111929893494,
0.004336369223892689,
0.07375480234622955,
-0.07328872382640839,
-0.0008695082506164908,
-0.006432763300836086,
0.05076258257031441,
0.08664638549089432,
0.0462309755384922,
-0.1018986776471138,
-0.047658707946538925,
-0.09980189800262451,
-0.04790894687175751,
-0.04900456964969635,
0.03214290738105774,
0.05979785695672035,
0.08116745948791504,
0.0180965643376112,
0.007192638237029314,
0.017380572855472565,
0.1490243673324585,
0.0286631528288126,
-0.0062999422661960125,
-0.07010010629892349,
0.03078303299844265,
-0.11786776781082153,
-0.09338690340518951,
0.10829425603151321,
-0.03845895826816559,
0.004766686819493771,
0.34794530272483826,
0.25524741411209106,
0.004079603590071201,
0.0006774360663257539,
-0.05231764167547226,
0.022349001839756966,
-0.030208827927708626,
0.18750561773777008,
0.07989820092916489,
0.21254199743270874,
-0.06596070528030396,
0.08024148643016815,
-0.056092169135808945,
-0.08863367140293121,
0.08888561278581619,
0.11430531740188599,
0.04762773960828781,
-0.01766432635486126,
-0.09386397898197174,
0.1490551233291626,
-0.23818960785865784,
0.04907524213194847,
-0.018364042043685913,
-0.07039809226989746,
-0.06279783695936203,
-0.007212233263999224,
-0.008139957673847675,
0.09587075561285019,
0.056866735219955444,
0.012667961418628693,
0.06546130031347275,
-0.0005299899494275451,
0.04599663242697716,
-0.106425441801548,
0.016400812193751335,
0.09553097933530807,
-0.06327524781227112,
0.23224927484989166,
-0.0013599192025139928,
0.04847744107246399,
0.043373435735702515,
0.03321819379925728,
-0.10106952488422394,
0.060324933379888535,
0.008009977638721466,
0.010489050298929214,
-0.005516367964446545,
0.07982607185840607,
0.04314465820789337,
-0.19953644275665283,
0.09083996713161469,
0.03764496371150017,
0.1115429550409317,
0.09162420779466629,
0.06714179366827011,
-0.10537946969270706,
0.03522692993283272,
-0.0680152103304863,
0.06652956455945969,
0.13077783584594727,
-0.07559561729431152,
-0.015892062336206436,
-0.0720801055431366,
0.06694301217794418,
-0.07597322762012482,
-0.04294803366065025,
0.039549730718135834,
-0.15288281440734863,
-0.026425648480653763,
0.08384658396244049,
0.00012121503823436797,
-0.12836050987243652,
0.07836499065160751,
-0.09380936622619629,
-0.07655257731676102,
-0.0769549086689949,
0.019512534141540527,
-0.07831765711307526,
0.05217881500720978,
0.021321158856153488,
-0.01743224821984768,
-0.04150928929448128,
0.10301994532346725,
-0.0736495703458786,
-0.13091176748275757
] |
null | null |
transformers
|
## GPT-2 Indonesian Small Kids Stories
GPT-2 Indonesian Small Kids Stories is a causal language model based on the [OpenAI GPT-2](https://cdn.openai.com/better-language-models/language_models_are_unsupervised_multitask_learners.pdf) model. The model was originally the pre-trained [GPT2 Small Indonesian](https://huggingface.co/flax-community/gpt2-small-indonesian) model, which was then fine-tuned on Indonesian kids' stories from [Room To Read](https://literacycloud.org/) and [Let's Read](https://reader.letsreadasia.org/).
10% of the dataset was kept for evaluation purposes. The pre-trained model was fine-tuned and achieved an evaluation loss of 3.777 and an evaluation perplexity of 43.68.
Hugging Face's `Trainer` class from the [Transformers](https://huggingface.co/transformers) library was used to train the model. PyTorch was used as the backend framework during training, but the model remains compatible with other frameworks nonetheless.
## Model
| Model | #params | Arch. | Training/Validation data (text) |
| ------------------------------ | ------- | ---------- | --------------------------------- |
| `gpt2-indo-small-kids-stories` | 124M | GPT2 Small | Indonesian Kids' Stories (860 KB) |
## Evaluation Results
The model was fine-tuned for 10 epochs.
| Epoch | Training Loss | Validation Loss |
| ----- | ------------- | --------------- |
| 1 | 4.259600 | 4.020201 |
| 2 | 3.979100 | 3.911295 |
| 3 | 3.818300 | 3.849313 |
| 4 | 3.691600 | 3.809931 |
| 5 | 3.589300 | 3.789201 |
| 6 | 3.506200 | 3.778665 |
| 7 | 3.439200 | 3.774871 |
| 8 | 3.387600 | 3.774859 |
| 9 | 3.351300 | 3.776672 |
| 10 | 3.330100 | 3.776935 |
## How to Use (PyTorch)
### As Causal Language Model
```python
from transformers import pipeline
pretrained_name = "bookbot/gpt2-indo-small-kids-stories"
nlp = pipeline(
"text-generation",
model=pretrained_name,
tokenizer=pretrained_name
)
nlp("Archie sedang mengendarai roket ke planet Mars.")
```
### Feature Extraction in PyTorch
```python
from transformers import GPT2LMHeadModel, GPT2TokenizerFast
pretrained_name = "bookbot/gpt2-indo-small-kids-stories"
model = GPT2LMHeadModel.from_pretrained(pretrained_name)
tokenizer = GPT2TokenizerFast.from_pretrained(pretrained_name)
prompt = "Archie sedang mengendarai roket ke planet Mars."
encoded_input = tokenizer(prompt, return_tensors='pt')
output = model(**encoded_input)
```
## Disclaimer
Do consider the biases which come from both the pre-trained GPT-2 model and the Indonesian Kids' Stories dataset that may be carried over into the results of this model.
## Author
GPT-2 Indonesian Small Kids Stories was trained and evaluated by [Wilson Wongso](https://w11wo.github.io/). All computation and development are done on Google Colaboratory using their free GPU access.
|
{"language": "id", "license": "mit", "tags": ["gpt2-indo-small-kids-stories"], "widget": [{"text": "Archie sedang mengendarai roket ke planet Mars."}]}
|
text-generation
|
bookbot/gpt2-indo-small-kids-stories
|
[
"transformers",
"pytorch",
"safetensors",
"gpt2",
"text-generation",
"gpt2-indo-small-kids-stories",
"id",
"license:mit",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"id"
] |
TAGS
#transformers #pytorch #safetensors #gpt2 #text-generation #gpt2-indo-small-kids-stories #id #license-mit #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
GPT-2 Indonesian Small Kids Stories
-----------------------------------
GPT-2 Indonesian Small Kids Stories is a causal language model based on the OpenAI GPT-2 model. The model was originally the pre-trained GPT2 Small Indonesian model, which was then fine-tuned on Indonesian kids' stories from Room To Read and Let's Read.
10% of the dataset was kept for evaluation purposes. The pre-trained model was fine-tuned and achieved an evaluation loss of 3.777 and an evaluation perplexity of 43.68.
Hugging Face's 'Trainer' class from the Transformers library was used to train the model. PyTorch was used as the backend framework during training, but the model remains compatible with other frameworks nonetheless.
Model
-----
Evaluation Results
------------------
The model was fine-tuned for 10 epochs.
Epoch: 1, Training Loss: 4.259600, Validation Loss: 4.020201
Epoch: 2, Training Loss: 3.979100, Validation Loss: 3.911295
Epoch: 3, Training Loss: 3.818300, Validation Loss: 3.849313
Epoch: 4, Training Loss: 3.691600, Validation Loss: 3.809931
Epoch: 5, Training Loss: 3.589300, Validation Loss: 3.789201
Epoch: 6, Training Loss: 3.506200, Validation Loss: 3.778665
Epoch: 7, Training Loss: 3.439200, Validation Loss: 3.774871
Epoch: 8, Training Loss: 3.387600, Validation Loss: 3.774859
Epoch: 9, Training Loss: 3.351300, Validation Loss: 3.776672
Epoch: 10, Training Loss: 3.330100, Validation Loss: 3.776935
How to Use (PyTorch)
--------------------
### As Causal Language Model
### Feature Extraction in PyTorch
Disclaimer
----------
Do consider the biases which come from both the pre-trained GPT-2 model and the Indonesian Kids' Stories dataset that may be carried over into the results of this model.
Author
------
GPT-2 Indonesian Small Kids Stories was trained and evaluated by Wilson Wongso. All computation and development are done on Google Colaboratory using their free GPU access.
|
[
"### As Causal Language Model",
"### Feature Extraction in PyTorch\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which come from both the pre-trained GPT-2 model and the Indonesian Kids' Stories dataset that may be carried over into the results of this model.\n\n\nAuthor\n------\n\n\nGPT-2 Indonesian Small Kids Stories was trained and evaluated by Wilson Wongso. All computation and development are done on Google Colaboratory using their free GPU access."
] |
[
"TAGS\n#transformers #pytorch #safetensors #gpt2 #text-generation #gpt2-indo-small-kids-stories #id #license-mit #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### As Causal Language Model",
"### Feature Extraction in PyTorch\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which come from both the pre-trained GPT-2 model and the Indonesian Kids' Stories dataset that may be carried over into the results of this model.\n\n\nAuthor\n------\n\n\nGPT-2 Indonesian Small Kids Stories was trained and evaluated by Wilson Wongso. All computation and development are done on Google Colaboratory using their free GPU access."
] |
[
72,
7,
92
] |
[
"passage: TAGS\n#transformers #pytorch #safetensors #gpt2 #text-generation #gpt2-indo-small-kids-stories #id #license-mit #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### As Causal Language Model### Feature Extraction in PyTorch\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which come from both the pre-trained GPT-2 model and the Indonesian Kids' Stories dataset that may be carried over into the results of this model.\n\n\nAuthor\n------\n\n\nGPT-2 Indonesian Small Kids Stories was trained and evaluated by Wilson Wongso. All computation and development are done on Google Colaboratory using their free GPU access."
] |
[
-0.031073274090886116,
-0.014923895709216595,
-0.0014417374040931463,
0.12117864936590195,
0.04505523666739464,
0.01016995869576931,
0.2353733777999878,
0.06776216626167297,
-0.04512723907828331,
-0.1364143043756485,
0.11829527467489243,
-0.01995142176747322,
0.10874899476766586,
0.06178665533661842,
-0.02114093489944935,
-0.30297407507896423,
0.03775130212306976,
-0.0005753511213697493,
0.1803186982870102,
0.188484787940979,
0.01354310568422079,
-0.019878791645169258,
0.06457843631505966,
0.1033138632774353,
-0.09224051982164383,
-0.1402328908443451,
0.021603452041745186,
-0.09641202539205551,
0.06265147775411606,
-0.06018393114209175,
0.04271302372217178,
0.03902324289083481,
0.0001546154817333445,
0.07852575927972794,
0.027198821306228638,
-0.003134478349238634,
0.006669221445918083,
0.010167768225073814,
0.07155423611402512,
0.05593502148985863,
0.2390194833278656,
0.06196984648704529,
-0.04121289402246475,
0.0006073300610296428,
-0.08152136951684952,
-0.24563515186309814,
0.03769203647971153,
0.023203587159514427,
0.036487676203250885,
0.0830387994647026,
-0.045041512697935104,
0.0949966087937355,
-0.25558972358703613,
0.03096850775182247,
0.2000230997800827,
-0.15778180956840515,
-0.08135909587144852,
0.07212219387292862,
0.004827162250876427,
-0.024303307756781578,
-0.0637560710310936,
-0.0006835456588305533,
0.027754148468375206,
0.000597182719502598,
-0.008820926770567894,
-0.07185440510511398,
-0.07217346876859665,
-0.08991623669862747,
-0.08087708055973053,
-0.060829732567071915,
0.1298973560333252,
-0.10093548893928528,
-0.056794945150613785,
-0.20160552859306335,
-0.014821791090071201,
-0.03188396617770195,
-0.02490353398025036,
0.02680096961557865,
-0.006192139349877834,
0.04657734930515289,
0.0899399146437645,
-0.0350784994661808,
-0.13305658102035522,
-0.05791269615292549,
0.01746894046664238,
0.10757171362638474,
0.010910701006650925,
0.055244117975234985,
-0.1850050538778305,
0.14070695638656616,
0.12804153561592102,
-0.10561475902795792,
-0.03489319607615471,
-0.06564659625291824,
0.15062637627124786,
0.09063373506069183,
-0.01563943363726139,
0.14240163564682007,
0.03796452656388283,
0.010002285242080688,
-0.06611140072345734,
-0.04716988652944565,
0.08363540470600128,
0.047992970794439316,
0.04143974557518959,
0.07340195775032043,
-0.05419968441128731,
0.04078622907400131,
0.12225624918937683,
0.0065574017353355885,
0.1069590225815773,
-0.003588179824873805,
-0.07934384047985077,
-0.028869688510894775,
0.05378922075033188,
0.0768924430012703,
0.01947665400803089,
0.1353815197944641,
-0.031298648566007614,
-0.028681404888629913,
0.03893430903553963,
-0.014978620223701,
-0.05013732239603996,
0.016731007024645805,
-0.04497455433011055,
0.17410142719745636,
-0.0016327082412317395,
0.012996851466596127,
-0.10365530848503113,
0.062034107744693756,
-0.02154827117919922,
0.005804972257465124,
0.02415754646062851,
0.06489045917987823,
0.05976549908518791,
-0.05178707465529442,
0.04262833669781685,
-0.10334180295467377,
-0.23624972999095917,
-0.022976383566856384,
0.0019156377529725432,
-0.07629194855690002,
-0.028607739135622978,
-0.08562283217906952,
-0.08222946524620056,
-0.006645337678492069,
-0.022772813215851784,
0.002282939152792096,
-0.057868048548698425,
0.10358890146017075,
-0.04262501373887062,
0.054011039435863495,
0.050668638199567795,
-0.016256073489785194,
-0.26732000708580017,
0.05079697072505951,
-0.03714977949857712,
0.09132157266139984,
0.17287373542785645,
0.0515555776655674,
0.050556302070617676,
-0.11896514892578125,
-0.08302972465753555,
-0.008202805183827877,
-0.07511434704065323,
0.23371009528636932,
0.00956177618354559,
-0.09696457535028458,
0.15048930048942566,
-0.14074844121932983,
-0.16150107979774475,
0.17636923491954803,
0.007215072400867939,
0.2822771966457367,
0.10151207447052002,
0.22633612155914307,
-0.07697994261980057,
0.07031840831041336,
0.00839161779731512,
-0.0324736014008522,
0.020560385659337044,
0.04422153905034065,
0.12115925550460815,
0.08274423331022263,
0.029547272250056267,
0.07289096713066101,
-0.0747300311923027,
0.09406327456235886,
0.018255552276968956,
-0.027640588581562042,
-0.009169911965727806,
-0.030376436188817024,
0.14075890183448792,
0.06419342011213303,
0.07198182493448257,
-0.054303597658872604,
-0.051032353192567825,
-0.15583810210227966,
0.028852304443717003,
-0.08643820136785507,
-0.00009522304753772914,
-0.1230490431189537,
0.16698554158210754,
-0.04849991947412491,
0.047895096242427826,
0.0032541935797780752,
0.04069938510656357,
-0.01570243388414383,
-0.10647296160459518,
-0.06365867704153061,
-0.08244486898183823,
0.021847957745194435,
-0.0011791146825999022,
-0.011152077466249466,
-0.005963482894003391,
0.041035305708646774,
-0.06152983754873276,
0.05091313645243645,
-0.029391974210739136,
0.034343499690294266,
0.03010816127061844,
0.22301846742630005,
-0.1885160654783249,
0.024941613897681236,
0.00963549967855215,
-0.0017983767902478576,
-0.025785163044929504,
-0.0031853807158768177,
0.07324826717376709,
-0.029558274894952774,
-0.025718050077557564,
-0.06194178760051727,
0.09532684087753296,
0.07255138456821442,
-0.05161408334970474,
0.17163905501365662,
-0.17187140882015228,
-0.017082901671528816,
0.13589122891426086,
-0.2384602129459381,
0.02178681641817093,
-0.06263775378465652,
-0.023457886651158333,
0.007232003379613161,
0.011737346649169922,
0.04172559455037117,
0.18033945560455322,
0.004532638005912304,
0.13379061222076416,
-0.18062041699886322,
-0.10537681728601456,
0.04555843397974968,
-0.06542793661355972,
0.10603567957878113,
0.09213540703058243,
-0.03519180417060852,
-0.047066714614629745,
0.12146888673305511,
0.12126468867063522,
0.0800996944308281,
0.199056476354599,
0.012834783643484116,
-0.007910947315394878,
-0.007573164068162441,
-0.02570808120071888,
-0.05204913765192032,
-0.023552894592285156,
-0.28737810254096985,
0.016471609473228455,
0.09635283797979355,
0.007585272658616304,
0.060796573758125305,
-0.16751520335674286,
-0.07814943790435791,
0.004757456481456757,
-0.0157532449811697,
-0.10222811996936798,
0.08255105465650558,
-0.0254970695823431,
0.08199802786111832,
-0.03471822664141655,
0.1000116690993309,
0.05275048315525055,
0.0002748947881627828,
-0.1133299320936203,
0.2074097990989685,
0.09178245067596436,
-0.36713090538978577,
-0.06284559518098831,
0.05631249025464058,
-0.057040341198444366,
0.03377886489033699,
0.08366237580776215,
-0.0713382437825203,
0.04855913296341896,
-0.07063036412000656,
0.09812238067388535,
0.0840146541595459,
-0.052069224417209625,
-0.06639154255390167,
0.04955068230628967,
-0.1310432106256485,
-0.039113640785217285,
-0.058100394904613495,
0.02965848706662655,
-0.14828486740589142,
0.1738409548997879,
-0.10462125390768051,
-0.07591722160577774,
0.17404529452323914,
0.0548967607319355,
-0.04203784465789795,
-0.02255723439157009,
0.14502066373825073,
-0.13666430115699768,
0.025971848517656326,
0.2534944713115692,
-0.1537407785654068,
-0.012869643978774548,
0.06155269593000412,
-0.028086762875318527,
-0.14088088274002075,
0.06584639102220535,
-0.06807305663824081,
-0.11548416316509247,
-0.23351171612739563,
-0.0709865540266037,
-0.04787833243608475,
0.14882014691829681,
-0.031830720603466034,
0.039773520082235336,
0.08843064308166504,
0.13639502227306366,
-0.09753541648387909,
0.05910979583859444,
0.07748113572597504,
0.03184840455651283,
0.0994599312543869,
-0.0015869704075157642,
0.024573251605033875,
-0.027871781960129738,
-0.06289428472518921,
0.03419911861419678,
-0.13819174468517303,
0.11612498760223389,
-0.07076173275709152,
0.1292896568775177,
0.027312690392136574,
0.07135321199893951,
0.0868806540966034,
0.16973531246185303,
-0.0048707653768360615,
-0.053259219974279404,
-0.009805938228964806,
-0.06682344526052475,
-0.1369805634021759,
0.031974948942661285,
-0.1545625627040863,
0.041977543383836746,
-0.04462689533829689,
-0.039073459804058075,
0.08707907795906067,
0.1540568768978119,
0.03014284186065197,
-0.14233876764774323,
-0.10687423497438431,
0.035809408873319626,
0.02444419637322426,
-0.047803718596696854,
0.08571132272481918,
0.1221526563167572,
-0.21814514696598053,
-0.037222106009721756,
-0.017788143828511238,
0.07035399973392487,
-0.16310791671276093,
0.017515137791633606,
-0.06145169958472252,
-0.11056007444858551,
0.014759477227926254,
0.10763589292764664,
-0.1662207543849945,
0.154862180352211,
-0.03716549277305603,
0.08017648756504059,
-0.20429131388664246,
-0.06831682473421097,
0.010871104896068573,
-0.0002739015035331249,
0.18595261871814728,
0.026912253350019455,
0.12647809088230133,
-0.13904038071632385,
-0.09733907878398895,
0.06032891571521759,
0.07093583792448044,
0.012408222071826458,
0.015820292755961418,
-0.1139678880572319,
0.08421322703361511,
-0.08341380208730698,
-0.015005896799266338,
-0.0014862807001918554,
-0.022121552377939224,
0.03502530977129936,
0.006322699133306742,
0.08602793514728546,
-0.05650901794433594,
-0.1029285416007042,
-0.011789755895733833,
-0.011506453156471252,
0.05154823511838913,
-0.2211001217365265,
-0.014319205656647682,
-0.056533314287662506,
-0.009588128887116909,
-0.044975485652685165,
0.006386656314134598,
0.007805491331964731,
0.04604402184486389,
0.049616653472185135,
-0.04638873040676117,
0.06896419078111649,
-0.03576110303401947,
-0.32946711778640747,
0.05623314902186394,
0.11851880699396133,
0.06002555042505264,
0.07984903454780579,
0.05972994491457939,
-0.021105481311678886,
-0.03148453310132027,
-0.11935622245073318,
-0.07999377697706223,
0.09929854422807693,
-0.004522408824414015,
0.008773036301136017,
0.11116939038038254,
0.15501615405082703,
-0.054130345582962036,
-0.12387751787900925,
0.20942646265029907,
0.17469747364521027,
0.006875636056065559,
0.08514780551195145,
0.17969633638858795,
-0.06298957020044327,
-0.2871188819408417,
-0.02640116587281227,
-0.05240299552679062,
-0.04856586456298828,
-0.08623774349689484,
-0.04121919721364975,
0.08776707202196121,
0.10825040191411972,
-0.007048983126878738,
0.03358739987015724,
-0.35735583305358887,
-0.11284320801496506,
0.050221025943756104,
0.03719943016767502,
0.2693791091442108,
-0.16000233590602875,
0.036625027656555176,
-0.03414740413427353,
-0.10978875309228897,
0.10142996907234192,
-0.09414850175380707,
0.08791855722665787,
-0.07980078458786011,
0.007544770836830139,
0.018760761246085167,
-0.04204507917165756,
0.1342802345752716,
-0.08831986039876938,
0.009473933838307858,
-0.14787600934505463,
-0.0478646382689476,
0.08687517046928406,
0.054118409752845764,
0.03847100958228111,
0.016857996582984924,
-0.052493978291749954,
-0.11743336170911789,
-0.14076073467731476,
-0.13994646072387695,
-0.01919148676097393,
0.0017645101761445403,
-0.11774725466966629,
-0.06914551556110382,
0.0912468284368515,
0.009335813112556934,
0.04194158688187599,
-0.025222765281796455,
-0.09202268719673157,
-0.04841816425323486,
-0.07583119720220566,
0.2492549568414688,
-0.16427794098854065,
0.09635122865438461,
-0.08569278568029404,
0.04165719076991081,
0.015036693774163723,
-0.16520772874355316,
-0.02624874748289585,
0.07593360543251038,
0.009532286785542965,
0.04432795196771622,
0.07968977093696594,
-0.021547356620430946,
0.0993696004152298,
0.03207624703645706,
-0.10752508789300919,
-0.16916082799434662,
-0.09141123294830322,
-0.14185118675231934,
0.01199128944426775,
-0.032385677099227905,
0.07275217771530151,
-0.1262732446193695,
-0.061470311135053635,
-0.05459784343838692,
0.031903769820928574,
-0.03897261247038841,
0.08470752090215683,
-0.0738576352596283,
-0.04163512587547302,
-0.14585058391094208,
0.17803558707237244,
0.061664141714572906,
-0.03864511474967003,
0.01825675182044506,
0.07399357855319977,
-0.06522674858570099,
-0.03115471825003624,
-0.009215163066983223,
-0.020123936235904694,
0.09727154672145844,
-0.10009437799453735,
-0.0781605914235115,
-0.11036519706249237,
-0.028252365067601204,
-0.169704869389534,
0.036822110414505005,
0.055290766060352325,
-0.028722135350108147,
-0.0374976322054863,
-0.10164421051740646,
0.06946218758821487,
0.04379461333155632,
-0.036870528012514114,
-0.05845832824707031,
0.10661406815052032,
0.07512064278125763,
0.07802902907133102,
-0.08577897399663925,
-0.044915471225976944,
-0.08419012278318405,
0.06001395359635353,
-0.10021523386240005,
0.11067122966051102,
-0.1658690720796585,
-0.02237585559487343,
-0.027900608256459236,
-0.048233021050691605,
-0.053712937980890274,
-0.006018875632435083,
-0.07357034832239151,
0.05815853923559189,
0.014197494834661484,
0.026601988822221756,
0.011707774363458157,
-0.01012690830975771,
0.09510219097137451,
-0.03374391794204712,
0.1463909149169922,
0.07064434885978699,
-0.12101085484027863,
0.11773385852575302,
-0.2635757625102997,
0.05589086189866066,
0.035405226051807404,
-0.0007631068001501262,
-0.029149159789085388,
-0.017401859164237976,
0.039660077542066574,
0.10711218416690826,
-0.05487615242600441,
0.05900502949953079,
0.06019066646695137,
-0.11525354534387589,
0.010786149650812149,
-0.01454258058220148,
-0.01390992384403944,
-0.017446691170334816,
-0.08860385417938232,
-0.0000493234874738846,
-0.04366712644696236,
0.14457279443740845,
-0.04323223978281021,
0.13115528225898743,
-0.11947315186262131,
-0.024957727640867233,
0.0020791932474821806,
-0.06151428446173668,
0.00891210325062275,
-0.135800302028656,
0.00010441902850288898,
0.07403683662414551,
0.21680264174938202,
0.09486622363328934,
-0.02943788468837738,
-0.04529225453734398,
0.14556242525577545,
0.14951561391353607,
0.01639385148882866,
0.19911248981952667,
0.07343480736017227,
-0.05679451674222946,
-0.046670425683259964,
0.052369795739650726,
0.05563652142882347,
0.05874559283256531,
0.16209624707698822,
0.02018037810921669,
0.051281556487083435,
0.08169592171907425,
-0.05006442591547966,
0.0871395543217659,
-0.14601017534732819,
-0.11400675773620605,
0.06742649525403976,
0.007035769056528807,
-0.07586467266082764,
0.13102351129055023,
0.18432500958442688,
-0.05474727600812912,
-0.009228748269379139,
-0.07011596858501434,
-0.054078273475170135,
-0.11869478970766068,
-0.2859203815460205,
-0.03513818234205246,
-0.08624254167079926,
-0.00990728847682476,
-0.10628223419189453,
-0.0801803469657898,
0.17135149240493774,
0.0565229132771492,
-0.14149747788906097,
0.18390212953090668,
0.06302950531244278,
-0.04956825450062752,
0.1342313438653946,
-0.0145427156239748,
0.05888081714510918,
-0.08887838572263718,
-0.025767961516976357,
-0.020441878587007523,
0.06644981354475021,
0.09889914095401764,
0.052223917096853256,
-0.0956120565533638,
-0.053895074874162674,
-0.10316592454910278,
-0.04970768094062805,
-0.04496321827173233,
0.019132673740386963,
0.043756917119026184,
0.05370185524225235,
0.013503964059054852,
-0.002775294939056039,
0.02269638516008854,
0.14856386184692383,
0.019786834716796875,
-0.04295746982097626,
-0.05973679572343826,
0.035939086228609085,
-0.12447599321603775,
-0.0857953131198883,
0.10288409888744354,
-0.04870161414146423,
-0.002086507622152567,
0.3909335136413574,
0.256299763917923,
0.030277473852038383,
0.016681253910064697,
-0.04184320196509361,
0.017327386885881424,
-0.07892484217882156,
0.20687805116176605,
0.07857902348041534,
0.21119791269302368,
-0.06592351198196411,
0.08932846784591675,
-0.033084675669670105,
-0.0721677765250206,
0.033095747232437134,
0.13330498337745667,
0.0368545837700367,
-0.001978076295927167,
-0.07396964728832245,
0.10942051559686661,
-0.23777586221694946,
0.05990869179368019,
-0.008913666941225529,
-0.05799345672130585,
-0.07097726315259933,
0.0022139842621982098,
-0.018380338326096535,
0.10416731238365173,
0.07735398411750793,
0.00734822079539299,
0.05271096155047417,
-0.019901610910892487,
0.032327547669410706,
-0.1051185354590416,
0.03506717085838318,
0.10196837037801743,
-0.07374364137649536,
0.2748984694480896,
-0.003775358898565173,
0.02924491837620735,
0.05602780729532242,
0.0269350316375494,
-0.15327633917331696,
0.028434863314032555,
0.01110384427011013,
0.038116779178380966,
0.0026668577920645475,
0.08897217363119125,
0.03184615448117256,
-0.19335976243019104,
0.10215754806995392,
0.009702567011117935,
0.10407477617263794,
0.11083898693323135,
0.10850532352924347,
-0.11253559589385986,
-0.0019420088501647115,
-0.057883042842149734,
0.05543942376971245,
0.10320842266082764,
-0.09239450097084045,
-0.03716161474585533,
-0.07172446697950363,
0.09140525758266449,
-0.04645344242453575,
-0.03108232282102108,
0.027121132239699364,
-0.1604679971933365,
-0.02068127505481243,
0.07497672736644745,
0.013145345263183117,
-0.10919270664453506,
0.09323962777853012,
-0.07954226434230804,
-0.07789785414934158,
-0.08493942767381668,
-0.006022095214575529,
-0.06104876473546028,
0.051203370094299316,
0.022662587463855743,
-0.03561093658208847,
-0.05639420822262764,
0.11073629558086395,
-0.06946979463100433,
-0.13279809057712555
] |
null | null |
transformers
|
# Wav2Vec2 Adult/Child Speech Classifier
Wav2Vec2 Adult/Child Speech Classifier is an audio classification model based on the [wav2vec 2.0](https://arxiv.org/abs/2006.11477) architecture. This model is a fine-tuned version of [wav2vec2-base](https://huggingface.co/facebook/wav2vec2-base) on a private adult/child speech classification dataset.
This model was trained using HuggingFace's PyTorch framework. All training was done on a Tesla P100, provided by Kaggle. Training metrics were logged via Tensorboard.
## Model
| Model | #params | Arch. | Training/Validation data (text) |
| -------------------------- | ------- | ----------- | ----------------------------------------- |
| `wav2vec2-adult-child-cls` | 91M | wav2vec 2.0 | Adult/Child Speech Classification Dataset |
## Evaluation Results
The model achieves the following results on evaluation:
| Dataset | Loss | Accuracy | F1 |
| --------------------------------- | ------ | -------- | ------ |
| Adult/Child Speech Classification | 0.1682 | 95.80% | 0.9618 |
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- `learning_rate`: 3e-05
- `train_batch_size`: 32
- `eval_batch_size`: 32
- `seed`: 42
- `optimizer`: Adam with `betas=(0.9,0.999)` and `epsilon=1e-08`
- `lr_scheduler_type`: linear
- `lr_scheduler_warmup_ratio`: 0.1
- `num_epochs`: 5
### Training results
| Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 |
| :-----------: | :---: | :--: | :-------------: | :------: | :----: |
| 0.2709 | 1.0 | 384 | 0.2616 | 0.9104 | 0.9142 |
| 0.2112 | 2.0 | 768 | 0.1826 | 0.9386 | 0.9421 |
| 0.1755 | 3.0 | 1152 | 0.1898 | 0.9354 | 0.9428 |
| 0.0915 | 4.0 | 1536 | 0.1682 | 0.9580 | 0.9618 |
| 0.1042 | 5.0 | 1920 | 0.1717 | 0.9511 | 0.9554 |
## Disclaimer
Do consider the biases which came from pre-training datasets that may be carried over into the results of this model.
## Authors
Wav2Vec2 Adult/Child Speech Classifier was trained and evaluated by [Wilson Wongso](https://w11wo.github.io/). All computation and development are done on Kaggle.
## Framework versions
- Transformers 4.16.2
- Pytorch 1.10.2+cu102
- Datasets 1.18.3
- Tokenizers 0.10.3
|
{"language": "en", "license": "apache-2.0", "tags": ["audio-classification", "generated_from_trainer"], "metrics": ["accuracy", "f1"], "base_model": "wav2vec2-base", "model-index": [{"name": "wav2vec2-adult-child-cls", "results": []}]}
|
audio-classification
|
bookbot/wav2vec2-adult-child-cls
|
[
"transformers",
"pytorch",
"tensorboard",
"safetensors",
"wav2vec2",
"audio-classification",
"generated_from_trainer",
"en",
"arxiv:2006.11477",
"base_model:wav2vec2-base",
"license:apache-2.0",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"2006.11477"
] |
[
"en"
] |
TAGS
#transformers #pytorch #tensorboard #safetensors #wav2vec2 #audio-classification #generated_from_trainer #en #arxiv-2006.11477 #base_model-wav2vec2-base #license-apache-2.0 #endpoints_compatible #has_space #region-us
|
Wav2Vec2 Adult/Child Speech Classifier
======================================
Wav2Vec2 Adult/Child Speech Classifier is an audio classification model based on the wav2vec 2.0 architecture. This model is a fine-tuned version of wav2vec2-base on a private adult/child speech classification dataset.
This model was trained using HuggingFace's PyTorch framework. All training was done on a Tesla P100, provided by Kaggle. Training metrics were logged via Tensorboard.
Model
-----
Evaluation Results
------------------
The model achieves the following results on evaluation:
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* 'learning\_rate': 3e-05
* 'train\_batch\_size': 32
* 'eval\_batch\_size': 32
* 'seed': 42
* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'
* 'lr\_scheduler\_type': linear
* 'lr\_scheduler\_warmup\_ratio': 0.1
* 'num\_epochs': 5
### Training results
Disclaimer
----------
Do consider the biases which came from pre-training datasets that may be carried over into the results of this model.
Authors
-------
Wav2Vec2 Adult/Child Speech Classifier was trained and evaluated by Wilson Wongso. All computation and development are done on Kaggle.
Framework versions
------------------
* Transformers 4.16.2
* Pytorch 1.10.2+cu102
* Datasets 1.18.3
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* 'learning\\_rate': 3e-05\n* 'train\\_batch\\_size': 32\n* 'eval\\_batch\\_size': 32\n* 'seed': 42\n* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'\n* 'lr\\_scheduler\\_type': linear\n* 'lr\\_scheduler\\_warmup\\_ratio': 0.1\n* 'num\\_epochs': 5",
"### Training results\n\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which came from pre-training datasets that may be carried over into the results of this model.\n\n\nAuthors\n-------\n\n\nWav2Vec2 Adult/Child Speech Classifier was trained and evaluated by Wilson Wongso. All computation and development are done on Kaggle.\n\n\nFramework versions\n------------------\n\n\n* Transformers 4.16.2\n* Pytorch 1.10.2+cu102\n* Datasets 1.18.3\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #safetensors #wav2vec2 #audio-classification #generated_from_trainer #en #arxiv-2006.11477 #base_model-wav2vec2-base #license-apache-2.0 #endpoints_compatible #has_space #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* 'learning\\_rate': 3e-05\n* 'train\\_batch\\_size': 32\n* 'eval\\_batch\\_size': 32\n* 'seed': 42\n* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'\n* 'lr\\_scheduler\\_type': linear\n* 'lr\\_scheduler\\_warmup\\_ratio': 0.1\n* 'num\\_epochs': 5",
"### Training results\n\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which came from pre-training datasets that may be carried over into the results of this model.\n\n\nAuthors\n-------\n\n\nWav2Vec2 Adult/Child Speech Classifier was trained and evaluated by Wilson Wongso. All computation and development are done on Kaggle.\n\n\nFramework versions\n------------------\n\n\n* Transformers 4.16.2\n* Pytorch 1.10.2+cu102\n* Datasets 1.18.3\n* Tokenizers 0.10.3"
] |
[
83,
137,
108
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #safetensors #wav2vec2 #audio-classification #generated_from_trainer #en #arxiv-2006.11477 #base_model-wav2vec2-base #license-apache-2.0 #endpoints_compatible #has_space #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* 'learning\\_rate': 3e-05\n* 'train\\_batch\\_size': 32\n* 'eval\\_batch\\_size': 32\n* 'seed': 42\n* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'\n* 'lr\\_scheduler\\_type': linear\n* 'lr\\_scheduler\\_warmup\\_ratio': 0.1\n* 'num\\_epochs': 5### Training results\n\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which came from pre-training datasets that may be carried over into the results of this model.\n\n\nAuthors\n-------\n\n\nWav2Vec2 Adult/Child Speech Classifier was trained and evaluated by Wilson Wongso. All computation and development are done on Kaggle.\n\n\nFramework versions\n------------------\n\n\n* Transformers 4.16.2\n* Pytorch 1.10.2+cu102\n* Datasets 1.18.3\n* Tokenizers 0.10.3"
] |
[
-0.10256730765104294,
0.12612445652484894,
-0.0025595847982913256,
0.046955425292253494,
0.09244760125875473,
-0.01987423188984394,
0.17422333359718323,
0.0846189633011818,
-0.04161715507507324,
0.0914444848895073,
0.06851416081190109,
0.12268886715173721,
0.09227952361106873,
0.0877198725938797,
-0.0439714640378952,
-0.18458454310894012,
0.03491001948714256,
-0.01715395785868168,
-0.06277330964803696,
0.13359138369560242,
0.06519074738025665,
-0.1204070895910263,
0.04387551546096802,
0.04705045744776726,
-0.11213085800409317,
-0.07429926097393036,
-0.012885138392448425,
-0.05081479996442795,
0.07674571871757507,
0.054211582988500595,
0.10290556401014328,
0.06960168480873108,
0.026314014568924904,
-0.28899890184402466,
0.016495242714881897,
0.06429583579301834,
0.0470980666577816,
0.06670383363962173,
0.07054989039897919,
-0.009359659627079964,
0.07806238532066345,
-0.06324765086174011,
0.0382935032248497,
0.05476551875472069,
-0.10856004059314728,
-0.2384025901556015,
-0.10934241116046906,
0.05345277115702629,
0.15174280107021332,
0.04896922782063484,
-0.06128142774105072,
0.09247143566608429,
-0.03950662538409233,
0.10927581787109375,
0.22162707149982452,
-0.27499300241470337,
-0.04375169053673744,
0.019622119143605232,
0.03721392899751663,
0.015936294570565224,
-0.12135817110538483,
-0.00010802716133184731,
0.06832586973905563,
-0.03849608451128006,
0.06090777367353439,
-0.03958873823285103,
0.023499147966504097,
-0.05248783901333809,
-0.14845675230026245,
-0.06784040480852127,
0.19740313291549683,
0.08598585426807404,
-0.09137168526649475,
-0.1503024846315384,
-0.004174457862973213,
-0.12565384805202484,
-0.0012316361535340548,
-0.021771231666207314,
0.009812318719923496,
-0.06823962926864624,
-0.009679204784333706,
-0.06817557662725449,
-0.11495649069547653,
-0.054395779967308044,
-0.0139542818069458,
0.2811111807823181,
0.035032957792282104,
-0.00132196513004601,
0.01707952842116356,
0.11641918122768402,
0.11139029264450073,
-0.14140066504478455,
-0.03708275035023689,
-0.0002351307775825262,
-0.10572253912687302,
-0.045011889189481735,
-0.019143592566251755,
0.03325652331113815,
0.01118694432079792,
0.14429333806037903,
-0.04100916162133217,
0.05384407192468643,
-0.014699414372444153,
0.012535634450614452,
0.006880161818116903,
0.146381676197052,
-0.04073493182659149,
-0.011057896539568901,
-0.004347620997577906,
0.06986802071332932,
0.02234520949423313,
-0.04716784507036209,
-0.036572474986314774,
0.018822381272912025,
0.07952413707971573,
0.05939110368490219,
0.031418781727552414,
0.06787186861038208,
-0.07885157316923141,
-0.06175965070724487,
0.022364282980561256,
-0.1256089210510254,
-0.004739483818411827,
0.09468098729848862,
-0.05919778719544411,
0.01997707411646843,
0.011076927185058594,
0.04823881760239601,
-0.06957001984119415,
0.05866702273488045,
-0.08918353170156479,
0.007515464909374714,
-0.030856948345899582,
-0.09042094647884369,
0.017877941951155663,
-0.026740584522485733,
-0.009588061831891537,
-0.05574535205960274,
-0.0801013931632042,
-0.0622815378010273,
0.010677790269255638,
-0.002410844201222062,
-0.06648910790681839,
-0.08365240693092346,
-0.07939739525318146,
0.014994065277278423,
-0.034941621124744415,
0.10510658472776413,
-0.04519855976104736,
0.0932757556438446,
0.03592879697680473,
0.019817933440208435,
0.09990007430315018,
0.0415496900677681,
-0.05470529943704605,
0.046656131744384766,
-0.09178005158901215,
0.13379183411598206,
-0.10310047119855881,
0.00937125738710165,
-0.14156511425971985,
-0.11266451328992844,
0.000004348798029241152,
0.010625912807881832,
0.03770601749420166,
0.17797458171844482,
-0.07207076251506805,
-0.08051285147666931,
0.17143502831459045,
-0.10614797472953796,
-0.1318918913602829,
0.10069454461336136,
-0.0432325042784214,
0.04163355007767677,
0.07980091124773026,
0.12938383221626282,
0.10334881395101547,
-0.130568265914917,
-0.05866037309169769,
-0.05960553139448166,
0.10767391324043274,
0.08611058443784714,
0.1117214784026146,
-0.049321498721838,
0.020741865038871765,
0.020447511225938797,
-0.1069614514708519,
-0.024244127795100212,
-0.058677490800619125,
-0.07336683571338654,
0.0073281326331198215,
-0.06320164352655411,
0.07557330280542374,
0.024513939395546913,
0.03809143975377083,
-0.03691092133522034,
-0.12627191841602325,
0.02749473974108696,
0.11067520081996918,
-0.1155969426035881,
0.021683219820261,
-0.11163580417633057,
0.061379577964544296,
-0.06337179243564606,
-0.007030065171420574,
-0.152706116437912,
-0.08491966128349304,
0.020585285499691963,
-0.09367852658033371,
0.018176501616835594,
0.023585595190525055,
0.029295574873685837,
0.03584869205951691,
-0.023057954385876656,
-0.051119524985551834,
-0.04557128995656967,
-0.00204028794541955,
-0.02489565871655941,
-0.1964607536792755,
-0.05789438635110855,
-0.028024321421980858,
0.2332436442375183,
-0.25947460532188416,
0.026922205463051796,
0.10687007009983063,
0.09571649134159088,
0.03582156449556351,
-0.0696529895067215,
0.027111900970339775,
0.026098674163222313,
-0.03302507475018501,
-0.0314566045999527,
0.030007030814886093,
0.010082068853080273,
-0.11217653006315231,
0.02988230437040329,
-0.19452887773513794,
-0.020631449297070503,
0.1265680491924286,
-0.023061927407979965,
-0.06873994320631027,
-0.07667016237974167,
-0.054593805223703384,
-0.04429461061954498,
-0.042586687952280045,
-0.005694595165550709,
0.16949650645256042,
0.045331694185733795,
0.10967390984296799,
-0.09485183656215668,
-0.08682636171579361,
0.030159849673509598,
-0.029833992943167686,
-0.005991969723254442,
0.12396559119224548,
-0.023841911926865578,
-0.07990359514951706,
0.10353910177946091,
0.04044986888766289,
-0.02576524019241333,
0.11743531376123428,
-0.04178956151008606,
-0.0714285597205162,
-0.08222811669111252,
0.06722016632556915,
0.00703297508880496,
0.09011094272136688,
-0.14119784533977509,
-0.0039223600178956985,
0.025846421718597412,
0.004529052879661322,
0.0032363873906433582,
-0.1898934692144394,
0.01919916458427906,
0.015459418296813965,
-0.06702793389558792,
-0.03910161554813385,
-0.008696488104760647,
-0.0009640554198995233,
0.08210041373968124,
-0.0215999074280262,
0.06707930564880371,
0.008354405872523785,
-0.06011471524834633,
-0.12012136727571487,
0.2058577984571457,
-0.04599375277757645,
-0.15548159182071686,
-0.10812154412269592,
-0.04677033796906471,
-0.09442365169525146,
-0.009274334646761417,
0.030230900272727013,
-0.04440392553806305,
-0.024859711527824402,
-0.07924004644155502,
0.052695754915475845,
0.006918410304933786,
-0.026749642565846443,
0.023153692483901978,
-0.010865854099392891,
0.047877416014671326,
-0.11541534960269928,
0.019872117787599564,
-0.01944817043840885,
-0.05897736921906471,
0.01800335757434368,
0.06840743869543076,
0.030108092352747917,
0.15903311967849731,
0.06439658999443054,
-0.00016720300482120365,
-0.045182108879089355,
0.1963219940662384,
-0.14254693686962128,
-0.016464602202177048,
0.105270154774189,
-0.0636715367436409,
0.029332561418414116,
0.13768495619297028,
0.009364825673401356,
-0.094000905752182,
0.04312843829393387,
0.06685148924589157,
-0.03850330784916878,
-0.30596429109573364,
-0.04300546273589134,
-0.07505901157855988,
0.0070661394856870174,
0.0673372745513916,
0.007369250524789095,
-0.03574168682098389,
0.07444299757480621,
-0.07715293020009995,
0.00020793233125004917,
0.06699977070093155,
0.07201603800058365,
0.13691842555999756,
0.021395966410636902,
0.13060200214385986,
-0.030582742765545845,
-0.02931600622832775,
0.02091301418840885,
0.03698084503412247,
0.1866144835948944,
-0.03218834474682808,
0.17649365961551666,
0.08966542780399323,
0.12528052926063538,
-0.01364259235560894,
0.024033810943365097,
0.038462135940790176,
0.011587179265916348,
0.027514319866895676,
-0.07229737937450409,
-0.08663589507341385,
0.08167412132024765,
0.0791136771440506,
-0.0010757959680631757,
-0.08349434286355972,
-0.037158314138650894,
0.01759151555597782,
0.3143061697483063,
0.10896151512861252,
-0.2219981700181961,
-0.11010520905256271,
0.05176178738474846,
-0.03992561995983124,
-0.019284699112176895,
0.0015166731318458915,
0.11036652326583862,
-0.1313788890838623,
0.09825077652931213,
-0.05661093816161156,
0.07451726496219635,
-0.1425221711397171,
-0.029830673709511757,
-0.0043510072864592075,
0.0025346095208078623,
-0.023049211129546165,
0.05748333781957626,
-0.232129767537117,
0.22336722910404205,
-0.0112858135253191,
0.06614001840353012,
-0.04841648414731026,
0.03219706937670708,
-0.0006431450019590557,
-0.04531874880194664,
0.1450960338115692,
-0.001660734647884965,
-0.027908945456147194,
-0.17043739557266235,
-0.10113793611526489,
0.022102897986769676,
0.11230898648500443,
-0.09893923252820969,
0.11364695429801941,
-0.02632642723619938,
0.040991928428411484,
0.0071486057713627815,
-0.01023892592638731,
-0.058843307197093964,
-0.07745876163244247,
0.07014652341604233,
-0.03632808104157448,
0.032759811729192734,
-0.06003411114215851,
-0.11239191889762878,
-0.15453962981700897,
0.17575861513614655,
-0.15836010873317719,
-0.07338494807481766,
-0.090779609978199,
0.0033147342037409544,
0.0816507488489151,
-0.08675271272659302,
0.031564947217702866,
0.011367898434400558,
0.13416121900081635,
0.026609985157847404,
0.006173992995172739,
0.062377531081438065,
-0.04572240263223648,
-0.24918892979621887,
-0.004396818112581968,
0.1904693990945816,
0.045594580471515656,
0.033107683062553406,
0.006205699872225523,
0.0028854317497462034,
0.02810506708920002,
-0.10716640204191208,
0.05652390047907829,
0.06717060506343842,
0.029398569837212563,
0.10053640604019165,
0.00004128456566832028,
-0.11884880810976028,
-0.14771050214767456,
-0.04547916352748871,
0.10423318296670914,
0.3866780698299408,
-0.062190115451812744,
0.03682425990700722,
0.08653419464826584,
-0.09421762824058533,
-0.1819004863500595,
0.013390245847404003,
0.11125542968511581,
0.01816634088754654,
0.05157972127199173,
-0.16222161054611206,
0.0825527086853981,
0.060107793658971786,
-0.0506141223013401,
0.10940807312726974,
-0.2543907165527344,
-0.14908228814601898,
0.13412615656852722,
0.11804548650979996,
0.03375351428985596,
-0.15832829475402832,
-0.0870489627122879,
0.017658038064837456,
-0.12921665608882904,
0.1642836183309555,
-0.052531298249959946,
0.1163310706615448,
0.04268719255924225,
0.05124366283416748,
0.04721268266439438,
-0.05881933495402336,
0.146375373005867,
0.03391899913549423,
0.04264133796095848,
-0.053134746849536896,
-0.05616634711623192,
0.024687688797712326,
-0.060400526970624924,
0.03362274914979935,
-0.012951523996889591,
0.04244706407189369,
-0.10804550349712372,
-0.04767666012048721,
-0.09447648376226425,
0.05643882602453232,
-0.07515893876552582,
-0.05073348060250282,
-0.057447340339422226,
0.0726250633597374,
0.07251361757516861,
0.022970443591475487,
0.09095128625631332,
-0.056000884622335434,
0.0659237876534462,
0.1876031458377838,
0.1293499916791916,
0.11198078095912933,
-0.06873541325330734,
-0.04873191565275192,
-0.007637541741132736,
0.06035769730806351,
-0.10320263355970383,
0.06506678462028503,
0.12192492932081223,
0.08034414052963257,
0.16659536957740784,
0.01659701019525528,
-0.10533036291599274,
0.0328628346323967,
0.013195743784308434,
-0.11383625864982605,
-0.16943158209323883,
0.0020185799803584814,
-0.08015692979097366,
-0.13571681082248688,
0.04002171382308006,
0.15683627128601074,
-0.005055128131061792,
-0.0043105860240757465,
0.008208065293729305,
0.034926071763038635,
-0.004083090927451849,
0.14569491147994995,
0.024804282933473587,
0.08774562925100327,
-0.07788094878196716,
0.06322193145751953,
0.08111602067947388,
-0.09782422333955765,
0.05145852640271187,
-0.04253283143043518,
-0.05008789151906967,
-0.03147986903786659,
-0.09108776599168777,
-0.03769436851143837,
0.05076323077082634,
-0.027184048667550087,
-0.0668930783867836,
-0.1265757530927658,
0.01365066971629858,
0.06098494306206703,
0.0400475449860096,
0.08365274965763092,
0.015496999025344849,
0.03263210132718086,
-0.11574918776750565,
0.1411147564649582,
0.03357551991939545,
0.02767782099545002,
-0.13482581079006195,
0.13099370896816254,
0.03500288724899292,
0.030627120286226273,
-0.00305554224178195,
-0.039994314312934875,
-0.11460170894861221,
0.029519876465201378,
-0.059305496513843536,
0.037456683814525604,
-0.08647570013999939,
0.008475151844322681,
-0.013792441226541996,
-0.09198993444442749,
-0.07791220396757126,
0.03870600834488869,
-0.08664921671152115,
0.0008698035380803049,
0.01579134911298752,
0.09954353421926498,
-0.14403200149536133,
-0.04256027191877365,
0.08338085561990738,
-0.1196303591132164,
0.09905592352151871,
0.029481908306479454,
-0.024427276104688644,
0.04020317271351814,
-0.07833294570446014,
0.08506496250629425,
0.032116275280714035,
0.03165267035365105,
-0.002062525600194931,
-0.2315429002046585,
-0.008751428686082363,
-0.006113909650593996,
0.03394364193081856,
0.005307129118591547,
0.06611303985118866,
-0.1321057379245758,
-0.08485777676105499,
-0.011508231051266193,
-0.03391479328274727,
-0.05492403358221054,
0.04037941247224808,
0.033710628747940063,
0.028791485354304314,
0.21207064390182495,
-0.028696808964014053,
0.09141315519809723,
-0.19263006746768951,
-0.006857334170490503,
0.00864188652485609,
-0.0406126044690609,
-0.013868873938918114,
-0.02586195059120655,
0.06737782061100006,
-0.0831909254193306,
0.13385581970214844,
-0.09231546521186829,
0.010054746642708778,
0.03730791062116623,
-0.06817210465669632,
-0.0123495664447546,
0.06404270231723785,
0.19443723559379578,
0.006183135788887739,
-0.012192979454994202,
0.06282516568899155,
-0.006659545004367828,
0.0061797043308615685,
0.08529030531644821,
0.16662272810935974,
0.11231172829866409,
-0.02871333807706833,
0.07229115813970566,
0.04298650473356247,
-0.07842344790697098,
-0.11292842775583267,
0.10401222854852676,
-0.09349104017019272,
0.08755761384963989,
-0.032348182052373886,
0.13537484407424927,
0.12782245874404907,
-0.19957560300827026,
0.07552390545606613,
-0.018171504139900208,
-0.10642004013061523,
-0.12240347266197205,
-0.16988280415534973,
-0.07313326001167297,
-0.08092907816171646,
0.04008864238858223,
-0.12107151001691818,
0.0011382122756913304,
0.0918528363108635,
0.026967037469148636,
-0.012789950706064701,
0.17264600098133087,
-0.004999264143407345,
-0.059667084366083145,
0.0960673913359642,
0.010422524996101856,
-0.03215312212705612,
-0.05068134143948555,
-0.059331513941287994,
0.07182420790195465,
0.03702274337410927,
0.09495317190885544,
-0.006739730015397072,
-0.051958344876766205,
0.05273720994591713,
-0.03225703537464142,
-0.09168123453855515,
0.00258440850302577,
0.010319222696125507,
0.11085158586502075,
0.1025829166173935,
0.0542980320751667,
0.012708505615592003,
-0.015597349032759666,
0.17908993363380432,
-0.06617173552513123,
-0.001605259720236063,
-0.14500939846038818,
0.15672767162322998,
-0.005627197679132223,
-0.00001381234142172616,
0.056569453328847885,
-0.08386226743459702,
0.03249921277165413,
0.18836946785449982,
0.1408955603837967,
0.0055366395972669125,
0.0054199532605707645,
0.000422115670517087,
0.006679662503302097,
0.013325836509466171,
0.07175164669752121,
0.056710004806518555,
0.0809122622013092,
-0.029441390186548233,
-0.017354147508740425,
-0.0031820745207369328,
-0.04154222458600998,
0.0580078661441803,
0.13751782476902008,
-0.011904031969606876,
-0.010814419947564602,
-0.06491237878799438,
0.09695524722337723,
-0.10842505842447281,
-0.21218623220920563,
0.037581223994493484,
-0.11407352983951569,
-0.1656060516834259,
0.003567874664440751,
0.019056744873523712,
0.028062203899025917,
0.03621642664074898,
-0.003548522014170885,
-0.042133063077926636,
0.12881705164909363,
0.0015159130562096834,
-0.006960292812436819,
-0.04442933574318886,
0.08160681277513504,
-0.0833621621131897,
0.20157909393310547,
-0.0003926361387129873,
0.1377515345811844,
0.10218606889247894,
0.017950279638171196,
-0.06502075493335724,
0.05019316449761391,
0.048730429261922836,
-0.11286665499210358,
0.0317869670689106,
0.1703268438577652,
-0.01821279153227806,
0.12452834099531174,
0.09253805875778198,
-0.06593455374240875,
0.038650333881378174,
-0.02861461415886879,
-0.04062260687351227,
-0.10840227454900742,
0.0339631550014019,
-0.10195252299308777,
0.1266680508852005,
0.17308275401592255,
-0.07614602148532867,
-0.029941821470856667,
-0.03256739675998688,
0.005339556373655796,
0.008582285605370998,
0.12713685631752014,
-0.003390190890058875,
-0.2712012529373169,
0.03615313768386841,
-0.04574672132730484,
0.0558931864798069,
-0.26136428117752075,
-0.05255329608917236,
0.05015789717435837,
-0.07384221255779266,
-0.03329133987426758,
0.09404285252094269,
0.019415661692619324,
0.040057774633169174,
-0.07846453785896301,
-0.07816729694604874,
-0.0525917150080204,
0.1631886214017868,
-0.15684737265110016,
-0.09421675652265549
] |
null | null |
transformers
|
# Wav2Vec2 XLS-R Adult/Child Speech Classifier
Wav2Vec2 XLS-R Adult/Child Speech Classifier is an audio classification model based on the [XLS-R](https://arxiv.org/abs/2111.09296) architecture. This model is a fine-tuned version of [wav2vec2-xls-r-300m](https://huggingface.co/facebook/wav2vec2-xls-r-300m) on a private adult/child speech classification dataset.
This model was trained using HuggingFace's PyTorch framework. All training was done on a Tesla P100, provided by Kaggle. Training metrics were logged via Tensorboard.
## Model
| Model | #params | Arch. | Training/Validation data (text) |
| -------------------------------- | ------- | ----- | ----------------------------------------- |
| `wav2vec2-xls-r-adult-child-cls` | 300M | XLS-R | Adult/Child Speech Classification Dataset |
## Evaluation Results
The model achieves the following results on evaluation:
| Dataset | Loss | Accuracy | F1 |
| --------------------------------- | ------ | -------- | ------ |
| Adult/Child Speech Classification | 0.1851 | 94.69% | 0.9508 |
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- `learning_rate`: 3e-05
- `train_batch_size`: 8
- `eval_batch_size`: 8
- `seed`: 42
- `gradient_accumulation_steps`: 4
- `total_train_batch_size`: 32
- `optimizer`: Adam with `betas=(0.9,0.999)` and `epsilon=1e-08`
- `lr_scheduler_type`: linear
- `lr_scheduler_warmup_ratio`: 0.1
- `num_epochs`: 5
### Training results
| Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 |
| :-----------: | :---: | :--: | :-------------: | :------: | :----: |
| 0.2906 | 1.0 | 383 | 0.1856 | 0.9372 | 0.9421 |
| 0.1749 | 2.0 | 766 | 0.1925 | 0.9418 | 0.9465 |
| 0.1681 | 3.0 | 1149 | 0.1893 | 0.9414 | 0.9459 |
| 0.1295 | 4.0 | 1532 | 0.1851 | 0.9469 | 0.9508 |
| 0.2031 | 5.0 | 1915 | 0.1944 | 0.9423 | 0.9460 |
## Disclaimer
Do consider the biases which came from pre-training datasets that may be carried over into the results of this model.
## Authors
Wav2Vec2 XLS-R Adult/Child Speech Classifier was trained and evaluated by [Wilson Wongso](https://w11wo.github.io/). All computation and development are done on Kaggle.
## Framework versions
- Transformers 4.17.0.dev0
- Pytorch 1.10.2+cu102
- Datasets 1.18.3
- Tokenizers 0.11.0
|
{"language": "en", "license": "apache-2.0", "tags": ["audio-classification", "generated_from_trainer"], "metrics": ["accuracy", "f1"], "model-index": [{"name": "wav2vec2-xls-r-adult-child-cls", "results": []}]}
|
audio-classification
|
bookbot/wav2vec2-xls-r-adult-child-cls
|
[
"transformers",
"pytorch",
"tensorboard",
"wav2vec2",
"audio-classification",
"generated_from_trainer",
"en",
"arxiv:2111.09296",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"2111.09296"
] |
[
"en"
] |
TAGS
#transformers #pytorch #tensorboard #wav2vec2 #audio-classification #generated_from_trainer #en #arxiv-2111.09296 #license-apache-2.0 #endpoints_compatible #region-us
|
Wav2Vec2 XLS-R Adult/Child Speech Classifier
============================================
Wav2Vec2 XLS-R Adult/Child Speech Classifier is an audio classification model based on the XLS-R architecture. This model is a fine-tuned version of wav2vec2-xls-r-300m on a private adult/child speech classification dataset.
This model was trained using HuggingFace's PyTorch framework. All training was done on a Tesla P100, provided by Kaggle. Training metrics were logged via Tensorboard.
Model
-----
Evaluation Results
------------------
The model achieves the following results on evaluation:
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* 'learning\_rate': 3e-05
* 'train\_batch\_size': 8
* 'eval\_batch\_size': 8
* 'seed': 42
* 'gradient\_accumulation\_steps': 4
* 'total\_train\_batch\_size': 32
* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'
* 'lr\_scheduler\_type': linear
* 'lr\_scheduler\_warmup\_ratio': 0.1
* 'num\_epochs': 5
### Training results
Disclaimer
----------
Do consider the biases which came from pre-training datasets that may be carried over into the results of this model.
Authors
-------
Wav2Vec2 XLS-R Adult/Child Speech Classifier was trained and evaluated by Wilson Wongso. All computation and development are done on Kaggle.
Framework versions
------------------
* Transformers 4.17.0.dev0
* Pytorch 1.10.2+cu102
* Datasets 1.18.3
* Tokenizers 0.11.0
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* 'learning\\_rate': 3e-05\n* 'train\\_batch\\_size': 8\n* 'eval\\_batch\\_size': 8\n* 'seed': 42\n* 'gradient\\_accumulation\\_steps': 4\n* 'total\\_train\\_batch\\_size': 32\n* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'\n* 'lr\\_scheduler\\_type': linear\n* 'lr\\_scheduler\\_warmup\\_ratio': 0.1\n* 'num\\_epochs': 5",
"### Training results\n\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which came from pre-training datasets that may be carried over into the results of this model.\n\n\nAuthors\n-------\n\n\nWav2Vec2 XLS-R Adult/Child Speech Classifier was trained and evaluated by Wilson Wongso. All computation and development are done on Kaggle.\n\n\nFramework versions\n------------------\n\n\n* Transformers 4.17.0.dev0\n* Pytorch 1.10.2+cu102\n* Datasets 1.18.3\n* Tokenizers 0.11.0"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #audio-classification #generated_from_trainer #en #arxiv-2111.09296 #license-apache-2.0 #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* 'learning\\_rate': 3e-05\n* 'train\\_batch\\_size': 8\n* 'eval\\_batch\\_size': 8\n* 'seed': 42\n* 'gradient\\_accumulation\\_steps': 4\n* 'total\\_train\\_batch\\_size': 32\n* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'\n* 'lr\\_scheduler\\_type': linear\n* 'lr\\_scheduler\\_warmup\\_ratio': 0.1\n* 'num\\_epochs': 5",
"### Training results\n\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which came from pre-training datasets that may be carried over into the results of this model.\n\n\nAuthors\n-------\n\n\nWav2Vec2 XLS-R Adult/Child Speech Classifier was trained and evaluated by Wilson Wongso. All computation and development are done on Kaggle.\n\n\nFramework versions\n------------------\n\n\n* Transformers 4.17.0.dev0\n* Pytorch 1.10.2+cu102\n* Datasets 1.18.3\n* Tokenizers 0.11.0"
] |
[
63,
170,
115
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #audio-classification #generated_from_trainer #en #arxiv-2111.09296 #license-apache-2.0 #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* 'learning\\_rate': 3e-05\n* 'train\\_batch\\_size': 8\n* 'eval\\_batch\\_size': 8\n* 'seed': 42\n* 'gradient\\_accumulation\\_steps': 4\n* 'total\\_train\\_batch\\_size': 32\n* 'optimizer': Adam with 'betas=(0.9,0.999)' and 'epsilon=1e-08'\n* 'lr\\_scheduler\\_type': linear\n* 'lr\\_scheduler\\_warmup\\_ratio': 0.1\n* 'num\\_epochs': 5### Training results\n\n\n\nDisclaimer\n----------\n\n\nDo consider the biases which came from pre-training datasets that may be carried over into the results of this model.\n\n\nAuthors\n-------\n\n\nWav2Vec2 XLS-R Adult/Child Speech Classifier was trained and evaluated by Wilson Wongso. All computation and development are done on Kaggle.\n\n\nFramework versions\n------------------\n\n\n* Transformers 4.17.0.dev0\n* Pytorch 1.10.2+cu102\n* Datasets 1.18.3\n* Tokenizers 0.11.0"
] |
[
-0.08235130459070206,
0.14090412855148315,
-0.0036267207469791174,
0.0541808120906353,
0.08239728957414627,
0.0066785127855837345,
0.1466386318206787,
0.11140517145395279,
-0.0338878259062767,
0.10909376293420792,
0.07632751017808914,
0.08317271620035172,
0.09659299999475479,
0.07553993910551071,
-0.04756038635969162,
-0.23142214119434357,
0.022985687479376793,
-0.06842981278896332,
-0.09411927312612534,
0.13604912161827087,
0.06407155841588974,
-0.1033235713839531,
0.05923659726977348,
0.019754748791456223,
-0.0761750340461731,
-0.05259869247674942,
-0.025305364280939102,
-0.05134870111942291,
0.07665814459323883,
0.05240713804960251,
0.08588821440935135,
0.0814996063709259,
0.07600495219230652,
-0.28348031640052795,
0.009981825016438961,
0.07435769587755203,
0.04097025841474533,
0.07897534966468811,
0.11057931184768677,
-0.03975750878453255,
0.13765735924243927,
-0.06849897652864456,
0.04767577722668648,
0.04857376962900162,
-0.10821092873811722,
-0.21998435258865356,
-0.07873626798391342,
0.047200728207826614,
0.12527497112751007,
0.05758966505527496,
-0.05411055311560631,
0.08619922399520874,
-0.05107469484210014,
0.08643028140068054,
0.2031511664390564,
-0.23828276991844177,
-0.03503832593560219,
0.006769285537302494,
0.030436474829912186,
0.05789542198181152,
-0.11490083485841751,
-0.009996389970183372,
0.043863892555236816,
-0.024299906566739082,
0.05074241757392883,
-0.025346489623188972,
0.059587717056274414,
-0.003286520019173622,
-0.1550978720188141,
-0.07526657730340958,
0.14216174185276031,
0.07912754267454147,
-0.07169549912214279,
-0.1292780041694641,
-0.0131700299680233,
-0.1553114354610443,
0.016082296147942543,
-0.0051947301253676414,
0.007290554232895374,
-0.05456992983818054,
-0.02369863912463188,
-0.027475498616695404,
-0.08624354004859924,
-0.06476307660341263,
0.020376425236463547,
0.2211349755525589,
0.030645916238427162,
0.00284675695002079,
0.028086338192224503,
0.11700000613927841,
0.11310544610023499,
-0.14755387604236603,
-0.008903698064386845,
-0.012244479730725288,
-0.12527719140052795,
-0.021439088508486748,
-0.022213377058506012,
0.04628116264939308,
0.010880467481911182,
0.1362985521554947,
-0.028862101957201958,
0.07221316546201706,
0.002319890074431896,
0.0025105660315603018,
0.010561354458332062,
0.14620816707611084,
-0.06877192109823227,
-0.01572735235095024,
-0.04381026700139046,
0.08047430962324142,
-0.010963523760437965,
-0.04606986418366432,
-0.053047508001327515,
0.024082200601696968,
0.06088710576295853,
0.062027737498283386,
0.00863152276724577,
0.04604047164320946,
-0.07114481180906296,
-0.07315792888402939,
0.02283838950097561,
-0.1304006278514862,
0.02776431292295456,
0.08937957137823105,
-0.06580516695976257,
0.02100394479930401,
0.005875744856894016,
0.04166572540998459,
-0.06454804539680481,
0.09290599822998047,
-0.06198824197053909,
-0.00003852718509733677,
-0.032132674008607864,
-0.09272415190935135,
0.01080300286412239,
-0.06744586676359177,
-0.005184444133192301,
-0.0432979054749012,
-0.04809420183300972,
-0.07419005036354065,
0.05187392607331276,
-0.05674874037504196,
-0.05969275161623955,
-0.07455918937921524,
-0.08353597670793533,
0.03767591714859009,
-0.02120175026357174,
0.14078348875045776,
-0.04306771606206894,
0.0904129147529602,
-0.0012152383569628,
0.03005925565958023,
0.11968395113945007,
0.05078840255737305,
-0.05587445944547653,
0.06817614287137985,
-0.10096610337495804,
0.1196349710226059,
-0.08264929801225662,
-0.01993352174758911,
-0.1661815196275711,
-0.10762201249599457,
0.004859509877860546,
0.0009806181769818068,
0.04104265943169594,
0.1530323028564453,
-0.11086268723011017,
-0.07943086326122284,
0.1273939609527588,
-0.07137826830148697,
-0.10336891561746597,
0.11771299690008163,
-0.04108200594782829,
0.004645223263651133,
0.030768929049372673,
0.15954884886741638,
0.05276620760560036,
-0.12951886653900146,
-0.039046261459589005,
-0.06793281435966492,
0.10190828889608383,
0.135456845164299,
0.1261543482542038,
-0.04071749001741409,
0.017795661464333534,
-0.024775179103016853,
-0.08539856970310211,
-0.029399404302239418,
-0.044909652322530746,
-0.07789982110261917,
0.0019216833170503378,
-0.04019748792052269,
0.053407926112413406,
0.009928261861205101,
-0.0036675601731985807,
-0.03403206542134285,
-0.1488690823316574,
-0.00885203666985035,
0.08931461721658707,
-0.0983089953660965,
0.0192111749202013,
-0.10379751771688461,
0.03301175311207771,
-0.024476809427142143,
-0.014883795753121376,
-0.1595757156610489,
-0.020738471299409866,
0.03067641519010067,
-0.10518424212932587,
0.044666457921266556,
0.017243029549717903,
0.04001902788877487,
0.0503198616206646,
-0.017186669632792473,
-0.05260414257645607,
-0.0357486866414547,
0.0004635172081179917,
-0.02085692062973976,
-0.2228103131055832,
-0.06330056488513947,
-0.026809480041265488,
0.20714369416236877,
-0.2591398358345032,
0.002942387480288744,
0.08621184527873993,
0.0856008380651474,
0.029788346961140633,
-0.054026637226343155,
0.027959199622273445,
0.025332722812891006,
-0.03335407003760338,
-0.04261146858334541,
0.01653306558728218,
-0.00861648190766573,
-0.1384323686361313,
0.03947298973798752,
-0.19646647572517395,
-0.04552141949534416,
0.10105763375759125,
-0.013403729535639286,
-0.07737501710653305,
-0.07973314821720123,
-0.05164657160639763,
-0.046249836683273315,
-0.035069093108177185,
0.007303539663553238,
0.2110675424337387,
0.056003548204898834,
0.09899744391441345,
-0.08998151868581772,
-0.08406936377286911,
0.0341239832341671,
-0.014477063901722431,
-0.010341357439756393,
0.14410802721977234,
0.03446032479405403,
-0.07062609493732452,
0.08005722612142563,
0.08231499046087265,
-0.010003764182329178,
0.09089615195989609,
-0.03719569370150566,
-0.10094728320837021,
-0.0787770003080368,
0.029354780912399292,
0.009673364460468292,
0.048847001045942307,
-0.09507803618907928,
0.020159127190709114,
0.03519057855010033,
0.044916242361068726,
-0.00601187814027071,
-0.18081441521644592,
0.01591963693499565,
0.04436509683728218,
-0.06685853749513626,
-0.07707763463258743,
-0.01927771605551243,
0.005513099022209644,
0.07556412369012833,
0.0016051065176725388,
0.031564515084028244,
-0.005125655326992273,
-0.06591170281171799,
-0.12207309156656265,
0.1806872934103012,
-0.0528862290084362,
-0.14280365407466888,
-0.125144362449646,
-0.0656820759177208,
-0.05271698534488678,
-0.005397992208600044,
0.02115912176668644,
-0.04922560229897499,
-0.036812931299209595,
-0.08306839317083359,
0.05946529284119606,
-0.025892915204167366,
-0.0067738196812570095,
0.003516792319715023,
0.013027791865170002,
0.05175003781914711,
-0.08599244058132172,
0.020359082147479057,
0.012517611496150494,
-0.052489474415779114,
0.008590102195739746,
0.07266668975353241,
0.05121377855539322,
0.16172218322753906,
0.04294460266828537,
0.005065049976110458,
-0.01779910922050476,
0.20244991779327393,
-0.13403423130512238,
-0.008199215866625309,
0.0950726866722107,
-0.09876926988363266,
0.03344932571053505,
0.16289646923542023,
0.01908862590789795,
-0.09304255247116089,
0.0356319285929203,
0.057412486523389816,
-0.03474375605583191,
-0.3089374601840973,
-0.0356251485645771,
-0.04011841118335724,
0.0029829912818968296,
0.10060840100049973,
0.024851791560649872,
-0.05239130184054375,
0.043754514306783676,
-0.049563948065042496,
-0.013177787885069847,
0.04703645035624504,
0.059376686811447144,
0.06936442852020264,
0.03849656507372856,
0.08935264497995377,
-0.018328484147787094,
-0.02589714340865612,
0.040565118193626404,
0.033395834267139435,
0.17897485196590424,
-0.010052002035081387,
0.19859609007835388,
0.0646158829331398,
0.08181111514568329,
-0.004621780011802912,
0.02316409908235073,
0.030181661248207092,
0.020502790808677673,
0.017035270109772682,
-0.06704666465520859,
-0.05783085897564888,
0.09184571355581284,
0.09031461179256439,
-0.013394334353506565,
-0.07201813906431198,
-0.004000670742243528,
0.02134084887802601,
0.292237251996994,
0.08621244877576828,
-0.22244110703468323,
-0.06431343406438828,
0.04390157759189606,
-0.05839616805315018,
-0.03398476168513298,
0.006351276766508818,
0.12734076380729675,
-0.11223895847797394,
0.07692263275384903,
-0.05680207163095474,
0.07831522077322006,
-0.12570902705192566,
-0.013665647245943546,
0.03962469473481178,
0.031035447493195534,
-0.017032938078045845,
0.06728879362344742,
-0.20942769944667816,
0.256731241941452,
-0.0009763921843841672,
0.04341385141015053,
-0.06316467374563217,
0.022454340010881424,
-0.010007674805819988,
-0.06755075603723526,
0.1490236222743988,
-0.004202347714453936,
-0.07159021496772766,
-0.1472751498222351,
-0.11595466732978821,
0.026964720338582993,
0.1500069946050644,
-0.09932085871696472,
0.1232091560959816,
-0.038220737129449844,
0.011831060983240604,
0.003282905323430896,
-0.02265205606818199,
-0.038375016301870346,
-0.09609798341989517,
0.054196953773498535,
-0.048920176923274994,
0.04182693362236023,
-0.042367059737443924,
-0.07489608973264694,
-0.1330547034740448,
0.15047413110733032,
-0.16661973297595978,
-0.04817434400320053,
-0.11266829073429108,
0.017298700287938118,
0.13026699423789978,
-0.07652796059846878,
0.010460074059665203,
0.02224688045680523,
0.1284351646900177,
0.02996763214468956,
-0.007537463679909706,
0.09604005515575409,
-0.03822082653641701,
-0.24864451587200165,
-0.02004201151430607,
0.17392556369304657,
0.05261070653796196,
0.079777292907238,
-0.02668065018951893,
0.04065520316362381,
-0.005539650097489357,
-0.09775891900062561,
0.07975932955741882,
0.03978139907121658,
0.005710052326321602,
0.0690952017903328,
0.006588509771972895,
-0.007566561456769705,
-0.13349124789237976,
-0.05754817649722099,
0.10599915683269501,
0.3612864315509796,
-0.07502792030572891,
0.07604102045297623,
0.07788609713315964,
-0.08254507929086685,
-0.17410670220851898,
-0.006874600891023874,
0.12469212710857391,
0.03723302111029625,
0.0384015329182148,
-0.19296877086162567,
0.05520007759332657,
0.07014045864343643,
-0.025644201785326004,
0.08550452440977097,
-0.2743183970451355,
-0.14528577029705048,
0.09190517663955688,
0.06735368818044662,
-0.07536299526691437,
-0.1520911157131195,
-0.07535359263420105,
-0.006670551840215921,
-0.08386479318141937,
0.1184106394648552,
-0.01512106228619814,
0.11692454665899277,
0.04039657115936279,
0.07129168510437012,
0.03943241015076637,
-0.0440034382045269,
0.16667351126670837,
0.0394073985517025,
0.03459141030907631,
-0.04542822390794754,
-0.03429741412401199,
-0.035874564200639725,
-0.05249116197228432,
0.026881035417318344,
-0.046903062611818314,
0.02334413304924965,
-0.11631367355585098,
-0.05537080764770508,
-0.06466116011142731,
0.019223319366574287,
-0.05841716006398201,
-0.060518160462379456,
-0.035348791629076004,
0.05352535843849182,
0.09367334842681885,
-0.0034685928840190172,
0.07503197342157364,
-0.06616394966840744,
0.03399524465203285,
0.19326967000961304,
0.15143579244613647,
0.11131895333528519,
-0.06302307546138763,
-0.024832943454384804,
0.012699402868747711,
0.0456371046602726,
-0.14199510216712952,
0.06019198149442673,
0.13904307782649994,
0.05796322599053383,
0.17197535932064056,
0.01605142280459404,
-0.096254363656044,
0.006927185226231813,
0.02101156674325466,
-0.11011959612369537,
-0.14000330865383148,
-0.00640525110065937,
-0.011442296206951141,
-0.15884049236774445,
-0.04235680401325226,
0.12539659440517426,
-0.009788790717720985,
-0.013772399164736271,
0.015458913519978523,
0.049123890697956085,
-0.022767160087823868,
0.16270795464515686,
0.02402052842080593,
0.09398704022169113,
-0.08188091218471527,
0.09489814937114716,
0.08745543658733368,
-0.13015154004096985,
0.057894933968782425,
0.03300917521119118,
-0.04164620116353035,
-0.01161526795476675,
-0.04075617715716362,
-0.004152642097324133,
0.0344737246632576,
-0.028443830087780952,
-0.056342270225286484,
-0.11930325627326965,
0.047882817685604095,
0.057200878858566284,
0.031246766448020935,
0.0664033591747284,
-0.009257188998162746,
0.0034727901220321655,
-0.12986651062965393,
0.12932483851909637,
0.03586140275001526,
0.027123935520648956,
-0.12453247606754303,
0.11515435576438904,
0.02129439078271389,
0.01785043068230152,
-0.003221771214157343,
-0.016030579805374146,
-0.10240112245082855,
0.03002914972603321,
-0.0817444920539856,
0.03653505817055702,
-0.05243479460477829,
0.0048135556280612946,
0.010279431007802486,
-0.07700316607952118,
-0.05718572437763214,
0.016920993104577065,
-0.10665393620729446,
-0.023009097203612328,
-0.01899896189570427,
0.09596911072731018,
-0.11548449099063873,
-0.031226567924022675,
0.0734427347779274,
-0.12110966444015503,
0.10154309868812561,
0.025796491652727127,
-0.027861949056386948,
0.023326611146330833,
-0.1108095794916153,
0.04972090572118759,
0.009719659574329853,
0.029886305332183838,
0.01604347489774227,
-0.25271397829055786,
-0.001599494251422584,
-0.03348303958773613,
-0.01789851114153862,
-0.0069585335440933704,
0.04972441494464874,
-0.1291445940732956,
-0.02851223386824131,
-0.029404349625110626,
-0.04890558868646622,
-0.05883922055363655,
0.035932302474975586,
0.02667640894651413,
0.0278178583830595,
0.18864397704601288,
-0.03501874580979347,
0.11251816898584366,
-0.1894008219242096,
-0.011137712746858597,
0.00004168079613009468,
-0.011870686896145344,
-0.027086084708571434,
-0.0490776002407074,
0.07948561012744904,
-0.08048731088638306,
0.1297140270471573,
-0.054058145731687546,
0.005804624408483505,
0.03373132273554802,
-0.06259123980998993,
-0.02875036746263504,
0.05760515481233597,
0.1698249727487564,
0.03617095947265625,
-0.01809529960155487,
0.06767068803310394,
-0.02763252519071102,
0.01562964916229248,
0.13493750989437103,
0.1570243239402771,
0.12896417081356049,
0.021958980709314346,
0.04701101407408714,
0.0742064118385315,
-0.11953022330999374,
-0.1292944699525833,
0.14858350157737732,
-0.0585603192448616,
0.10444764047861099,
-0.0411146879196167,
0.17362026870250702,
0.09550127387046814,
-0.20697462558746338,
0.06252989917993546,
-0.029287829995155334,
-0.10260850191116333,
-0.13931232690811157,
-0.1494808942079544,
-0.08138123899698257,
-0.08648665249347687,
0.028487402945756912,
-0.10740569233894348,
0.04786250740289688,
0.058892976492643356,
0.04282163456082344,
0.013107690960168839,
0.11552959680557251,
-0.030362989753484726,
-0.04171266406774521,
0.06619575619697571,
0.01931690238416195,
-0.019527992233633995,
-0.014045200310647488,
-0.055185768753290176,
0.07761162519454956,
0.0019921930506825447,
0.08354440331459045,
-0.012138071469962597,
-0.040492746978998184,
0.04146316275000572,
-0.0482412688434124,
-0.09858627617359161,
0.008605276234447956,
-0.0012097450671717525,
0.09128326177597046,
0.1404881775379181,
0.05041823536157608,
0.0029313769191503525,
-0.02243739739060402,
0.19304263591766357,
-0.08262065052986145,
-0.008251930586993694,
-0.1537715494632721,
0.19069135189056396,
-0.02038475126028061,
0.0014659017324447632,
0.0430438369512558,
-0.09309902042150497,
0.0044680689461529255,
0.16874255239963531,
0.1480247974395752,
-0.02261340618133545,
-0.005655236542224884,
0.014627451077103615,
0.022442294284701347,
0.008706020191311836,
0.06259790807962418,
0.08057740330696106,
0.05776146054267883,
-0.04880450293421745,
-0.026890605688095093,
-0.0343642495572567,
-0.053163886070251465,
0.048467718064785004,
0.1294611245393753,
0.008386073634028435,
-0.024914879351854324,
-0.0410321019589901,
0.10527314990758896,
-0.11058131605386734,
-0.2094099223613739,
0.03527340665459633,
-0.1319710612297058,
-0.1597173511981964,
-0.0038201555144041777,
0.020970739424228668,
0.051954448223114014,
0.04291100800037384,
-0.0049829911440610886,
-0.06647486239671707,
0.11606176942586899,
0.014728306792676449,
-0.030612122267484665,
-0.016184668987989426,
0.06579546630382538,
-0.07788129895925522,
0.1856178492307663,
0.0038749955128878355,
0.1066792830824852,
0.09466174244880676,
0.03522074967622757,
-0.0838133841753006,
0.05735253915190697,
0.0724962055683136,
-0.12804503738880157,
0.03232826665043831,
0.20904603600502014,
-0.03625722602009773,
0.14393505454063416,
0.07973481714725494,
-0.062249451875686646,
0.03147033229470253,
-0.04771610349416733,
-0.053193073719739914,
-0.07858379930257797,
0.006012077908962965,
-0.07479032129049301,
0.14670568704605103,
0.2065213918685913,
-0.06618502736091614,
-0.01198861375451088,
-0.04439690709114075,
0.01256024930626154,
0.012637905776500702,
0.10028320550918579,
-0.008557197637856007,
-0.23459777235984802,
0.0306432843208313,
-0.0018665776588022709,
0.058146730065345764,
-0.22930695116519928,
-0.049460723996162415,
0.048603255301713943,
-0.048111047595739365,
-0.042063094675540924,
0.10365224629640579,
0.0062867458909749985,
0.048995569348335266,
-0.06546006351709366,
-0.07329484820365906,
-0.013114377856254578,
0.16147559881210327,
-0.17522044479846954,
-0.07028068602085114
] |
null | null |
transformers
|
# Harry Potter DialoGPT Model
|
{"tags": ["conversational"]}
|
text-generation
|
bookemdan/DialoGPT-small-harrypotter
|
[
"transformers",
"pytorch",
"conversational",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #conversational #endpoints_compatible #has_space #region-us
|
# Harry Potter DialoGPT Model
|
[
"# Harry Potter DialoGPT Model"
] |
[
"TAGS\n#transformers #pytorch #conversational #endpoints_compatible #has_space #region-us \n",
"# Harry Potter DialoGPT Model"
] |
[
29,
8
] |
[
"passage: TAGS\n#transformers #pytorch #conversational #endpoints_compatible #has_space #region-us \n# Harry Potter DialoGPT Model"
] |
[
0.027774613350629807,
0.07819665968418121,
-0.006726363208144903,
0.05588759481906891,
0.05026226490736008,
0.045019377022981644,
0.1074274554848671,
0.09222754836082458,
0.07472135126590729,
-0.012895351275801659,
0.08417999744415283,
0.16887575387954712,
-0.04191965237259865,
-0.002325695473700762,
-0.03443197160959244,
-0.32217708230018616,
0.05152413249015808,
0.01634996198117733,
-0.1279572695493698,
0.10082270205020905,
0.06441010534763336,
-0.09510402381420135,
0.03764994442462921,
0.0098145492374897,
-0.09057946503162384,
-0.021413378417491913,
-0.02341240458190441,
-0.06465595215559006,
0.18513379991054535,
-0.004524844232946634,
0.11043669283390045,
-0.014120045118033886,
-0.11232348531484604,
-0.1523865908384323,
0.038718730211257935,
-0.030590476468205452,
0.0029944530688226223,
0.04376734420657158,
-0.03563152998685837,
-0.048376958817243576,
0.1481495201587677,
0.13342833518981934,
0.12277691811323166,
0.008380627259612083,
-0.16134564578533173,
-0.1552656888961792,
-0.021297793835401535,
0.07031774520874023,
-0.0798819437623024,
0.06815095245838165,
-0.012355724349617958,
0.17052042484283447,
-0.12865832448005676,
0.04806150496006012,
0.18540167808532715,
-0.43114909529685974,
-0.014118412509560585,
0.17535452544689178,
0.1243940070271492,
0.1227414458990097,
-0.12177295237779617,
0.023983128368854523,
-0.016376890242099762,
0.0008478513336740434,
-0.004057391546666622,
-0.08218664675951004,
-0.06653837114572525,
0.0826532319188118,
-0.1533845067024231,
-0.005606825929135084,
0.23450440168380737,
-0.09575598686933517,
0.049066852778196335,
-0.07598047703504562,
-0.07175251096487045,
0.014066291972994804,
-0.03298497572541237,
-0.09986431151628494,
0.011843855492770672,
0.04499613866209984,
-0.01231619343161583,
-0.027304943650960922,
-0.09148336946964264,
0.08199360221624374,
-0.16192527115345,
0.22354121506214142,
0.04394252970814705,
0.08335372805595398,
-0.23394638299942017,
0.05484708771109581,
-0.008222956210374832,
-0.03789849206805229,
0.02013322152197361,
-0.12932038307189941,
0.012891926802694798,
0.014907591044902802,
-0.06095968186855316,
0.07404199242591858,
0.018849680200219154,
0.11937849968671799,
-0.061802033334970474,
0.01469886302947998,
0.09417299181222916,
0.08737218379974365,
0.11848143488168716,
0.017536994069814682,
0.00636063190177083,
-0.06993516534566879,
-0.028978463262319565,
-0.07223009318113327,
0.010886083357036114,
-0.03153480216860771,
-0.13713723421096802,
-0.0938437283039093,
0.018243025988340378,
-0.0067737954668700695,
0.07197406142950058,
0.030591031536459923,
-0.014931484125554562,
-0.01658678613603115,
0.00823475793004036,
0.041782863438129425,
-0.0027287506964057684,
-0.014024063013494015,
-0.011585632339119911,
0.22883033752441406,
-0.0019978575874119997,
-0.008591179735958576,
-0.017468251287937164,
0.10322582721710205,
-0.09691889584064484,
0.013705002143979073,
0.01325986161828041,
-0.04244853928685188,
0.03272189572453499,
0.04075109213590622,
0.06476756930351257,
-0.13679641485214233,
0.042522087693214417,
-0.00988989882171154,
0.028456982225179672,
-0.000573660887312144,
-0.0677102729678154,
-0.0006832036888226867,
-0.04279410094022751,
0.03821942210197449,
-0.0031965477392077446,
-0.03110668994486332,
-0.06414807587862015,
0.08032146841287613,
-0.07718924432992935,
0.1387067288160324,
-0.09563037008047104,
0.05809210613369942,
-0.04674440994858742,
-0.02405610680580139,
-0.14146995544433594,
-0.04269304871559143,
-0.013238531537353992,
0.08696377277374268,
0.06688947975635529,
-0.06519497185945511,
-0.07559424638748169,
0.051933836191892624,
-0.08939360082149506,
0.1288667768239975,
-0.05709463357925415,
-0.06895691901445389,
0.18444132804870605,
-0.08278973400592804,
-0.19889166951179504,
0.09238807111978531,
-0.050962552428245544,
0.04985383525490761,
0.05183160677552223,
0.28541237115859985,
-0.00116819073446095,
-0.0899522677063942,
0.015833813697099686,
0.10910948365926743,
-0.11994528025388718,
0.03888317570090294,
0.05013427510857582,
0.013550740666687489,
-0.00585784250870347,
0.03288138285279274,
0.024665186181664467,
-0.012695450335741043,
-0.05299700051546097,
0.009265575557947159,
-0.03184868395328522,
-0.04598037898540497,
0.17730802297592163,
0.01791832409799099,
0.11900990456342697,
-0.10033910721540451,
-0.021109359338879585,
0.06800779700279236,
0.06678656488656998,
0.03782081604003906,
0.10196889191865921,
-0.008130200207233429,
0.11413612961769104,
0.09277722984552383,
-0.0071927281096577644,
-0.14886021614074707,
-0.015714209526777267,
-0.057631805539131165,
0.12939618527889252,
0.0800369381904602,
0.22795802354812622,
0.07835299521684647,
-0.07824350148439407,
-0.06184469908475876,
0.03563063591718674,
0.07337638735771179,
0.01995348557829857,
-0.09659198671579361,
-0.15138205885887146,
-0.011455049738287926,
-0.07844936102628708,
0.06172255799174309,
-0.06578095257282257,
0.02992217056453228,
-0.034780099987983704,
0.10795369744300842,
-0.02525625377893448,
0.024128206074237823,
0.012389709241688251,
0.014139746315777302,
-0.0601586252450943,
0.008510259911417961,
0.0615667849779129,
-0.025404630228877068,
-0.03992875665426254,
0.1605750173330307,
-0.13640448451042175,
0.2707882821559906,
0.2418462038040161,
-0.29299092292785645,
-0.0006947524379938841,
-0.08924761414527893,
-0.013906504958868027,
0.05655893310904503,
0.10746360570192337,
0.03534911200404167,
0.14344573020935059,
-0.015708288177847862,
0.09986519068479538,
-0.021645016968250275,
-0.07141393423080444,
-0.10798497498035431,
-0.06263259798288345,
-0.03974180668592453,
0.08228461444377899,
-0.03802239149808884,
-0.10010918229818344,
0.14695248007774353,
0.21552011370658875,
0.05590643361210823,
0.09728338569402695,
0.10170456022024155,
0.013690108433365822,
0.006801505107432604,
-0.052061643451452255,
-0.07976483553647995,
-0.00678562605753541,
-0.37372156977653503,
-0.08649976551532745,
0.09625657647848129,
-0.011676524765789509,
0.09271816164255142,
-0.10486295819282532,
-0.06273718923330307,
0.059603121131658554,
0.0614517480134964,
-0.033741679042577744,
0.13514243066310883,
0.01874231919646263,
0.12416581064462662,
0.029158834367990494,
-0.07810904085636139,
0.03968186303973198,
0.018531380221247673,
-0.02964092418551445,
0.11955255270004272,
-0.17586998641490936,
-0.21702882647514343,
0.029069432988762856,
-0.15306590497493744,
0.04827791824936867,
0.00411080801859498,
0.07015323638916016,
-0.09373927116394043,
0.01375383697450161,
0.017886627465486526,
-0.01953967660665512,
-0.3247586786746979,
0.006946707610040903,
-0.1468464732170105,
0.047958649694919586,
-0.21875077486038208,
-0.10695996135473251,
-0.03891026973724365,
-0.043995313346385956,
-0.03299574926495552,
0.100960873067379,
-0.10235030204057693,
0.008765925653278828,
0.2199341207742691,
0.014586200006306171,
0.02758179046213627,
-0.027804430574178696,
0.20033472776412964,
-0.08889971673488617,
-0.053776126354932785,
0.09596876800060272,
0.0017573658842593431,
0.04864317923784256,
0.10222332924604416,
0.022953633219003677,
-0.02230977453291416,
-0.023469187319278717,
-0.09474880993366241,
-0.039521925151348114,
-0.21148602664470673,
-0.08718303591012955,
-0.1144704595208168,
0.038555700331926346,
-0.03977472707629204,
0.06281625479459763,
0.11135465651750565,
0.0008129114285111427,
0.05015866830945015,
-0.16246770322322845,
0.0420643575489521,
0.11279187351465225,
0.22159452736377716,
-0.09964630007743835,
0.08019237965345383,
-0.00691848574206233,
-0.04778851941227913,
0.03376710042357445,
0.08166453242301941,
0.06707315146923065,
0.046807657927274704,
-0.04213947430253029,
0.025572093203663826,
0.2135585993528366,
0.14536665380001068,
-0.0012315592030063272,
0.05164925754070282,
-0.02184741012752056,
-0.047600023448467255,
0.018284637480974197,
-0.05306361988186836,
0.08542200177907944,
0.18898028135299683,
-0.12764179706573486,
-0.07234203815460205,
-0.11024858802556992,
0.08726874738931656,
-0.10371189564466476,
0.08065234869718552,
-0.05045384168624878,
-0.036817267537117004,
0.08586135506629944,
-0.07063250243663788,
-0.08756707608699799,
0.12460485100746155,
0.05406981706619263,
-0.18028347194194794,
0.009364346973598003,
0.07402385026216507,
0.07322944700717926,
0.017108112573623657,
0.05193952098488808,
-0.1730530560016632,
-0.21418078243732452,
-0.001643385156057775,
0.024889439344406128,
-0.2934213876724243,
0.18210120499134064,
-0.04027281329035759,
-0.08105713129043579,
0.00004148047082708217,
-0.06865637749433517,
0.019030528143048286,
0.189646914601326,
0.05815809592604637,
0.04614458605647087,
0.039243124425411224,
0.03775306046009064,
0.14118508994579315,
-0.04144483059644699,
0.08660281449556351,
0.05576588585972786,
-0.026804743334650993,
-0.01970488764345646,
0.004243059083819389,
-0.03820714354515076,
0.0891643762588501,
0.1399916559457779,
-0.18100900948047638,
0.07509788870811462,
-0.019803069531917572,
-0.012266809120774269,
0.024929450824856758,
-0.021091949194669724,
-0.12809263169765472,
0.09227269142866135,
0.09592460840940475,
-0.07181575149297714,
-0.05973535776138306,
-0.10450852662324905,
0.14795993268489838,
-0.004072131589055061,
0.04120422527194023,
-0.028956575319170952,
-0.02114599198102951,
-0.17102117836475372,
-0.12175964564085007,
0.08312391489744186,
-0.05219464749097824,
-0.037202440202236176,
-0.019314829260110855,
0.17549313604831696,
-0.04421965032815933,
0.11182807385921478,
-0.0035289025399833918,
0.04849518835544586,
-0.24153274297714233,
-0.03966590389609337,
0.008358861319720745,
0.00023264165793079883,
0.03985995426774025,
0.05825114995241165,
0.08325114846229553,
0.07490609586238861,
-0.01828753761947155,
0.03977976366877556,
0.26632216572761536,
0.161591038107872,
-0.06491727381944656,
0.11571460962295532,
0.0704057365655899,
-0.025601740926504135,
-0.13095709681510925,
-0.08220639824867249,
-0.16852205991744995,
-0.08385034650564194,
-0.0371854230761528,
-0.03200877085328102,
0.07823365926742554,
-0.03818982094526291,
-0.011658204719424248,
0.10464145988225937,
-0.29870426654815674,
-0.09054083377122879,
0.11519636958837509,
-0.029051903635263443,
0.4250166714191437,
-0.11794041097164154,
0.007987911812961102,
0.008718843571841717,
-0.23556090891361237,
0.040003251284360886,
-0.07000134885311127,
0.09387020021677017,
-0.08081938326358795,
0.19245168566703796,
0.020414993166923523,
-0.02001778967678547,
0.1291610449552536,
0.02884700521826744,
-0.047383371740579605,
-0.07230624556541443,
-0.23196646571159363,
0.032363031059503555,
-0.007933443412184715,
-0.007031820714473724,
0.06268789619207382,
0.005889345426112413,
-0.17255431413650513,
0.0056787896901369095,
-0.12124445289373398,
-0.009413342922925949,
-0.02930266596376896,
-0.014808782376348972,
-0.12346406280994415,
0.030898310244083405,
0.008405259810388088,
0.02924003265798092,
0.1891438513994217,
-0.08064785599708557,
0.23248012363910675,
0.010633745230734348,
0.08381155133247375,
-0.15435999631881714,
-0.17238542437553406,
0.023056747391819954,
-0.05095130205154419,
0.05793885514140129,
-0.11778194457292557,
-0.07442063838243484,
0.1339738816022873,
-0.00829667691141367,
-0.008868396282196045,
0.13918541371822357,
0.02739204466342926,
0.009587321430444717,
0.053706541657447815,
-0.21552322804927826,
-0.16730792820453644,
-0.009770460426807404,
0.0027389281895011663,
0.08537933975458145,
0.05016147345304489,
0.1402658075094223,
-0.03298971801996231,
-0.08136902749538422,
0.011047359555959702,
0.00915340706706047,
-0.06696780771017075,
0.02044769376516342,
0.09638369828462601,
0.028316520154476166,
-0.13466542959213257,
0.0405954010784626,
-0.003948488272726536,
-0.17066608369350433,
-0.009776587598025799,
0.16627605259418488,
-0.101860910654068,
-0.12210845947265625,
-0.11643511801958084,
-0.0072049787268042564,
-0.06714734435081482,
0.02164088934659958,
0.0577990896999836,
-0.12836800515651703,
0.02286921627819538,
0.11464766412973404,
0.0639641135931015,
0.05871565639972687,
-0.0847061350941658,
-0.008614490740001202,
0.03677574172616005,
-0.03146661818027496,
-0.006555580534040928,
0.030103899538517,
-0.07438834011554718,
0.18147709965705872,
-0.07874222844839096,
0.09759703278541565,
-0.07724198698997498,
-0.0932595282793045,
-0.15714550018310547,
0.045033812522888184,
-0.033401377499103546,
-0.12859635055065155,
-0.17312702536582947,
-0.07791779935359955,
0.013628257438540459,
-0.11165116727352142,
-0.00031303887953981757,
-0.04670773819088936,
-0.13680149614810944,
0.045394811779260635,
0.020371217280626297,
-0.010461660102009773,
-0.04571801424026489,
0.00013232558558229357,
0.09330207854509354,
-0.03630592301487923,
0.12327173352241516,
0.2332662045955658,
-0.1384984254837036,
0.09911872446537018,
-0.03425626829266548,
-0.18183660507202148,
0.029670778661966324,
0.006037923973053694,
0.07900139689445496,
0.03228913992643356,
-0.0678965151309967,
-0.011962364427745342,
0.057950351387262344,
0.07930979132652283,
0.06993205100297928,
-0.04431932047009468,
-0.010531296953558922,
0.0025278127286583185,
-0.16972845792770386,
0.0019748839549720287,
-0.07974141091108322,
0.16932708024978638,
-0.0087129520252347,
0.07934553176164627,
0.008572318591177464,
0.061738356947898865,
-0.06274648755788803,
0.02785220928490162,
-0.009907523170113564,
-0.14167934656143188,
0.1009531244635582,
-0.026650339365005493,
0.0268679428845644,
-0.021965395659208298,
0.24107833206653595,
-0.044958360493183136,
-0.03535833582282066,
0.0417187437415123,
0.049631018191576004,
-0.03445810079574585,
0.01948210410773754,
0.19507889449596405,
0.10923396050930023,
-0.08900231122970581,
-0.00659166369587183,
0.07374591380357742,
0.04702368751168251,
0.03869997337460518,
0.0862637609243393,
0.08434724062681198,
0.09124627709388733,
0.09286174178123474,
-0.03667662292718887,
0.081471748650074,
-0.09765747934579849,
-0.23275892436504364,
0.061239950358867645,
-0.012688545510172844,
-0.11428123712539673,
0.16220706701278687,
0.2136278599500656,
-0.022422203794121742,
0.07400020956993103,
-0.03375358134508133,
-0.06189844757318497,
-0.11276834458112717,
0.02645299583673477,
-0.040942225605249405,
-0.1492508053779602,
-0.022409241646528244,
-0.15089429914951324,
0.019319996237754822,
0.09190671890974045,
0.07094496488571167,
-0.04095487669110298,
-0.01902339793741703,
0.1452229619026184,
-0.060185763984918594,
0.03149319440126419,
-0.001415541279129684,
0.0835563987493515,
-0.034803614020347595,
-0.0047445837408304214,
-0.1484406739473343,
-0.05843394994735718,
-0.036266472190618515,
0.054548878222703934,
-0.1628149300813675,
-0.01194692775607109,
-0.10809408873319626,
-0.07877231389284134,
-0.03839778155088425,
0.08815232664346695,
-0.02440144121646881,
0.16661910712718964,
0.028720306232571602,
-0.04473939910531044,
-0.03934907168149948,
0.29690679907798767,
-0.12562644481658936,
-0.04893624037504196,
-0.0005521043785847723,
0.16151244938373566,
0.06729694455862045,
0.042595066130161285,
0.009602988138794899,
0.04194917157292366,
-0.11422820389270782,
0.2049546092748642,
0.39154815673828125,
-0.12409725785255432,
0.03294599801301956,
0.05327652767300606,
0.04204108938574791,
0.1153881698846817,
0.04851754009723663,
0.143133282661438,
0.3146371841430664,
-0.10363972932100296,
0.034652259200811386,
-0.05111488327383995,
0.01328846625983715,
-0.05905776470899582,
-0.00012679507199209183,
0.0789966955780983,
-0.12290950119495392,
-0.044670529663562775,
0.07138871401548386,
-0.25894275307655334,
0.09776082634925842,
-0.05391804873943329,
-0.2491578608751297,
-0.041521258652210236,
-0.03209102898836136,
0.11199569702148438,
-0.007958832196891308,
0.14683400094509125,
0.011499172076582909,
-0.08897311985492706,
0.06369822472333908,
0.01725103333592415,
-0.23099331557750702,
-0.05828435346484184,
0.23333479464054108,
-0.011941695585846901,
-0.04084990546107292,
-0.025270814076066017,
0.008841685019433498,
0.0877557024359703,
0.11126399785280228,
-0.04564668983221054,
-0.08340580761432648,
0.03222863748669624,
-0.0825548768043518,
-0.10214412212371826,
0.020743045955896378,
0.06451241672039032,
-0.10108673572540283,
0.1297539323568344,
-0.15191301703453064,
0.06914466619491577,
0.07559143751859665,
0.02468777634203434,
-0.011237548664212227,
0.006943122483789921,
-0.05255123972892761,
0.04636058956384659,
0.110951729118824,
-0.03536757454276085,
-0.021273771300911903,
0.023725377395749092,
-0.11527075618505478,
0.020801547914743423,
-0.02879597432911396,
-0.15307334065437317,
-0.0889091044664383,
-0.10294891893863678,
-0.012653311714529991,
-0.0528058297932148,
-0.0812055841088295,
-0.036387767642736435,
-0.04028214514255524,
0.04403757303953171,
-0.056872375309467316,
0.05472375079989433,
0.06480881571769714,
0.016251474618911743,
0.011864803731441498,
0.07357238233089447,
0.05970631539821625,
0.13385404646396637,
-0.1755414605140686,
-0.02171403169631958
] |
null | null |
transformers
|
#berk
|
{"tags": ["conversational"]}
|
text-generation
|
boran/berkbot
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"conversational",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
#berk
|
[] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n"
] |
[
51
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n"
] |
[
-0.009697278961539268,
0.03208012506365776,
-0.007204889785498381,
0.004809224978089333,
0.16726240515708923,
0.014898733235895634,
0.09765533357858658,
0.13672804832458496,
-0.007841327227652073,
-0.031050153076648712,
0.14490588009357452,
0.20411323010921478,
-0.006439372431486845,
0.0661218985915184,
-0.07572533935308456,
-0.2683109939098358,
0.05759621039032936,
0.046649303287267685,
0.016515716910362244,
0.1200079694390297,
0.08573378622531891,
-0.05473608896136284,
0.08714032918214798,
-0.014583407901227474,
-0.150366872549057,
0.017733458429574966,
0.043394338339567184,
-0.12260226160287857,
0.11910516023635864,
0.05462685227394104,
0.07063519209623337,
0.014929565601050854,
-0.07541623711585999,
-0.1631229966878891,
0.03031250834465027,
0.01425902172923088,
-0.0594632662832737,
0.04757995903491974,
0.059961482882499695,
-0.10165371745824814,
0.10819483548402786,
0.09530027210712433,
-0.013078106567263603,
0.06798283755779266,
-0.16849711537361145,
-0.020869607105851173,
-0.01446688175201416,
0.009899779222905636,
0.05550243332982063,
0.09964893013238907,
-0.03413357585668564,
0.10497362166643143,
-0.09214533120393753,
0.11017382889986038,
0.10932035744190216,
-0.32057443261146545,
-0.005767723545432091,
0.09167823940515518,
0.039358653128147125,
0.07352814823389053,
-0.04467793554067612,
0.06258884817361832,
0.018015462905168533,
0.017986174672842026,
-0.014015024527907372,
-0.07283061742782593,
-0.11612214148044586,
0.04717336222529411,
-0.08668071031570435,
-0.059868961572647095,
0.2244078367948532,
-0.05464440956711769,
0.06881742179393768,
-0.05281897634267807,
-0.10522868484258652,
-0.04308144748210907,
-0.029833965003490448,
0.00475557055324316,
-0.07660607248544693,
0.08692064881324768,
0.00869679357856512,
-0.09547875821590424,
-0.1376667022705078,
-0.02496783249080181,
-0.1776352822780609,
0.16140350699424744,
0.02465328387916088,
0.05232657864689827,
-0.2027255892753601,
0.09623090922832489,
0.017906051129102707,
-0.08045592904090881,
0.022091427817940712,
-0.10046248883008957,
0.029131146147847176,
0.013760408386588097,
-0.04754498973488808,
-0.061387211084365845,
0.0843690037727356,
0.11199145019054413,
-0.01731434464454651,
0.025486016646027565,
-0.039331406354904175,
0.08100687712430954,
0.03553595021367073,
0.09077847748994827,
0.007288969587534666,
-0.028338588774204254,
0.025842782109975815,
-0.13719046115875244,
-0.003647835226729512,
-0.07116208970546722,
-0.16572439670562744,
-0.021088803187012672,
0.02994808368384838,
0.08289173990488052,
0.015449047088623047,
0.11682453751564026,
-0.03272046521306038,
-0.025152435526251793,
0.03602350503206253,
-0.047656361013650894,
-0.012649794109165668,
0.016648368909955025,
0.013163427822291851,
0.12399329990148544,
-0.0022096503525972366,
0.03235051408410072,
-0.13653022050857544,
0.031423524022102356,
-0.06793295592069626,
-0.003740974934771657,
-0.03486552834510803,
-0.040637075901031494,
0.009043924510478973,
-0.06862333416938782,
0.003486064961180091,
-0.15030112862586975,
-0.15063877403736115,
0.007587034720927477,
-0.007836631499230862,
-0.04107699543237686,
-0.06370922178030014,
-0.06952770054340363,
-0.013550350442528725,
0.04251532256603241,
-0.07093454152345657,
-0.011352915316820145,
-0.06403283774852753,
0.11004766076803207,
-0.03197755664587021,
0.07921615242958069,
-0.11953279376029968,
0.08390819281339645,
-0.11260783672332764,
-0.02386913076043129,
-0.060801517218351364,
0.09317506104707718,
-0.0006014376995153725,
0.09549830108880997,
-0.006563255097717047,
-0.017931854352355003,
-0.07981178909540176,
0.06445012241601944,
-0.042872510850429535,
0.21701598167419434,
-0.0615808479487896,
-0.11181682348251343,
0.28781595826148987,
-0.052628401666879654,
-0.1370542049407959,
0.11647392809391022,
0.008682746440172195,
0.05777018144726753,
0.10703510791063309,
0.19733482599258423,
-0.015276194550096989,
0.004040541127324104,
0.09471915662288666,
0.11263324320316315,
-0.11276852339506149,
-0.033160366117954254,
0.013019153848290443,
-0.04081077128648758,
-0.10867965966463089,
0.04689536616206169,
0.09810488671064377,
0.07090286910533905,
-0.04786505550146103,
-0.03377414867281914,
-0.01366397924721241,
0.0052589005790650845,
0.08885077387094498,
-0.007157256826758385,
0.10962837189435959,
-0.05819983780384064,
-0.03796621412038803,
-0.029282379895448685,
-0.012126247398555279,
-0.03951939567923546,
0.03137664496898651,
-0.043376367539167404,
0.10821941494941711,
-0.011204327456653118,
0.06364280730485916,
-0.16185984015464783,
-0.07691477984189987,
-0.017002692446112633,
0.1581239402294159,
0.024538565427064896,
0.09859629720449448,
0.0552486926317215,
-0.040398042649030685,
-0.0012767292791977525,
0.012792680412530899,
0.15581141412258148,
-0.022091681137681007,
-0.065607450902462,
-0.052166227251291275,
0.08642971515655518,
-0.05641226842999458,
0.04504093527793884,
-0.05937713757157326,
0.012367865070700645,
0.05064384639263153,
0.10342344641685486,
-0.00018274025933351368,
0.03323284164071083,
-0.008164864964783192,
0.002145637758076191,
-0.058205123990774155,
0.007405933458358049,
0.10799351334571838,
0.00036868182360194623,
-0.07365862280130386,
0.22074243426322937,
-0.17796069383621216,
0.1765957772731781,
0.1893044263124466,
-0.299345999956131,
0.017949223518371582,
-0.10759581625461578,
-0.04561871662735939,
0.014407722279429436,
0.05567655712366104,
-0.0454222597181797,
0.1703362911939621,
-0.009871348738670349,
0.18874616920948029,
-0.04946064203977585,
-0.04464937001466751,
-0.0200483538210392,
-0.05118836089968681,
-0.0024189651012420654,
0.07781197130680084,
0.10685696452856064,
-0.13992026448249817,
0.1964332014322281,
0.1621224284172058,
0.048237916082143784,
0.19945049285888672,
0.015346456319093704,
-0.011589210480451584,
0.0909530371427536,
0.005220826715230942,
-0.058739423751831055,
-0.07409929484128952,
-0.2594851851463318,
-0.030033592134714127,
0.07992640137672424,
0.0422382652759552,
0.1212305948138237,
-0.11349532753229141,
-0.038956157863140106,
-0.01763172075152397,
-0.023146281018853188,
0.021672505885362625,
0.0914369598031044,
0.06075398623943329,
0.13201528787612915,
-0.001710098935291171,
-0.007300339173525572,
0.10524573177099228,
0.01783694699406624,
-0.09354141354560852,
0.18308524787425995,
-0.13652534782886505,
-0.37097251415252686,
-0.13911493122577667,
-0.18057456612586975,
-0.05449081212282181,
0.05712554603815079,
0.11679314076900482,
-0.12011238187551498,
-0.018752124160528183,
0.01578843593597412,
0.10931742936372757,
-0.08449502289295197,
0.0021454424131661654,
-0.06880278885364532,
0.0321490578353405,
-0.10310184955596924,
-0.09194442629814148,
-0.055416494607925415,
-0.031392451375722885,
-0.08001253753900528,
0.1423761546611786,
-0.10777941346168518,
0.04476889222860336,
0.20262959599494934,
0.04653622955083847,
0.05625178664922714,
-0.044105201959609985,
0.19377262890338898,
-0.11264272034168243,
-0.01661740615963936,
0.19215328991413116,
-0.048360925167798996,
0.07476246356964111,
0.1232115849852562,
-0.006348740309476852,
-0.08765771239995956,
0.03011748194694519,
-0.02085109055042267,
-0.07988511025905609,
-0.23219464719295502,
-0.13938382267951965,
-0.12429051846265793,
0.09477275609970093,
0.028005298227071762,
0.056365787982940674,
0.17219258844852448,
0.06577219814062119,
-0.038416244089603424,
0.006410336587578058,
0.02959546446800232,
0.08237514644861221,
0.23417828977108002,
-0.06035616248846054,
0.1364797055721283,
-0.03420931473374367,
-0.14982740581035614,
0.08169995993375778,
0.0713929831981659,
0.10213395953178406,
0.06678459793329239,
0.0804823637008667,
0.0149586396291852,
0.06188136339187622,
0.1311223804950714,
0.08191446959972382,
0.019586285576224327,
-0.02480296604335308,
-0.03388110175728798,
-0.025523077696561813,
-0.05937909707427025,
0.040128443390131,
0.06589099019765854,
-0.16763372719287872,
-0.039227183908224106,
-0.09338314831256866,
0.09657008945941925,
0.0873042419552803,
0.06609832495450974,
-0.1842060089111328,
-0.008006223477423191,
0.08488986641168594,
-0.03854905813932419,
-0.13727426528930664,
0.09535189718008041,
0.01523482333868742,
-0.15144726634025574,
0.03139317408204079,
-0.04061909019947052,
0.12188644707202911,
-0.07804752141237259,
0.09809603542089462,
-0.08108244836330414,
-0.07448557764291763,
0.02123199962079525,
0.1261177361011505,
-0.30527687072753906,
0.20240111649036407,
-0.0024993624538183212,
-0.06486981362104416,
-0.1243603527545929,
-0.0032166161108762026,
0.002410882618278265,
0.07357452809810638,
0.10519039630889893,
-0.007196315098553896,
0.001897757756523788,
-0.06300821900367737,
-0.01829923689365387,
0.032471053302288055,
0.13080233335494995,
-0.0401318334043026,
-0.021158374845981598,
-0.050194524228572845,
-0.001653497340157628,
-0.03173094615340233,
-0.06934895366430283,
0.02002747356891632,
-0.19509181380271912,
0.08751901984214783,
0.04166261479258537,
0.09648149460554123,
0.029994789510965347,
0.004265148192644119,
-0.09651939570903778,
0.24698667228221893,
-0.07148019969463348,
-0.10072879493236542,
-0.10919588059186935,
-0.046813901513814926,
0.03569883480668068,
-0.05628936365246773,
0.04309194162487984,
-0.0788632407784462,
0.028997479006648064,
-0.06352769583463669,
-0.19235502183437347,
0.12410202622413635,
-0.09027006477117538,
-0.04412810131907463,
-0.02371402643620968,
0.2110891044139862,
-0.05598580464720726,
0.010335659608244896,
0.02930437959730625,
0.01208863127976656,
-0.11645778268575668,
-0.09678568691015244,
0.031018631532788277,
-0.007351789623498917,
0.050603240728378296,
0.041841957718133926,
-0.05915454775094986,
-0.017138581722974777,
-0.052199993282556534,
-0.022926922887563705,
0.3496883809566498,
0.14231905341148376,
-0.043836336582899094,
0.19347235560417175,
0.12347975373268127,
-0.07452994585037231,
-0.3159443140029907,
-0.1066238060593605,
-0.10937739163637161,
-0.04680149629712105,
-0.07012093812227249,
-0.2002030611038208,
0.06474938243627548,
0.00662544509395957,
-0.013415241613984108,
0.12749312818050385,
-0.2561831772327423,
-0.07571036368608475,
0.15906259417533875,
-0.017980827018618584,
0.3745945692062378,
-0.1168576180934906,
-0.10926306992769241,
-0.03950892388820648,
-0.14175476133823395,
0.16968177258968353,
-0.01989765651524067,
0.11221715062856674,
-0.009765521623194218,
0.14388824999332428,
0.05548359826207161,
-0.023479344323277473,
0.08544106781482697,
0.004999885335564613,
-0.03290518373250961,
-0.10304180532693863,
-0.05676887184381485,
0.007092386484146118,
0.02477436140179634,
0.018026655539870262,
-0.041834570467472076,
0.02227151393890381,
-0.11731979995965958,
-0.04657655209302902,
-0.08982590585947037,
0.04431166127324104,
0.03899754583835602,
-0.07325074821710587,
-0.002380647463724017,
-0.07165111601352692,
-0.012272949330508709,
0.022334342822432518,
0.20356793701648712,
-0.08029330521821976,
0.16448934376239777,
0.09239562600851059,
0.12419285625219345,
-0.14376309514045715,
-0.00019283240544609725,
-0.0762530043721199,
-0.05611240118741989,
0.07737895101308823,
-0.09433035552501678,
0.058893077075481415,
0.10901971161365509,
-0.04567738622426987,
0.08828683942556381,
0.10377411544322968,
0.008936077356338501,
0.003213887568563223,
0.10916902124881744,
-0.2667325437068939,
-0.0296600554138422,
-0.07532413303852081,
0.000883326749317348,
0.09092561900615692,
0.08562852442264557,
0.18840822577476501,
0.025361526757478714,
-0.04293036088347435,
-0.002770674182102084,
0.028597986325621605,
-0.039021048694849014,
0.051667019724845886,
0.001123449532315135,
0.01947369985282421,
-0.1530752182006836,
0.072522833943367,
0.01490565575659275,
-0.15215420722961426,
0.021316176280379295,
0.16572684049606323,
-0.11656328290700912,
-0.1283872276544571,
-0.06520111113786697,
0.08313824236392975,
-0.11755692958831787,
-0.01578943058848381,
-0.03279297426342964,
-0.13145680725574493,
0.07992171496152878,
0.12629036605358124,
0.05557859688997269,
0.0972496047616005,
-0.06061713397502899,
-0.020469192415475845,
-0.018721895292401314,
-0.014099318534135818,
-0.012384648434817791,
-0.007667020428925753,
-0.055978111922740936,
0.0590752474963665,
-0.026677248999476433,
0.1425808072090149,
-0.09221141785383224,
-0.1037059873342514,
-0.16142144799232483,
0.0374140702188015,
-0.11013076454401016,
-0.08825794607400894,
-0.08821134269237518,
-0.050188567489385605,
0.002360827289521694,
-0.019856395199894905,
-0.04037635400891304,
-0.05829505994915962,
-0.12300454825162888,
0.0338277705013752,
-0.040771447122097015,
0.024727050215005875,
-0.07512269169092178,
0.015856385231018066,
0.08507686108350754,
-0.03285100311040878,
0.15655414760112762,
0.1450488418340683,
-0.1006515845656395,
0.10741901397705078,
-0.14806775748729706,
-0.09138492494821548,
0.11116421222686768,
0.015329592861235142,
0.0449691042304039,
0.09723787009716034,
0.013362943194806576,
0.0635865181684494,
0.032776717096567154,
0.05308786407113075,
0.027619892731308937,
-0.11959987878799438,
0.06483134627342224,
-0.03626115620136261,
-0.14700546860694885,
-0.049338050186634064,
-0.05282869189977646,
0.01647452637553215,
0.013054544106125832,
0.09622690081596375,
-0.05301849544048309,
0.10698331147432327,
-0.04055701196193695,
0.0346808135509491,
0.017554637044668198,
-0.1730053424835205,
-0.03816922754049301,
-0.08538098633289337,
0.03681723028421402,
0.014741539023816586,
0.25266793370246887,
0.030072299763560295,
0.012416383251547813,
0.032671261578798294,
0.08285367488861084,
0.03899408504366875,
0.010228337720036507,
0.17482228577136993,
0.1162426546216011,
-0.06621865928173065,
-0.10445023328065872,
0.0729617029428482,
0.016332454979419708,
0.01286179106682539,
0.13617953658103943,
0.008365051820874214,
0.005795429926365614,
0.08649782836437225,
-0.016865963116288185,
0.009968153201043606,
-0.10052056610584259,
-0.13426925241947174,
-0.022176474332809448,
0.05151832848787308,
-0.04655967652797699,
0.11727844923734665,
0.1406494379043579,
-0.01806013658642769,
0.03222079202532768,
-0.021771740168333054,
-0.05699979141354561,
-0.1683429479598999,
-0.1429590880870819,
-0.06883849948644638,
-0.13416796922683716,
0.00897989235818386,
-0.11180389672517776,
0.05395037308335304,
0.06001098081469536,
0.06750501692295074,
-0.06899319589138031,
0.10220931470394135,
0.04626858979463577,
-0.11440542340278625,
0.06264589726924896,
-0.0296088308095932,
0.09430401772260666,
-0.02759445086121559,
-0.019505485892295837,
-0.09039592742919922,
0.014574515633285046,
0.011419114656746387,
0.06245238706469536,
-0.04707273095846176,
0.007463190704584122,
-0.14696238934993744,
-0.08972041308879852,
-0.0523175448179245,
0.0718572810292244,
-0.050409089773893356,
0.14282815158367157,
0.00775480642914772,
-0.0170906875282526,
0.039554283022880554,
0.22787313163280487,
-0.07476283609867096,
-0.04778539761900902,
-0.05269690603017807,
0.20717895030975342,
0.02975541539490223,
0.1171872541308403,
-0.022938819602131844,
-0.006106364540755749,
-0.0919521227478981,
0.3764844834804535,
0.30030161142349243,
-0.09031439572572708,
0.011794124729931355,
0.02137952297925949,
0.04502861574292183,
0.1316293478012085,
0.1216534823179245,
0.10318691283464432,
0.3006802201271057,
-0.07452366501092911,
-0.04653361067175865,
-0.012629742734134197,
-0.023858042433857918,
-0.09059546142816544,
0.1021224707365036,
0.04839762672781944,
-0.06382183730602264,
-0.03313443064689636,
0.0954432487487793,
-0.25862133502960205,
0.1277991235256195,
-0.12311873584985733,
-0.17578600347042084,
-0.06654827296733856,
0.009760108776390553,
0.10465722531080246,
0.015642458572983742,
0.0946015790104866,
0.007128213066607714,
-0.11252258718013763,
0.06305865943431854,
0.03397420793771744,
-0.22762253880500793,
0.0006893770187161863,
0.06642123311758041,
-0.07006710022687912,
-0.0024247700348496437,
-0.026499588042497635,
0.05657242611050606,
0.0656052976846695,
0.054629553109407425,
-0.00971333310008049,
0.03816632181406021,
0.0034184439573436975,
-0.0585215799510479,
0.016623929142951965,
0.05121519789099693,
0.02472509816288948,
-0.09763528406620026,
0.06927435845136642,
-0.1574270874261856,
0.04766253009438515,
-0.0030655991286039352,
-0.04124255105853081,
0.006064958870410919,
0.008823691867291927,
-0.06491616368293762,
0.05165379121899605,
0.07916834205389023,
-0.0016257909592241049,
-0.0062433634884655476,
-0.057178743183612823,
-0.02632102556526661,
-0.027755750343203545,
-0.09291748702526093,
-0.10495562851428986,
-0.14682936668395996,
-0.11640441417694092,
0.09368976950645447,
-0.01011267676949501,
-0.1848134547472,
0.022154374048113823,
-0.08606051653623581,
0.08319322764873505,
-0.1670055389404297,
0.08040720224380493,
0.07041648775339127,
0.013038921169936657,
-0.0031511052511632442,
-0.02002427540719509,
0.054132770746946335,
0.086809903383255,
-0.10407156497240067,
-0.07400695979595184
] |
null | null | null |
Tokenizer based on `facebook/bart-large-cnn` and trained on captions normalized by [dalle-mini](https://github.com/borisdayma/dalle-mini).
|
{}
| null |
boris/dalle-mini-tokenizer
|
[
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#region-us
|
Tokenizer based on 'facebook/bart-large-cnn' and trained on captions normalized by dalle-mini.
|
[] |
[
"TAGS\n#region-us \n"
] |
[
6
] |
[
"passage: TAGS\n#region-us \n"
] |
[
0.024608636274933815,
-0.026205500587821007,
-0.009666500613093376,
-0.10395516455173492,
0.08638657629489899,
0.059816278517246246,
0.01882290467619896,
0.020661840215325356,
0.23975107073783875,
-0.005599027033895254,
0.1219947561621666,
0.0015615287702530622,
-0.037353623658418655,
0.03733762726187706,
-0.0035912662278860807,
-0.17583473026752472,
0.03876631706953049,
-0.018274923786520958,
0.01843859627842903,
0.026470553129911423,
-0.07776834815740585,
-0.07564429938793182,
0.015296397730708122,
-0.10247814655303955,
-0.083692267537117,
0.11002834886312485,
0.031466204673051834,
-0.019670886918902397,
0.10779199749231339,
-0.04243955761194229,
0.18699054419994354,
-0.011512263678014278,
-0.11213519424200058,
-0.2536850869655609,
0.021806683391332626,
-0.01765260472893715,
-0.08747660368680954,
0.01506110467016697,
0.0665089413523674,
-0.09014441072940826,
-0.0588928684592247,
0.0795099288225174,
-0.01132340170443058,
0.04246443510055542,
-0.27593839168548584,
-0.12684126198291779,
-0.05297930911183357,
-0.1421966552734375,
0.08651168644428253,
0.04035491496324539,
0.008764253929257393,
0.15506891906261444,
-0.20897391438484192,
0.004104613792151213,
0.08255259692668915,
-0.2538507878780365,
0.05591634660959244,
0.17671173810958862,
0.03623908758163452,
0.18037272989749908,
0.0060391901060938835,
0.11029672622680664,
0.0716743916273117,
-0.024263937026262283,
-0.17590197920799255,
-0.08127854019403458,
-0.04696211963891983,
0.16642488539218903,
-0.06727185100317001,
-0.14248386025428772,
0.34701237082481384,
0.00015008423360995948,
0.009657775051891804,
0.16921205818653107,
-0.059524230659008026,
-0.09972117841243744,
0.07259953022003174,
0.016484731808304787,
0.018492350354790688,
0.1471305936574936,
0.16307872533798218,
-0.0458691343665123,
-0.13837823271751404,
-0.018630273640155792,
-0.22798998653888702,
0.17510560154914856,
-0.03248048573732376,
0.13137903809547424,
-0.27447956800460815,
0.01684025302529335,
-0.2570667266845703,
0.0032130838371813297,
0.04178816080093384,
-0.06004921346902847,
-0.0226522795855999,
-0.013265985064208508,
-0.08018817007541656,
0.004899587947875261,
0.06192673370242119,
0.1266920566558838,
-0.06128726154565811,
0.06128238886594772,
-0.09319206327199936,
0.141696035861969,
0.07166698575019836,
0.07868369668722153,
0.13037432730197906,
0.041205424815416336,
-0.07187089323997498,
-0.21872246265411377,
-0.0026476888451725245,
-0.06275863200426102,
-0.09502086788415909,
-0.0020165652967989445,
-0.11606067419052124,
0.17244569957256317,
-0.030802514404058456,
-0.09825427830219269,
-0.11208184063434601,
0.09148659557104111,
-0.032992321997880936,
-0.03437839448451996,
-0.03552987426519394,
-0.020977836102247238,
0.019381176680326462,
0.04704452306032181,
-0.1548958420753479,
-0.005131472367793322,
0.07039852440357208,
0.11502562463283539,
-0.1346137970685959,
-0.003783059772104025,
-0.07908964157104492,
0.03039063885807991,
0.07654735445976257,
-0.16510222852230072,
0.03158547356724739,
-0.1124754324555397,
-0.07531405985355377,
0.002912673633545637,
-0.015710093080997467,
-0.016202643513679504,
0.166526660323143,
-0.0020451415330171585,
0.0714716836810112,
-0.026345307007431984,
-0.05890209600329399,
-0.11243434250354767,
-0.08489254862070084,
0.05390460044145584,
0.03670717030763626,
0.03266148269176483,
-0.2193479984998703,
0.014805203303694725,
-0.12762966752052307,
0.1360815018415451,
-0.10566820204257965,
-0.04705966264009476,
-0.022842247039079666,
0.20562705397605896,
0.037286072969436646,
0.08762791007757187,
-0.22171171009540558,
0.039756543934345245,
-0.05404696613550186,
0.18480908870697021,
-0.1502426266670227,
-0.0799463614821434,
0.20813211798667908,
-0.07964949309825897,
-0.10115210711956024,
0.021235812455415726,
0.020391687750816345,
0.026287272572517395,
0.0766737088561058,
0.4564172327518463,
-0.09766800701618195,
-0.09146861732006073,
0.10178250074386597,
0.17055274546146393,
-0.12427149713039398,
-0.1827561855316162,
0.06446871906518936,
-0.16666454076766968,
-0.1973118633031845,
0.0018917324487119913,
0.09222044050693512,
0.038269978016614914,
-0.07875611633062363,
-0.020746968686580658,
0.06325206160545349,
-0.0007678253459744155,
0.09095914661884308,
0.03755716234445572,
0.09034032374620438,
-0.08716782182455063,
0.11115926504135132,
-0.05017651244997978,
0.004037132486701012,
0.1343354731798172,
0.027325427159667015,
-0.03223329409956932,
0.08694463223218918,
-0.0485352948307991,
0.05295134335756302,
-0.1662379503250122,
-0.15068690478801727,
0.03398871049284935,
0.06283251196146011,
0.03186952322721481,
0.1280253529548645,
0.08141885697841644,
-0.10732853412628174,
0.022690722718834877,
-0.004228927195072174,
0.058398615568876266,
0.03891623765230179,
0.006107209715992212,
0.008764320984482765,
0.0961301177740097,
-0.10607069730758667,
-0.13589619100093842,
-0.07336436957120895,
-0.014715781435370445,
0.14371353387832642,
-0.0302802175283432,
0.07690227776765823,
-0.004240254405885935,
0.00013200697139836848,
0.06930823624134064,
0.08137880265712738,
0.016412746161222458,
0.08971183747053146,
-0.05237193778157234,
-0.05160155147314072,
0.10863113403320312,
-0.13533565402030945,
0.17837053537368774,
0.14053137600421906,
-0.20532016456127167,
0.029453208670020103,
-0.06838275492191315,
0.03670361638069153,
-0.008162540383636951,
0.0975119024515152,
-0.08272241055965424,
-0.02106042578816414,
0.013134466484189034,
0.0052274600602686405,
-0.013007243163883686,
0.017682146281003952,
-0.07295988500118256,
-0.07787393033504486,
-0.10233919322490692,
0.08436838537454605,
0.11562882363796234,
-0.10282530635595322,
0.14214380085468292,
0.4384984076023102,
0.11495281755924225,
0.21582984924316406,
-0.09581480920314789,
-0.0412987545132637,
0.007486371789127588,
0.0001535322517156601,
-0.04476691037416458,
0.08031861484050751,
-0.15973517298698425,
-0.038901735097169876,
0.027348900213837624,
0.07128690183162689,
0.11475157737731934,
-0.14959022402763367,
-0.09639324247837067,
-0.00793045200407505,
0.0022841424215584993,
-0.1249532699584961,
0.023905446752905846,
-0.03974650055170059,
0.04015624523162842,
0.07232289016246796,
-0.021535737439990044,
0.13939237594604492,
-0.04166141897439957,
-0.0639561116695404,
0.07585346698760986,
-0.2017085999250412,
-0.23179671168327332,
-0.12309670448303223,
-0.14680525660514832,
0.04366797208786011,
0.05154111236333847,
0.01726446859538555,
-0.17635835707187653,
-0.015074856579303741,
0.07706750929355621,
0.07820965349674225,
-0.20886357128620148,
-0.022814949974417686,
-0.004290030337870121,
0.0895976573228836,
-0.10227091610431671,
-0.0017130117630586028,
-0.04419664293527603,
-0.10150232166051865,
0.0017003051470965147,
0.07279510796070099,
-0.137485533952713,
0.13807645440101624,
0.21589438617229462,
0.07225540280342102,
0.07359948754310608,
-0.019093448296189308,
0.09936179965734482,
-0.10856141895055771,
-0.16549113392829895,
0.08348225057125092,
-0.06234746053814888,
0.047262318432331085,
0.17534415423870087,
0.03307317942380905,
-0.13904969394207,
-0.015682822093367577,
-0.0402069091796875,
-0.15603256225585938,
-0.238995760679245,
-0.09178274869918823,
-0.1182505264878273,
0.16442428529262543,
0.0009358620154671371,
0.06651917099952698,
0.08258313685655594,
-0.022042419761419296,
0.16447891294956207,
-0.07379321753978729,
-0.07578866183757782,
-0.006978808436542749,
0.12375060468912125,
-0.056660156697034836,
-0.03080669604241848,
-0.10566964000463486,
-0.008295975625514984,
0.1151021271944046,
0.15304014086723328,
0.12214863300323486,
0.2957419455051422,
0.08268889784812927,
0.026645636186003685,
0.08958091586828232,
0.17622539401054382,
0.09495089203119278,
0.07838419824838638,
-0.045413073152303696,
-0.014814783819019794,
0.014317171648144722,
-0.04022889584302902,
0.010141594335436821,
0.14683100581169128,
-0.2679629921913147,
-0.006678564939647913,
-0.2710230350494385,
0.0965198427438736,
-0.10913380235433578,
0.11837165057659149,
-0.01015760749578476,
0.10194015502929688,
0.11082887649536133,
0.03233652561903,
-0.03858073800802231,
0.16613617539405823,
0.08450309932231903,
-0.11277695000171661,
0.001758623169735074,
0.03737903758883476,
0.09715615212917328,
-0.02818971499800682,
0.12721189856529236,
-0.11048974841833115,
-0.1464834064245224,
0.013753619976341724,
0.07152791321277618,
-0.15373679995536804,
0.3138748109340668,
0.012069208547472954,
-0.13481520116329193,
-0.01481647603213787,
-0.09957809001207352,
-0.006440147757530212,
0.1254177987575531,
0.09333524852991104,
0.07935678958892822,
-0.2185502052307129,
-0.13339371979236603,
0.05872276425361633,
-0.00575496768578887,
0.22408108413219452,
-0.034034017473459244,
-0.11356475204229355,
-0.027013886719942093,
0.04241163283586502,
-0.06043251231312752,
0.08524788916110992,
0.023536119610071182,
-0.08113526552915573,
-0.032957352697849274,
0.05323701351881027,
0.012368366122245789,
0.00524376705288887,
0.09360801428556442,
0.020107939839363098,
-0.0009265501867048442,
0.01785753294825554,
0.047885000705718994,
-0.0675911232829094,
-0.1984109878540039,
0.09357594698667526,
-0.05215044692158699,
0.0015536568826064467,
-0.08013670891523361,
-0.15122665464878082,
-0.08837161958217621,
-0.16009655594825745,
0.12540200352668762,
-0.034406669437885284,
0.12700119614601135,
-0.06619787961244583,
0.17341409623622894,
-0.07871770113706589,
0.04481020197272301,
-0.047349292784929276,
0.050332702696323395,
-0.007268077693879604,
-0.07756082713603973,
0.16585899889469147,
-0.15564003586769104,
0.01809087023139,
0.19572502374649048,
-0.018915493041276932,
0.07177707552909851,
0.021322092041373253,
-0.0636206790804863,
0.23147478699684143,
0.3014698624610901,
0.008138049393892288,
0.1665448248386383,
0.3018903136253357,
-0.07466315478086472,
-0.2642788887023926,
-0.05505012720823288,
-0.2841376066207886,
-0.05371501296758652,
0.10716094076633453,
-0.22523896396160126,
0.06986407935619354,
0.14383509755134583,
-0.06471995264291763,
0.30228954553604126,
-0.21825523674488068,
0.012589273042976856,
0.15434536337852478,
-0.08868814259767532,
0.5515313148498535,
-0.1133413165807724,
-0.17677772045135498,
-0.008122089318931103,
-0.08741296827793121,
0.10602109134197235,
-0.0340677872300148,
0.06877441704273224,
0.013465235009789467,
0.04797380417585373,
0.048932258039712906,
-0.03111894056200981,
0.22701001167297363,
0.008710170164704323,
0.09015397727489471,
-0.07378865778446198,
-0.18624304234981537,
0.11639340221881866,
-0.04359482601284981,
-0.08891059458255768,
0.0849778801202774,
-0.05942516401410103,
-0.11078983545303345,
0.04663389176130295,
-0.07950539886951447,
-0.024862350896000862,
0.08423490077257156,
-0.04678233340382576,
-0.042606171220541,
-0.008054176345467567,
-0.1618063747882843,
-0.0002289071271661669,
0.31360217928886414,
-0.07096036523580551,
0.16695955395698547,
0.03677211329340935,
0.00038613268407061696,
-0.11027684062719345,
0.030288029462099075,
-0.05203165486454964,
-0.021576624363660812,
0.09578979015350342,
-0.11096979677677155,
0.03204701095819473,
0.14160704612731934,
-0.04864364117383957,
0.05846960097551346,
0.09256096184253693,
-0.0849417969584465,
0.007583672646433115,
0.17753590643405914,
-0.17537221312522888,
-0.1273445188999176,
-0.006135711446404457,
-0.09862716495990753,
0.14055661857128143,
0.04394126310944557,
0.05191568285226822,
0.16669964790344238,
0.03967129811644554,
-0.029474308714270592,
-0.02817419543862343,
-0.1153380498290062,
-0.0201893113553524,
0.040153320878744125,
0.00045633706031367183,
-0.08791285753250122,
0.2262638509273529,
0.06409153342247009,
-0.1328488290309906,
-0.051157206296920776,
0.2161225974559784,
-0.06805316358804703,
-0.04911920800805092,
-0.223562553524971,
0.10752306133508682,
-0.07112517952919006,
-0.0965060144662857,
0.05453834682703018,
-0.02270081453025341,
0.005106312222778797,
0.181985542178154,
0.03941008821129799,
0.11070270836353302,
0.03738937899470329,
-0.02448922023177147,
0.15798696875572205,
-0.142850860953331,
-0.14191335439682007,
-0.025354057550430298,
-0.08757315576076508,
-0.13844476640224457,
-0.026804137974977493,
0.1617041826248169,
-0.09177309274673462,
-0.14772607386112213,
-0.2621181011199951,
0.10968475043773651,
-0.16432365775108337,
-0.10192688554525375,
-0.03469514101743698,
-0.08968492597341537,
0.0696166530251503,
0.030301768332719803,
-0.03093348816037178,
-0.06706760823726654,
-0.18593791127204895,
0.0816768929362297,
0.06349513679742813,
0.045533183962106705,
-0.017847947776317596,
0.0067379772663116455,
0.1720137596130371,
0.025955144315958023,
0.10040043294429779,
0.16762186586856842,
0.011397695168852806,
0.2246655523777008,
-0.1671202927827835,
-0.11496317386627197,
0.1336962729692459,
-0.026543032377958298,
0.06762003898620605,
0.16792191565036774,
-0.0772583931684494,
0.015526676550507545,
-0.028136352077126503,
0.07066910713911057,
-0.11003983020782471,
-0.105624258518219,
0.007937257178127766,
0.02567129209637642,
-0.2755882740020752,
-0.005599735304713249,
-0.19717298448085785,
0.14788752794265747,
0.02579621411859989,
0.03297143429517746,
0.10257530212402344,
0.10404334217309952,
0.08312062919139862,
-0.0017710148822516203,
0.03226327523589134,
-0.1176818460226059,
0.02753005363047123,
-0.059239376336336136,
-0.020663779228925705,
0.017624232918024063,
0.36952024698257446,
-0.03603357449173927,
-0.046802736818790436,
0.003710439894348383,
0.1307835876941681,
-0.02139742486178875,
0.017395347356796265,
0.13209912180900574,
0.12607666850090027,
-0.08595693111419678,
-0.1504845917224884,
0.04888554662466049,
-0.04565655067563057,
-0.02836887165904045,
0.1464131623506546,
0.05905961990356445,
0.1050296202301979,
0.0908031314611435,
-0.014463032595813274,
-0.00318976235575974,
0.012856799177825451,
-0.15486004948616028,
0.06223496049642563,
-0.010558074340224266,
0.012565906159579754,
0.017934376373887062,
0.15238402783870697,
-0.005540105979889631,
0.07739730179309845,
-0.09889880567789078,
0.004208535887300968,
-0.13498884439468384,
-0.07913459837436676,
0.03617347031831741,
-0.13393273949623108,
0.04141177982091904,
-0.01871878281235695,
0.029611799865961075,
0.30386561155319214,
0.02558239921927452,
-0.020639164373278618,
0.12512871623039246,
-0.1214587539434433,
-0.12050267308950424,
-0.001594188273884356,
-0.029960084706544876,
0.0791488066315651,
-0.02633434161543846,
-0.0997740775346756,
-0.1001306027173996,
-0.15166029334068298,
-0.09759195148944855,
0.05182836204767227,
-0.04993441700935364,
-0.059362251311540604,
-0.17634081840515137,
-0.05707859992980957,
-0.05147340148687363,
0.14025864005088806,
-0.12263951450586319,
0.15159130096435547,
-0.014490418136119843,
0.004084470681846142,
0.04405883327126503,
0.1950942426919937,
-0.03644494712352753,
0.08714226633310318,
0.0154351145029068,
0.1522706001996994,
-0.05119588226079941,
0.14720745384693146,
-0.10931728035211563,
-0.04014137014746666,
-0.06710435450077057,
0.21513493359088898,
0.25630924105644226,
-0.06136954948306084,
-0.008937356993556023,
-0.012760217301547527,
0.058654606342315674,
0.1073930487036705,
0.16049085557460785,
0.002326392102986574,
0.2802925705909729,
-0.03133585304021835,
0.04815128445625305,
0.02901598811149597,
0.013607407920062542,
-0.06336209923028946,
0.03397751972079277,
0.07539387792348862,
-0.035039983689785004,
-0.1412304788827896,
0.15837742388248444,
-0.21980468928813934,
0.18157227337360382,
0.11640069633722305,
-0.19996967911720276,
-0.013728445395827293,
-0.04882071167230606,
0.1689416468143463,
-0.0856364443898201,
0.1637246012687683,
-0.0903693437576294,
-0.2108195722103119,
-0.2056000679731369,
0.03867346793413162,
-0.34623071551322937,
-0.254462867975235,
0.10422009229660034,
0.1488201916217804,
0.04015883058309555,
-0.018507536500692368,
-0.019967829808592796,
-0.018367022275924683,
0.04877542704343796,
-0.0067357709631323814,
0.06014643982052803,
0.031397558748722076,
-0.02988368645310402,
-0.24127542972564697,
-0.029804671183228493,
0.023964406922459602,
-0.07093082368373871,
0.07464958727359772,
-0.06874357163906097,
-0.022495782002806664,
0.08059766888618469,
-0.03066304884850979,
0.03298592567443848,
-0.035373736172914505,
-0.16326889395713806,
0.027529051527380943,
0.03900543600320816,
0.036012712866067886,
0.00634160777553916,
0.0008072225609794259,
-0.03455270454287529,
0.0644603744149208,
-0.16716794669628143,
-0.16015739738941193,
0.14140215516090393,
-0.06745140254497528,
0.2779497504234314,
-0.05812826007604599,
-0.0809100940823555,
0.04766704887151718,
-0.03426874056458473,
0.1807648241519928,
-0.07756473124027252,
0.047254521399736404,
0.12766779959201813,
0.011127962730824947,
0.03121316432952881,
-0.3092964291572571,
0.11082969605922699,
-0.000795336440205574,
-0.006093299947679043,
-0.07581598311662674
] |
null | null | null |
## VQGAN-f16-16384
### Model Description
This is a Pytorch Lightning checkpoint of VQGAN, which learns a codebook of context-rich visual parts by leveraging both the use of convolutional methods and transformers. It was introduced in [Taming Transformers for High-Resolution Image Synthesis](https://compvis.github.io/taming-transformers/) ([CVPR paper](https://openaccess.thecvf.com/content/CVPR2021/html/Esser_Taming_Transformers_for_High-Resolution_Image_Synthesis_CVPR_2021_paper.html)).
The model allows the encoding of images as a fixed-length sequence of tokens taken from the codebook.
This version of the model uses a reduction factor `f=16` and a vocabulary of `13,384` tokens.
As an example of how the reduction factor works, images of size `256x256` are encoded to sequences of `256` tokens: `256/16 * 256/16`. Images of `512x512` would result in sequences of `1024` tokens.
### Datasets Used for Training
* ImageNet. We didn't train this model from scratch. Instead, we started from [a checkpoint pre-trained on ImageNet](https://heibox.uni-heidelberg.de/d/a7530b09fed84f80a887/).
* [Conceptual Captions 3M](https://ai.google.com/research/ConceptualCaptions/) (CC3M).
* [OpenAI subset of YFCC100M](https://github.com/openai/CLIP/blob/main/data/yfcc100m.md).
We fine-tuned on CC3M and YFCC100M to improve the encoding quality of people and faces, which are not very well represented in ImageNet. We used a subset of 2,268,720 images from CC3M and YFCC100M for this purpose.
### Training Process
Finetuning was performed in PyTorch using [taming-transformers](https://github.com/CompVis/taming-transformers). The full training process and model preparation includes these steps:
* Pre-training on ImageNet. Previously performed. We used [this checkpoint](https://heibox.uni-heidelberg.de/d/a7530b09fed84f80a887).
* Fine-tuning, [Part 1](https://wandb.ai/wandb/hf-flax-dalle-mini/runs/2021-07-09T15-33-11_dalle_vqgan?workspace=user-borisd13).
* Fine-tuning, [Part 2](https://wandb.ai/wandb/hf-flax-dalle-mini/runs/2021-07-09T21-42-07_dalle_vqgan?workspace=user-borisd13) – continuation from Part 1. The final checkpoint has been logged as an artifact in the training run and is the model present in this card.
* Conversion to JAX as [`flax-community/vqgan_f16_16384`](https://huggingface.co/flax-community/vqgan_f16_16384).
### How to Use
The checkpoint can be loaded using Pytorch-Lightning.
Note: `omegaconf==2.0.0` is required for loading the checkpoint.
### Related Models in the Hub
* JAX version of VQGAN, trained on the same datasets described here: [`flax-community/vqgan_f16_16384`](https://huggingface.co/flax-community/vqgan_f16_16384).
* [DALL·E mini](https://huggingface.co/flax-community/dalle-mini), a Flax/JAX simplified implementation of OpenAI's DALL·E.
### Other
This model was successfully used as part of the implementation of [DALL·E mini](https://github.com/borisdayma/dalle-mini). Our [report](https://wandb.ai/dalle-mini/dalle-mini/reports/DALL-E-mini--Vmlldzo4NjIxODA) contains more details on how to leverage it in an image encoding / generation pipeline.
|
{}
| null |
boris/vqgan_f16_16384
|
[
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#has_space #region-us
|
## VQGAN-f16-16384
### Model Description
This is a Pytorch Lightning checkpoint of VQGAN, which learns a codebook of context-rich visual parts by leveraging both the use of convolutional methods and transformers. It was introduced in Taming Transformers for High-Resolution Image Synthesis (CVPR paper).
The model allows the encoding of images as a fixed-length sequence of tokens taken from the codebook.
This version of the model uses a reduction factor 'f=16' and a vocabulary of '13,384' tokens.
As an example of how the reduction factor works, images of size '256x256' are encoded to sequences of '256' tokens: '256/16 * 256/16'. Images of '512x512' would result in sequences of '1024' tokens.
### Datasets Used for Training
* ImageNet. We didn't train this model from scratch. Instead, we started from a checkpoint pre-trained on ImageNet.
* Conceptual Captions 3M (CC3M).
* OpenAI subset of YFCC100M.
We fine-tuned on CC3M and YFCC100M to improve the encoding quality of people and faces, which are not very well represented in ImageNet. We used a subset of 2,268,720 images from CC3M and YFCC100M for this purpose.
### Training Process
Finetuning was performed in PyTorch using taming-transformers. The full training process and model preparation includes these steps:
* Pre-training on ImageNet. Previously performed. We used this checkpoint.
* Fine-tuning, Part 1.
* Fine-tuning, Part 2 – continuation from Part 1. The final checkpoint has been logged as an artifact in the training run and is the model present in this card.
* Conversion to JAX as 'flax-community/vqgan_f16_16384'.
### How to Use
The checkpoint can be loaded using Pytorch-Lightning.
Note: 'omegaconf==2.0.0' is required for loading the checkpoint.
### Related Models in the Hub
* JAX version of VQGAN, trained on the same datasets described here: 'flax-community/vqgan_f16_16384'.
* DALL·E mini, a Flax/JAX simplified implementation of OpenAI's DALL·E.
### Other
This model was successfully used as part of the implementation of DALL·E mini. Our report contains more details on how to leverage it in an image encoding / generation pipeline.
|
[
"## VQGAN-f16-16384",
"### Model Description\n\nThis is a Pytorch Lightning checkpoint of VQGAN, which learns a codebook of context-rich visual parts by leveraging both the use of convolutional methods and transformers. It was introduced in Taming Transformers for High-Resolution Image Synthesis (CVPR paper).\n\nThe model allows the encoding of images as a fixed-length sequence of tokens taken from the codebook.\n\nThis version of the model uses a reduction factor 'f=16' and a vocabulary of '13,384' tokens.\n\nAs an example of how the reduction factor works, images of size '256x256' are encoded to sequences of '256' tokens: '256/16 * 256/16'. Images of '512x512' would result in sequences of '1024' tokens.",
"### Datasets Used for Training\n\n* ImageNet. We didn't train this model from scratch. Instead, we started from a checkpoint pre-trained on ImageNet.\n* Conceptual Captions 3M (CC3M).\n* OpenAI subset of YFCC100M.\n\nWe fine-tuned on CC3M and YFCC100M to improve the encoding quality of people and faces, which are not very well represented in ImageNet. We used a subset of 2,268,720 images from CC3M and YFCC100M for this purpose.",
"### Training Process\n\nFinetuning was performed in PyTorch using taming-transformers. The full training process and model preparation includes these steps:\n\n* Pre-training on ImageNet. Previously performed. We used this checkpoint.\n* Fine-tuning, Part 1.\n* Fine-tuning, Part 2 – continuation from Part 1. The final checkpoint has been logged as an artifact in the training run and is the model present in this card.\n* Conversion to JAX as 'flax-community/vqgan_f16_16384'.",
"### How to Use\n\nThe checkpoint can be loaded using Pytorch-Lightning.\n\nNote: 'omegaconf==2.0.0' is required for loading the checkpoint.",
"### Related Models in the Hub\n\n* JAX version of VQGAN, trained on the same datasets described here: 'flax-community/vqgan_f16_16384'.\n* DALL·E mini, a Flax/JAX simplified implementation of OpenAI's DALL·E.",
"### Other\n\nThis model was successfully used as part of the implementation of DALL·E mini. Our report contains more details on how to leverage it in an image encoding / generation pipeline."
] |
[
"TAGS\n#has_space #region-us \n",
"## VQGAN-f16-16384",
"### Model Description\n\nThis is a Pytorch Lightning checkpoint of VQGAN, which learns a codebook of context-rich visual parts by leveraging both the use of convolutional methods and transformers. It was introduced in Taming Transformers for High-Resolution Image Synthesis (CVPR paper).\n\nThe model allows the encoding of images as a fixed-length sequence of tokens taken from the codebook.\n\nThis version of the model uses a reduction factor 'f=16' and a vocabulary of '13,384' tokens.\n\nAs an example of how the reduction factor works, images of size '256x256' are encoded to sequences of '256' tokens: '256/16 * 256/16'. Images of '512x512' would result in sequences of '1024' tokens.",
"### Datasets Used for Training\n\n* ImageNet. We didn't train this model from scratch. Instead, we started from a checkpoint pre-trained on ImageNet.\n* Conceptual Captions 3M (CC3M).\n* OpenAI subset of YFCC100M.\n\nWe fine-tuned on CC3M and YFCC100M to improve the encoding quality of people and faces, which are not very well represented in ImageNet. We used a subset of 2,268,720 images from CC3M and YFCC100M for this purpose.",
"### Training Process\n\nFinetuning was performed in PyTorch using taming-transformers. The full training process and model preparation includes these steps:\n\n* Pre-training on ImageNet. Previously performed. We used this checkpoint.\n* Fine-tuning, Part 1.\n* Fine-tuning, Part 2 – continuation from Part 1. The final checkpoint has been logged as an artifact in the training run and is the model present in this card.\n* Conversion to JAX as 'flax-community/vqgan_f16_16384'.",
"### How to Use\n\nThe checkpoint can be loaded using Pytorch-Lightning.\n\nNote: 'omegaconf==2.0.0' is required for loading the checkpoint.",
"### Related Models in the Hub\n\n* JAX version of VQGAN, trained on the same datasets described here: 'flax-community/vqgan_f16_16384'.\n* DALL·E mini, a Flax/JAX simplified implementation of OpenAI's DALL·E.",
"### Other\n\nThis model was successfully used as part of the implementation of DALL·E mini. Our report contains more details on how to leverage it in an image encoding / generation pipeline."
] |
[
10,
9,
191,
127,
124,
39,
74,
44
] |
[
"passage: TAGS\n#has_space #region-us \n## VQGAN-f16-16384### Model Description\n\nThis is a Pytorch Lightning checkpoint of VQGAN, which learns a codebook of context-rich visual parts by leveraging both the use of convolutional methods and transformers. It was introduced in Taming Transformers for High-Resolution Image Synthesis (CVPR paper).\n\nThe model allows the encoding of images as a fixed-length sequence of tokens taken from the codebook.\n\nThis version of the model uses a reduction factor 'f=16' and a vocabulary of '13,384' tokens.\n\nAs an example of how the reduction factor works, images of size '256x256' are encoded to sequences of '256' tokens: '256/16 * 256/16'. Images of '512x512' would result in sequences of '1024' tokens.### Datasets Used for Training\n\n* ImageNet. We didn't train this model from scratch. Instead, we started from a checkpoint pre-trained on ImageNet.\n* Conceptual Captions 3M (CC3M).\n* OpenAI subset of YFCC100M.\n\nWe fine-tuned on CC3M and YFCC100M to improve the encoding quality of people and faces, which are not very well represented in ImageNet. We used a subset of 2,268,720 images from CC3M and YFCC100M for this purpose.### Training Process\n\nFinetuning was performed in PyTorch using taming-transformers. The full training process and model preparation includes these steps:\n\n* Pre-training on ImageNet. Previously performed. We used this checkpoint.\n* Fine-tuning, Part 1.\n* Fine-tuning, Part 2 – continuation from Part 1. The final checkpoint has been logged as an artifact in the training run and is the model present in this card.\n* Conversion to JAX as 'flax-community/vqgan_f16_16384'.### How to Use\n\nThe checkpoint can be loaded using Pytorch-Lightning.\n\nNote: 'omegaconf==2.0.0' is required for loading the checkpoint."
] |
[
-0.05583365634083748,
0.1315830945968628,
-0.004181893542408943,
0.029261959716677666,
0.10841598361730576,
0.03488020598888397,
-0.0021499968133866787,
0.13964517414569855,
-0.08720846474170685,
0.021764392033219337,
0.021371036767959595,
-0.015912668779492378,
0.11447619646787643,
0.09244386106729507,
0.08455554395914078,
-0.1789151132106781,
0.020452357828617096,
-0.010418538935482502,
-0.05337843298912048,
0.08476968109607697,
0.10347790271043777,
-0.122037373483181,
0.04250490665435791,
-0.01524091325700283,
-0.14622049033641815,
0.02010427415370941,
-0.029470348730683327,
-0.004496536683291197,
0.12493107467889786,
0.044771708548069,
0.12683482468128204,
-0.015633471310138702,
0.03362993150949478,
-0.20753449201583862,
0.02192259207367897,
0.09069783985614777,
0.029817698523402214,
0.07542972266674042,
0.08604263514280319,
0.13942328095436096,
0.15644937753677368,
-0.07307156175374985,
0.02943997271358967,
0.03202337771654129,
-0.06513279676437378,
-0.11995753645896912,
-0.13651324808597565,
0.16706527769565582,
0.07357832044363022,
0.061456941068172455,
-0.007990649901330471,
0.04691564291715622,
-0.0138320317491889,
0.06326853483915329,
0.08234363049268723,
-0.18151602149009705,
-0.01377060730010271,
0.10602068901062012,
0.026259776204824448,
-0.002590376418083906,
-0.07648815959692001,
-0.011498977430164814,
0.009934114292263985,
0.018683550879359245,
0.11033784598112106,
-0.028843428939580917,
-0.08905552327632904,
-0.027784382924437523,
-0.10599283128976822,
-0.09799817204475403,
0.06634760648012161,
0.023680290207266808,
-0.09335354715585709,
-0.13965001702308655,
-0.045086249709129333,
-0.07734615355730057,
0.04785547032952309,
-0.09875773638486862,
-0.022599464282393456,
0.016788989305496216,
-0.0020922438707202673,
-0.14852865040302277,
-0.14540137350559235,
0.008137545548379421,
0.02587917633354664,
0.05810410529375076,
0.06423062086105347,
0.05951329693198204,
-0.022537166252732277,
0.15032601356506348,
-0.04091738909482956,
-0.022772861644625664,
-0.0499214231967926,
-0.06292615085840225,
-0.10587309300899506,
-0.041531290858983994,
-0.00878306943923235,
-0.08691435307264328,
-0.07245790958404541,
0.14230597019195557,
-0.1280490607023239,
0.045460183173418045,
0.022000666707754135,
0.03696223720908165,
-0.014725413173437119,
0.19999511539936066,
-0.027069469913840294,
-0.004469466395676136,
0.02980097383260727,
-0.0028043908532708883,
0.017679665237665176,
-0.035218652337789536,
-0.054148267954587936,
-0.0646839588880539,
0.11346664279699326,
0.06998651474714279,
-0.03312605246901512,
0.024117665365338326,
-0.023527691140770912,
-0.02887669950723648,
0.20670746266841888,
-0.0932130217552185,
0.054519783705472946,
0.012404236011207104,
-0.07483882457017899,
0.03135618939995766,
0.07237135618925095,
-0.027965771034359932,
-0.11825148016214371,
0.021493136882781982,
-0.009236510843038559,
-0.024190446361899376,
-0.11218374967575073,
-0.0692528635263443,
0.014203260652720928,
-0.07947062700986862,
-0.03898180276155472,
-0.1313336193561554,
-0.13941021263599396,
-0.028529684990644455,
0.03658529743552208,
0.010733842849731445,
-0.004243291448801756,
0.030332472175359726,
-0.07599085569381714,
-0.04074959456920624,
0.04022187367081642,
0.031608209013938904,
0.009758048690855503,
0.04560364410281181,
-0.06073075160384178,
0.03257172927260399,
-0.05341716855764389,
-0.010453696362674236,
-0.052879054099321365,
-0.000007789552000758704,
-0.15400457382202148,
0.06041404604911804,
0.02558145858347416,
-0.0589616559445858,
-0.04588256776332855,
-0.059568773955106735,
-0.05845768377184868,
-0.03258584439754486,
0.0545474998652935,
0.1143200695514679,
-0.13237741589546204,
-0.006043024826794863,
0.0695837065577507,
-0.09857048839330673,
-0.0029238355346024036,
0.09953810274600983,
-0.03608692064881325,
0.012489189393818378,
0.06318136304616928,
0.0455913282930851,
0.12588994204998016,
-0.13156738877296448,
-0.06309668719768524,
0.015350695699453354,
-0.0585348904132843,
0.0463181771337986,
0.015194490551948547,
-0.03184659779071808,
0.03492773324251175,
0.006606822833418846,
-0.07295442372560501,
-0.0018690143479034305,
-0.03281015530228615,
-0.06094885244965553,
-0.024938438087701797,
-0.04040181636810303,
0.019127173349261284,
0.007552007678896189,
-0.018487391993403435,
-0.013841071166098118,
-0.07461369782686234,
-0.12868942320346832,
0.1346055120229721,
-0.08693230152130127,
0.0745830088853836,
-0.07833195477724075,
0.08095462620258331,
-0.023374976590275764,
-0.008692000061273575,
-0.13272280991077423,
-0.03600170835852623,
0.08527398854494095,
-0.056015387177467346,
-0.024745134636759758,
-0.022645121440291405,
0.018778478726744652,
0.047140270471572876,
-0.03008691780269146,
-0.04765214025974274,
-0.13582590222358704,
-0.04824702814221382,
-0.018310831859707832,
-0.014585819095373154,
-0.1575746387243271,
-0.026866570115089417,
0.16541601717472076,
-0.1334368884563446,
0.016614452004432678,
0.06972486525774002,
0.08473196625709534,
0.05751378461718559,
-0.08206965774297714,
0.019830012694001198,
0.02349034510552883,
0.00900314375758171,
-0.09523186087608337,
0.011513336561620235,
0.06239913031458855,
-0.03497447818517685,
-0.0016148154390975833,
-0.09279502183198929,
-0.12254191935062408,
0.05506354570388794,
-0.00554047804325819,
-0.10976900905370712,
0.05066399276256561,
-0.036284275352954865,
-0.015769675374031067,
-0.10577282309532166,
-0.02461322583258152,
0.13486763834953308,
0.007335976231843233,
0.0919690877199173,
-0.04761277884244919,
-0.023760832846164703,
0.010657747276127338,
0.007763061672449112,
-0.011922918260097504,
-0.03184998407959938,
0.045927513390779495,
-0.03244561329483986,
0.00016858434537425637,
-0.004925723187625408,
-0.023598944768309593,
0.11223477125167847,
0.03461598604917526,
-0.07322365790605545,
-0.023326853290200233,
0.016472632065415382,
-0.01458954717963934,
0.14353519678115845,
0.023095428943634033,
0.00026256273849867284,
0.016539636999368668,
-0.029048731550574303,
0.06774268299341202,
-0.16208471357822418,
0.05959264189004898,
0.0652177482843399,
-0.0396699383854866,
0.09338678419589996,
0.0019976929761469364,
-0.026858456432819366,
0.025868983939290047,
0.03496261686086655,
0.05018765479326248,
0.030001966282725334,
-0.029146641492843628,
-0.1171698346734047,
0.15543889999389648,
-0.0890987366437912,
-0.3090970814228058,
-0.1754062920808792,
0.10812264680862427,
0.007885842584073544,
-0.011166015639901161,
-0.023630527779459953,
-0.03246602043509483,
-0.052388809621334076,
-0.08068151772022247,
-0.03281547129154205,
-0.04147478938102722,
-0.026501907035708427,
-0.024002136662602425,
-0.04140207916498184,
0.03995748981833458,
-0.09073933213949203,
0.026283487677574158,
0.02761898562312126,
-0.08321496844291687,
0.05739493668079376,
0.023687588050961494,
0.11403574794530869,
0.1405060589313507,
-0.09778641909360886,
0.013235792517662048,
-0.031757205724716187,
0.2072870284318924,
-0.10444310307502747,
0.13330145180225372,
0.11988278478384018,
-0.024585986509919167,
0.07089219987392426,
0.04974913224577904,
0.010256498120725155,
-0.0002680032339412719,
0.028193149715662003,
0.015962930396199226,
-0.045990440994501114,
-0.15525448322296143,
-0.022643715143203735,
-0.04325174540281296,
-0.0728616788983345,
0.05761682242155075,
0.04335479810833931,
0.09401801973581314,
0.06323549151420593,
-0.056307412683963776,
-0.021218083798885345,
0.028386684134602547,
0.09166364371776581,
-0.01341735478490591,
0.010081266053020954,
0.007269169203937054,
-0.06240180507302284,
0.010579204186797142,
0.07279550284147263,
0.09691546112298965,
0.12831640243530273,
-0.057732824236154556,
0.04114927724003792,
0.04599950462579727,
0.1728242188692093,
0.02341361530125141,
0.03923600912094116,
-0.05527272820472717,
0.006954352371394634,
0.0004583807021845132,
-0.04488265886902809,
-0.024478847160935402,
-0.006883788853883743,
0.06764562427997589,
0.023553436622023582,
-0.08351046591997147,
0.03455394133925438,
0.02455899491906166,
0.18742181360721588,
0.0299668125808239,
-0.13769225776195526,
-0.029718955978751183,
-0.01589963398873806,
0.020533068105578423,
-0.10310147702693939,
-0.01594618335366249,
0.1641172617673874,
-0.1497800350189209,
-0.0008752508438192308,
-0.04249131679534912,
0.06017838418483734,
-0.0904705673456192,
-0.031461868435144424,
0.06990034878253937,
0.04139429330825806,
0.02163405530154705,
0.05292147025465965,
-0.16242629289627075,
0.00890524685382843,
0.01065394189208746,
0.1747361123561859,
-0.04972929507493973,
0.04489269480109215,
0.016175545752048492,
0.06696885824203491,
0.10688049346208572,
0.011161676608026028,
-0.029198557138442993,
-0.11941537261009216,
0.027424003928899765,
0.023192211985588074,
0.09433088451623917,
0.03663868084549904,
0.10313285887241364,
-0.038224972784519196,
0.011661194264888763,
-0.021350566297769547,
0.05209159851074219,
-0.13475966453552246,
-0.14754489064216614,
0.007321344222873449,
0.014073995873332024,
0.03435385972261429,
-0.08374018967151642,
0.010808683931827545,
-0.06741214543581009,
0.13626806437969208,
-0.10844360291957855,
-0.06147640198469162,
-0.09710834175348282,
0.004054969642311335,
0.09541720896959305,
-0.05402228608727455,
0.06020643189549446,
0.020825756713747978,
0.1891370564699173,
-0.08654043078422546,
-0.07356452941894531,
-0.08606085181236267,
-0.06862013041973114,
-0.12982186675071716,
0.02374948002398014,
0.08983807265758514,
0.031808726489543915,
0.028139537200331688,
0.03503898158669472,
-0.009645553305745125,
-0.012056718580424786,
-0.0770266130566597,
0.0337425097823143,
0.0943404883146286,
-0.0027738246135413647,
0.07096302509307861,
-0.01736554317176342,
-0.08647435158491135,
-0.0960182473063469,
-0.029057415202260017,
0.10816842317581177,
0.12716010212898254,
-0.11726255714893341,
0.06605175882577896,
0.16492675244808197,
-0.11169270426034927,
-0.17856383323669434,
0.008895653299987316,
0.01757187210023403,
0.07252132892608643,
-0.04703284427523613,
-0.17904680967330933,
-0.008548547513782978,
0.033195484429597855,
-0.01599624752998352,
0.038204699754714966,
-0.3360172212123871,
-0.07143881916999817,
0.03318917751312256,
0.06141557916998863,
-0.028148887678980827,
-0.0855758935213089,
-0.006448888219892979,
0.012572686187922955,
-0.10365275293588638,
0.07463159412145615,
-0.024262631312012672,
0.06529365479946136,
-0.0042336247861385345,
0.0029400973580777645,
0.04783089458942413,
-0.05507596582174301,
0.09890783578157425,
-0.05242041125893593,
0.07687874138355255,
-0.018775396049022675,
-0.01387796550989151,
0.0628163293004036,
-0.06106735020875931,
0.10526460409164429,
0.10251382738351822,
0.0762304738163948,
-0.1126294881105423,
0.016894983127713203,
-0.08449193090200424,
0.0532151460647583,
-0.05994356423616409,
-0.05060037225484848,
-0.1113705039024353,
0.0739818885922432,
0.09100517630577087,
0.007577246055006981,
-0.056292206048965454,
-0.0028732926584780216,
0.07646258175373077,
0.1557755172252655,
-0.00037896132562309504,
-0.0268692709505558,
-0.14154626429080963,
0.005763320717960596,
-0.0208986084908247,
0.11975531280040741,
-0.01812349446117878,
0.026724861934781075,
0.08083230257034302,
0.01392225082963705,
0.1299208700656891,
0.02875526063144207,
-0.15556979179382324,
0.006433645263314247,
0.03715468570590019,
-0.15498937666416168,
-0.0248066708445549,
-0.026407085359096527,
0.02565903216600418,
-0.046067703515291214,
0.009657308459281921,
0.07357331365346909,
-0.08764880150556564,
-0.025047238916158676,
0.0027672438882291317,
0.0489158108830452,
-0.0342649482190609,
0.0822451263666153,
0.12285449355840683,
0.04272076487541199,
-0.03701919689774513,
0.14882199466228485,
0.08340112119913101,
-0.11723347753286362,
0.02100740373134613,
0.06791360676288605,
-0.0865895226597786,
-0.03769461810588837,
-0.016522323712706566,
0.0320616252720356,
-0.05574202910065651,
-0.1185021698474884,
0.0033868656028062105,
-0.06834512948989868,
0.021271714940667152,
0.011547678150236607,
0.028267180547118187,
0.10135969519615173,
-0.059333350509405136,
0.012369600124657154,
-0.1709476262331009,
0.05455853044986725,
-0.008401939645409584,
0.017635175958275795,
-0.1362719088792801,
0.18708844482898712,
0.04078476503491402,
0.07707677036523819,
7.705092457399587e-7,
-0.05622881278395653,
-0.02591615356504917,
0.026506971567869186,
0.03872806578874588,
0.0058425297029316425,
-0.011671517975628376,
-0.022658169269561768,
-0.007350550498813391,
0.06096380576491356,
0.027304543182253838,
0.06334786117076874,
-0.03455045074224472,
-0.0551566407084465,
-0.03602967783808708,
-0.03617479279637337,
-0.08692288398742676,
-0.01230070274323225,
0.0038808125536888838,
-0.08120410144329071,
0.07049349695444107,
-0.04936994984745979,
-0.0365777313709259,
-0.014073604717850685,
-0.037382032722234726,
-0.03293889760971069,
-0.0021818168461322784,
-0.010743686929345131,
-0.030431751161813736,
-0.0958193987607956,
-0.013520916923880577,
-0.020553523674607277,
-0.04561237990856171,
-0.020308423787355423,
0.057515766471624374,
-0.07212556153535843,
0.003467397764325142,
-0.05379828065633774,
0.03746512532234192,
-0.08558101952075958,
0.11912089586257935,
0.05446283891797066,
0.06886505335569382,
0.08175539970397949,
-0.05388691648840904,
0.04736762493848801,
-0.08980883657932281,
-0.026579612866044044,
0.0013324181782081723,
0.04504306614398956,
-0.08650165051221848,
0.001706487499177456,
0.054459672421216965,
-0.034615304321050644,
0.05008978396654129,
-0.03928423300385475,
0.0019696145318448544,
-0.009101968258619308,
-0.09500814974308014,
-0.1531822830438614,
0.003847826039418578,
0.1513514667749405,
-0.02991747297346592,
-0.06958427280187607,
0.07266230881214142,
0.0026192092336714268,
-0.025479957461357117,
0.10541006177663803,
0.14559221267700195,
0.07057502865791321,
0.07541229575872421,
0.11157765984535217,
-0.031634729355573654,
-0.04326285794377327,
0.0086650550365448,
0.01600109599530697,
-0.044796187430620193,
0.04760953411459923,
-0.05879542976617813,
0.07367870956659317,
0.16313022375106812,
-0.16635261476039886,
0.09327112883329391,
0.04257792606949806,
-0.07760482281446457,
-0.05557848513126373,
-0.12052863091230392,
-0.022426662966609,
-0.009430065751075745,
-0.006556761916726828,
-0.09467840939760208,
0.044283851981163025,
0.07837608456611633,
0.008079418912529945,
-0.05957658216357231,
0.1410638689994812,
-0.04876627400517464,
-0.0867030993103981,
0.08586375415325165,
0.03307434916496277,
0.03591787815093994,
0.02632208913564682,
0.03151167184114456,
0.05972127616405487,
0.04529934749007225,
0.12689487636089325,
0.0215518306940794,
0.10102497786283493,
0.04986604303121567,
0.03198771923780441,
-0.023512089625000954,
0.014837038703262806,
-0.020145447924733162,
0.10210798680782318,
0.12308545410633087,
0.013653997331857681,
-0.012426807545125484,
-0.029349151998758316,
0.14875543117523193,
-0.07046647369861603,
-0.09426965564489365,
-0.11825206130743027,
0.13301339745521545,
0.058055579662323,
-0.028965596109628677,
0.04516538232564926,
-0.11756101995706558,
0.011057449504733086,
0.19100841879844666,
0.11697466671466827,
0.015422500669956207,
-0.01285227108746767,
0.03748844191431999,
0.00009482264431426302,
-0.04252316430211067,
0.17000262439250946,
0.03466980904340744,
0.17735061049461365,
-0.04697525128722191,
0.10881616175174713,
-0.05178259313106537,
-0.01709379255771637,
-0.03623136132955551,
0.12263401597738266,
-0.05892591178417206,
0.005436157342046499,
-0.06482099741697311,
0.03305330500006676,
-0.01862550526857376,
-0.275473028421402,
0.10894190520048141,
-0.06175830215215683,
-0.04566096514463425,
0.0463099367916584,
0.039279550313949585,
-0.027863116934895515,
0.07299739122390747,
-0.034936923533678055,
0.03649665042757988,
0.1942155510187149,
0.036000002175569534,
-0.04457264393568039,
-0.04782926291227341,
0.033686112612485886,
-0.04656260833144188,
0.21926607191562653,
0.035520583391189575,
0.11423544585704803,
0.05263668671250343,
0.010521801188588142,
-0.13682179152965546,
0.0009396024979650974,
0.024132831022143364,
0.009064787067472935,
-0.017134493216872215,
0.15419353544712067,
0.0369221530854702,
0.034524064511060715,
0.02132810652256012,
-0.061933305114507675,
0.02339102514088154,
-0.034524425864219666,
0.011083113960921764,
-0.1054455116391182,
0.0048422375693917274,
-0.06651458889245987,
0.15064984560012817,
0.1327269971370697,
-0.005667447112500668,
0.009081456810235977,
-0.05985499918460846,
-0.01242082379758358,
0.0397460013628006,
0.10838699340820312,
0.010221918113529682,
-0.06193853169679642,
0.04542949050664902,
-0.16002202033996582,
0.03537442535161972,
-0.12262716889381409,
-0.048759013414382935,
0.049371011555194855,
-0.05661538988351822,
-0.009308185428380966,
0.11756247282028198,
-0.006495737936347723,
0.050719253718853,
-0.024303119629621506,
0.03371386602520943,
-0.030824342742562294,
0.08455602824687958,
-0.13157223165035248,
-0.046699292957782745
] |
null | null |
transformers
|
# Wav2Vec2-Large-XLSR-53-English
Fine-tuned [facebook/wav2vec2-large-xlsr-53](https://huggingface.co/facebook/wav2vec2-large-xlsr-53) on {language} using the [Common Voice](https://huggingface.co/datasets/common_voice).
When using this model, make sure that your speech input is sampled at 16kHz.
## Usage
The model can be used directly (without a language model) as follows:
```python
import torch
import torchaudio
from datasets import load_dataset
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
test_dataset = load_dataset("common_voice", "{lang_id}", split="test[:2%]") #TODO: replace {lang_id} in your language code here. Make sure the code is one of the *ISO codes* of [this](https://huggingface.co/languages) site.
processor = Wav2Vec2Processor.from_pretrained("{model_id}") #TODO: replace {model_id} with your model id. The model id consists of {your_username}/{your_modelname}, *e.g.* `elgeish/wav2vec2-large-xlsr-53-arabic`
model = Wav2Vec2ForCTC.from_pretrained("{model_id}") #TODO: replace {model_id} with your model id. The model id consists of {your_username}/{your_modelname}, *e.g.* `elgeish/wav2vec2-large-xlsr-53-arabic`
resampler = torchaudio.transforms.Resample(48_000, 16_000)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def speech_file_to_array_fn(batch):
\tspeech_array, sampling_rate = torchaudio.load(batch["path"])
\tbatch["speech"] = resampler(speech_array).squeeze().numpy()
\treturn batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
inputs = processor(test_dataset[:2]["speech"], sampling_rate=16_000, return_tensors="pt", padding=True)
with torch.no_grad():
\tlogits = model(inputs.input_values, attention_mask=inputs.attention_mask).logits
predicted_ids = torch.argmax(logits, dim=-1)
print("Prediction:", processor.batch_decode(predicted_ids))
print("Reference:", test_dataset[:2]["sentence"])
```
## Evaluation
The model can be evaluated as follows on the {language} test data of Common Voice. # TODO: replace #TODO: replace language with your {language}, *e.g.* French
```python
import torch
import torchaudio
from datasets import load_dataset, load_metric
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import re
test_dataset = load_dataset("common_voice", "{lang_id}", split="test") #TODO: replace {lang_id} in your language code here. Make sure the code is one of the *ISO codes* of [this](https://huggingface.co/languages) site.
wer = load_metric("wer")
processor = Wav2Vec2Processor.from_pretrained("{model_id}") #TODO: replace {model_id} with your model id. The model id consists of {your_username}/{your_modelname}, *e.g.* `elgeish/wav2vec2-large-xlsr-53-arabic`
model = Wav2Vec2ForCTC.from_pretrained("{model_id}") #TODO: replace {model_id} with your model id. The model id consists of {your_username}/{your_modelname}, *e.g.* `elgeish/wav2vec2-large-xlsr-53-arabic`
model.to("cuda")
chars_to_ignore_regex = '[\\,\\?\\.\\!\\-\\;\\:\\"\\“]' # TODO: adapt this list to include all special characters you removed from the data
resampler = torchaudio.transforms.Resample(48_000, 16_000)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def speech_file_to_array_fn(batch):
\tbatch["sentence"] = re.sub(chars_to_ignore_regex, '', batch["sentence"]).lower()
\tspeech_array, sampling_rate = torchaudio.load(batch["path"])
\tbatch["speech"] = resampler(speech_array).squeeze().numpy()
\treturn batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def evaluate(batch):
\tinputs = processor(batch["speech"], sampling_rate=16_000, return_tensors="pt", padding=True)
\twith torch.no_grad():
\t\tlogits = model(inputs.input_values.to("cuda"), attention_mask=inputs.attention_mask.to("cuda")).logits
\tpred_ids = torch.argmax(logits, dim=-1)
\tbatch["pred_strings"] = processor.batch_decode(pred_ids)
\treturn batch
result = test_dataset.map(evaluate, batched=True, batch_size=8)
print("WER: {:2f}".format(100 * wer.compute(predictions=result["pred_strings"], references=result["sentence"])))
```
**Test Result**: XX.XX % # TODO: write output of print here. IMPORTANT: Please remember to also replace {wer_result_on_test} at the top of with this value here. tags.
## Training
The Common Voice `train`, `validation`, and ... datasets were used for training as well as ... and ... # TODO: adapt to state all the datasets that were used for training.
The script used for training can be found [here](...) # TODO: fill in a link to your training script here. If you trained your model in a colab, simply fill in the link here. If you trained the model locally, it would be great if you could upload the training script on github and paste the link here.
|
{"language": "en", "license": "apache-2.0", "tags": ["audio", "automatic-speech-recognition", "speech"], "datasets": ["common_voice"], "metrics": ["wer"], "model-index": [{"name": "English XLSR Wav2Vec2 Large 53 with punctuation", "results": [{"task": {"type": "automatic-speech-recognition", "name": "Speech Recognition"}, "dataset": {"name": "Common Voice en", "type": "common_voice", "args": "en"}, "metrics": [{"type": "wer", "value": 1.0, "name": "Test WER"}]}]}]}
|
automatic-speech-recognition
|
boris/xlsr-en-punctuation
|
[
"transformers",
"pytorch",
"jax",
"wav2vec2",
"automatic-speech-recognition",
"audio",
"speech",
"en",
"dataset:common_voice",
"license:apache-2.0",
"model-index",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #en #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us
|
# Wav2Vec2-Large-XLSR-53-English
Fine-tuned facebook/wav2vec2-large-xlsr-53 on {language} using the Common Voice.
When using this model, make sure that your speech input is sampled at 16kHz.
## Usage
The model can be used directly (without a language model) as follows:
## Evaluation
The model can be evaluated as follows on the {language} test data of Common Voice. # TODO: replace #TODO: replace language with your {language}, *e.g.* French
Test Result: XX.XX % # TODO: write output of print here. IMPORTANT: Please remember to also replace {wer_result_on_test} at the top of with this value here. tags.
## Training
The Common Voice 'train', 'validation', and ... datasets were used for training as well as ... and ... # TODO: adapt to state all the datasets that were used for training.
The script used for training can be found here # TODO: fill in a link to your training script here. If you trained your model in a colab, simply fill in the link here. If you trained the model locally, it would be great if you could upload the training script on github and paste the link here.
|
[
"# Wav2Vec2-Large-XLSR-53-English\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 on {language} using the Common Voice.\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"## Usage\n\nThe model can be used directly (without a language model) as follows:",
"## Evaluation\n\nThe model can be evaluated as follows on the {language} test data of Common Voice. # TODO: replace #TODO: replace language with your {language}, *e.g.* French\n\n\n\n\nTest Result: XX.XX % # TODO: write output of print here. IMPORTANT: Please remember to also replace {wer_result_on_test} at the top of with this value here. tags.",
"## Training\n\nThe Common Voice 'train', 'validation', and ... datasets were used for training as well as ... and ... # TODO: adapt to state all the datasets that were used for training.\n\nThe script used for training can be found here # TODO: fill in a link to your training script here. If you trained your model in a colab, simply fill in the link here. If you trained the model locally, it would be great if you could upload the training script on github and paste the link here."
] |
[
"TAGS\n#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #en #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us \n",
"# Wav2Vec2-Large-XLSR-53-English\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 on {language} using the Common Voice.\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"## Usage\n\nThe model can be used directly (without a language model) as follows:",
"## Evaluation\n\nThe model can be evaluated as follows on the {language} test data of Common Voice. # TODO: replace #TODO: replace language with your {language}, *e.g.* French\n\n\n\n\nTest Result: XX.XX % # TODO: write output of print here. IMPORTANT: Please remember to also replace {wer_result_on_test} at the top of with this value here. tags.",
"## Training\n\nThe Common Voice 'train', 'validation', and ... datasets were used for training as well as ... and ... # TODO: adapt to state all the datasets that were used for training.\n\nThe script used for training can be found here # TODO: fill in a link to your training script here. If you trained your model in a colab, simply fill in the link here. If you trained the model locally, it would be great if you could upload the training script on github and paste the link here."
] |
[
69,
63,
20,
93,
119
] |
[
"passage: TAGS\n#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #en #dataset-common_voice #license-apache-2.0 #model-index #endpoints_compatible #region-us \n# Wav2Vec2-Large-XLSR-53-English\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 on {language} using the Common Voice.\nWhen using this model, make sure that your speech input is sampled at 16kHz.## Usage\n\nThe model can be used directly (without a language model) as follows:## Evaluation\n\nThe model can be evaluated as follows on the {language} test data of Common Voice. # TODO: replace #TODO: replace language with your {language}, *e.g.* French\n\n\n\n\nTest Result: XX.XX % # TODO: write output of print here. IMPORTANT: Please remember to also replace {wer_result_on_test} at the top of with this value here. tags.## Training\n\nThe Common Voice 'train', 'validation', and ... datasets were used for training as well as ... and ... # TODO: adapt to state all the datasets that were used for training.\n\nThe script used for training can be found here # TODO: fill in a link to your training script here. If you trained your model in a colab, simply fill in the link here. If you trained the model locally, it would be great if you could upload the training script on github and paste the link here."
] |
[
-0.1338631808757782,
0.06016654521226883,
-0.003338686190545559,
0.024069542065262794,
0.1778871715068817,
0.023345284163951874,
0.14219439029693604,
0.11384625732898712,
-0.0195166002959013,
-0.007859094068408012,
0.004313408397138119,
0.056415993720293045,
0.05965455621480942,
0.1707592010498047,
0.0632992535829544,
-0.20052912831306458,
-0.00031146768014878035,
-0.005260311532765627,
0.024231666699051857,
0.0646129697561264,
0.11088370531797409,
-0.0693771168589592,
0.04800547659397125,
0.033246058970689774,
-0.10936615616083145,
0.03116638958454132,
0.02723282389342785,
-0.05599995702505112,
0.14065739512443542,
0.06504259258508682,
0.10541728883981705,
0.03139776736497879,
0.050924237817525864,
-0.20622417330741882,
0.04232455790042877,
0.0863141268491745,
-0.028392251580953598,
0.05192050337791443,
0.07660015672445297,
-0.06644506752490997,
-0.0038537452928721905,
-0.04013887047767639,
-0.03182018920779228,
0.0694456398487091,
-0.09382696449756622,
-0.1497824341058731,
-0.05896168202161789,
0.016402574256062508,
0.06826791167259216,
0.1113986223936081,
-0.024399779736995697,
0.06607631593942642,
0.014365936629474163,
0.06351297348737717,
0.15814952552318573,
-0.10894393920898438,
0.024077869951725006,
0.1253703236579895,
0.07252416014671326,
0.05689430609345436,
-0.10466364026069641,
0.01933622919023037,
-0.0018845910672098398,
-0.00855093915015459,
0.02863951586186886,
-0.04193514585494995,
-0.050401486456394196,
0.014486867003142834,
-0.10114819556474686,
-0.028384879231452942,
0.19107294082641602,
-0.026040198281407356,
-0.0671544149518013,
-0.11683129519224167,
-0.03293018043041229,
-0.013380206190049648,
-0.04468895122408867,
-0.10872162878513336,
0.04133889451622963,
0.08002875000238419,
-0.005814810749143362,
-0.18100664019584656,
-0.11973899602890015,
-0.061361163854599,
0.002567061223089695,
-0.04885511472821236,
0.020146546885371208,
-0.03541795164346695,
-0.1435215175151825,
0.14601215720176697,
-0.1317739188671112,
-0.02662571892142296,
-0.023904690518975258,
-0.04031720757484436,
-0.07963141053915024,
-0.06894347816705704,
-0.08004461228847504,
-0.1608121246099472,
0.011361440643668175,
0.02106955274939537,
0.009302270598709583,
0.004878594074398279,
-0.12957903742790222,
0.0770701915025711,
0.010292514227330685,
0.13369101285934448,
-0.08960828930139542,
-0.0408392958343029,
0.047306548804044724,
0.023093147203326225,
-0.08021840453147888,
0.0021244711242616177,
-0.057415418326854706,
0.0010726181790232658,
0.04264353960752487,
0.08063125610351562,
0.06117337942123413,
0.014254042878746986,
-0.0625755712389946,
-0.041695304214954376,
0.08363712579011917,
-0.12579141557216644,
0.02782861515879631,
0.02110472321510315,
-0.023527083918452263,
0.07885053008794785,
0.12424436211585999,
0.018269626423716545,
-0.10681728273630142,
0.0001405904913553968,
-0.027475891634821892,
0.05712166801095009,
-0.072491854429245,
-0.08131478726863861,
0.011102473363280296,
0.008165739476680756,
-0.01172218844294548,
-0.06822515279054642,
-0.2149057686328888,
-0.054738592356443405,
0.018963927403092384,
0.0021341517567634583,
0.05232236534357071,
-0.04027853161096573,
-0.05008343979716301,
-0.04243246093392372,
0.024636801332235336,
0.0053061372600495815,
-0.021361850202083588,
0.03235090523958206,
-0.03395910933613777,
0.004899789113551378,
-0.008358662948012352,
0.07296504825353622,
-0.06073234975337982,
-0.04120052605867386,
-0.09187252819538116,
0.13129112124443054,
0.012171201407909393,
-0.10575314611196518,
-0.12713930010795593,
-0.07325687259435654,
-0.11412140727043152,
0.05553077533841133,
0.0516158863902092,
0.1554364114999771,
-0.24090386927127838,
-0.052833814173936844,
0.24308809638023376,
-0.1085701733827591,
0.001266045612283051,
0.14018744230270386,
-0.057119954377412796,
0.19424401223659515,
0.11819995939731598,
0.09091473370790482,
0.09623491764068604,
-0.18154208362102509,
0.0037139009218662977,
-0.03469008952379227,
-0.11587797850370407,
0.044771723449230194,
0.0398576445877552,
-0.028394529595971107,
0.03704654425382614,
-0.006487451959401369,
-0.07983327656984329,
-0.02170480042695999,
-0.023238960653543472,
-0.045664940029382706,
-0.009478510357439518,
-0.007988997735083103,
0.0413268581032753,
0.016424309462308884,
0.004079390317201614,
0.03919088467955589,
-0.07713848352432251,
0.12804779410362244,
0.09128186851739883,
-0.09364280104637146,
0.08304857462644577,
-0.06915738433599472,
0.04950294643640518,
-0.03568794205784798,
0.024159740656614304,
-0.13530272245407104,
0.011258436366915703,
0.005610268097370863,
0.04464350640773773,
0.028073826804757118,
0.1674373745918274,
0.03864869475364685,
0.05294939503073692,
-0.07143307477235794,
-0.010153792798519135,
-0.055702030658721924,
-0.03879477083683014,
-0.05721832066774368,
-0.10657300800085068,
-0.02773991785943508,
-0.07658979296684265,
0.16912780702114105,
-0.22229284048080444,
0.053502392023801804,
0.059016965329647064,
-0.016531597822904587,
0.022526239976286888,
-0.027130667120218277,
0.023559780791401863,
0.003785589011386037,
-0.02599596418440342,
-0.04202531650662422,
0.02045268379151821,
0.02993074432015419,
-0.05845973268151283,
0.08921445906162262,
-0.17775794863700867,
-0.11175496131181717,
0.12701725959777832,
0.0005821731756441295,
-0.0681610256433487,
-0.051670949906110764,
-0.0037830376531928778,
-0.003172487486153841,
-0.0910099670290947,
-0.06286657601594925,
0.18365536630153656,
0.024829527363181114,
0.11230600625276566,
-0.05437226966023445,
0.008487370796501637,
-0.03172888234257698,
-0.014267448335886002,
0.021737461909651756,
-0.0008153491071425378,
-0.011449508368968964,
0.007402820512652397,
0.040751855820417404,
-0.05228734761476517,
-0.034912362694740295,
0.20388048887252808,
0.05299289897084236,
-0.09372302889823914,
-0.013044878840446472,
-0.0015151809202507138,
0.03379407525062561,
0.04031379893422127,
-0.05051576346158981,
0.04608529433608055,
0.049907829612493515,
0.051754727959632874,
0.06367791444063187,
-0.11717629432678223,
0.027227051556110382,
0.03995099291205406,
-0.0967426672577858,
-0.10090526193380356,
0.017213990911841393,
0.016158511862158775,
0.051279593259096146,
-0.08793371915817261,
0.12275340408086777,
0.01633390225470066,
-0.010930188931524754,
-0.1361679881811142,
0.13437433540821075,
-0.0931503027677536,
-0.1967693269252777,
-0.19232670962810516,
0.05050642043352127,
0.006329006981104612,
0.06086580827832222,
0.10337144136428833,
-0.07470918446779251,
-0.0041045029647648335,
-0.03444951772689819,
0.08906067907810211,
0.008673264645040035,
-0.08030728250741959,
-0.07499255985021591,
0.01782146841287613,
0.013617735356092453,
-0.14838702976703644,
0.02141396328806877,
0.02860405668616295,
-0.10235641896724701,
0.009339720010757446,
0.044933296740055084,
0.03972901403903961,
0.08123063296079636,
-0.019020194187760353,
-0.018457641825079918,
-0.008921360597014427,
0.1430416852235794,
-0.11248219758272171,
0.07853540033102036,
0.16067251563072205,
0.002027431270107627,
0.048267919570207596,
0.03836569935083389,
0.00018154217104893178,
-0.0510847382247448,
-0.01629774458706379,
0.04179353639483452,
-0.04840843752026558,
-0.2279376983642578,
-0.06250128895044327,
-0.05357382446527481,
-0.029226627200841904,
0.0386476069688797,
0.02053758129477501,
0.024041036143898964,
0.048544712364673615,
-0.06249028444290161,
-0.0596981979906559,
0.06025072559714317,
0.0630401074886322,
-0.026910539716482162,
-0.009950713254511356,
0.08117005228996277,
-0.029345298185944557,
0.008622136898338795,
0.08214856684207916,
0.08291712403297424,
0.0943794772028923,
-0.023515546694397926,
0.09738551080226898,
0.10350441932678223,
0.011953144334256649,
0.003853443544358015,
0.11590903252363205,
-0.014276961795985699,
0.012976286932826042,
-0.030457286164164543,
-0.021802186965942383,
0.02052035741508007,
0.04437156021595001,
0.05228333920240402,
-0.07160119712352753,
-0.06323889642953873,
0.01270415261387825,
0.07640505582094193,
0.09402821213006973,
-0.017873544245958328,
-0.23997098207473755,
-0.06614899635314941,
-0.011425902135670185,
-0.021313488483428955,
-0.03859395533800125,
-0.05654255673289299,
0.12889041006565094,
-0.17025655508041382,
0.009412553161382675,
-0.04397319257259369,
0.06269901245832443,
0.0031861152965575457,
0.01213881652802229,
0.0023545261938124895,
0.05038918927311897,
-0.015610563568770885,
0.09221730381250381,
-0.20606966316699982,
0.1487119346857071,
0.019731273874640465,
0.08778805285692215,
-0.06599058955907822,
0.02078435756266117,
0.0486668162047863,
0.08580788224935532,
0.1251797080039978,
-0.002644097898155451,
-0.024162212386727333,
-0.08467034250497818,
-0.008585182949900627,
0.035877835005521774,
0.021692954003810883,
-0.04178311675786972,
0.08098599314689636,
-0.025277040898799896,
-0.013441947288811207,
0.010911433957517147,
0.004182600881904364,
-0.13149744272232056,
-0.15063488483428955,
0.0719183012843132,
0.0009988559177145362,
0.20243975520133972,
-0.014613043516874313,
-0.0414520800113678,
-0.014681526459753513,
0.1581733524799347,
-0.07645163685083389,
-0.011219312436878681,
-0.1143665686249733,
0.0573013611137867,
0.12184199690818787,
-0.08280839025974274,
0.0007687418255954981,
0.0866277664899826,
0.14475250244140625,
-0.06949526816606522,
-0.014021211303770542,
0.012168316170573235,
-0.13399945199489594,
-0.0913185253739357,
-0.005203324370086193,
0.06521990895271301,
0.10375107824802399,
0.04361988231539726,
0.061027348041534424,
-0.01202809251844883,
-0.009638205170631409,
-0.0827861800789833,
0.013816934078931808,
0.09481784701347351,
0.054478831589221954,
0.020566843450069427,
-0.1545139104127884,
-0.09661288559436798,
-0.11874336004257202,
-0.013266417197883129,
0.18505914509296417,
0.14895819127559662,
-0.047400400042533875,
0.09978479146957397,
0.1277059018611908,
-0.15119217336177826,
-0.1625906080007553,
0.024561535567045212,
-0.01443941704928875,
0.02167254500091076,
0.052549347281455994,
-0.2378310263156891,
0.06828123331069946,
0.04697439819574356,
0.023950524628162384,
-0.050399307161569595,
-0.27891039848327637,
-0.12430107593536377,
0.03978189080953598,
0.0033856022637337446,
0.10270655900239944,
-0.12794393301010132,
0.007409384939819574,
-0.05292045325040817,
0.02252473682165146,
0.07123269885778427,
-0.19605369865894318,
0.11595801264047623,
0.04303085058927536,
0.020106999203562737,
0.026949292048811913,
-0.04464811831712723,
0.08591443300247192,
0.04050339758396149,
0.04607880115509033,
0.008820803835988045,
0.08208492398262024,
0.0726969763636589,
-0.037172287702560425,
0.1501074731349945,
0.033682722598314285,
0.016673047095537186,
-0.0639597550034523,
-0.06431731581687927,
-0.06440386176109314,
0.10094340145587921,
-0.0020269707310944796,
-0.030884098261594772,
0.033684082329273224,
0.042423129081726074,
0.10020305961370468,
-0.02265622466802597,
-0.18736644089221954,
-0.08530241996049881,
0.020601266995072365,
0.10801143944263458,
0.1009288877248764,
0.021147333085536957,
-0.11653867363929749,
0.029394567012786865,
0.01602611504495144,
0.07250571250915527,
-0.05449914187192917,
0.09820421040058136,
0.03099890798330307,
-0.013120683841407299,
0.1630643606185913,
0.038536831736564636,
-0.07925141602754593,
0.04202538728713989,
0.04020298272371292,
-0.06793771684169769,
-0.16756942868232727,
-0.036314599215984344,
-0.008634600788354874,
-0.08560992777347565,
-0.0400984026491642,
0.1324322521686554,
-0.03370567038655281,
-0.043741375207901,
-0.0380915105342865,
0.08364058285951614,
-0.07788879424333572,
0.13265284895896912,
-0.00896652415394783,
0.03383179381489754,
-0.07638701051473618,
0.05685504898428917,
0.029990190640091896,
-0.1418118178844452,
0.07772932201623917,
-0.0004807582008652389,
-0.045677997171878815,
-0.08217120915651321,
0.005922195501625538,
0.09147140383720398,
-0.005543099716305733,
-0.0852401852607727,
-0.06116332486271858,
0.0385073646903038,
-0.01547374576330185,
0.04098917171359062,
0.00044752354733645916,
0.0013932998990640044,
-0.03137525916099548,
-0.024197667837142944,
-0.18297743797302246,
0.03186758607625961,
0.17088663578033447,
0.0029142487328499556,
-0.026410361751914024,
0.17849218845367432,
0.02459174580872059,
0.03149208426475525,
-0.014911429025232792,
-0.01930125802755356,
-0.04910185933113098,
0.008624833077192307,
-0.006197817623615265,
0.007233588024973869,
-0.05272241681814194,
-0.026114199310541153,
-0.009808000177145004,
-0.022292740643024445,
0.018841762095689774,
0.05154813453555107,
-0.08769726008176804,
-0.035828281193971634,
-0.034945882856845856,
0.09989697486162186,
-0.1378116011619568,
0.040543295443058014,
0.08672110736370087,
-0.060878776013851166,
0.08428099751472473,
0.0825236365199089,
-0.09023720026016235,
0.061285536736249924,
-0.1415514498949051,
-0.05874408781528473,
0.010601007379591465,
-0.00769947562366724,
-0.017717571929097176,
-0.1632838249206543,
0.05135205760598183,
0.009642530232667923,
0.04655810818076134,
-0.02110428363084793,
0.1606317013502121,
-0.09904837608337402,
0.0073149376548826694,
-0.011935683898627758,
-0.0015112035907804966,
-0.04743015766143799,
0.0672493726015091,
0.07394743710756302,
0.03344935178756714,
0.14753399789333344,
-0.08262959867715836,
0.07963617891073227,
-0.1372608244419098,
0.012927140109241009,
-0.03148727864027023,
-0.0213474128395319,
0.014863848686218262,
-0.10255121439695358,
0.04537466540932655,
-0.02487580105662346,
0.09167969226837158,
-0.029916763305664062,
0.04608329012989998,
0.0164029523730278,
-0.07074712961912155,
-0.06058637425303459,
-0.0171675905585289,
0.0975080206990242,
-0.002317846519872546,
0.007068508304655552,
-0.036534398794174194,
0.09745366871356964,
0.07130157947540283,
0.11321769654750824,
0.14221154153347015,
0.21524637937545776,
0.005459978245198727,
0.08116958290338516,
-0.033626969903707504,
-0.08734460175037384,
-0.03536398336291313,
-0.011622059158980846,
-0.0793786495923996,
-0.0209693294018507,
-0.06943774968385696,
0.015988361090421677,
0.14073501527309418,
-0.13785195350646973,
0.15312807261943817,
0.04217715188860893,
-0.08631858229637146,
-0.10978008061647415,
-0.06561753153800964,
-0.02973385527729988,
-0.11099354177713394,
-0.00881189201027155,
-0.1053566113114357,
0.05868492275476456,
0.0858745351433754,
0.030828090384602547,
-0.040288206189870834,
0.16870424151420593,
-0.041075289249420166,
-0.11548977345228195,
-0.023042596876621246,
-0.06442530453205109,
0.02732781693339348,
0.00032345144427381456,
-0.005685584153980017,
0.075642891228199,
0.04433080181479454,
0.10125147551298141,
0.04265119880437851,
0.10957904905080795,
0.07321252673864365,
-0.1501196026802063,
-0.045370396226644516,
-0.026021700352430344,
0.011546440422534943,
0.047087162733078,
0.13031575083732605,
0.07477152347564697,
-0.0909721776843071,
0.02524719387292862,
0.19752991199493408,
-0.01547201257199049,
-0.16307754814624786,
-0.19833415746688843,
0.16243848204612732,
0.0989021435379982,
-0.0394020602107048,
-0.07107426971197128,
-0.09660009294748306,
0.03802824392914772,
0.22986136376857758,
0.21141989529132843,
0.02664818987250328,
0.009155355393886566,
0.02817314863204956,
0.007583439350128174,
0.0535021536052227,
-0.009963222779333591,
0.06358681619167328,
0.03837740048766136,
-0.05862803012132645,
0.09959188848733902,
-0.01817356050014496,
-0.038289617747068405,
-0.0240163654088974,
0.14738814532756805,
-0.0711435005068779,
-0.05088487267494202,
0.03348477929830551,
0.11183715611696243,
-0.0322895273566246,
-0.1825907826423645,
-0.09796784073114395,
-0.034215591847896576,
-0.05881005525588989,
-0.018515707924962044,
-0.033805884420871735,
0.013252169825136662,
0.08288242667913437,
0.005165105685591698,
-0.011498752050101757,
0.1811266392469406,
0.015175214037299156,
-0.05493874475359917,
-0.03991100192070007,
0.03813289478421211,
-0.11849336326122284,
0.1547258347272873,
-0.031320832669734955,
0.017123233526945114,
0.0634557455778122,
0.000024420231056865305,
-0.05858651548624039,
0.039900343865156174,
-0.010334877297282219,
0.028987616300582886,
0.05216626450419426,
0.12028598040342331,
-0.049844611436128616,
-0.012342077679932117,
0.003402137430384755,
-0.14604778587818146,
0.03978433459997177,
-0.1488829404115677,
-0.03983060270547867,
-0.059531886130571365,
0.041097693145275116,
-0.08697590231895447,
0.11933261156082153,
0.1482078731060028,
-0.026451097801327705,
0.03168901801109314,
-0.08520840108394623,
0.06347429752349854,
0.027875298634171486,
0.03444083407521248,
-0.05340278148651123,
-0.20470668375492096,
0.004526898730546236,
-0.007638168521225452,
0.04749992489814758,
-0.21223866939544678,
-0.03745405375957489,
0.1175929456949234,
-0.06393164396286011,
-0.018194595351815224,
0.10036322474479675,
0.09743377566337585,
0.012697900645434856,
-0.02489517629146576,
-0.05161362513899803,
0.01857619360089302,
0.10787727683782578,
-0.13868729770183563,
-0.06568900495767593
] |
null | null |
transformers
|
For studying only
|
{}
|
text-classification
|
bowipawan/bert-sentimental
|
[
"transformers",
"pytorch",
"tensorboard",
"distilbert",
"text-classification",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #distilbert #text-classification #autotrain_compatible #endpoints_compatible #region-us
|
For studying only
|
[] |
[
"TAGS\n#transformers #pytorch #tensorboard #distilbert #text-classification #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
42
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #distilbert #text-classification #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
-0.04603995382785797,
0.07697968930006027,
-0.007587090600281954,
0.040334295481443405,
0.1894233375787735,
0.03266342729330063,
0.10487726330757141,
0.12383108586072922,
0.07114987820386887,
-0.00525201391428709,
0.10253765434026718,
0.25097358226776123,
-0.02987755462527275,
0.053772926330566406,
-0.1199457049369812,
-0.2987544536590576,
0.02529042959213257,
0.08571648597717285,
-0.030483629554510117,
0.09768591076135635,
0.06876937299966812,
-0.10465067625045776,
0.0528610460460186,
-0.02778947539627552,
-0.1437736451625824,
0.03635691478848457,
0.03931419178843498,
-0.1251041740179062,
0.0920763611793518,
0.054787419736385345,
0.1859414130449295,
0.04978036880493164,
-0.05764761567115784,
-0.11654829233884811,
0.04489264637231827,
0.01485765352845192,
-0.09254759550094604,
0.06704971194267273,
0.08834836632013321,
-0.12994645535945892,
0.022403404116630554,
0.0009764776332303882,
0.02570222318172455,
0.03161700814962387,
-0.1174834743142128,
-0.043175891041755676,
-0.005370291881263256,
0.022237000986933708,
0.05965010076761246,
0.041221290826797485,
-0.0005730330594815314,
0.13452528417110443,
-0.09795165807008743,
0.13178953528404236,
0.07975141704082489,
-0.3102419078350067,
-0.0316278412938118,
0.12225354462862015,
0.027336133643984795,
0.044738732278347015,
-0.05966753512620926,
0.04469624161720276,
0.019826875999569893,
0.011544160544872284,
0.03513888269662857,
-0.07321186363697052,
-0.10471564531326294,
0.02097994275391102,
-0.09265647083520889,
0.0033277259208261967,
0.14798091351985931,
-0.0682767704129219,
0.08570198714733124,
-0.057520680129528046,
-0.10869370400905609,
-0.043445881456136703,
-0.03323303535580635,
0.015165060758590698,
-0.05078959837555885,
0.057163260877132416,
0.026306524872779846,
-0.03208002820611,
-0.12777327001094818,
0.044014688581228256,
-0.22997713088989258,
0.19326399266719818,
0.010304112918674946,
0.031071512028574944,
-0.16978037357330322,
0.04729142412543297,
0.05112743750214577,
-0.10878967493772507,
0.07722025364637375,
-0.07739537209272385,
0.005033629946410656,
-0.04434605687856674,
-0.05033571645617485,
-0.1646299511194229,
0.09229962527751923,
0.0792732834815979,
0.02271258644759655,
0.07589870691299438,
-0.02943352423608303,
0.10195162892341614,
0.03502525016665459,
0.13418622314929962,
0.01367510948330164,
-0.015007480047643185,
0.03770652413368225,
-0.10332653671503067,
0.02381240203976631,
-0.08328819274902344,
-0.17091533541679382,
-0.00980136077851057,
0.07454106956720352,
0.05742347612977028,
0.010912112891674042,
0.09158407896757126,
-0.0660814717411995,
-0.0429697148501873,
0.06734535843133926,
-0.09236454963684082,
0.0387350432574749,
0.023506851866841316,
0.02882397174835205,
0.1148211658000946,
-0.0014555156230926514,
-0.013479024171829224,
-0.0804947167634964,
0.1312321424484253,
-0.05420195683836937,
0.018140261992812157,
-0.040765196084976196,
-0.09668736159801483,
0.033556222915649414,
-0.1942308247089386,
0.012832476757466793,
-0.14952345192432404,
-0.09085899591445923,
0.0031279376707971096,
0.028533341363072395,
-0.01735086180269718,
-0.0026152029167860746,
-0.02203414961695671,
-0.0102163627743721,
0.05283864587545395,
-0.03304903954267502,
-0.0676739290356636,
-0.05507177487015724,
0.08837705850601196,
-0.052004240453243256,
0.08552996814250946,
-0.12362656742334366,
0.06806599348783493,
-0.07196059077978134,
-0.0067867920733988285,
-0.15163005888462067,
0.058627646416425705,
-0.06155307963490486,
0.1464628130197525,
-0.001813486567698419,
-0.045914359390735626,
-0.08455463498830795,
0.0498102530837059,
-0.03738473728299141,
0.13493332266807556,
-0.13282020390033722,
-0.11931726336479187,
0.18522660434246063,
-0.0935104712843895,
-0.10909486562013626,
0.10003595799207687,
-0.019867058843374252,
0.002699471078813076,
0.08324342221021652,
0.21328146755695343,
0.12464241683483124,
0.0028257453814148903,
0.05430715158581734,
0.13919903337955475,
-0.08823887258768082,
-0.11714000254869461,
-0.03825186565518379,
0.0018066264456138015,
-0.05023888871073723,
0.052827876061201096,
0.11148220300674438,
0.07636700570583344,
-0.042702414095401764,
-0.046748045831918716,
-0.016987726092338562,
-0.007444145157933235,
0.14808255434036255,
0.08780794590711594,
0.1379544585943222,
-0.06564826518297195,
-0.006117431912571192,
0.0587441623210907,
-0.02647368237376213,
-0.026576140895485878,
0.023803580552339554,
-0.07718691974878311,
0.1339673101902008,
-0.013797886669635773,
0.014891697093844414,
-0.24588166177272797,
-0.09897125512361526,
-0.015116585418581963,
0.10073293745517731,
-0.005431900266557932,
0.13364331424236298,
0.07663562893867493,
-0.045877762138843536,
-0.014807133935391903,
-0.0037892337422817945,
0.19071373343467712,
0.02251225896179676,
-0.08754529803991318,
-0.10135912895202637,
0.07217859476804733,
-0.09146692603826523,
-0.0014686144422739744,
-0.10566286742687225,
0.036679767072200775,
0.10934353619813919,
0.11581508070230484,
0.0322575680911541,
0.059172991663217545,
-0.009365053847432137,
0.06432660669088364,
-0.07833180576562881,
0.009577843360602856,
0.1176786720752716,
-0.01058810856193304,
-0.10429687052965164,
0.1302568018436432,
-0.1527077704668045,
0.25838297605514526,
0.20033486187458038,
-0.2754853367805481,
0.0020080499816685915,
-0.04659823700785637,
-0.005882642697542906,
0.018667304888367653,
0.0350269190967083,
0.025228438898921013,
0.09880781173706055,
0.0010991080198436975,
0.190837100148201,
-0.021153589710593224,
-0.04234985634684563,
-0.00043466492206789553,
-0.0410488024353981,
-0.05431409552693367,
0.09751458466053009,
0.09040956944227219,
-0.1704697161912918,
0.18294359743595123,
0.20096078515052795,
-0.031497180461883545,
0.19861248135566711,
-0.019592585042119026,
0.032567303627729416,
0.0825900211930275,
-0.017592541873455048,
-0.006372673902660608,
-0.04826289042830467,
-0.2143700271844864,
-0.04401913285255432,
0.06353070586919785,
-0.007641746196895838,
0.07189679145812988,
-0.13307592272758484,
-0.015508023090660572,
-0.016467437148094177,
0.019635312259197235,
0.031465865671634674,
0.08101508021354675,
0.06822001188993454,
0.1023208275437355,
-0.031828563660383224,
-0.08330725878477097,
0.09931344538927078,
-0.0012868402991443872,
-0.06186167150735855,
0.19159837067127228,
-0.1355380415916443,
-0.33784791827201843,
-0.14171135425567627,
-0.15382839739322662,
-0.03614667057991028,
0.042498957365751266,
0.0781187042593956,
-0.10310425609350204,
-0.032773856073617935,
0.01770966500043869,
-0.009754807688295841,
-0.022896232083439827,
0.05171317607164383,
-0.05437587574124336,
0.06398747116327286,
-0.05511936545372009,
-0.07351033389568329,
-0.0546439103782177,
-0.05728188902139664,
-0.004270676523447037,
0.1619870811700821,
-0.09769860655069351,
0.06409615278244019,
0.2201099991798401,
-0.007712316233664751,
0.05556498467922211,
-0.04209320247173309,
0.12120041996240616,
-0.08795541524887085,
0.02059486322104931,
0.15421971678733826,
-0.07923649996519089,
0.08098173141479492,
0.13726887106895447,
0.04435084015130997,
-0.06202536076307297,
0.00956772267818451,
-0.010630836710333824,
-0.107969731092453,
-0.2322322577238083,
-0.1380920112133026,
-0.12242541462182999,
0.05669332668185234,
0.05641760677099228,
0.0779140442609787,
0.12269847095012665,
0.08977797627449036,
0.040568213909864426,
0.013465355150401592,
0.01096833311021328,
0.04571719095110893,
0.2334052324295044,
0.0031826819758862257,
0.15645180642604828,
-0.06712072342634201,
-0.14530925452709198,
0.0757266953587532,
0.038224246352910995,
0.13289396464824677,
0.06512083858251572,
0.07363896071910858,
0.012872171588242054,
0.06836868077516556,
0.15091189742088318,
0.10524623095989227,
0.010815367102622986,
-0.025864502415060997,
-0.005371313542127609,
-0.010978900827467442,
-0.0350341759622097,
0.002318707061931491,
0.10046376287937164,
-0.13667164742946625,
-0.03537127375602722,
-0.07865475863218307,
0.09840776771306992,
0.08403969556093216,
0.03882572427392006,
-0.21059665083885193,
0.010571003891527653,
0.08295454829931259,
-0.007700029294937849,
-0.07526291906833649,
0.05221300944685936,
0.009621120989322662,
-0.09180407226085663,
0.09416726231575012,
-0.07643487304449081,
0.10554104298353195,
-0.11090786755084991,
0.06416766345500946,
-0.03688103333115578,
-0.07657108455896378,
0.026301974430680275,
0.08470986783504486,
-0.2604331076145172,
0.205475315451622,
0.016697736456990242,
-0.048316534608602524,
-0.08488874137401581,
-0.00650798762217164,
0.0435372032225132,
0.181710347533226,
0.07844818383455276,
-0.015856770798563957,
-0.023163115605711937,
-0.13565456867218018,
-0.03051353059709072,
-0.003760268446058035,
0.11238259822130203,
-0.01428043283522129,
-0.028583984822034836,
-0.016433127224445343,
-0.028177322819828987,
-0.00570681830868125,
-0.09235639125108719,
0.03589004650712013,
-0.19368532299995422,
0.06611783802509308,
0.03342404589056969,
-0.06428017467260361,
0.02925647422671318,
-0.08004195988178253,
-0.16517730057239532,
0.2313268929719925,
-0.13491466641426086,
-0.09082598984241486,
-0.11070311814546585,
-0.0496392622590065,
0.02026003785431385,
-0.08725357800722122,
0.04926200583577156,
-0.07609765976667404,
0.026254817843437195,
-0.07180263847112656,
-0.2214146852493286,
0.1389671266078949,
-0.08847879618406296,
-0.025432957336306572,
-0.06931548565626144,
0.14377693831920624,
-0.06066567450761795,
0.017467113211750984,
0.021506069228053093,
0.014022707007825375,
-0.06208350881934166,
-0.08238857239484787,
0.004993600770831108,
0.023671936243772507,
0.05603156238794327,
0.035453055053949356,
-0.08977041393518448,
-0.08057613670825958,
-0.011197812855243683,
0.044776834547519684,
0.2891542613506317,
0.12209925800561905,
-0.08025669306516647,
0.14290151000022888,
0.10878642648458481,
-0.07144255936145782,
-0.3543127179145813,
-0.04164673388004303,
-0.07900619506835938,
-0.03392558917403221,
-0.020990785211324692,
-0.1692517101764679,
0.14245586097240448,
-0.0000615489188930951,
-0.01895914226770401,
0.1114436611533165,
-0.21510793268680573,
-0.1049436405301094,
0.1900956928730011,
0.02373528480529785,
0.31844425201416016,
-0.1309761106967926,
-0.10668004304170609,
-0.02388131059706211,
-0.06360536813735962,
0.1516016721725464,
-0.05354338139295578,
0.09137018769979477,
-0.009240174666047096,
0.033372484147548676,
0.04592502489686012,
-0.055624183267354965,
0.086124949157238,
0.015534227713942528,
0.019867250695824623,
-0.10128539800643921,
-0.10478194057941437,
0.05314861610531807,
-0.01335966307669878,
-0.019335314631462097,
-0.030992336571216583,
-0.0025771053042262793,
-0.13294681906700134,
-0.03887597471475601,
-0.05905978009104729,
0.08181441575288773,
0.04122334346175194,
-0.04715103656053543,
-0.002014993457123637,
-0.016191530972719193,
-0.012539757415652275,
0.00776049867272377,
0.2978050112724304,
-0.025419797748327255,
0.15354785323143005,
0.1475812941789627,
0.14917775988578796,
-0.1373811811208725,
0.0012880538124591112,
-0.069809190928936,
-0.04924085736274719,
0.0654655247926712,
-0.08163250237703323,
0.06121481955051422,
0.1405683159828186,
-0.04176536947488785,
0.06739624589681625,
0.11167377233505249,
0.039750371128320694,
-0.015671327710151672,
0.1537807136774063,
-0.22622083127498627,
-0.03459077328443527,
-0.028011951595544815,
-0.07797582447528839,
0.05419991910457611,
0.09948994964361191,
0.14474579691886902,
0.058124326169490814,
-0.02056090347468853,
0.027291104197502136,
-0.002854485297575593,
0.02246136963367462,
0.08833222091197968,
0.07260780036449432,
0.03273298218846321,
-0.1296292394399643,
0.05610274150967598,
0.057791970670223236,
-0.1971728354692459,
-0.007592620328068733,
0.12753580510616302,
-0.13765358924865723,
-0.1395956575870514,
-0.00368200964294374,
0.15480469167232513,
-0.0719805359840393,
-0.040744781494140625,
-0.08620481938123703,
-0.12317974865436554,
0.05295936390757561,
0.22048485279083252,
0.11812905967235565,
0.07950117439031601,
-0.07506711035966873,
-0.03918588161468506,
-0.015959450975060463,
0.026107730343937874,
0.009056173264980316,
0.03585343435406685,
-0.12321169674396515,
0.03902330994606018,
-0.013275094330310822,
0.13959747552871704,
-0.10191387683153152,
-0.06404422968626022,
-0.16382648050785065,
0.02242553047835827,
-0.09004931151866913,
-0.005799910984933376,
-0.08917959779500961,
-0.02318430133163929,
0.0034408504143357277,
-0.033649854362010956,
-0.03768184408545494,
-0.06568079441785812,
-0.10806655883789062,
0.04703846201300621,
-0.017591189593076706,
0.03405676782131195,
-0.08813277631998062,
-0.05076688155531883,
0.05857892706990242,
-0.030288219451904297,
0.11159545183181763,
0.07041904330253601,
-0.0770534873008728,
0.08952080458402634,
-0.15135589241981506,
-0.09286046773195267,
0.14862670004367828,
0.03597563877701759,
0.07924872636795044,
0.09267039597034454,
0.029397539794445038,
0.059773869812488556,
0.024467892944812775,
0.07071136683225632,
0.0755324587225914,
-0.10952910035848618,
0.08470501005649567,
-0.08085161447525024,
-0.15890535712242126,
-0.06223023310303688,
0.0013675987720489502,
0.08564738929271698,
0.018606536090373993,
0.15478956699371338,
-0.07301109284162521,
0.08197207748889923,
-0.06174205616116524,
0.01433352567255497,
-0.0041921003721654415,
-0.19598278403282166,
-0.017797021195292473,
-0.057849686592817307,
0.02677738107740879,
-0.026435470208525658,
0.21437565982341766,
0.07230960577726364,
0.007385936565697193,
0.040313705801963806,
0.06414308398962021,
-0.014836613088846207,
0.03390687331557274,
0.16330793499946594,
0.07705213129520416,
-0.05681150406599045,
-0.07325239479541779,
0.06300541758537292,
0.037690501660108566,
0.04070398584008217,
0.15186378359794617,
0.06068604812026024,
-0.09053897857666016,
0.09453897923231125,
0.0013086919207125902,
0.0515473410487175,
-0.1233317106962204,
-0.06034238263964653,
-0.07271169871091843,
0.09952261298894882,
0.022567152976989746,
0.05051688477396965,
0.0887046679854393,
-0.023983240127563477,
0.04772427678108215,
-0.037624310702085495,
-0.08153237402439117,
-0.17393338680267334,
-0.14927510917186737,
-0.0936640277504921,
-0.06960950046777725,
0.0035707163624465466,
-0.07159373164176941,
-0.0395137183368206,
0.04780955985188484,
0.059553198516368866,
-0.05583415925502777,
0.10025706142187119,
0.03625905141234398,
-0.022445766255259514,
0.08314794301986694,
-0.007469092961400747,
0.014862235635519028,
-0.021018408238887787,
-0.019656401127576828,
-0.14086902141571045,
0.01936039887368679,
-0.05302619934082031,
0.03316029906272888,
-0.03159722685813904,
0.011576766148209572,
-0.13239584863185883,
-0.12850627303123474,
-0.021544622257351875,
0.054541267454624176,
-0.037967029958963394,
0.10874872654676437,
0.019593678414821625,
-0.00928666815161705,
0.034212660044431686,
0.16547484695911407,
-0.05578825622797012,
-0.019702430814504623,
-0.042297352105379105,
0.1864418089389801,
0.056279901415109634,
0.1041661724448204,
0.0004502236843109131,
-0.02590130642056465,
-0.07041119039058685,
0.2917492985725403,
0.3067082166671753,
-0.07154642790555954,
0.06031757965683937,
0.023385006934404373,
0.02764515019953251,
0.11660000681877136,
0.13777925074100494,
0.07038599252700806,
0.24802185595035553,
-0.06222294643521309,
-0.08442019671201706,
-0.028010902926325798,
-0.025048203766345978,
-0.12891323864459991,
0.07728605717420578,
0.07329729199409485,
-0.033493250608444214,
-0.062004923820495605,
0.10077860951423645,
-0.20854933559894562,
0.08674542605876923,
0.027777856215834618,
-0.2377578467130661,
-0.09061086922883987,
-0.026773691177368164,
0.15642909705638885,
-0.02893924154341221,
0.07409939914941788,
-0.01334049366414547,
-0.10492754727602005,
-0.006130533292889595,
0.01244515459984541,
-0.21407517790794373,
0.061376385390758514,
0.033911217004060745,
-0.07640945166349411,
0.007194093894213438,
-0.03237824887037277,
0.027679091319441795,
0.08744186162948608,
0.08077127486467361,
0.003640656592324376,
0.028805220499634743,
-0.001016925903968513,
-0.03201787918806076,
0.025657953694462776,
0.02615967206656933,
0.00916578434407711,
-0.11191057413816452,
0.08908887207508087,
-0.11946779489517212,
0.05242010951042175,
-0.1150560975074768,
-0.06555624306201935,
-0.020165501162409782,
0.043921004980802536,
-0.055444177240133286,
0.04351620748639107,
0.09324859827756882,
0.03184846043586731,
-0.02103528380393982,
-0.04087841138243675,
-0.051752422004938126,
0.00030713106389157474,
-0.09480879455804825,
-0.14768947660923004,
-0.10398419201374054,
-0.0784272626042366,
0.04616319760680199,
0.007821858860552311,
-0.17432796955108643,
-0.01485002413392067,
-0.10754987597465515,
0.04383577033877373,
-0.18084506690502167,
0.07133903354406357,
0.07244544476270676,
0.021627036854624748,
-0.020868869498372078,
-0.0057632508687675,
0.035843897610902786,
0.05677977204322815,
-0.14626255631446838,
-0.07018790394067764
] |
null | null |
transformers
|
# Gollum DialoGPT Model
|
{"tags": ["conversational"]}
|
text-generation
|
boydster/DialoGPT-small-gollum
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"conversational",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
# Gollum DialoGPT Model
|
[
"# Gollum DialoGPT Model"
] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"# Gollum DialoGPT Model"
] |
[
51,
8
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n# Gollum DialoGPT Model"
] |
[
-0.012526074424386024,
0.1149032935500145,
-0.008385661989450455,
0.041397809982299805,
0.1502750962972641,
0.0036365611013025045,
0.14726245403289795,
0.09611695259809494,
-0.014671461656689644,
-0.05605223774909973,
0.10034424066543579,
0.17997969686985016,
-0.01177162490785122,
0.02499980293214321,
-0.02713111788034439,
-0.3573357164859772,
0.023700471967458725,
0.02858486957848072,
-0.028103269636631012,
0.11602627485990524,
0.06460091471672058,
-0.029133403673768044,
0.07186181843280792,
-0.018184583634138107,
-0.11743087321519852,
-0.0567978173494339,
-0.02806934528052807,
-0.09019920229911804,
0.11231956630945206,
0.04756329581141472,
-0.012174496427178383,
0.0010775570990517735,
-0.08146855235099792,
-0.09631888568401337,
0.05401870980858803,
-0.011720811016857624,
-0.027887776494026184,
0.04680857062339783,
0.00601454870775342,
-0.10514496266841888,
0.1808440089225769,
0.11369244754314423,
-0.008160966448485851,
0.0342753529548645,
-0.15039630234241486,
-0.008506599813699722,
0.01260096114128828,
0.029430653899908066,
0.007344071753323078,
0.08682063221931458,
-0.03829862177371979,
0.061501212418079376,
-0.08140166103839874,
0.10509943217039108,
0.11109578609466553,
-0.34288933873176575,
-0.032735198736190796,
0.10289376229047775,
0.06900815665721893,
0.1137971431016922,
-0.0702221542596817,
0.05687800422310829,
0.012268596328794956,
0.0017406316474080086,
-0.03063780628144741,
-0.07589078694581985,
-0.13735848665237427,
0.018290258944034576,
-0.13601364195346832,
0.001993943238630891,
0.21720634400844574,
-0.07366344332695007,
0.030456187203526497,
-0.07870537787675858,
-0.09233268350362778,
-0.02423950657248497,
-0.018401626497507095,
-0.043175894767045975,
-0.07074711471796036,
0.0708921030163765,
0.026672162115573883,
-0.15132775902748108,
-0.12912121415138245,
-0.013075310736894608,
-0.19157545268535614,
0.16599862277507782,
0.046842921525239944,
0.03638753294944763,
-0.22140713036060333,
0.09841940551996231,
-0.034176528453826904,
-0.08764811605215073,
0.0006483443430624902,
-0.07400181889533997,
0.004508524667471647,
0.010100207291543484,
-0.05628484860062599,
-0.019152909517288208,
0.09436662495136261,
0.12810656428337097,
0.013537411577999592,
0.020855337381362915,
-0.05744402855634689,
0.07136359065771103,
0.07832860946655273,
0.062888965010643,
-0.022549638524651527,
-0.09184227138757706,
0.040393613278865814,
-0.10187225043773651,
0.038810547441244125,
-0.0796651840209961,
-0.1549370288848877,
-0.021886751055717468,
0.07152114063501358,
0.03743433207273483,
0.051369283348321915,
0.11123844981193542,
0.01903221569955349,
-0.07747279107570648,
0.005103451665490866,
-0.0033922947477549314,
-0.03154252469539642,
0.016092222183942795,
-0.014921065419912338,
0.11049431562423706,
0.0213482603430748,
0.025139939039945602,
-0.16141203045845032,
0.039749860763549805,
-0.03532103821635246,
0.047297898679971695,
0.002593305893242359,
-0.02969660796225071,
0.018650460988283157,
-0.05479593947529793,
0.006562295835465193,
-0.13682369887828827,
-0.17663198709487915,
0.030754024162888527,
-0.0039349705912172794,
-0.0604507215321064,
-0.11396448314189911,
-0.07198906689882278,
-0.009272114373743534,
0.010533655993640423,
-0.07776249945163727,
-0.047332197427749634,
-0.05759071186184883,
0.09326374530792236,
-0.044959764927625656,
0.09479708224534988,
-0.05760639160871506,
0.07437922805547714,
-0.10181121528148651,
-0.04339086264371872,
-0.06821111589670181,
0.08783742785453796,
0.02137349732220173,
0.05016754940152168,
-0.017203379422426224,
-0.02956545166671276,
-0.08418558537960052,
0.035193294286727905,
-0.032419007271528244,
0.2031196653842926,
-0.06681371480226517,
-0.12608911097049713,
0.26194480061531067,
-0.056664034724235535,
-0.07873779535293579,
0.16049019992351532,
0.001072790939360857,
0.051233746111392975,
0.1260775923728943,
0.18061856925487518,
0.051721904426813126,
-0.0021575158461928368,
0.1068023070693016,
0.04726388305425644,
-0.10147377103567123,
-0.006031163968145847,
0.0036510915961116552,
-0.006563856266438961,
-0.0425071120262146,
0.053112298250198364,
0.11619414389133453,
0.07100798934698105,
-0.04686084762215614,
0.0049198102205991745,
0.0006474658730439842,
-0.003399080131202936,
0.11174888163805008,
-0.04025135561823845,
0.13175147771835327,
-0.052371785044670105,
-0.053749509155750275,
-0.03516053408384323,
-0.0043165553361177444,
0.010662289336323738,
0.05189640447497368,
-0.0837373435497284,
0.1175876185297966,
-0.0718197152018547,
0.08653147518634796,
-0.14808854460716248,
0.005813865456730127,
-0.031179985031485558,
0.18812628090381622,
0.13454782962799072,
0.07041478902101517,
0.07915826141834259,
-0.025164347141981125,
-0.03891594335436821,
0.07708228379487991,
0.15476340055465698,
-0.03455730527639389,
-0.070279560983181,
-0.11231943964958191,
0.11198274046182632,
-0.06586869806051254,
0.10359392315149307,
-0.030542626976966858,
0.018514618277549744,
-0.005242443177849054,
0.12824222445487976,
-0.028587762266397476,
0.007652474567294121,
0.0300135537981987,
-0.02071155607700348,
-0.05347413197159767,
0.0168618131428957,
0.09770280867815018,
0.021234693005681038,
-0.09019161015748978,
0.22669459879398346,
-0.16548869013786316,
0.1625361293554306,
0.20450687408447266,
-0.22581656277179718,
0.004007627721875906,
-0.12264052033424377,
-0.006811207626014948,
0.016698051244020462,
0.10515855997800827,
0.015709906816482544,
0.2604137063026428,
-0.004135191906243563,
0.16484905779361725,
-0.01693212054669857,
-0.05218900367617607,
-0.02962682396173477,
-0.07423342764377594,
0.006316685117781162,
0.09958911687135696,
0.11842884868383408,
-0.16122427582740784,
0.1298534870147705,
0.0991363525390625,
0.003963806200772524,
0.24037478864192963,
0.07165631651878357,
0.020756714046001434,
0.002515098312869668,
0.016919009387493134,
-0.05258788913488388,
-0.09167802333831787,
-0.3149415850639343,
0.004427328240126371,
0.06544245034456253,
0.06643474102020264,
0.12148471176624298,
-0.12295231968164444,
-0.03134412318468094,
-0.012978731654584408,
-0.0006204381352290511,
0.056868355721235275,
0.1247185692191124,
0.020012037828564644,
0.12894690036773682,
-0.04108737036585808,
-0.02377849631011486,
0.06214747577905655,
0.03044438362121582,
-0.06893236935138702,
0.18003037571907043,
-0.14438341557979584,
-0.28476256132125854,
-0.08656033873558044,
-0.21740971505641937,
-0.10958965867757797,
0.04718048498034477,
0.10811489075422287,
-0.13888727128505707,
0.013043084181845188,
0.007133237086236477,
0.09333908557891846,
-0.11257372796535492,
0.01782073825597763,
-0.03079824149608612,
-0.04046687111258507,
-0.13597652316093445,
-0.07320493459701538,
-0.03737140819430351,
-0.07183123379945755,
-0.07671615481376648,
0.1630122810602188,
-0.11167141795158386,
-0.027345526963472366,
0.23053120076656342,
0.07581395655870438,
0.038339145481586456,
-0.02287594974040985,
0.2112147957086563,
-0.07792548835277557,
-0.011173450388014317,
0.23128071427345276,
-0.009268357418477535,
0.08223949372768402,
0.11031761020421982,
-0.006986065302044153,
-0.07948926836252213,
0.02365201525390148,
0.007061803247779608,
-0.08866259455680847,
-0.20205439627170563,
-0.09827244281768799,
-0.14193783700466156,
0.08196613937616348,
0.0020609856583178043,
0.07210517674684525,
0.1814550906419754,
0.061112985014915466,
-0.03244529291987419,
-0.02489723637700081,
0.09381521493196487,
0.08152150362730026,
0.257086843252182,
-0.10750318318605423,
0.12279250472784042,
0.009579562582075596,
-0.1547715812921524,
0.05982604995369911,
0.10176900774240494,
0.10341839492321014,
0.04952362924814224,
0.036154113709926605,
-0.00549432123079896,
0.009435906074941158,
0.11518837511539459,
0.03325992822647095,
0.03364500775933266,
-0.018451837822794914,
-0.05078878998756409,
-0.03635456785559654,
-0.028511079028248787,
0.04757564142346382,
0.10036135464906693,
-0.15874101221561432,
-0.008092115633189678,
-0.019175058230757713,
0.05764807015657425,
0.03875856101512909,
0.03343456611037254,
-0.16028955578804016,
-0.009965331293642521,
0.08801189064979553,
-0.01399786677211523,
-0.11848177015781403,
0.09011945128440857,
0.006227628793567419,
-0.11017905920743942,
0.054157279431819916,
-0.016451461240649223,
0.10192891955375671,
-0.08764835447072983,
0.07827781140804291,
-0.14784786105155945,
-0.07400034368038177,
-0.007213719189167023,
0.08921432495117188,
-0.280111700296402,
0.14175638556480408,
-0.024545438587665558,
-0.04409385100007057,
-0.07706955820322037,
-0.007006516680121422,
0.04732021316885948,
0.04106786847114563,
0.08973963558673859,
0.0019645406864583492,
0.05132684111595154,
0.019647542387247086,
-0.04623014107346535,
-0.005927620455622673,
0.11985255777835846,
-0.024085182696580887,
-0.00518929585814476,
-0.03548610955476761,
0.0072584436275064945,
-0.04242294654250145,
-0.09809786081314087,
0.041737817227840424,
-0.14618606865406036,
0.12118081003427505,
0.10228990018367767,
0.06630176305770874,
0.029621729627251625,
-0.04365745931863785,
-0.06638775020837784,
0.2466716766357422,
-0.0830271989107132,
-0.09517135471105576,
-0.07809153199195862,
-0.03718850016593933,
0.06161457300186157,
-0.04691050201654434,
0.0013337703421711922,
-0.06742243468761444,
0.006027680356055498,
-0.0789857730269432,
-0.14890190958976746,
0.10771635174751282,
-0.0500759482383728,
-0.09794842451810837,
-0.04525819048285484,
0.21758903563022614,
-0.014263919554650784,
0.04086361080408096,
0.0011878316290676594,
-0.0009550839895382524,
-0.10774419456720352,
-0.0814218744635582,
0.00666575413197279,
0.055117156356573105,
-0.035860851407051086,
0.05757710710167885,
0.01827933080494404,
-0.0621759407222271,
-0.0402364656329155,
-0.03303776681423187,
0.300059974193573,
0.10600546002388,
0.000043084695789730176,
0.19706077873706818,
0.1354675143957138,
-0.09176360815763474,
-0.23543323576450348,
-0.13548579812049866,
-0.07598140835762024,
-0.028977174311876297,
-0.12726376950740814,
-0.16687218844890594,
0.0800260677933693,
-0.028867430984973907,
-0.011081026867032051,
0.133534237742424,
-0.2719114124774933,
-0.13033893704414368,
0.16285106539726257,
-0.00995247345417738,
0.41550517082214355,
-0.0741441398859024,
-0.07809535413980484,
-0.023685364052653313,
-0.11991097033023834,
0.12459124624729156,
-0.00641629146412015,
0.14912313222885132,
-0.013586948625743389,
0.1574503481388092,
0.061369962990283966,
-0.01573881320655346,
0.12750549614429474,
-0.008679362013936043,
-0.06180473044514656,
-0.12392771989107132,
-0.0536196306347847,
0.0007465841481462121,
0.03007889911532402,
0.009635395370423794,
-0.07413322478532791,
-0.020370593294501305,
-0.09339108318090439,
-0.06266079843044281,
-0.0924263447523117,
0.0028464130591601133,
0.028490493074059486,
-0.044617291539907455,
0.02396109700202942,
-0.018001386895775795,
-0.009029112756252289,
0.009913970716297626,
0.1276092678308487,
-0.11307551711797714,
0.145237997174263,
0.11046234518289566,
0.12908238172531128,
-0.12367425113916397,
-0.03345627710223198,
-0.08252353966236115,
-0.03409406170248985,
0.0826505571603775,
-0.08313270658254623,
-0.008298195898532867,
0.13862231373786926,
-0.008264528587460518,
0.07982931286096573,
0.0839274600148201,
-0.004068491980433464,
0.045473597943782806,
0.08198153972625732,
-0.2319772094488144,
-0.09233172237873077,
-0.035605814307928085,
-0.02690831571817398,
0.07979476451873779,
0.12412068247795105,
0.25754082202911377,
0.01611809991300106,
-0.045176777988672256,
0.012171907350420952,
0.0656544417142868,
-0.027220582589507103,
0.08352231234312057,
-0.03474564105272293,
0.020898012444376945,
-0.13300716876983643,
0.08167348802089691,
0.0493973009288311,
-0.0809035524725914,
0.049643274396657944,
0.15636011958122253,
-0.0912690982222557,
-0.10524559766054153,
-0.10030915588140488,
0.02629479207098484,
-0.10317938774824142,
0.0004416732699610293,
-0.02862958237528801,
-0.14696770906448364,
0.06340236216783524,
0.14182202517986298,
0.03296438232064247,
0.039714496582746506,
-0.14293518662452698,
-0.0064039891585707664,
-0.0180794857442379,
0.024409079924225807,
0.08565348386764526,
-0.039107806980609894,
-0.04686525836586952,
0.06732772290706635,
-0.03678727522492409,
0.1272479146718979,
-0.09270549565553665,
-0.15438583493232727,
-0.12452174723148346,
0.05268438905477524,
-0.12813253700733185,
-0.06398501992225647,
-0.10554288327693939,
-0.03578323870897293,
-0.014310142025351524,
-0.04921392351388931,
-0.041177354753017426,
-0.041854020208120346,
-0.09498182684183121,
0.02664557285606861,
-0.052901361137628555,
0.02807917259633541,
-0.07928083837032318,
0.0028577842749655247,
0.07554290443658829,
-0.025400176644325256,
0.1676025390625,
0.16573333740234375,
-0.14119786024093628,
0.10756904631853104,
-0.10785123705863953,
-0.04066963121294975,
0.10241146385669708,
0.02219577319920063,
0.04004281759262085,
0.04418942332267761,
0.009021230041980743,
0.023604873567819595,
0.031476184725761414,
0.05300808325409889,
0.06270597130060196,
-0.10635235160589218,
-0.0225425586104393,
-0.02927268296480179,
-0.1489422470331192,
-0.03729325905442238,
-0.03620532155036926,
-0.006876700557768345,
0.039075057953596115,
0.028539951890707016,
-0.03577345982193947,
0.07584130018949509,
-0.07460013777017593,
0.04018483683466911,
0.024558398872613907,
-0.18312151730060577,
0.0008955635712482035,
-0.06184305623173714,
0.05182606726884842,
0.01032202783972025,
0.17887748777866364,
0.0016571037704125047,
-0.014212841168045998,
0.0015303997788578272,
0.08949276804924011,
0.029028140008449554,
-0.0031951467972248793,
0.12062075734138489,
0.07706853747367859,
-0.061581846326589584,
-0.09649334102869034,
0.09306371957063675,
0.047130074352025986,
-0.025220878422260284,
0.0674135833978653,
-0.044122159481048584,
0.04203465208411217,
0.07339389622211456,
-0.0494920052587986,
0.029500454664230347,
-0.13390056788921356,
-0.11595336347818375,
0.021924873813986778,
0.060044288635253906,
-0.07370426505804062,
0.13071654736995697,
0.18552419543266296,
0.016520092263817787,
0.019255051389336586,
-0.011087724007666111,
-0.04860265552997589,
-0.11798442155122757,
-0.22415310144424438,
-0.053884249180555344,
-0.13309834897518158,
0.0019044671207666397,
-0.1389738768339157,
0.00899586733430624,
0.01632644049823284,
0.10271605104207993,
-0.06945207715034485,
0.059178728610277176,
0.10736023634672165,
-0.12000951915979385,
0.06994777172803879,
-0.05133843421936035,
0.1102123111486435,
-0.00020970332843717188,
-0.011006810702383518,
-0.06624165177345276,
0.06222444772720337,
0.03110015206038952,
0.04934979975223541,
-0.018479622900485992,
0.03974052146077156,
-0.1275940090417862,
-0.08248455077409744,
-0.04341398924589157,
0.06205982714891434,
-0.0020947852171957493,
0.1537812054157257,
0.01997319795191288,
-0.05088172107934952,
0.01741802506148815,
0.22798430919647217,
-0.06095077842473984,
-0.09404516965150833,
-0.037550292909145355,
0.24376532435417175,
0.03933113068342209,
0.07840923219919205,
-0.007253365591168404,
0.005974192637950182,
-0.13023455440998077,
0.27848386764526367,
0.3012647330760956,
-0.13032026588916779,
-0.009670091792941093,
0.014708367176353931,
0.038996241986751556,
0.13154368102550507,
0.04427937790751457,
0.13337883353233337,
0.3768857717514038,
-0.05945013836026192,
0.014019637368619442,
-0.012247311882674694,
-0.016219306737184525,
-0.057887330651283264,
0.0038175941444933414,
0.05427518114447594,
-0.024932684376835823,
-0.04695068299770355,
0.08267506211996078,
-0.28418096899986267,
0.07648047059774399,
-0.16737468540668488,
-0.16859863698482513,
-0.0990125760436058,
-0.0062715234234929085,
0.07555416971445084,
0.029893115162849426,
0.08143114298582077,
0.014954779297113419,
-0.10283960402011871,
0.0678190290927887,
0.010308369062840939,
-0.16522042453289032,
0.019483711570501328,
0.07465485483407974,
-0.06201077625155449,
-0.031247606500983238,
-0.04404463618993759,
0.0770372524857521,
0.08217412233352661,
0.05932769924402237,
-0.0013119668001309037,
0.026965651661157608,
-0.016715001314878464,
-0.06334984302520752,
0.019583728164434433,
0.0614105649292469,
0.02886313386261463,
-0.05073408782482147,
0.12021895498037338,
-0.05161922052502632,
0.028373433277010918,
-0.010531889274716377,
-0.02593674138188362,
-0.01020712498575449,
0.06951162219047546,
-0.08218005299568176,
0.029198896139860153,
0.08856258541345596,
-0.029062319546937943,
-0.02254311554133892,
-0.0468757264316082,
-0.05572770535945892,
-0.04270976781845093,
-0.06541071087121964,
-0.08891409635543823,
-0.14474771916866302,
-0.11766381561756134,
0.10511843115091324,
0.028738275170326233,
-0.18788452446460724,
-0.03398032486438751,
-0.07810499519109726,
0.07117491960525513,
-0.09276355057954788,
0.08469760417938232,
0.07638561725616455,
0.016199812293052673,
-0.025627262890338898,
-0.0038012985605746508,
0.010414130054414272,
0.08141913264989853,
-0.16467627882957458,
-0.09566494077444077
] |
null | null |
transformers
|
# Model Trained Using AutoNLP
- Problem type: Binary Classification
- Model ID: 33199029
- CO2 Emissions (in grams): 3.667033499762825
## Validation Metrics
- Loss: 0.32653310894966125
- Accuracy: 0.9133333333333333
- Precision: 0.9005847953216374
- Recall: 0.9447852760736196
- AUC: 0.9532488468944517
- F1: 0.9221556886227544
## Usage
You can use cURL to access this model:
```
$ curl -X POST -H "Authorization: Bearer YOUR_API_KEY" -H "Content-Type: application/json" -d '{"inputs": "I love AutoNLP"}' https://api-inference.huggingface.co/models/bozelosp/autonlp-sci-relevance-33199029
```
Or Python API:
```
from transformers import AutoModelForSequenceClassification, AutoTokenizer
model = AutoModelForSequenceClassification.from_pretrained("bozelosp/autonlp-sci-relevance-33199029", use_auth_token=True)
tokenizer = AutoTokenizer.from_pretrained("bozelosp/autonlp-sci-relevance-33199029", use_auth_token=True)
inputs = tokenizer("I love AutoNLP", return_tensors="pt")
outputs = model(**inputs)
```
|
{"language": "en", "tags": "autonlp", "datasets": ["bozelosp/autonlp-data-sci-relevance"], "widget": [{"text": "I love AutoNLP \ud83e\udd17"}], "co2_eq_emissions": 3.667033499762825}
|
text-classification
|
world-wide/sent-sci-irrelevance
|
[
"transformers",
"pytorch",
"bert",
"text-classification",
"autonlp",
"en",
"dataset:bozelosp/autonlp-data-sci-relevance",
"co2_eq_emissions",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #bert #text-classification #autonlp #en #dataset-bozelosp/autonlp-data-sci-relevance #co2_eq_emissions #autotrain_compatible #endpoints_compatible #region-us
|
# Model Trained Using AutoNLP
- Problem type: Binary Classification
- Model ID: 33199029
- CO2 Emissions (in grams): 3.667033499762825
## Validation Metrics
- Loss: 0.32653310894966125
- Accuracy: 0.9133333333333333
- Precision: 0.9005847953216374
- Recall: 0.9447852760736196
- AUC: 0.9532488468944517
- F1: 0.9221556886227544
## Usage
You can use cURL to access this model:
Or Python API:
|
[
"# Model Trained Using AutoNLP\n\n- Problem type: Binary Classification\n- Model ID: 33199029\n- CO2 Emissions (in grams): 3.667033499762825",
"## Validation Metrics\n\n- Loss: 0.32653310894966125\n- Accuracy: 0.9133333333333333\n- Precision: 0.9005847953216374\n- Recall: 0.9447852760736196\n- AUC: 0.9532488468944517\n- F1: 0.9221556886227544",
"## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
"TAGS\n#transformers #pytorch #bert #text-classification #autonlp #en #dataset-bozelosp/autonlp-data-sci-relevance #co2_eq_emissions #autotrain_compatible #endpoints_compatible #region-us \n",
"# Model Trained Using AutoNLP\n\n- Problem type: Binary Classification\n- Model ID: 33199029\n- CO2 Emissions (in grams): 3.667033499762825",
"## Validation Metrics\n\n- Loss: 0.32653310894966125\n- Accuracy: 0.9133333333333333\n- Precision: 0.9005847953216374\n- Recall: 0.9447852760736196\n- AUC: 0.9532488468944517\n- F1: 0.9221556886227544",
"## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
70,
42,
78,
17
] |
[
"passage: TAGS\n#transformers #pytorch #bert #text-classification #autonlp #en #dataset-bozelosp/autonlp-data-sci-relevance #co2_eq_emissions #autotrain_compatible #endpoints_compatible #region-us \n# Model Trained Using AutoNLP\n\n- Problem type: Binary Classification\n- Model ID: 33199029\n- CO2 Emissions (in grams): 3.667033499762825## Validation Metrics\n\n- Loss: 0.32653310894966125\n- Accuracy: 0.9133333333333333\n- Precision: 0.9005847953216374\n- Recall: 0.9447852760736196\n- AUC: 0.9532488468944517\n- F1: 0.9221556886227544## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
-0.15625762939453125,
0.15650425851345062,
-0.0010397292207926512,
0.0637698620557785,
0.11966010183095932,
0.02177584171295166,
0.06832657754421234,
0.09205380827188492,
0.030979614704847336,
0.06454794108867645,
0.15464065968990326,
0.19884465634822845,
0.03235034644603729,
0.15718717873096466,
-0.13032403588294983,
-0.1494659185409546,
0.06700994819402695,
0.06581932306289673,
0.10972430557012558,
0.12493498623371124,
0.09267222136259079,
-0.09706196933984756,
0.1279207468032837,
0.04516701027750969,
-0.1689375787973404,
-0.011062158271670341,
0.0861489474773407,
-0.1266709268093109,
0.10476385802030563,
0.09550493955612183,
0.16093434393405914,
0.025578845292329788,
0.10219471156597137,
-0.09531611949205399,
-0.028165536001324654,
-0.0015471335500478745,
-0.015809746459126472,
0.10277103632688522,
0.04804512858390808,
-0.045135702937841415,
-0.01705808751285076,
0.0018222114304080606,
0.08877381682395935,
0.04193457216024399,
-0.09542261809110641,
-0.054577723145484924,
-0.05938473716378212,
0.036433685570955276,
0.12646740674972534,
0.10356123000383377,
-0.001089817495085299,
0.27701565623283386,
-0.08882225304841995,
0.09410042315721512,
0.050845883786678314,
-0.2543811500072479,
-0.019466714933514595,
0.10500402003526688,
-0.02871549129486084,
-0.09091639518737793,
-0.024667182937264442,
0.02165193110704422,
0.09144411981105804,
0.01584128476679325,
0.060304418206214905,
-0.05934387817978859,
-0.06167413294315338,
0.007569725159555674,
-0.10696588456630707,
-0.07365898042917252,
0.22156985104084015,
0.022277742624282837,
-0.09602291136980057,
-0.01916961930692196,
-0.09636832773685455,
-0.12695561349391937,
-0.06387881934642792,
-0.04506655037403107,
-0.022915130481123924,
-0.043761689215898514,
-0.052519913762807846,
0.08806434273719788,
-0.10747059434652328,
-0.06657136231660843,
-0.17253413796424866,
0.13226547837257385,
-0.0014942009001970291,
0.057356640696525574,
-0.028400562703609467,
0.10527167469263077,
-0.07467354834079742,
-0.08311448246240616,
-0.010682170279324055,
-0.025965366512537003,
-0.06925605982542038,
-0.05033478885889053,
-0.029388606548309326,
0.047466427087783813,
-0.008475566282868385,
0.20902912318706512,
0.06717456877231598,
0.031971827149391174,
0.046891938894987106,
-0.0010503133526071906,
0.003595976158976555,
0.21413898468017578,
-0.09106224030256271,
-0.03179824352264404,
0.064228355884552,
-0.054702091962099075,
0.03036600351333618,
-0.03793534263968468,
-0.08447059988975525,
-0.1123179942369461,
0.15285493433475494,
0.03316453844308853,
0.01704328879714012,
0.057275570929050446,
-0.08959802240133286,
-0.03160857781767845,
0.08317921310663223,
-0.06063259392976761,
0.026691151782870293,
-0.02635711431503296,
-0.07456008344888687,
0.06215827167034149,
0.10635404288768768,
0.023521821945905685,
-0.074612095952034,
0.08398059010505676,
-0.1220608800649643,
0.010598075576126575,
-0.04279697686433792,
-0.11767597496509552,
0.04306051880121231,
-0.07638777047395706,
0.033844392746686935,
-0.20258751511573792,
-0.17576934397220612,
-0.00734681636095047,
-0.005544432904571295,
-0.049307774752378464,
-0.032511766999959946,
-0.017930958420038223,
-0.026454569771885872,
0.04229982942342758,
-0.026530707255005836,
-0.03486288711428642,
-0.0414469949901104,
0.044255342334508896,
0.06420502066612244,
0.043561533093452454,
-0.13488547503948212,
0.03257957473397255,
-0.10109934210777283,
0.0041220164857804775,
-0.1052008718252182,
0.0326165109872818,
-0.02842334657907486,
0.023987984284758568,
-0.14081473648548126,
-0.06717736274003983,
0.10408774763345718,
-0.014825514517724514,
0.08200503140687943,
0.14631305634975433,
-0.06555669754743576,
-0.014435596764087677,
0.05757240206003189,
-0.060621120035648346,
-0.11095044016838074,
0.10107190907001495,
-0.040895093232393265,
0.013147511519491673,
0.06711584329605103,
-0.011231375858187675,
0.12785105407238007,
-0.10218456387519836,
-0.05024129897356033,
0.0322304405272007,
-0.037902381271123886,
-0.11944673955440521,
0.055841341614723206,
0.023137860000133514,
-0.17637021839618683,
0.04120917618274689,
0.052200984209775925,
0.03816400095820427,
-0.061987072229385376,
-0.0916355773806572,
-0.05761611834168434,
-0.02935502864420414,
0.03793663531541824,
-0.011167463846504688,
0.06386449187994003,
-0.03337983787059784,
-0.07255972921848297,
0.010639830492436886,
0.12846732139587402,
-0.014048892073333263,
-0.021777641028165817,
-0.15430240333080292,
0.11443380266427994,
-0.18856871128082275,
-0.052142031490802765,
-0.1979668140411377,
-0.022169923409819603,
-0.015196967869997025,
0.03832679241895676,
-0.022847043350338936,
-0.03568517044186592,
0.044215716421604156,
0.03119550831615925,
0.022104529663920403,
-0.02681758999824524,
0.10429675877094269,
0.0070860073901712894,
-0.1365872472524643,
-0.04825979843735695,
-0.026863442733883858,
-0.00902074109762907,
0.21830891072750092,
-0.11131338775157928,
-0.011858130805194378,
-0.010736932046711445,
0.08321432769298553,
-0.018308453261852264,
0.01964755170047283,
-0.019014371559023857,
0.05391134321689606,
-0.06274113804101944,
-0.0032197197433561087,
0.028922609984874725,
-0.02668234333395958,
-0.10301826894283295,
0.021505728363990784,
-0.1849856972694397,
0.21564653515815735,
0.1640915870666504,
-0.07029750943183899,
-0.07969430088996887,
0.014155350625514984,
0.029623927548527718,
-0.0113827558234334,
-0.030907487496733665,
-0.0024211748968809843,
0.09133978933095932,
0.012839016504585743,
0.12734557688236237,
-0.07199250161647797,
-0.019086429849267006,
0.06810475140810013,
-0.08679293841123581,
-0.02635100670158863,
0.1380312442779541,
0.07004918158054352,
-0.20071080327033997,
0.10161227732896805,
0.046855032444000244,
-0.11357776820659637,
0.006529622711241245,
0.028179358690977097,
-0.05106177181005478,
-0.03875969350337982,
-0.0644739642739296,
0.023089183494448662,
0.08368508517742157,
-0.0338708870112896,
0.04043450206518173,
0.09279680997133255,
-0.021477311849594116,
0.0022677977103739977,
-0.1321900337934494,
0.011785242706537247,
0.02579346112906933,
0.0165417343378067,
-0.07154835015535355,
0.013408779166638851,
0.04325563460588455,
0.13192182779312134,
0.022246429696679115,
-0.125250905752182,
0.046633463352918625,
0.035539690405130386,
-0.13865835964679718,
0.2458537518978119,
-0.0960310623049736,
-0.22001320123672485,
-0.18141093850135803,
-0.11600665748119354,
-0.04958471283316612,
0.009872878901660442,
0.021829864010214806,
-0.02899204008281231,
-0.11979955434799194,
-0.0326724611222744,
-0.06974266469478607,
-0.016046058386564255,
0.016655834391713142,
-0.03198515623807907,
-0.04217086359858513,
0.04838841035962105,
-0.08568825572729111,
-0.05234524607658386,
-0.024611227214336395,
-0.021416086703538895,
0.1370030641555786,
-0.0592191144824028,
0.12230207771062851,
0.17027565836906433,
-0.053607236593961716,
0.00495241628959775,
0.025342373177409172,
0.21949566900730133,
-0.018950214609503746,
-0.009325358085334301,
0.1746189445257187,
0.008485367521643639,
0.03138774260878563,
0.12127506732940674,
0.016535663977265358,
-0.07318014651536942,
-0.011620568111538887,
-0.01814322918653488,
-0.04356495290994644,
-0.18555355072021484,
-0.17510610818862915,
-0.0032829823903739452,
0.006432428956031799,
0.12935307621955872,
0.004342739470303059,
0.11087673902511597,
0.16196194291114807,
0.00859582144767046,
0.07708568871021271,
-0.08079706877470016,
0.11028441786766052,
0.1820855438709259,
0.02260172925889492,
0.16628575325012207,
-0.0587148442864418,
-0.07246141135692596,
0.06824194639921188,
-0.02469155564904213,
0.07399528473615646,
0.0439794659614563,
-0.03168554604053497,
-0.026299649849534035,
0.1320946216583252,
0.06873785704374313,
0.15861810743808746,
0.07860831916332245,
-0.03550197184085846,
0.007833708077669144,
-0.038987286388874054,
-0.1341591626405716,
0.03867697715759277,
0.04892444238066673,
0.024980485439300537,
-0.11558323353528976,
-0.02600504644215107,
0.00006770835898350924,
0.07011531293392181,
0.16316477954387665,
-0.49551692605018616,
-0.09676388651132584,
0.015202888287603855,
-0.028461167588829994,
-0.13717330992221832,
-0.013948041945695877,
-0.09244208037853241,
-0.16142107546329498,
0.02801693044602871,
-0.033748410642147064,
0.10964406281709671,
-0.05026929825544357,
-0.0001805491920094937,
-0.10098517686128616,
0.018585212528705597,
-0.02437303401529789,
0.08501541614532471,
-0.23438483476638794,
0.2347581833600998,
0.05666884034872055,
0.024812690913677216,
-0.09078345447778702,
0.011410295963287354,
0.005334964022040367,
0.09283813834190369,
0.11958428472280502,
0.003279173281043768,
0.042252689599990845,
-0.2891899347305298,
-0.15173129737377167,
0.047879576683044434,
-0.029533585533499718,
0.0054779513739049435,
0.09020527452230453,
0.0028202186804264784,
-0.02509790100157261,
0.00972234271466732,
-0.04824850708246231,
-0.07379026710987091,
-0.05091099068522453,
0.04231095686554909,
0.11076555401086807,
-0.022148549556732178,
0.0035989994648844004,
-0.06871499121189117,
-0.011833438649773598,
0.14797343313694,
-0.04093077778816223,
-0.07695312052965164,
-0.1400877833366394,
0.019431432709097862,
0.12340465188026428,
-0.11108206957578659,
0.06376125663518906,
-0.04961363971233368,
0.05084342136979103,
0.007721226196736097,
-0.12168379873037338,
0.11316266655921936,
-0.0901700109243393,
-0.04896281659603119,
0.0027510549407452345,
0.08261365443468094,
0.009468578733503819,
0.03591348975896835,
0.07035773247480392,
0.024796035140752792,
-0.08079580962657928,
-0.11527255922555923,
0.00176225695759058,
0.0621812529861927,
0.13780726492404938,
0.0838141068816185,
0.029620621353387833,
-0.1430005133152008,
-0.05651165172457695,
0.07787489891052246,
0.15880019962787628,
0.18160568177700043,
-0.07921325415372849,
-0.01059208158403635,
0.12542447447776794,
0.0012115968856960535,
-0.2123166173696518,
-0.017230944707989693,
-0.01591193489730358,
0.06154796481132507,
-0.11884630471467972,
-0.04198838770389557,
0.11004670709371567,
0.08713891357183456,
-0.04486703500151634,
-0.023113131523132324,
-0.19073635339736938,
-0.12085197865962982,
0.2927778959274292,
0.05845791473984718,
0.18695741891860962,
-0.06356417387723923,
-0.02351238951086998,
-0.11766308546066284,
-0.27155160903930664,
0.1492091417312622,
0.02215447649359703,
0.076823391020298,
-0.051406171172857285,
0.13590730726718903,
0.0559968426823616,
-0.06493531167507172,
0.1560700237751007,
0.000051587681809905916,
0.02985682524740696,
-0.031210456043481827,
-0.07304833084344864,
-0.04207862541079521,
-0.06565205752849579,
0.15107452869415283,
0.034302640706300735,
0.07454074919223785,
-0.19134573638439178,
-0.04335296154022217,
-0.03010355308651924,
0.10336575657129288,
-0.015522320754826069,
-0.06382090598344803,
-0.022248869761824608,
-0.019765479490160942,
-0.013837610371410847,
-0.06518322974443436,
0.01567930355668068,
-0.0019388850778341293,
0.04033294692635536,
0.15788494050502777,
0.13318197429180145,
-0.06466535478830338,
-0.029996097087860107,
0.029436131939291954,
-0.08589811623096466,
0.10709064453840256,
-0.13018782436847687,
0.0809033140540123,
0.12231355160474777,
-0.009960800409317017,
0.0972820520401001,
0.046424828469753265,
-0.05016138777136803,
-0.029636213555932045,
0.054681118577718735,
-0.15800774097442627,
0.095915287733078,
-0.0032564192079007626,
0.024409165605902672,
-0.041694026440382004,
0.06613592058420181,
0.14726266264915466,
-0.05854854732751846,
-0.040146276354789734,
0.011147466488182545,
-0.0043139285407960415,
-0.0228599701076746,
0.22567982971668243,
0.034324031323194504,
0.07200323045253754,
-0.13101987540721893,
0.038999781012535095,
0.034122318029403687,
-0.043870385736227036,
0.027808165177702904,
-0.040697526186704636,
-0.1268782913684845,
-0.08397209644317627,
-0.030138259753584862,
0.12023735046386719,
-0.2733217775821686,
-0.07317081838846207,
-0.03509977459907532,
-0.09034286439418793,
0.059807490557432175,
0.21978749334812164,
0.11102034151554108,
0.05010131001472473,
-0.0401831790804863,
-0.09825977683067322,
-0.12721867859363556,
0.0035216554533690214,
0.11783697456121445,
0.055589187890291214,
-0.13823403418064117,
0.14389994740486145,
-0.03176780045032501,
0.0681210458278656,
-0.04710392653942108,
-0.009080763906240463,
-0.1575930416584015,
0.01629243604838848,
-0.1569179892539978,
0.03265709802508354,
-0.07925929874181747,
0.019974157214164734,
0.011603730730712414,
-0.03227163478732109,
-0.06970027834177017,
0.016985546797513962,
-0.07283243536949158,
-0.012900729663670063,
0.03078271821141243,
0.020714212208986282,
-0.07371488958597183,
-0.050480108708143234,
0.06689957529306412,
-0.02920530177652836,
0.06180242821574211,
0.1564989537000656,
0.03916258364915848,
0.05826329439878464,
-0.0928996354341507,
-0.016364404931664467,
0.12297780811786652,
0.0357414111495018,
0.10433554649353027,
-0.1440240442752838,
0.0670151337981224,
0.06336446106433868,
0.02046922594308853,
0.05092476308345795,
0.1259923279285431,
-0.1139368861913681,
-0.008108977228403091,
-0.06780257821083069,
-0.08047091960906982,
-0.12744228541851044,
0.013683810830116272,
0.12124049663543701,
0.05665479600429535,
0.09350461512804031,
-0.05013526603579521,
0.035312116146087646,
-0.12529690563678741,
0.009608017280697823,
-0.07791309058666229,
-0.07268257439136505,
-0.059308674186468124,
-0.05641238018870354,
0.061760153621435165,
-0.011850888840854168,
0.10258740186691284,
-0.03744325414299965,
0.08733870834112167,
-0.002142687328159809,
0.08349093794822693,
0.04370039328932762,
-0.013231567107141018,
0.15619535744190216,
0.1063583716750145,
-0.017574531957507133,
0.0437898151576519,
0.11281793564558029,
0.08234226703643799,
-0.02008981816470623,
0.019191231578588486,
0.009473910555243492,
-0.03560711443424225,
0.15132471919059753,
0.006580517161637545,
-0.0713030993938446,
-0.041976794600486755,
-0.09005294740200043,
-0.12798947095870972,
0.03721370920538902,
0.023013895377516747,
0.06307493150234222,
0.09779616445302963,
-0.06712163239717484,
-0.02735796757042408,
-0.033641621470451355,
-0.07552387565374374,
-0.19382058084011078,
-0.06211567670106888,
-0.13565771281719208,
-0.05410141870379448,
-0.013140475377440453,
-0.08824122697114944,
-0.04778934270143509,
0.0963364914059639,
0.029478557407855988,
-0.034606438130140305,
0.07009148597717285,
-0.10133317857980728,
-0.01740172877907753,
-0.009211636148393154,
0.012877696193754673,
0.005792110227048397,
-0.02317121811211109,
-0.011041662655770779,
-0.0034145466051995754,
0.02901732549071312,
0.05913528800010681,
-0.0019292902434244752,
0.0382450707256794,
0.10443466901779175,
-0.0034105961676687002,
-0.09734810143709183,
-0.042434316128492355,
0.042275894433259964,
0.06551799178123474,
0.06900110840797424,
0.02338949777185917,
0.038332805037498474,
-0.015826981514692307,
0.2158644199371338,
-0.10115575045347214,
0.010657229460775852,
-0.1381593942642212,
0.32578036189079285,
-0.0076760840602219105,
0.05091337114572525,
0.034550607204437256,
-0.046887703239917755,
0.011839279904961586,
0.180965393781662,
0.11107345670461655,
-0.017275068908929825,
0.005238865036517382,
-0.01837794855237007,
-0.01521399524062872,
-0.018810050562024117,
0.034135378897190094,
0.06255064159631729,
0.190010666847229,
-0.09939222037792206,
0.012474214658141136,
-0.013734497129917145,
-0.008146947249770164,
-0.0196042712777853,
0.03204111382365227,
-0.015190791338682175,
-0.03259716182947159,
-0.06255035102367401,
0.07670863717794418,
-0.07796598225831985,
0.06643225997686386,
0.0737563893198967,
-0.10342159122228622,
-0.12975914776325226,
0.02941548451781273,
-0.0684162825345993,
-0.020577706396579742,
0.11430004239082336,
-0.09499584883451462,
-0.02276250533759594,
0.04643240198493004,
0.014284806326031685,
-0.17744047939777374,
-0.06328969448804855,
0.042438067495822906,
0.18009400367736816,
0.17871910333633423,
0.029062140733003616,
0.17597071826457977,
0.1470515877008438,
0.04881785437464714,
-0.11257424205541611,
0.10061828792095184,
0.015147094614803791,
-0.09966907650232315,
0.12962402403354645,
0.013284318149089813,
-0.003829485969617963,
0.056408144533634186,
0.039141904562711716,
-0.18283335864543915,
-0.001990930875763297,
-0.08258926123380661,
0.04878490790724754,
-0.08041790127754211,
-0.009412659332156181,
-0.08102838695049286,
0.11214946955442429,
0.10837185382843018,
-0.06323902308940887,
-0.03789249435067177,
-0.03596414253115654,
0.08006645739078522,
0.031155714765191078,
-0.1125481054186821,
-0.019208187237381935,
-0.11507323384284973,
0.06831950694322586,
-0.04512351378798485,
0.011519606225192547,
-0.2186739444732666,
-0.021675080060958862,
-0.030375724658370018,
-0.06936678290367126,
-0.04215630516409874,
0.08074510842561722,
0.0042944857850670815,
0.036461472511291504,
-0.046229876577854156,
-0.06967760622501373,
-0.0020018143113702536,
0.11376786231994629,
-0.08241501450538635,
-0.15073329210281372
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# bert-finetuned-ner
This model is a fine-tuned version of [allenai/longformer-base-4096](https://huggingface.co/allenai/longformer-base-4096) on the None dataset.
It achieves the following results on the evaluation set:
- Loss: 0.6434
- Precision: 0.8589
- Recall: 0.8686
- F1: 0.8637
- Accuracy: 0.8324
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 5e-05
- train_batch_size: 1
- eval_batch_size: 1
- seed: 42
- gradient_accumulation_steps: 8
- total_train_batch_size: 8
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_ratio: 0.1
- num_epochs: 5
### Training results
| Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
|:-------------:|:-----:|:----:|:---------------:|:---------:|:------:|:------:|:--------:|
| 0.615 | 1.0 | 1741 | 0.6111 | 0.8200 | 0.8652 | 0.8420 | 0.8046 |
| 0.4795 | 2.0 | 3482 | 0.5366 | 0.8456 | 0.8803 | 0.8626 | 0.8301 |
| 0.3705 | 3.0 | 5223 | 0.5412 | 0.8527 | 0.8786 | 0.8655 | 0.8339 |
| 0.2749 | 4.0 | 6964 | 0.5906 | 0.8559 | 0.8711 | 0.8634 | 0.8316 |
| 0.2049 | 5.0 | 8705 | 0.6434 | 0.8589 | 0.8686 | 0.8637 | 0.8324 |
### Framework versions
- Transformers 4.17.0
- Pytorch 1.10.0+cu111
- Datasets 1.18.4
- Tokenizers 0.11.6
|
{"tags": ["generated_from_trainer"], "metrics": ["precision", "recall", "f1", "accuracy"], "model-index": [{"name": "bert-finetuned-ner", "results": []}]}
|
token-classification
|
brad1141/bert-finetuned-ner
|
[
"transformers",
"pytorch",
"tensorboard",
"longformer",
"token-classification",
"generated_from_trainer",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #longformer #token-classification #generated_from_trainer #autotrain_compatible #endpoints_compatible #region-us
|
bert-finetuned-ner
==================
This model is a fine-tuned version of allenai/longformer-base-4096 on the None dataset.
It achieves the following results on the evaluation set:
* Loss: 0.6434
* Precision: 0.8589
* Recall: 0.8686
* F1: 0.8637
* Accuracy: 0.8324
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 5e-05
* train\_batch\_size: 1
* eval\_batch\_size: 1
* seed: 42
* gradient\_accumulation\_steps: 8
* total\_train\_batch\_size: 8
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* lr\_scheduler\_warmup\_ratio: 0.1
* num\_epochs: 5
### Training results
### Framework versions
* Transformers 4.17.0
* Pytorch 1.10.0+cu111
* Datasets 1.18.4
* Tokenizers 0.11.6
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5e-05\n* train\\_batch\\_size: 1\n* eval\\_batch\\_size: 1\n* seed: 42\n* gradient\\_accumulation\\_steps: 8\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_ratio: 0.1\n* num\\_epochs: 5",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.17.0\n* Pytorch 1.10.0+cu111\n* Datasets 1.18.4\n* Tokenizers 0.11.6"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #longformer #token-classification #generated_from_trainer #autotrain_compatible #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5e-05\n* train\\_batch\\_size: 1\n* eval\\_batch\\_size: 1\n* seed: 42\n* gradient\\_accumulation\\_steps: 8\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_ratio: 0.1\n* num\\_epochs: 5",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.17.0\n* Pytorch 1.10.0+cu111\n* Datasets 1.18.4\n* Tokenizers 0.11.6"
] |
[
49,
144,
4,
33
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #longformer #token-classification #generated_from_trainer #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5e-05\n* train\\_batch\\_size: 1\n* eval\\_batch\\_size: 1\n* seed: 42\n* gradient\\_accumulation\\_steps: 8\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_ratio: 0.1\n* num\\_epochs: 5### Training results### Framework versions\n\n\n* Transformers 4.17.0\n* Pytorch 1.10.0+cu111\n* Datasets 1.18.4\n* Tokenizers 0.11.6"
] |
[
-0.11685390025377274,
0.1104368343949318,
-0.0022971536964178085,
0.1229650229215622,
0.16037093102931976,
0.023266511037945747,
0.12552408874034882,
0.14697562158107758,
-0.09987867623567581,
0.07714617997407913,
0.1278911530971527,
0.13333363831043243,
0.02327711507678032,
0.14669294655323029,
-0.04665848985314369,
-0.28741219639778137,
-0.003035656176507473,
0.02455892786383629,
-0.0930812656879425,
0.11890413612127304,
0.0756031796336174,
-0.14470940828323364,
0.08442700654268265,
0.003148707328364253,
-0.19583627581596375,
-0.025160331279039383,
-0.0021953945979475975,
-0.056732725352048874,
0.12313497811555862,
0.009549986571073532,
0.1160089373588562,
0.022945040836930275,
0.08813945204019547,
-0.16228725016117096,
0.0022816306445747614,
0.05283728986978531,
0.0188521146774292,
0.10111529380083084,
0.05914151296019554,
-0.011791383847594261,
0.059079062193632126,
-0.10917622596025467,
0.06380809843540192,
0.009537125006318092,
-0.131207674741745,
-0.2541915774345398,
-0.09655436873435974,
0.023639481514692307,
0.06809943169355392,
0.07190345227718353,
-0.0074425553902983665,
0.13595692813396454,
-0.07999183237552643,
0.0914255827665329,
0.28100258111953735,
-0.28434523940086365,
-0.06910032778978348,
0.039542198181152344,
0.007635194808244705,
0.05587765574455261,
-0.1275932639837265,
-0.023578282445669174,
0.044244568794965744,
0.031261008232831955,
0.1417277455329895,
-0.011815140955150127,
-0.04749680310487747,
0.01548225712031126,
-0.1422957479953766,
-0.020488565787672997,
0.08818771690130234,
0.03359917551279068,
-0.02690718322992325,
-0.06039419025182724,
-0.0624222457408905,
-0.24037934839725494,
-0.043718576431274414,
-0.0018835040973499417,
0.046081189066171646,
-0.06821385025978088,
-0.09758210927248001,
0.03218100592494011,
-0.07523810118436813,
-0.0597064383327961,
-0.03154512494802475,
0.1438627690076828,
0.04334323853254318,
0.006578699219971895,
-0.020609714090824127,
0.10148070752620697,
-0.01586819253861904,
-0.16531240940093994,
0.014126865193247795,
0.03217487037181854,
-0.047586485743522644,
-0.05524837225675583,
-0.04831919074058533,
-0.04224634915590286,
0.0021005445159971714,
0.1386704444885254,
-0.0543023906648159,
0.062448352575302124,
0.02334008738398552,
0.01954766921699047,
-0.07841487228870392,
0.19244922697544098,
-0.0696304515004158,
-0.03994528949260712,
-0.01303449459373951,
0.07920695096254349,
0.009890444576740265,
-0.024647315964102745,
-0.09785756468772888,
0.01958312839269638,
0.11813941597938538,
0.023972027003765106,
-0.049966875463724136,
0.05449247732758522,
-0.042394332587718964,
-0.030943643301725388,
0.03476623073220253,
-0.10179386287927628,
0.05578545480966568,
0.003609304316341877,
-0.10619950294494629,
-0.020301422104239464,
-0.01041656918823719,
-0.004419225268065929,
-0.005156232975423336,
0.15734463930130005,
-0.10894574970006943,
0.03295621648430824,
-0.08727938681840897,
-0.12933121621608734,
0.019545046612620354,
-0.10773198306560516,
0.014535469003021717,
-0.0620218962430954,
-0.12529854476451874,
-0.023367352783679962,
0.04585355147719383,
-0.07180672138929367,
-0.0337311327457428,
-0.05580863729119301,
-0.08595525473356247,
0.039334557950496674,
-0.010191954672336578,
0.12275856733322144,
-0.07021233439445496,
0.0985456109046936,
0.04209912195801735,
0.09247083216905594,
0.0005384027026593685,
0.05085507035255432,
-0.08378802984952927,
0.029421094805002213,
-0.23618286848068237,
0.04636650159955025,
-0.06818973273038864,
0.06827805191278458,
-0.1024780198931694,
-0.11406553536653519,
0.04425057768821716,
-0.02337237447500229,
0.09120824187994003,
0.10226122289896011,
-0.17653609812259674,
-0.07802696526050568,
0.17381460964679718,
-0.07034143805503845,
-0.10913790762424469,
0.1251661479473114,
-0.04815918952226639,
0.0042050909250974655,
0.04841908812522888,
0.1649884283542633,
0.08198542892932892,
-0.0845983624458313,
-0.03133604675531387,
-0.04996245354413986,
0.08002576977014542,
-0.04395872727036476,
0.06628578901290894,
0.013721035793423653,
0.07642435282468796,
0.009505926631391048,
-0.005800759885460138,
0.0434691496193409,
-0.11088886111974716,
-0.08448003977537155,
-0.02698235958814621,
-0.08485449105501175,
0.06824597716331482,
0.06607260555028915,
0.0829695612192154,
-0.1001189798116684,
-0.0917503833770752,
0.06716365367174149,
0.081114262342453,
-0.06549303233623505,
0.02571260929107666,
-0.06473083049058914,
0.0890515074133873,
-0.07626495510339737,
-0.02710714563727379,
-0.19229310750961304,
-0.06693674623966217,
0.025757906958460808,
-0.0011084784055128694,
0.010889706201851368,
-0.01329004392027855,
0.0881114974617958,
0.07857566326856613,
-0.056249335408210754,
-0.023242995142936707,
-0.018580453470349312,
-0.006045692600309849,
-0.13459797203540802,
-0.22270041704177856,
-0.057233426719903946,
-0.027622878551483154,
0.1049494668841362,
-0.20123246312141418,
0.020860282704234123,
0.0340028777718544,
0.10550655424594879,
0.038493040949106216,
-0.025810278952121735,
-0.02045522816479206,
0.07671363651752472,
-0.03323766961693764,
-0.06813671439886093,
0.051605433225631714,
-0.014412982389330864,
-0.074762262403965,
-0.05136709660291672,
-0.13288311660289764,
0.15636993944644928,
0.12644514441490173,
-0.028203384950757027,
-0.1025189459323883,
-0.01706552691757679,
-0.05691103637218475,
-0.04268069565296173,
-0.045082058757543564,
0.04091895371675491,
0.1399679183959961,
0.019294189289212227,
0.1465049684047699,
-0.06626036763191223,
-0.051128942519426346,
0.045388396829366684,
-0.015668045729398727,
0.015934821218252182,
0.12013306468725204,
0.08393359929323196,
-0.0870659276843071,
0.14528334140777588,
0.11805317550897598,
-0.07498352229595184,
0.13903704285621643,
-0.042695656418800354,
-0.09046132117509842,
-0.040631331503391266,
-0.02312062680721283,
0.02615540102124214,
0.11894576251506805,
-0.10213818401098251,
-0.025269605219364166,
0.022696880623698235,
0.03721661493182182,
-0.0001487788395024836,
-0.19330300390720367,
-0.021610399708151817,
0.036262672394514084,
-0.037719130516052246,
-0.00441049225628376,
-0.028409361839294434,
0.01911361888051033,
0.11258728057146072,
0.021300876513123512,
-0.07738538086414337,
0.012448742054402828,
0.0038933255709707737,
-0.05322539433836937,
0.20229770243167877,
-0.07603248953819275,
-0.11902854591608047,
-0.10590559244155884,
-0.09556740522384644,
-0.05718725174665451,
-0.0008508993778377771,
0.04653269797563553,
-0.10749442875385284,
-0.0240947213023901,
-0.0606745108962059,
0.025487396866083145,
0.012169627472758293,
0.06210756674408913,
0.0031934166327118874,
-0.0051305354572832584,
0.05350712686777115,
-0.10008091479539871,
-0.002178957685828209,
-0.052146170288324356,
-0.027657393366098404,
0.050238244235515594,
0.063178151845932,
0.10211953520774841,
0.15296535193920135,
-0.015997759997844696,
0.028012653812766075,
-0.030572576448321342,
0.21441268920898438,
-0.07976911216974258,
-0.01662059873342514,
0.08924929797649384,
-0.01825229451060295,
0.06650899350643158,
0.13265086710453033,
0.07388681918382645,
-0.09670280665159225,
0.006611022632569075,
0.04445343837141991,
-0.03970674052834511,
-0.19235017895698547,
-0.029495161026716232,
-0.04392477869987488,
0.01828726753592491,
0.1234361082315445,
0.018833013251423836,
0.01747298613190651,
0.05814094468951225,
0.04301447793841362,
0.055446114391088486,
-0.05592475086450577,
0.06904582679271698,
0.07099278271198273,
0.05927732214331627,
0.12908704578876495,
-0.03226690739393234,
-0.06191873177886009,
0.033101655542850494,
-0.014247296378016472,
0.22873622179031372,
-0.03232303261756897,
0.1738801896572113,
0.024940861389040947,
0.1743060201406479,
0.012735799886286259,
0.09155739098787308,
-0.0068103778176009655,
-0.04885132610797882,
0.0005894143250770867,
-0.046677395701408386,
-0.025961022824048996,
0.016474561765789986,
-0.014190221205353737,
0.05333273857831955,
-0.13172544538974762,
0.02675352804362774,
0.05386324226856232,
0.2863484025001526,
0.08150189369916916,
-0.35124075412750244,
-0.09179403632879257,
-0.006651381962001324,
-0.022599877789616585,
-0.013585694134235382,
-0.0026284067425876856,
0.12275093793869019,
-0.08508047461509705,
0.04506596177816391,
-0.08220399171113968,
0.07871711999177933,
-0.07700912654399872,
0.023973898962140083,
0.08743347972631454,
0.0985272154211998,
-0.017201479524374008,
0.04951580986380577,
-0.25378361344337463,
0.2864161729812622,
0.023003041744232178,
0.05666625127196312,
-0.0821986272931099,
0.005590895656496286,
0.02410198003053665,
0.010252890177071095,
0.05436623841524124,
-0.018374500796198845,
-0.07368026673793793,
-0.21560989320278168,
-0.11271709203720093,
0.001275321701541543,
0.11670666933059692,
-0.043677378445863724,
0.12630896270275116,
-0.015155627392232418,
-0.023672375828027725,
0.05558401718735695,
-0.05256589502096176,
-0.0051282476633787155,
-0.08133203536272049,
0.015275957062840462,
0.011898854747414589,
-0.04004755988717079,
-0.04957939311861992,
-0.12639859318733215,
-0.08488338440656662,
0.16985490918159485,
0.010109899565577507,
-0.03293249011039734,
-0.134600430727005,
0.10678630322217941,
0.11929435282945633,
-0.09600137174129486,
0.04332031309604645,
0.016549432650208473,
0.0825340747833252,
0.036732740700244904,
-0.05327001214027405,
0.13909542560577393,
-0.052555255591869354,
-0.1916748583316803,
-0.05969487875699997,
0.10996825993061066,
0.035490863025188446,
0.06756645441055298,
-0.03371315076947212,
0.0444055013358593,
-0.005678171757608652,
-0.0867549329996109,
0.04332292452454567,
-0.037213899195194244,
0.07259302586317062,
0.021739458665251732,
-0.02144930139183998,
0.04213184863328934,
-0.05065305158495903,
-0.01485846471041441,
0.17879529297351837,
0.2733917534351349,
-0.1062849834561348,
0.02113531157374382,
0.037904612720012665,
-0.04028071463108063,
-0.1735517531633377,
0.05450504645705223,
0.07821379601955414,
0.004856183659285307,
0.026917915791273117,
-0.17508602142333984,
0.11881206184625626,
0.10741092264652252,
-0.007177873980253935,
0.10954997688531876,
-0.30759814381599426,
-0.12971645593643188,
0.0886145532131195,
0.12200373411178589,
0.0626298114657402,
-0.1514144390821457,
-0.03472986817359924,
0.00878357607871294,
-0.12413803488016129,
0.10643204301595688,
-0.06444834917783737,
0.12989948689937592,
-0.028278646990656853,
0.0656622052192688,
0.020357562229037285,
-0.06932920962572098,
0.11268310993909836,
0.02636902406811714,
0.09772464632987976,
-0.05539444461464882,
-0.03934084624052048,
0.05707301199436188,
-0.0665736272931099,
0.021350087597966194,
-0.052132535725831985,
0.01429534237831831,
-0.09306123107671738,
-0.016912799328565598,
-0.0862785279750824,
0.008749639615416527,
-0.036905352026224136,
-0.055599600076675415,
-0.04034309461712837,
0.043808069080114365,
0.049344662576913834,
-0.024217141792178154,
0.1678793877363205,
0.02828509360551834,
0.1442204713821411,
0.1219736635684967,
0.061129216104745865,
-0.0752081573009491,
-0.07746078073978424,
0.006316626910120249,
-0.0025524755474179983,
0.044998329132795334,
-0.1387494057416916,
0.036846987903118134,
0.15966768562793732,
0.03399357944726944,
0.12334675341844559,
0.08075721561908722,
-0.02637980692088604,
0.017271999269723892,
0.06813623011112213,
-0.15006114542484283,
-0.08571235090494156,
0.016706690192222595,
-0.07176823914051056,
-0.1429063230752945,
0.04744051396846771,
0.10697463154792786,
-0.05613863468170166,
-0.009586872532963753,
0.00036182248732075095,
0.0195937417447567,
-0.03074522688984871,
0.2378915548324585,
0.04840075224637985,
0.07158277928829193,
-0.11372166126966476,
0.06855716556310654,
0.06815754622220993,
-0.10006263852119446,
0.021395904943346977,
0.10611056536436081,
-0.08002206683158875,
-0.018476027995347977,
0.06817614287137985,
0.14837469160556793,
-0.05761835724115372,
-0.00813490990549326,
-0.1678057312965393,
-0.10431777685880661,
0.08223364502191544,
0.1712951362133026,
0.08604313433170319,
0.019042236730456352,
-0.036924805492162704,
0.018412107601761818,
-0.14455965161323547,
0.10419599711894989,
0.05950551480054855,
0.09285464882850647,
-0.15061526000499725,
0.16401396691799164,
-0.015046617016196251,
0.04628485068678856,
-0.017775310203433037,
0.025986913591623306,
-0.12892265617847443,
0.0031700441613793373,
-0.09881370514631271,
-0.02628890983760357,
-0.04742627963423729,
-0.0024813043419271708,
-0.013888995163142681,
-0.053281139582395554,
-0.05048234015703201,
-0.0024068800266832113,
-0.11550119519233704,
-0.037733543664216995,
0.021592119708657265,
0.029453080147504807,
-0.13034120202064514,
-0.03145480155944824,
0.018128015100955963,
-0.08029986917972565,
0.09125842899084091,
0.0432625375688076,
0.034070130437612534,
0.03145857900381088,
-0.06776212155818939,
-0.0023108776658773422,
0.051704712212085724,
-0.021153250709176064,
0.1047038808465004,
-0.114498570561409,
-0.00641576386988163,
-0.03870747238397598,
0.05598817020654678,
0.028122495859861374,
0.07567135244607925,
-0.1388235241174698,
0.016095755621790886,
-0.04095178097486496,
-0.057641346007585526,
-0.06518425792455673,
0.042081210762262344,
0.08370576798915863,
0.0008931304328143597,
0.16196390986442566,
-0.07103294879198074,
0.055264126509428024,
-0.21620947122573853,
-0.016188044100999832,
-0.01652383990585804,
-0.11790089309215546,
-0.061503831297159195,
-0.03436970338225365,
0.0878443717956543,
-0.06836311519145966,
0.10225629806518555,
0.00709443399682641,
0.043757691979408264,
0.0381476953625679,
-0.036640800535678864,
-0.017605949193239212,
0.04651851952075958,
0.14575213193893433,
0.038009531795978546,
-0.052763327956199646,
0.08461746573448181,
0.07000283896923065,
0.09748588502407074,
0.14307621121406555,
0.23057803511619568,
0.11149199306964874,
0.020454086363315582,
0.08280311524868011,
0.034974656999111176,
-0.09027320146560669,
-0.1997087150812149,
0.07793926447629929,
-0.06832960247993469,
0.12345129251480103,
-0.01572217047214508,
0.16114933788776398,
0.07003538310527802,
-0.16754867136478424,
0.04525730386376381,
-0.05578632652759552,
-0.10308211296796799,
-0.10752956569194794,
-0.02658454142510891,
-0.09018830955028534,
-0.1433696150779724,
-0.0014483585255220532,
-0.11705341935157776,
0.02972467988729477,
0.11441750824451447,
0.028155775740742683,
0.013471715152263641,
0.16907744109630585,
0.04051310196518898,
0.05109061300754547,
0.06168700009584427,
0.028802737593650818,
-0.018779529258608818,
-0.053804002702236176,
-0.060452889651060104,
-0.023273760452866554,
-0.013478648848831654,
0.04065142944455147,
-0.058065906167030334,
-0.056633442640304565,
0.05013978108763695,
-0.002231177408248186,
-0.1225808784365654,
0.019544025883078575,
0.011819027364253998,
0.07230300456285477,
0.03056969679892063,
0.007384825497865677,
0.01394625473767519,
-0.03824608400464058,
0.22984717786312103,
-0.08958908915519714,
-0.033377505838871,
-0.11875645816326141,
0.24067892134189606,
0.03107283264398575,
-0.016490664333105087,
0.02357189543545246,
-0.08330612629652023,
-0.010747628286480904,
0.18001727759838104,
0.18410764634609222,
-0.07095882296562195,
-0.0035748600494116545,
0.009760675020515919,
-0.016638124361634254,
-0.018745487555861473,
0.09698410332202911,
0.1174701526761055,
0.05088459327816963,
-0.09256977587938309,
-0.04947831854224205,
-0.04626207426190376,
-0.04358384758234024,
-0.042880162596702576,
0.05591105669736862,
0.0432172566652298,
0.03910258039832115,
-0.050047505646944046,
0.05078638717532158,
-0.05629884824156761,
-0.10851293057203293,
0.07694392651319504,
-0.2503175735473633,
-0.19703100621700287,
-0.013288039714097977,
0.07284727692604065,
0.009341620840132236,
0.07344871759414673,
-0.0007714264211244881,
-0.02690843492746353,
0.059814997017383575,
-0.0144476518034935,
-0.06640730798244476,
-0.10747106373310089,
0.0892874225974083,
-0.10031343251466751,
0.20163045823574066,
-0.04781235009431839,
0.024716956540942192,
0.1366533488035202,
0.04945402964949608,
-0.10920862853527069,
0.014929820783436298,
0.06393200904130936,
-0.10079633444547653,
0.016329476609826088,
0.14657172560691833,
-0.04169675335288048,
0.08417756110429764,
0.04569156467914581,
-0.14121118187904358,
0.0018760489765554667,
-0.0756891593337059,
-0.021516401320695877,
-0.03624223917722702,
-0.05485100299119949,
-0.029698235914111137,
0.14092853665351868,
0.23926453292369843,
-0.03994458168745041,
0.006426344625651836,
-0.05997280403971672,
0.0029234776739031076,
0.05430585518479347,
0.049271054565906525,
-0.0805923193693161,
-0.26973190903663635,
0.029267551377415657,
0.07546886801719666,
-0.004898872226476669,
-0.22864262759685516,
-0.0953429788351059,
0.043304141610860825,
-0.06439782679080963,
-0.09312216937541962,
0.09800391644239426,
0.05921388790011406,
0.06554768979549408,
-0.06368079036474228,
-0.058140769600868225,
-0.07944784313440323,
0.1566217690706253,
-0.1804206669330597,
-0.0930945947766304
] |
null | null | null |
This is a test model
|
{}
| null |
bradyll/bert_finetuning_test_20220210
|
[
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#region-us
|
This is a test model
|
[] |
[
"TAGS\n#region-us \n"
] |
[
6
] |
[
"passage: TAGS\n#region-us \n"
] |
[
0.024608636274933815,
-0.026205500587821007,
-0.009666500613093376,
-0.10395516455173492,
0.08638657629489899,
0.059816278517246246,
0.01882290467619896,
0.020661840215325356,
0.23975107073783875,
-0.005599027033895254,
0.1219947561621666,
0.0015615287702530622,
-0.037353623658418655,
0.03733762726187706,
-0.0035912662278860807,
-0.17583473026752472,
0.03876631706953049,
-0.018274923786520958,
0.01843859627842903,
0.026470553129911423,
-0.07776834815740585,
-0.07564429938793182,
0.015296397730708122,
-0.10247814655303955,
-0.083692267537117,
0.11002834886312485,
0.031466204673051834,
-0.019670886918902397,
0.10779199749231339,
-0.04243955761194229,
0.18699054419994354,
-0.011512263678014278,
-0.11213519424200058,
-0.2536850869655609,
0.021806683391332626,
-0.01765260472893715,
-0.08747660368680954,
0.01506110467016697,
0.0665089413523674,
-0.09014441072940826,
-0.0588928684592247,
0.0795099288225174,
-0.01132340170443058,
0.04246443510055542,
-0.27593839168548584,
-0.12684126198291779,
-0.05297930911183357,
-0.1421966552734375,
0.08651168644428253,
0.04035491496324539,
0.008764253929257393,
0.15506891906261444,
-0.20897391438484192,
0.004104613792151213,
0.08255259692668915,
-0.2538507878780365,
0.05591634660959244,
0.17671173810958862,
0.03623908758163452,
0.18037272989749908,
0.0060391901060938835,
0.11029672622680664,
0.0716743916273117,
-0.024263937026262283,
-0.17590197920799255,
-0.08127854019403458,
-0.04696211963891983,
0.16642488539218903,
-0.06727185100317001,
-0.14248386025428772,
0.34701237082481384,
0.00015008423360995948,
0.009657775051891804,
0.16921205818653107,
-0.059524230659008026,
-0.09972117841243744,
0.07259953022003174,
0.016484731808304787,
0.018492350354790688,
0.1471305936574936,
0.16307872533798218,
-0.0458691343665123,
-0.13837823271751404,
-0.018630273640155792,
-0.22798998653888702,
0.17510560154914856,
-0.03248048573732376,
0.13137903809547424,
-0.27447956800460815,
0.01684025302529335,
-0.2570667266845703,
0.0032130838371813297,
0.04178816080093384,
-0.06004921346902847,
-0.0226522795855999,
-0.013265985064208508,
-0.08018817007541656,
0.004899587947875261,
0.06192673370242119,
0.1266920566558838,
-0.06128726154565811,
0.06128238886594772,
-0.09319206327199936,
0.141696035861969,
0.07166698575019836,
0.07868369668722153,
0.13037432730197906,
0.041205424815416336,
-0.07187089323997498,
-0.21872246265411377,
-0.0026476888451725245,
-0.06275863200426102,
-0.09502086788415909,
-0.0020165652967989445,
-0.11606067419052124,
0.17244569957256317,
-0.030802514404058456,
-0.09825427830219269,
-0.11208184063434601,
0.09148659557104111,
-0.032992321997880936,
-0.03437839448451996,
-0.03552987426519394,
-0.020977836102247238,
0.019381176680326462,
0.04704452306032181,
-0.1548958420753479,
-0.005131472367793322,
0.07039852440357208,
0.11502562463283539,
-0.1346137970685959,
-0.003783059772104025,
-0.07908964157104492,
0.03039063885807991,
0.07654735445976257,
-0.16510222852230072,
0.03158547356724739,
-0.1124754324555397,
-0.07531405985355377,
0.002912673633545637,
-0.015710093080997467,
-0.016202643513679504,
0.166526660323143,
-0.0020451415330171585,
0.0714716836810112,
-0.026345307007431984,
-0.05890209600329399,
-0.11243434250354767,
-0.08489254862070084,
0.05390460044145584,
0.03670717030763626,
0.03266148269176483,
-0.2193479984998703,
0.014805203303694725,
-0.12762966752052307,
0.1360815018415451,
-0.10566820204257965,
-0.04705966264009476,
-0.022842247039079666,
0.20562705397605896,
0.037286072969436646,
0.08762791007757187,
-0.22171171009540558,
0.039756543934345245,
-0.05404696613550186,
0.18480908870697021,
-0.1502426266670227,
-0.0799463614821434,
0.20813211798667908,
-0.07964949309825897,
-0.10115210711956024,
0.021235812455415726,
0.020391687750816345,
0.026287272572517395,
0.0766737088561058,
0.4564172327518463,
-0.09766800701618195,
-0.09146861732006073,
0.10178250074386597,
0.17055274546146393,
-0.12427149713039398,
-0.1827561855316162,
0.06446871906518936,
-0.16666454076766968,
-0.1973118633031845,
0.0018917324487119913,
0.09222044050693512,
0.038269978016614914,
-0.07875611633062363,
-0.020746968686580658,
0.06325206160545349,
-0.0007678253459744155,
0.09095914661884308,
0.03755716234445572,
0.09034032374620438,
-0.08716782182455063,
0.11115926504135132,
-0.05017651244997978,
0.004037132486701012,
0.1343354731798172,
0.027325427159667015,
-0.03223329409956932,
0.08694463223218918,
-0.0485352948307991,
0.05295134335756302,
-0.1662379503250122,
-0.15068690478801727,
0.03398871049284935,
0.06283251196146011,
0.03186952322721481,
0.1280253529548645,
0.08141885697841644,
-0.10732853412628174,
0.022690722718834877,
-0.004228927195072174,
0.058398615568876266,
0.03891623765230179,
0.006107209715992212,
0.008764320984482765,
0.0961301177740097,
-0.10607069730758667,
-0.13589619100093842,
-0.07336436957120895,
-0.014715781435370445,
0.14371353387832642,
-0.0302802175283432,
0.07690227776765823,
-0.004240254405885935,
0.00013200697139836848,
0.06930823624134064,
0.08137880265712738,
0.016412746161222458,
0.08971183747053146,
-0.05237193778157234,
-0.05160155147314072,
0.10863113403320312,
-0.13533565402030945,
0.17837053537368774,
0.14053137600421906,
-0.20532016456127167,
0.029453208670020103,
-0.06838275492191315,
0.03670361638069153,
-0.008162540383636951,
0.0975119024515152,
-0.08272241055965424,
-0.02106042578816414,
0.013134466484189034,
0.0052274600602686405,
-0.013007243163883686,
0.017682146281003952,
-0.07295988500118256,
-0.07787393033504486,
-0.10233919322490692,
0.08436838537454605,
0.11562882363796234,
-0.10282530635595322,
0.14214380085468292,
0.4384984076023102,
0.11495281755924225,
0.21582984924316406,
-0.09581480920314789,
-0.0412987545132637,
0.007486371789127588,
0.0001535322517156601,
-0.04476691037416458,
0.08031861484050751,
-0.15973517298698425,
-0.038901735097169876,
0.027348900213837624,
0.07128690183162689,
0.11475157737731934,
-0.14959022402763367,
-0.09639324247837067,
-0.00793045200407505,
0.0022841424215584993,
-0.1249532699584961,
0.023905446752905846,
-0.03974650055170059,
0.04015624523162842,
0.07232289016246796,
-0.021535737439990044,
0.13939237594604492,
-0.04166141897439957,
-0.0639561116695404,
0.07585346698760986,
-0.2017085999250412,
-0.23179671168327332,
-0.12309670448303223,
-0.14680525660514832,
0.04366797208786011,
0.05154111236333847,
0.01726446859538555,
-0.17635835707187653,
-0.015074856579303741,
0.07706750929355621,
0.07820965349674225,
-0.20886357128620148,
-0.022814949974417686,
-0.004290030337870121,
0.0895976573228836,
-0.10227091610431671,
-0.0017130117630586028,
-0.04419664293527603,
-0.10150232166051865,
0.0017003051470965147,
0.07279510796070099,
-0.137485533952713,
0.13807645440101624,
0.21589438617229462,
0.07225540280342102,
0.07359948754310608,
-0.019093448296189308,
0.09936179965734482,
-0.10856141895055771,
-0.16549113392829895,
0.08348225057125092,
-0.06234746053814888,
0.047262318432331085,
0.17534415423870087,
0.03307317942380905,
-0.13904969394207,
-0.015682822093367577,
-0.0402069091796875,
-0.15603256225585938,
-0.238995760679245,
-0.09178274869918823,
-0.1182505264878273,
0.16442428529262543,
0.0009358620154671371,
0.06651917099952698,
0.08258313685655594,
-0.022042419761419296,
0.16447891294956207,
-0.07379321753978729,
-0.07578866183757782,
-0.006978808436542749,
0.12375060468912125,
-0.056660156697034836,
-0.03080669604241848,
-0.10566964000463486,
-0.008295975625514984,
0.1151021271944046,
0.15304014086723328,
0.12214863300323486,
0.2957419455051422,
0.08268889784812927,
0.026645636186003685,
0.08958091586828232,
0.17622539401054382,
0.09495089203119278,
0.07838419824838638,
-0.045413073152303696,
-0.014814783819019794,
0.014317171648144722,
-0.04022889584302902,
0.010141594335436821,
0.14683100581169128,
-0.2679629921913147,
-0.006678564939647913,
-0.2710230350494385,
0.0965198427438736,
-0.10913380235433578,
0.11837165057659149,
-0.01015760749578476,
0.10194015502929688,
0.11082887649536133,
0.03233652561903,
-0.03858073800802231,
0.16613617539405823,
0.08450309932231903,
-0.11277695000171661,
0.001758623169735074,
0.03737903758883476,
0.09715615212917328,
-0.02818971499800682,
0.12721189856529236,
-0.11048974841833115,
-0.1464834064245224,
0.013753619976341724,
0.07152791321277618,
-0.15373679995536804,
0.3138748109340668,
0.012069208547472954,
-0.13481520116329193,
-0.01481647603213787,
-0.09957809001207352,
-0.006440147757530212,
0.1254177987575531,
0.09333524852991104,
0.07935678958892822,
-0.2185502052307129,
-0.13339371979236603,
0.05872276425361633,
-0.00575496768578887,
0.22408108413219452,
-0.034034017473459244,
-0.11356475204229355,
-0.027013886719942093,
0.04241163283586502,
-0.06043251231312752,
0.08524788916110992,
0.023536119610071182,
-0.08113526552915573,
-0.032957352697849274,
0.05323701351881027,
0.012368366122245789,
0.00524376705288887,
0.09360801428556442,
0.020107939839363098,
-0.0009265501867048442,
0.01785753294825554,
0.047885000705718994,
-0.0675911232829094,
-0.1984109878540039,
0.09357594698667526,
-0.05215044692158699,
0.0015536568826064467,
-0.08013670891523361,
-0.15122665464878082,
-0.08837161958217621,
-0.16009655594825745,
0.12540200352668762,
-0.034406669437885284,
0.12700119614601135,
-0.06619787961244583,
0.17341409623622894,
-0.07871770113706589,
0.04481020197272301,
-0.047349292784929276,
0.050332702696323395,
-0.007268077693879604,
-0.07756082713603973,
0.16585899889469147,
-0.15564003586769104,
0.01809087023139,
0.19572502374649048,
-0.018915493041276932,
0.07177707552909851,
0.021322092041373253,
-0.0636206790804863,
0.23147478699684143,
0.3014698624610901,
0.008138049393892288,
0.1665448248386383,
0.3018903136253357,
-0.07466315478086472,
-0.2642788887023926,
-0.05505012720823288,
-0.2841376066207886,
-0.05371501296758652,
0.10716094076633453,
-0.22523896396160126,
0.06986407935619354,
0.14383509755134583,
-0.06471995264291763,
0.30228954553604126,
-0.21825523674488068,
0.012589273042976856,
0.15434536337852478,
-0.08868814259767532,
0.5515313148498535,
-0.1133413165807724,
-0.17677772045135498,
-0.008122089318931103,
-0.08741296827793121,
0.10602109134197235,
-0.0340677872300148,
0.06877441704273224,
0.013465235009789467,
0.04797380417585373,
0.048932258039712906,
-0.03111894056200981,
0.22701001167297363,
0.008710170164704323,
0.09015397727489471,
-0.07378865778446198,
-0.18624304234981537,
0.11639340221881866,
-0.04359482601284981,
-0.08891059458255768,
0.0849778801202774,
-0.05942516401410103,
-0.11078983545303345,
0.04663389176130295,
-0.07950539886951447,
-0.024862350896000862,
0.08423490077257156,
-0.04678233340382576,
-0.042606171220541,
-0.008054176345467567,
-0.1618063747882843,
-0.0002289071271661669,
0.31360217928886414,
-0.07096036523580551,
0.16695955395698547,
0.03677211329340935,
0.00038613268407061696,
-0.11027684062719345,
0.030288029462099075,
-0.05203165486454964,
-0.021576624363660812,
0.09578979015350342,
-0.11096979677677155,
0.03204701095819473,
0.14160704612731934,
-0.04864364117383957,
0.05846960097551346,
0.09256096184253693,
-0.0849417969584465,
0.007583672646433115,
0.17753590643405914,
-0.17537221312522888,
-0.1273445188999176,
-0.006135711446404457,
-0.09862716495990753,
0.14055661857128143,
0.04394126310944557,
0.05191568285226822,
0.16669964790344238,
0.03967129811644554,
-0.029474308714270592,
-0.02817419543862343,
-0.1153380498290062,
-0.0201893113553524,
0.040153320878744125,
0.00045633706031367183,
-0.08791285753250122,
0.2262638509273529,
0.06409153342247009,
-0.1328488290309906,
-0.051157206296920776,
0.2161225974559784,
-0.06805316358804703,
-0.04911920800805092,
-0.223562553524971,
0.10752306133508682,
-0.07112517952919006,
-0.0965060144662857,
0.05453834682703018,
-0.02270081453025341,
0.005106312222778797,
0.181985542178154,
0.03941008821129799,
0.11070270836353302,
0.03738937899470329,
-0.02448922023177147,
0.15798696875572205,
-0.142850860953331,
-0.14191335439682007,
-0.025354057550430298,
-0.08757315576076508,
-0.13844476640224457,
-0.026804137974977493,
0.1617041826248169,
-0.09177309274673462,
-0.14772607386112213,
-0.2621181011199951,
0.10968475043773651,
-0.16432365775108337,
-0.10192688554525375,
-0.03469514101743698,
-0.08968492597341537,
0.0696166530251503,
0.030301768332719803,
-0.03093348816037178,
-0.06706760823726654,
-0.18593791127204895,
0.0816768929362297,
0.06349513679742813,
0.045533183962106705,
-0.017847947776317596,
0.0067379772663116455,
0.1720137596130371,
0.025955144315958023,
0.10040043294429779,
0.16762186586856842,
0.011397695168852806,
0.2246655523777008,
-0.1671202927827835,
-0.11496317386627197,
0.1336962729692459,
-0.026543032377958298,
0.06762003898620605,
0.16792191565036774,
-0.0772583931684494,
0.015526676550507545,
-0.028136352077126503,
0.07066910713911057,
-0.11003983020782471,
-0.105624258518219,
0.007937257178127766,
0.02567129209637642,
-0.2755882740020752,
-0.005599735304713249,
-0.19717298448085785,
0.14788752794265747,
0.02579621411859989,
0.03297143429517746,
0.10257530212402344,
0.10404334217309952,
0.08312062919139862,
-0.0017710148822516203,
0.03226327523589134,
-0.1176818460226059,
0.02753005363047123,
-0.059239376336336136,
-0.020663779228925705,
0.017624232918024063,
0.36952024698257446,
-0.03603357449173927,
-0.046802736818790436,
0.003710439894348383,
0.1307835876941681,
-0.02139742486178875,
0.017395347356796265,
0.13209912180900574,
0.12607666850090027,
-0.08595693111419678,
-0.1504845917224884,
0.04888554662466049,
-0.04565655067563057,
-0.02836887165904045,
0.1464131623506546,
0.05905961990356445,
0.1050296202301979,
0.0908031314611435,
-0.014463032595813274,
-0.00318976235575974,
0.012856799177825451,
-0.15486004948616028,
0.06223496049642563,
-0.010558074340224266,
0.012565906159579754,
0.017934376373887062,
0.15238402783870697,
-0.005540105979889631,
0.07739730179309845,
-0.09889880567789078,
0.004208535887300968,
-0.13498884439468384,
-0.07913459837436676,
0.03617347031831741,
-0.13393273949623108,
0.04141177982091904,
-0.01871878281235695,
0.029611799865961075,
0.30386561155319214,
0.02558239921927452,
-0.020639164373278618,
0.12512871623039246,
-0.1214587539434433,
-0.12050267308950424,
-0.001594188273884356,
-0.029960084706544876,
0.0791488066315651,
-0.02633434161543846,
-0.0997740775346756,
-0.1001306027173996,
-0.15166029334068298,
-0.09759195148944855,
0.05182836204767227,
-0.04993441700935364,
-0.059362251311540604,
-0.17634081840515137,
-0.05707859992980957,
-0.05147340148687363,
0.14025864005088806,
-0.12263951450586319,
0.15159130096435547,
-0.014490418136119843,
0.004084470681846142,
0.04405883327126503,
0.1950942426919937,
-0.03644494712352753,
0.08714226633310318,
0.0154351145029068,
0.1522706001996994,
-0.05119588226079941,
0.14720745384693146,
-0.10931728035211563,
-0.04014137014746666,
-0.06710435450077057,
0.21513493359088898,
0.25630924105644226,
-0.06136954948306084,
-0.008937356993556023,
-0.012760217301547527,
0.058654606342315674,
0.1073930487036705,
0.16049085557460785,
0.002326392102986574,
0.2802925705909729,
-0.03133585304021835,
0.04815128445625305,
0.02901598811149597,
0.013607407920062542,
-0.06336209923028946,
0.03397751972079277,
0.07539387792348862,
-0.035039983689785004,
-0.1412304788827896,
0.15837742388248444,
-0.21980468928813934,
0.18157227337360382,
0.11640069633722305,
-0.19996967911720276,
-0.013728445395827293,
-0.04882071167230606,
0.1689416468143463,
-0.0856364443898201,
0.1637246012687683,
-0.0903693437576294,
-0.2108195722103119,
-0.2056000679731369,
0.03867346793413162,
-0.34623071551322937,
-0.254462867975235,
0.10422009229660034,
0.1488201916217804,
0.04015883058309555,
-0.018507536500692368,
-0.019967829808592796,
-0.018367022275924683,
0.04877542704343796,
-0.0067357709631323814,
0.06014643982052803,
0.031397558748722076,
-0.02988368645310402,
-0.24127542972564697,
-0.029804671183228493,
0.023964406922459602,
-0.07093082368373871,
0.07464958727359772,
-0.06874357163906097,
-0.022495782002806664,
0.08059766888618469,
-0.03066304884850979,
0.03298592567443848,
-0.035373736172914505,
-0.16326889395713806,
0.027529051527380943,
0.03900543600320816,
0.036012712866067886,
0.00634160777553916,
0.0008072225609794259,
-0.03455270454287529,
0.0644603744149208,
-0.16716794669628143,
-0.16015739738941193,
0.14140215516090393,
-0.06745140254497528,
0.2779497504234314,
-0.05812826007604599,
-0.0809100940823555,
0.04766704887151718,
-0.03426874056458473,
0.1807648241519928,
-0.07756473124027252,
0.047254521399736404,
0.12766779959201813,
0.011127962730824947,
0.03121316432952881,
-0.3092964291572571,
0.11082969605922699,
-0.000795336440205574,
-0.006093299947679043,
-0.07581598311662674
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# deberta-base-finetuned-ner
This model is a fine-tuned version of [microsoft/deberta-base](https://huggingface.co/microsoft/deberta-base) on the conll2003 dataset.
It achieves the following results on the evaluation set:
- Loss: 0.0501
- Precision: 0.9563
- Recall: 0.9652
- F1: 0.9608
- Accuracy: 0.9899
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 5e-05
- train_batch_size: 16
- eval_batch_size: 16
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 5
### Training results
| Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
|:-------------:|:-----:|:----:|:---------------:|:---------:|:------:|:------:|:--------:|
| 0.1419 | 1.0 | 878 | 0.0628 | 0.9290 | 0.9288 | 0.9289 | 0.9835 |
| 0.0379 | 2.0 | 1756 | 0.0466 | 0.9456 | 0.9567 | 0.9511 | 0.9878 |
| 0.0176 | 3.0 | 2634 | 0.0473 | 0.9539 | 0.9575 | 0.9557 | 0.9890 |
| 0.0098 | 4.0 | 3512 | 0.0468 | 0.9570 | 0.9635 | 0.9603 | 0.9896 |
| 0.0043 | 5.0 | 4390 | 0.0501 | 0.9563 | 0.9652 | 0.9608 | 0.9899 |
### Framework versions
- Transformers 4.11.3
- Pytorch 1.9.0+cu111
- Datasets 1.12.1
- Tokenizers 0.10.3
|
{"license": "mit", "tags": ["generated_from_trainer"], "datasets": ["conll2003"], "metrics": ["precision", "recall", "f1", "accuracy"], "model-index": [{"name": "deberta-base-finetuned-ner", "results": [{"task": {"type": "token-classification", "name": "Token Classification"}, "dataset": {"name": "conll2003", "type": "conll2003", "args": "conll2003"}, "metrics": [{"type": "precision", "value": 0.9563020492186769, "name": "Precision"}, {"type": "recall", "value": 0.9652436720816018, "name": "Recall"}, {"type": "f1", "value": 0.9607520564042303, "name": "F1"}, {"type": "accuracy", "value": 0.9899205302077261, "name": "Accuracy"}]}]}]}
|
token-classification
|
geckos/deberta-base-fine-tuned-ner
|
[
"transformers",
"pytorch",
"tensorboard",
"deberta",
"token-classification",
"generated_from_trainer",
"dataset:conll2003",
"license:mit",
"model-index",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #deberta #token-classification #generated_from_trainer #dataset-conll2003 #license-mit #model-index #autotrain_compatible #endpoints_compatible #region-us
|
deberta-base-finetuned-ner
==========================
This model is a fine-tuned version of microsoft/deberta-base on the conll2003 dataset.
It achieves the following results on the evaluation set:
* Loss: 0.0501
* Precision: 0.9563
* Recall: 0.9652
* F1: 0.9608
* Accuracy: 0.9899
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 5e-05
* train\_batch\_size: 16
* eval\_batch\_size: 16
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 5
### Training results
### Framework versions
* Transformers 4.11.3
* Pytorch 1.9.0+cu111
* Datasets 1.12.1
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5e-05\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 5",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.12.1\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #deberta #token-classification #generated_from_trainer #dataset-conll2003 #license-mit #model-index #autotrain_compatible #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5e-05\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 5",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.12.1\n* Tokenizers 0.10.3"
] |
[
66,
98,
4,
34
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #deberta #token-classification #generated_from_trainer #dataset-conll2003 #license-mit #model-index #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5e-05\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 5### Training results### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.12.1\n* Tokenizers 0.10.3"
] |
[
-0.10576985776424408,
0.09251377731561661,
-0.002313703764230013,
0.12035571783781052,
0.17572036385536194,
0.04090975224971771,
0.12844710052013397,
0.10590467602014542,
-0.09937417507171631,
0.027518562972545624,
0.13234947621822357,
0.17888541519641876,
0.0061524054035544395,
0.10288866609334946,
-0.048383962363004684,
-0.2740142345428467,
-0.01167710404843092,
0.0550568662583828,
-0.08107689023017883,
0.11973235011100769,
0.10141434520483017,
-0.14026708900928497,
0.095783531665802,
0.01346813328564167,
-0.2144881933927536,
0.0167938694357872,
0.011588718742132187,
-0.05222030729055405,
0.14573639631271362,
0.034221697598695755,
0.15206801891326904,
0.00030658190371468663,
0.0985461100935936,
-0.16606909036636353,
0.009736324660480022,
0.052569400519132614,
0.002145515289157629,
0.08458123356103897,
0.04619261249899864,
0.011645462363958359,
0.14511768519878387,
-0.08119358122348785,
0.07136267423629761,
0.011649085208773613,
-0.12686830759048462,
-0.2034590095281601,
-0.07838571071624756,
0.028418157249689102,
0.07969549298286438,
0.10089628398418427,
-0.0033097220584750175,
0.13090220093727112,
-0.09574951231479645,
0.08673245459794998,
0.1830054670572281,
-0.2726147770881653,
-0.07091643661260605,
0.04836897924542427,
0.007211450487375259,
0.031428854912519455,
-0.10126792639493942,
-0.03266134485602379,
0.06010891869664192,
0.04384966567158699,
0.12008097022771835,
-0.034504469484090805,
-0.08391817659139633,
0.022287268191576004,
-0.1407899409532547,
-0.023007703945040703,
0.14751660823822021,
0.03776942193508148,
-0.034068960696458817,
-0.0480012521147728,
-0.048565275967121124,
-0.1509631723165512,
-0.023396654054522514,
-0.03525322675704956,
0.04409933090209961,
-0.03558292239904404,
-0.07514526695013046,
0.004064633045345545,
-0.11068044602870941,
-0.07286319136619568,
-0.08306602388620377,
0.14460040628910065,
0.03636500984430313,
0.004797830246388912,
-0.037129905074834824,
0.11340685188770294,
0.013917303644120693,
-0.1201840415596962,
0.018396230414509773,
0.024736668914556503,
-0.006289680954068899,
-0.06586867570877075,
-0.0517692007124424,
-0.06943830102682114,
0.0037325823213905096,
0.1241094172000885,
-0.03234892711043358,
0.030812943354249,
0.054974514991045,
0.03194550797343254,
-0.0716853216290474,
0.18361896276474,
-0.055974021553993225,
-0.023729106411337852,
0.002393872942775488,
0.03894168511033058,
0.012111951597034931,
-0.006422626785933971,
-0.12453702837228775,
-0.00747452350333333,
0.09523823857307434,
0.008333887904882431,
-0.07323390990495682,
0.07077047228813171,
-0.05512723699212074,
-0.031334612518548965,
0.018383491784334183,
-0.07985479384660721,
0.03297613933682442,
-0.00468507269397378,
-0.08185412734746933,
-0.012181016616523266,
0.005816989112645388,
0.022093459963798523,
0.011432009749114513,
0.1299637109041214,
-0.10734175890684128,
0.009167804382741451,
-0.10167407244443893,
-0.12258701026439667,
0.01458565890789032,
-0.1128089502453804,
0.050788514316082,
-0.10002702474594116,
-0.17955148220062256,
-0.01358987670391798,
0.0464336983859539,
-0.02368977665901184,
-0.0525449775159359,
-0.0597420334815979,
-0.07204066216945648,
0.007686664815992117,
-0.0063501945696771145,
0.11659588664770126,
-0.05766918510198593,
0.09317654371261597,
0.029763035476207733,
0.05220092087984085,
-0.04992394894361496,
0.05250980332493782,
-0.09797070175409317,
0.014497744850814342,
-0.15424378216266632,
0.03223815932869911,
-0.04196784645318985,
0.06555289775133133,
-0.08367970585823059,
-0.10186763107776642,
0.014864059165120125,
0.008850879967212677,
0.07105431705713272,
0.08650103956460953,
-0.1849832385778427,
-0.08155325055122375,
0.13423210382461548,
-0.072153739631176,
-0.10830218344926834,
0.11330680549144745,
-0.07928957045078278,
0.05617528408765793,
0.06970107555389404,
0.14031217992305756,
0.0858219712972641,
-0.08837127685546875,
0.0017606117762625217,
-0.0023183964658528566,
0.03490244597196579,
-0.055366840213537216,
0.060096461325883865,
0.004577591549605131,
0.026074877008795738,
0.019457176327705383,
-0.02863280475139618,
0.052106648683547974,
-0.09664442390203476,
-0.09093577414751053,
-0.02137315459549427,
-0.09216829389333725,
0.043558280915021896,
0.07958633452653885,
0.07076157629489899,
-0.0942881777882576,
-0.08128592371940613,
0.09756815433502197,
0.09218087792396545,
-0.05594273656606674,
0.008481108583509922,
-0.06639140099287033,
0.06008927896618843,
-0.05411691591143608,
-0.038264647126197815,
-0.16061444580554962,
-0.05130784958600998,
-0.0007334031397476792,
0.009715333580970764,
0.010610060766339302,
0.05813714489340782,
0.07120504230260849,
0.06893270462751389,
-0.04603540152311325,
-0.01121821440756321,
-0.04415135830640793,
0.006810788530856371,
-0.12720701098442078,
-0.2085256576538086,
-0.04110829159617424,
-0.027890125289559364,
0.14037346839904785,
-0.2248314470052719,
0.03659527748823166,
-0.005627315491437912,
0.08641179651021957,
0.022573597729206085,
-0.013282821513712406,
-0.048940449953079224,
0.0796971246600151,
-0.04973210021853447,
-0.05441910773515701,
0.05736725032329559,
0.0016966164112091064,
-0.0886814072728157,
-0.06981994956731796,
-0.11079028993844986,
0.16780120134353638,
0.12723016738891602,
-0.11251785606145859,
-0.09480377286672592,
-0.015180664137005806,
-0.05689019709825516,
-0.033808473497629166,
-0.042465098202228546,
0.02085558883845806,
0.18324118852615356,
-0.01948859915137291,
0.13983336091041565,
-0.061233557760715485,
-0.04669613763689995,
0.02426602877676487,
-0.022900978103280067,
0.004859229549765587,
0.11646976321935654,
0.13781434297561646,
-0.09852299839258194,
0.15394893288612366,
0.12968015670776367,
-0.09234385192394257,
0.14930720627307892,
-0.028564151376485825,
-0.0779673233628273,
-0.02872610092163086,
-0.03440582752227783,
-0.0027568084187805653,
0.12673595547676086,
-0.15698285400867462,
-0.008967756293714046,
0.01535777747631073,
0.013777613639831543,
0.018379170447587967,
-0.2147412896156311,
-0.0494559220969677,
0.04580194875597954,
-0.02546674571931362,
-0.015133297070860863,
-0.005107154604047537,
0.001500295358709991,
0.10155346989631653,
0.009618177078664303,
-0.09534231573343277,
0.04172118008136749,
0.00764209870249033,
-0.07857774943113327,
0.21909114718437195,
-0.073478102684021,
-0.1217157319188118,
-0.11360155045986176,
-0.06627433747053146,
-0.045831747353076935,
0.01858026161789894,
0.05405004695057869,
-0.08665025979280472,
-0.03157038614153862,
-0.056856244802474976,
0.029652351513504982,
-0.021221432834863663,
0.030267948284745216,
-0.0028297603130340576,
-0.0014759391779080033,
0.047313399612903595,
-0.11596738547086716,
-0.005017489194869995,
-0.06322667747735977,
-0.066499263048172,
0.03327522054314613,
0.03754094988107681,
0.12658323347568512,
0.15178649127483368,
-0.021500933915376663,
0.0034636682830750942,
-0.03379734978079796,
0.2371780425310135,
-0.07649146765470505,
-0.03287869319319725,
0.1250295639038086,
-0.015773087739944458,
0.03221079707145691,
0.11725789308547974,
0.07504421472549438,
-0.08661673963069916,
0.003135599661618471,
0.049293484538793564,
-0.03428950905799866,
-0.2103152573108673,
-0.041153810918331146,
-0.052068859338760376,
-0.009312699548900127,
0.08873643726110458,
0.019428370520472527,
0.05298677086830139,
0.07743727415800095,
0.037542179226875305,
0.09175693988800049,
-0.04453372582793236,
0.0629471018910408,
0.12062659114599228,
0.04058384522795677,
0.12564995884895325,
-0.04106627777218819,
-0.08078443259000778,
0.03520559147000313,
-0.0056609646417200565,
0.22125142812728882,
0.017105165868997574,
0.12442943453788757,
0.060989268124103546,
0.14513027667999268,
-0.01020267978310585,
0.06869905441999435,
-0.0016150091541931033,
-0.04638930410146713,
-0.013459421694278717,
-0.03333979845046997,
-0.025715285912156105,
0.03153149411082268,
-0.04941246658563614,
0.06046068295836449,
-0.1315319836139679,
0.01861712522804737,
0.04433000832796097,
0.2218640297651291,
0.0497988685965538,
-0.3405657410621643,
-0.09381808340549469,
0.004205042961984873,
-0.021799616515636444,
-0.024774080142378807,
0.02098318561911583,
0.09474216401576996,
-0.08443659543991089,
0.022260332480072975,
-0.06515074521303177,
0.08194663375616074,
-0.07190848886966705,
0.04353732988238335,
0.09028195589780807,
0.10446076095104218,
-0.004862201400101185,
0.08016464859247208,
-0.29444777965545654,
0.26793697476387024,
0.0076173474080860615,
0.07987909018993378,
-0.07463791966438293,
0.0006129710236564279,
0.02436845749616623,
0.065752312541008,
0.07254309207201004,
-0.020988408476114273,
-0.028688525781035423,
-0.2092166244983673,
-0.038218334317207336,
0.027792923152446747,
0.0726998820900917,
-0.03814822807908058,
0.09549932181835175,
-0.023144187405705452,
0.013305706903338432,
0.08615560084581375,
0.0019351061200723052,
-0.055392686277627945,
-0.10072235763072968,
-0.010180719196796417,
0.014290742576122284,
-0.056562576442956924,
-0.05833921581506729,
-0.11217314749956131,
-0.11618032306432724,
0.14105302095413208,
-0.04618510603904724,
-0.03488817438483238,
-0.10965754091739655,
0.09230614453554153,
0.06526953727006912,
-0.08370465785264969,
0.04686681553721428,
0.01621164195239544,
0.06767086684703827,
0.034920524805784225,
-0.05846535041928291,
0.10908717662096024,
-0.06739408522844315,
-0.15669478476047516,
-0.07811245322227478,
0.08147313445806503,
0.050646763294935226,
0.05492076277732849,
-0.0021240212954580784,
0.011727754026651382,
-0.031009646132588387,
-0.08321472257375717,
0.029447097331285477,
0.000010599327652016655,
0.08381492644548416,
0.0005285179940983653,
-0.05413660779595375,
0.037022173404693604,
-0.0517776720225811,
-0.018651364371180534,
0.1789723038673401,
0.23457585275173187,
-0.09974949806928635,
-0.0031443173065781593,
0.019027553498744965,
-0.07206673920154572,
-0.18725071847438812,
0.06824596971273422,
0.05144613981246948,
0.02048989199101925,
0.03264773637056351,
-0.1905742734670639,
0.12777835130691528,
0.12269989401102066,
-0.005533766467124224,
0.1181650385260582,
-0.3201606571674347,
-0.12591104209423065,
0.13868103921413422,
0.1428884118795395,
0.11301995068788528,
-0.12997832894325256,
-0.009198565967381,
-0.01615786924958229,
-0.13108114898204803,
0.11053886264562607,
-0.08187147229909897,
0.10775447636842728,
-0.027647580951452255,
0.08937373012304306,
0.011560672894120216,
-0.05835004895925522,
0.11483762413263321,
0.029832372441887856,
0.11373622715473175,
-0.05568486079573631,
-0.05695364996790886,
0.0389927476644516,
-0.03187099099159241,
0.015285803005099297,
-0.05917096510529518,
0.023239808157086372,
-0.10269373655319214,
-0.03404666855931282,
-0.06840866059064865,
0.0535486601293087,
-0.0412679947912693,
-0.08694679290056229,
-0.04131271317601204,
0.030506940558552742,
0.036972884088754654,
-0.020105969160795212,
0.1350242644548416,
0.027025820687413216,
0.13812141120433807,
0.06506260484457016,
0.08811341226100922,
-0.04323728755116463,
-0.06005062162876129,
-0.010681028477847576,
-0.014030780643224716,
0.06658477336168289,
-0.12112085521221161,
0.020087124779820442,
0.14343573153018951,
0.028968144208192825,
0.13933983445167542,
0.0884619951248169,
-0.027404915541410446,
0.014703857712447643,
0.057791199535131454,
-0.14883534610271454,
-0.0772852972149849,
-0.0069203064776957035,
-0.08341304212808609,
-0.11044050008058548,
0.06679936498403549,
0.09534890949726105,
-0.07255826890468597,
-0.009287567809224129,
-0.006894249003380537,
-0.006461325101554394,
-0.05777214467525482,
0.19003745913505554,
0.062220677733421326,
0.03733396902680397,
-0.09268452227115631,
0.06134023889899254,
0.04395239055156708,
-0.06784512847661972,
0.008326959796249866,
0.03436460345983505,
-0.07276154309511185,
-0.042204685509204865,
0.0442056842148304,
0.20230548083782196,
-0.08069397509098053,
-0.037723202258348465,
-0.13609765470027924,
-0.11682228744029999,
0.07282952964305878,
0.15843014419078827,
0.11031165719032288,
0.02661808766424656,
-0.06254762411117554,
0.024614468216896057,
-0.14170673489570618,
0.0890093594789505,
0.0347934328019619,
0.07814830541610718,
-0.16086825728416443,
0.16770868003368378,
-0.004176356829702854,
0.02645389921963215,
-0.019560115411877632,
0.032185524702072144,
-0.11330627650022507,
0.0026849478017538786,
-0.11036138981580734,
-0.039500195533037186,
-0.04354605823755264,
0.009447156451642513,
0.005887444131076336,
-0.05413568392395973,
-0.056446731090545654,
0.00734232272952795,
-0.1012902483344078,
-0.01353178545832634,
0.05128636956214905,
0.0629344955086708,
-0.10874579846858978,
-0.03823751583695412,
0.01089943666011095,
-0.06161528453230858,
0.06888701766729355,
0.038292936980724335,
0.04493672773241997,
0.04812377691268921,
-0.12524199485778809,
0.03609552979469299,
0.07089147716760635,
0.011292306706309319,
0.07941312342882156,
-0.08786523342132568,
-0.008630193769931793,
-0.01157909631729126,
0.04698677733540535,
0.013170742429792881,
0.054646845906972885,
-0.13074065744876862,
0.0016745754983276129,
-0.009344362653791904,
-0.08583032339811325,
-0.0694475844502449,
0.03073842078447342,
0.09576971083879471,
0.0119257103651762,
0.20226521790027618,
-0.07106568664312363,
0.04541498422622681,
-0.20309977233409882,
-0.0027408574242144823,
-0.005437910556793213,
-0.09180805832147598,
-0.10881876200437546,
-0.05997638776898384,
0.06330984085798264,
-0.06509219855070114,
0.1435692310333252,
0.0366782620549202,
0.03528699278831482,
0.026064030826091766,
-0.028063734993338585,
0.038046374917030334,
0.025055179372429848,
0.21265625953674316,
0.03315499797463417,
-0.038738593459129333,
0.03825518488883972,
0.05879884958267212,
0.10644180327653885,
0.1188628226518631,
0.18528638780117035,
0.14313514530658722,
-0.03978057578206062,
0.08807065337896347,
0.05149129778146744,
-0.07257960736751556,
-0.16476382315158844,
0.02652357891201973,
-0.02652399055659771,
0.0860569179058075,
-0.016311179846525192,
0.20237727463245392,
0.07903122156858444,
-0.167165607213974,
0.02579472027719021,
-0.04739713668823242,
-0.08745788782835007,
-0.11436120420694351,
-0.049245089292526245,
-0.07896435260772705,
-0.11640507727861404,
0.01098690740764141,
-0.11955995112657547,
0.0009404269512742758,
0.10129866749048233,
0.004261395428329706,
-0.03278457000851631,
0.1623128205537796,
0.029612349346280098,
0.04216331988573074,
0.046151820570230484,
0.023165330290794373,
-0.026460831984877586,
-0.11837486177682877,
-0.05794220045208931,
-0.02974756248295307,
-0.027947811409831047,
0.022973360493779182,
-0.08503628522157669,
-0.04827816039323807,
0.02844052202999592,
-0.004172362387180328,
-0.09389572590589523,
0.010623134672641754,
0.022765405476093292,
0.0509369857609272,
0.034731876105070114,
0.008548128418624401,
0.019080379977822304,
-0.013795513659715652,
0.18819542229175568,
-0.06830497831106186,
-0.044474583119153976,
-0.10296014696359634,
0.25722241401672363,
0.047111183404922485,
0.005946931894868612,
0.03850320354104042,
-0.07832030951976776,
0.020811473950743675,
0.22329285740852356,
0.21104690432548523,
-0.08136337995529175,
-0.004681107588112354,
0.010050706565380096,
-0.011678141541779041,
-0.03090522065758705,
0.10076334327459335,
0.10810870677232742,
0.06414123624563217,
-0.08268051594495773,
-0.04676781967282295,
-0.056826282292604446,
-0.0017750608967617154,
-0.016561472788453102,
0.06735166907310486,
0.06270025670528412,
0.0035677324049174786,
-0.03726149722933769,
0.054662059992551804,
-0.063030906021595,
-0.09525678306818008,
0.08873341232538223,
-0.19793473184108734,
-0.1575440615415573,
-0.009993781335651875,
0.08401069790124893,
-0.0029573317151516676,
0.07954457402229309,
-0.03715681657195091,
-0.009852918796241283,
0.08300072699785233,
-0.012706431560218334,
-0.10492322593927383,
-0.07599911093711853,
0.09673092514276505,
-0.05575471743941307,
0.21710696816444397,
-0.05548761412501335,
0.07278916984796524,
0.1296776533126831,
0.07304586470127106,
-0.07215254753828049,
0.053084488958120346,
0.05225659906864166,
-0.06634305417537689,
0.027394304051995277,
0.06898266822099686,
-0.03634381294250488,
0.07980683445930481,
0.04611680656671524,
-0.15591961145401,
0.028900928795337677,
-0.07406063377857208,
-0.06715510040521622,
-0.054154373705387115,
-0.01646398939192295,
-0.054419029504060745,
0.13641256093978882,
0.21693351864814758,
-0.02129628323018551,
-0.0058499169535934925,
-0.06411485373973846,
0.019979899749159813,
0.07470609992742538,
0.044666681438684464,
-0.0653337761759758,
-0.2128547579050064,
0.01305390801280737,
0.046738553792238235,
-0.025384463369846344,
-0.23752404749393463,
-0.08923634141683578,
0.008308700285851955,
-0.08055099099874496,
-0.08049432933330536,
0.0666925236582756,
0.08156424760818481,
0.05915014445781708,
-0.064488485455513,
-0.027353394776582718,
-0.08050832897424698,
0.1456223577260971,
-0.13938216865062714,
-0.0991014689207077
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# distilbert-base-uncased-finetuned-ner
This model is a fine-tuned version of [distilbert-base-uncased](https://huggingface.co/distilbert-base-uncased) on the conll2003 dataset.
It achieves the following results on the evaluation set:
- Loss: 0.0606
- Precision: 0.9303
- Recall: 0.9380
- F1: 0.9342
- Accuracy: 0.9842
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 2e-05
- train_batch_size: 16
- eval_batch_size: 16
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 3
### Training results
| Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
|:-------------:|:-----:|:----:|:---------------:|:---------:|:------:|:------:|:--------:|
| 0.2459 | 1.0 | 878 | 0.0696 | 0.9117 | 0.9195 | 0.9156 | 0.9808 |
| 0.0513 | 2.0 | 1756 | 0.0602 | 0.9223 | 0.9376 | 0.9299 | 0.9835 |
| 0.0304 | 3.0 | 2634 | 0.0606 | 0.9303 | 0.9380 | 0.9342 | 0.9842 |
### Framework versions
- Transformers 4.11.3
- Pytorch 1.9.0+cu111
- Datasets 1.12.1
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "datasets": ["conll2003"], "metrics": ["precision", "recall", "f1", "accuracy"], "model-index": [{"name": "distilbert-base-uncased-finetuned-ner", "results": [{"task": {"type": "token-classification", "name": "Token Classification"}, "dataset": {"name": "conll2003", "type": "conll2003", "args": "conll2003"}, "metrics": [{"type": "precision", "value": 0.9303228669699323, "name": "Precision"}, {"type": "recall", "value": 0.9380243875153821, "name": "Recall"}, {"type": "f1", "value": 0.9341577540106952, "name": "F1"}, {"type": "accuracy", "value": 0.9842407104389407, "name": "Accuracy"}]}]}]}
|
token-classification
|
geckos/distilbert-base-uncased-fine-tuned-ner
|
[
"transformers",
"pytorch",
"tensorboard",
"distilbert",
"token-classification",
"generated_from_trainer",
"dataset:conll2003",
"license:apache-2.0",
"model-index",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #distilbert #token-classification #generated_from_trainer #dataset-conll2003 #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us
|
distilbert-base-uncased-finetuned-ner
=====================================
This model is a fine-tuned version of distilbert-base-uncased on the conll2003 dataset.
It achieves the following results on the evaluation set:
* Loss: 0.0606
* Precision: 0.9303
* Recall: 0.9380
* F1: 0.9342
* Accuracy: 0.9842
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 2e-05
* train\_batch\_size: 16
* eval\_batch\_size: 16
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 3
### Training results
### Framework versions
* Transformers 4.11.3
* Pytorch 1.9.0+cu111
* Datasets 1.12.1
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.12.1\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #distilbert #token-classification #generated_from_trainer #dataset-conll2003 #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.12.1\n* Tokenizers 0.10.3"
] |
[
69,
98,
4,
34
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #distilbert #token-classification #generated_from_trainer #dataset-conll2003 #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3### Training results### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.9.0+cu111\n* Datasets 1.12.1\n* Tokenizers 0.10.3"
] |
[
-0.10733510553836823,
0.11062104254961014,
-0.0024228524416685104,
0.1325540989637375,
0.1540447324514389,
0.03060363605618477,
0.12244931608438492,
0.11245027184486389,
-0.08881912380456924,
0.026323307305574417,
0.13199160993099213,
0.16142213344573975,
0.014326708391308784,
0.11677442491054535,
-0.051285602152347565,
-0.24676363170146942,
-0.002616961020976305,
0.04186413809657097,
-0.04823612794280052,
0.13163872063159943,
0.09798945486545563,
-0.1322900354862213,
0.09396342933177948,
0.014917727559804916,
-0.1936975121498108,
-0.008667378686368465,
0.0004885847447440028,
-0.05061890557408333,
0.1428566873073578,
0.01612226478755474,
0.12667229771614075,
-0.01101977564394474,
0.09467042982578278,
-0.17530158162117004,
0.0054322415962815285,
0.043885283172130585,
0.011817450635135174,
0.09517684578895569,
0.04061314836144447,
0.012719141319394112,
0.09476454555988312,
-0.061192747205495834,
0.05955372750759125,
0.01284539420157671,
-0.11939184367656708,
-0.20379577577114105,
-0.09321519732475281,
0.048268284648656845,
0.08315853029489517,
0.09797071665525436,
0.0037821088917553425,
0.13787192106246948,
-0.09031642973423004,
0.08555306494235992,
0.20739908516407013,
-0.2841174900531769,
-0.067635677754879,
0.04888610541820526,
0.01076198648661375,
0.04146585986018181,
-0.09858483076095581,
-0.0451296791434288,
0.04323715716600418,
0.050263479351997375,
0.13029727339744568,
-0.02722832001745701,
-0.1111602783203125,
0.012869314290583134,
-0.14384932816028595,
-0.04294050857424736,
0.16539320349693298,
0.04955935850739479,
-0.03458687290549278,
-0.04281463846564293,
-0.06295205652713776,
-0.166645348072052,
-0.028259368613362312,
-0.012791664339601994,
0.04420492798089981,
-0.026973655447363853,
-0.05449998378753662,
0.003101232461631298,
-0.09972875565290451,
-0.06765291094779968,
-0.08109655976295471,
0.1326194405555725,
0.03553297743201256,
0.01452646404504776,
-0.02066841907799244,
0.11491471529006958,
0.0015989457024261355,
-0.12109275162220001,
0.0210199486464262,
0.02240518294274807,
0.0028563886880874634,
-0.04626097530126572,
-0.05133408308029175,
-0.04205423220992088,
0.007943714037537575,
0.1462206095457077,
-0.03148413076996803,
0.032751552760601044,
0.05536580830812454,
0.04488501697778702,
-0.08707085251808167,
0.18346360325813293,
-0.04460016265511513,
-0.032944537699222565,
0.0071002584882080555,
0.054712045937776566,
0.025078335776925087,
-0.004087681416422129,
-0.12424585968255997,
0.011819878593087196,
0.09765861928462982,
0.009443329647183418,
-0.06554635614156723,
0.06497504562139511,
-0.06327164173126221,
-0.02802507020533085,
0.02093411423265934,
-0.08538620173931122,
0.027607813477516174,
-0.01002279482781887,
-0.07986030727624893,
-0.022588465362787247,
0.01715715229511261,
0.023761484771966934,
-0.003910721279680729,
0.11077068001031876,
-0.09729758650064468,
0.0200115405023098,
-0.0891166552901268,
-0.09994592517614365,
0.017933888360857964,
-0.10969336330890656,
0.03169896453619003,
-0.0952916368842125,
-0.19543574750423431,
-0.004506475757807493,
0.06340950727462769,
-0.022701917216181755,
-0.07032747566699982,
-0.045640766620635986,
-0.06749041378498077,
0.007868600077927113,
-0.009675129316747189,
0.11952773481607437,
-0.06584175676107407,
0.08975138515233994,
0.020175427198410034,
0.05778524652123451,
-0.05191502720117569,
0.05246754363179207,
-0.10893373936414719,
0.0250481516122818,
-0.15356679260730743,
0.030609672889113426,
-0.04942935332655907,
0.06505002081394196,
-0.08843687921762466,
-0.10045374929904938,
0.01839684508740902,
-0.018529105931520462,
0.06620105355978012,
0.08561114221811295,
-0.18387435376644135,
-0.06270121783018112,
0.1353021115064621,
-0.06175214424729347,
-0.12223131954669952,
0.1241571232676506,
-0.06641345471143723,
0.03996734693646431,
0.056543201208114624,
0.1534069925546646,
0.06896998733282089,
-0.07612133771181107,
0.0033212739508599043,
0.010368691757321358,
0.0510115772485733,
-0.06238113343715668,
0.07716860622167587,
0.006329122465103865,
0.017370929941534996,
0.02924727089703083,
-0.03569210320711136,
0.05612797662615776,
-0.08925527334213257,
-0.10121774673461914,
-0.039063382893800735,
-0.09784126281738281,
0.045979876071214676,
0.06310673803091049,
0.06353359669446945,
-0.08862420171499252,
-0.0751408040523529,
0.05118813365697861,
0.09116843342781067,
-0.04437239095568657,
0.02001636102795601,
-0.06434120237827301,
0.0804983377456665,
-0.044701676815748215,
-0.03129509463906288,
-0.17348822951316833,
-0.03480542451143265,
0.012780058197677135,
0.0016857037553563714,
0.014420964755117893,
0.025209812447428703,
0.06128649041056633,
0.07350592315196991,
-0.04164291173219681,
-0.018568921834230423,
-0.035305507481098175,
0.005344398785382509,
-0.13060611486434937,
-0.19191069900989532,
-0.04496103525161743,
-0.020163051784038544,
0.15463747084140778,
-0.20235012471675873,
0.03473862633109093,
-0.026673298329114914,
0.08622824400663376,
0.01485454197973013,
-0.015002688392996788,
-0.0415758453309536,
0.06998494267463684,
-0.049204304814338684,
-0.05402422696352005,
0.0654163658618927,
0.011186287738382816,
-0.09080980718135834,
-0.06611473113298416,
-0.08704543113708496,
0.16438627243041992,
0.12520354986190796,
-0.10004241019487381,
-0.07507447898387909,
-0.01724710315465927,
-0.06491655856370926,
-0.034848324954509735,
-0.04989911988377571,
0.029403071850538254,
0.17465335130691528,
-0.004288308788090944,
0.1416277289390564,
-0.06996812671422958,
-0.04497629776597023,
0.02134234644472599,
-0.033759646117687225,
0.020049748942255974,
0.11603114753961563,
0.13525508344173431,
-0.08187294006347656,
0.15324042737483978,
0.15256443619728088,
-0.0917566642165184,
0.1156785860657692,
-0.03989134356379509,
-0.06247096508741379,
-0.026374582201242447,
-0.027938034385442734,
-0.007788300048559904,
0.11578648537397385,
-0.1400456577539444,
0.00737172644585371,
0.03520200029015541,
0.021789472550153732,
0.009731143712997437,
-0.2204858362674713,
-0.042862821370363235,
0.0374443419277668,
-0.03374067321419716,
-0.008755642920732498,
-0.010858030058443546,
0.005916506052017212,
0.09897598624229431,
0.004599158186465502,
-0.10663099586963654,
0.0462723970413208,
0.009002890437841415,
-0.07239291071891785,
0.20523515343666077,
-0.08611434698104858,
-0.14105728268623352,
-0.12465430796146393,
-0.08609562367200851,
-0.05775069817900658,
0.010313918814063072,
0.05369056016206741,
-0.0725647583603859,
-0.03530162572860718,
-0.07302775979042053,
0.002309374278411269,
0.00178046606015414,
0.028186585754156113,
0.01608148030936718,
-0.008914751932024956,
0.06574694067239761,
-0.106041319668293,
-0.011991112492978573,
-0.05171400308609009,
-0.04932662099599838,
0.0369059182703495,
0.04052566736936569,
0.11560428142547607,
0.1514223963022232,
-0.011835269629955292,
0.006473606918007135,
-0.02173341065645218,
0.2542969286441803,
-0.0585908479988575,
-0.02041681669652462,
0.13530290126800537,
-0.01903873309493065,
0.05353762209415436,
0.12191333621740341,
0.07611624151468277,
-0.08316593617200851,
-0.0021904667373746634,
0.031534165143966675,
-0.039452239871025085,
-0.21165022253990173,
-0.05376604199409485,
-0.05561450496315956,
-0.0052559892646968365,
0.09755057841539383,
0.024165164679288864,
0.037410710006952286,
0.08104529976844788,
0.038230329751968384,
0.09457467496395111,
-0.052230559289455414,
0.0630328431725502,
0.12097108364105225,
0.045043282210826874,
0.12345694750547409,
-0.03168271481990814,
-0.061695292592048645,
0.0459028035402298,
0.007177123334258795,
0.2240591049194336,
0.012445276603102684,
0.12797774374485016,
0.06119520217180252,
0.17993967235088348,
-0.010229526087641716,
0.07671069353818893,
-0.009555589407682419,
-0.03226267173886299,
-0.021954011172056198,
-0.03700375556945801,
-0.039255641400814056,
0.026567893102765083,
-0.05684264749288559,
0.07331237196922302,
-0.10435587912797928,
0.021754831075668335,
0.05250055342912674,
0.25490692257881165,
0.037913817912340164,
-0.3385380804538727,
-0.09866002947092056,
0.00039725733222439885,
-0.035920966416597366,
-0.022115852683782578,
0.029983041808009148,
0.08164095878601074,
-0.09612184762954712,
0.020052947103977203,
-0.06478719413280487,
0.09166376292705536,
-0.051739394664764404,
0.04124302789568901,
0.08205269277095795,
0.09056467562913895,
0.012361492030322552,
0.0856441929936409,
-0.2705115079879761,
0.2709103524684906,
0.0011124404845759273,
0.05848463997244835,
-0.07775081694126129,
0.006703331135213375,
0.03555436432361603,
0.06323380023241043,
0.07337834686040878,
-0.004679467529058456,
-0.018162380903959274,
-0.19774138927459717,
-0.06294222176074982,
0.022419258952140808,
0.058072563260793686,
-0.04060147702693939,
0.08857964724302292,
-0.030562501400709152,
0.00889674574136734,
0.0679696574807167,
0.00940337311476469,
-0.04613585025072098,
-0.10001584887504578,
-0.006098082289099693,
0.036120664328336716,
-0.046233292669057846,
-0.06219656392931938,
-0.10925440490245819,
-0.12306603789329529,
0.14112649857997894,
-0.0319681242108345,
-0.03690791875123978,
-0.10618462413549423,
0.07423456013202667,
0.0820893943309784,
-0.08278810232877731,
0.05062349885702133,
-0.005233006086200476,
0.0736616924405098,
0.030725495889782906,
-0.058779675513505936,
0.09896454960107803,
-0.08048470318317413,
-0.16822980344295502,
-0.07307478040456772,
0.10317803919315338,
0.03763363137841225,
0.06608660519123077,
-0.006598783656954765,
0.017738111317157745,
-0.05011477321386337,
-0.0892515629529953,
0.02287985198199749,
-0.000017353482689941302,
0.08720899373292923,
0.015477229841053486,
-0.050616465508937836,
0.026952208951115608,
-0.054224852472543716,
-0.032860446721315384,
0.18973253667354584,
0.2337537556886673,
-0.10170110315084457,
0.01936899498105049,
0.0298044141381979,
-0.0616346076130867,
-0.17643193900585175,
0.02528606727719307,
0.05346114933490753,
0.004031751304864883,
0.039863403886556625,
-0.17719666659832,
0.14480650424957275,
0.11955806612968445,
-0.018453503027558327,
0.10619935393333435,
-0.32651394605636597,
-0.1191754937171936,
0.1327512115240097,
0.1330595314502716,
0.10782822221517563,
-0.12380607426166534,
-0.019780028611421585,
-0.015969395637512207,
-0.1487499475479126,
0.11417325586080551,
-0.07129546999931335,
0.11434805393218994,
-0.03298850730061531,
0.09874825179576874,
0.0023825312964618206,
-0.05758729577064514,
0.1255401074886322,
0.03689766675233841,
0.09922156482934952,
-0.056995492428541183,
-0.043178658932447433,
0.03287943825125694,
-0.04210522770881653,
0.023589130491018295,
-0.08058171719312668,
0.03715981915593147,
-0.10783560574054718,
-0.01999804377555847,
-0.06527992337942123,
0.040899261832237244,
-0.035012904554605484,
-0.07360541820526123,
-0.04365942254662514,
0.03051353618502617,
0.054351598024368286,
-0.011413555592298508,
0.1301603615283966,
0.04844836890697479,
0.1328841745853424,
0.09691385924816132,
0.06445418298244476,
-0.077752985060215,
-0.086395762860775,
-0.030282387509942055,
-0.01589536853134632,
0.05820314958691597,
-0.11751117557287216,
0.024163154885172844,
0.14540234208106995,
0.025365661829710007,
0.13743604719638824,
0.08161676675081253,
-0.016070231795310974,
0.006424179766327143,
0.050535500049591064,
-0.16826054453849792,
-0.0711364671587944,
-0.0019020880572497845,
-0.03659061715006828,
-0.11981002241373062,
0.05177446827292442,
0.0932018905878067,
-0.07093540579080582,
-0.009278872050344944,
-0.0033621620386838913,
0.013357915915548801,
-0.049699824303388596,
0.19043180346488953,
0.05690053105354309,
0.047820884734392166,
-0.102084219455719,
0.07104768604040146,
0.055389489978551865,
-0.05410180240869522,
-0.004401098936796188,
0.04851002246141434,
-0.0898294523358345,
-0.042811427265405655,
0.04856842756271362,
0.16897179186344147,
-0.07035963982343674,
-0.04266275092959404,
-0.13119660317897797,
-0.11532279849052429,
0.08006303757429123,
0.1382775902748108,
0.11728738993406296,
0.016536174342036247,
-0.06686230003833771,
0.0006611951976083219,
-0.10944919288158417,
0.09770160168409348,
0.04708116874098778,
0.0734066441655159,
-0.15702109038829803,
0.1359664350748062,
0.003941343631595373,
0.04041372612118721,
-0.015619616955518723,
0.027891412377357483,
-0.09419288486242294,
0.007938643917441368,
-0.11390265822410583,
-0.02122381702065468,
-0.039233069866895676,
0.014458067715168,
-0.0046383109875023365,
-0.05716574937105179,
-0.05601923167705536,
0.014524370431900024,
-0.10699468106031418,
-0.01928335428237915,
0.03899373114109039,
0.06291910260915756,
-0.1127442866563797,
-0.0373651459813118,
0.029830560088157654,
-0.060435932129621506,
0.07610075175762177,
0.04539432004094124,
0.025760788470506668,
0.041388947516679764,
-0.12090925872325897,
0.011362165212631226,
0.06587745994329453,
0.029135525226593018,
0.0779394805431366,
-0.10033273696899414,
-0.013809912838041782,
-0.0036235337611287832,
0.03675238415598869,
0.014088255353271961,
0.07652497291564941,
-0.13853393495082855,
-0.009831419214606285,
-0.009862758219242096,
-0.07842090725898743,
-0.06479731947183609,
0.017448456957936287,
0.1048879325389862,
0.01632278598845005,
0.21058489382266998,
-0.060283590108156204,
0.04383789002895355,
-0.20603863894939423,
0.00220497022382915,
-0.009706608019769192,
-0.10710105299949646,
-0.13172513246536255,
-0.0606943778693676,
0.050109315663576126,
-0.05686523765325546,
0.15516485273838043,
0.025668196380138397,
0.02424066886305809,
0.02158510498702526,
0.004501462448388338,
0.02044464461505413,
0.01022712979465723,
0.19369563460350037,
0.04143132269382477,
-0.035854727029800415,
0.05643327534198761,
0.039416369050741196,
0.10394105315208435,
0.10309775173664093,
0.18762512505054474,
0.13835537433624268,
0.0006479793810285628,
0.08627559244632721,
0.03793670982122421,
-0.0659962147474289,
-0.17356030642986298,
0.0321008674800396,
-0.036479923874139786,
0.10581496357917786,
-0.014568694867193699,
0.22775574028491974,
0.05537606030702591,
-0.1675533652305603,
0.0337410531938076,
-0.05109969899058342,
-0.08056384325027466,
-0.10062191635370255,
-0.06422203779220581,
-0.07625630497932434,
-0.1254475712776184,
-0.0006655273027718067,
-0.11110815405845642,
0.0066540297120809555,
0.12899866700172424,
0.005776833277195692,
-0.024805990979075432,
0.14498600363731384,
0.0033682985231280327,
0.03868979215621948,
0.03787379339337349,
0.013384872116148472,
-0.03578371927142143,
-0.1115308403968811,
-0.07284858077764511,
-0.0241189356893301,
-0.01707078330218792,
0.03741392493247986,
-0.07137462496757507,
-0.036267925053834915,
0.02739904075860977,
-0.011365998536348343,
-0.09159814566373825,
0.006796678993850946,
0.005448846612125635,
0.04997970163822174,
0.03590784966945648,
0.005951589904725552,
0.03615834563970566,
-0.008003095164895058,
0.19558702409267426,
-0.07219093292951584,
-0.06427428871393204,
-0.10781139135360718,
0.22916629910469055,
0.027566205710172653,
-0.02230616845190525,
0.04086305573582649,
-0.06521167606115341,
0.005916034337133169,
0.2317272126674652,
0.2008689045906067,
-0.09690136462450027,
-0.013936447910964489,
0.01001159567385912,
-0.013797705993056297,
-0.03325854241847992,
0.09343154728412628,
0.13060081005096436,
0.046068813651800156,
-0.09066332876682281,
-0.03996893763542175,
-0.07088183611631393,
-0.01335997972637415,
-0.033476538956165314,
0.05620923265814781,
0.042216911911964417,
0.005870217457413673,
-0.04412734881043434,
0.04800984635949135,
-0.07213880866765976,
-0.08872917294502258,
0.06165574863553047,
-0.19982871413230896,
-0.1694580614566803,
-0.011273977346718311,
0.0979483500123024,
0.004911693278700113,
0.058558445423841476,
-0.03344385325908661,
-0.001048139063641429,
0.0842779353260994,
-0.01971886120736599,
-0.09345351904630661,
-0.08213888853788376,
0.10545605421066284,
-0.06964312493801117,
0.23047016561031342,
-0.045413050800561905,
0.07239377498626709,
0.12237795442342758,
0.06775360554456711,
-0.08226149529218674,
0.05658791586756706,
0.05512154474854469,
-0.05442260578274727,
0.02102680504322052,
0.06780051440000534,
-0.026829630136489868,
0.08261296153068542,
0.04455665871500969,
-0.1320836842060089,
0.011871784925460815,
-0.04297323152422905,
-0.05374489352107048,
-0.04603271558880806,
-0.03260395675897598,
-0.054986078292131424,
0.1392628699541092,
0.2082338035106659,
-0.0348820723593235,
-0.014887052588164806,
-0.06879091262817383,
0.022490499541163445,
0.06120410934090614,
0.00923272967338562,
-0.06272074580192566,
-0.21558567881584167,
0.016742533072829247,
0.04254124313592911,
-0.019728971645236015,
-0.21277059614658356,
-0.10292027145624161,
0.0008055662619881332,
-0.07509142905473709,
-0.08626852184534073,
0.07076068222522736,
0.08129408955574036,
0.05211088806390762,
-0.05952313914895058,
-0.025473440065979958,
-0.08385016024112701,
0.13497555255889893,
-0.1352596879005432,
-0.0900496393442154
] |
null | null | null |
# [models/cnstd](models/cnstd)
存放 [cnstd](https://github.com/breezedeus/cnstd) 中使用的模型。
# [models/cnocr](models/cnocr)
存放 [cnocr](https://github.com/breezedeus/cnocr) 中使用的模型。
|
{}
| null |
breezedeus/cnstd-cnocr-models
|
[
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#region-us
|
# models/cnstd
存放 cnstd 中使用的模型。
# models/cnocr
存放 cnocr 中使用的模型。
|
[
"# models/cnstd\n存放 cnstd 中使用的模型。",
"# models/cnocr\n存放 cnocr 中使用的模型。"
] |
[
"TAGS\n#region-us \n",
"# models/cnstd\n存放 cnstd 中使用的模型。",
"# models/cnocr\n存放 cnocr 中使用的模型。"
] |
[
6,
18,
17
] |
[
"passage: TAGS\n#region-us \n# models/cnstd\n存放 cnstd 中使用的模型。# models/cnocr\n存放 cnocr 中使用的模型。"
] |
[
-0.03866061940789223,
0.1054900735616684,
-0.00796516239643097,
-0.03662026301026344,
0.05135824531316757,
0.07346411049365997,
0.08674917370080948,
-0.024086331948637962,
0.07582598924636841,
0.025585364550352097,
0.143981471657753,
-0.05313132703304291,
-0.039602190256118774,
0.03348648548126221,
-0.03051111474633217,
-0.15540097653865814,
0.12413202226161957,
-0.03298325464129448,
-0.10650225728750229,
0.07793113589286804,
0.014013601467013359,
-0.012368845753371716,
0.049723539501428604,
-0.002152462024241686,
-0.14080525934696198,
-0.06765767931938171,
0.0229153074324131,
-0.026741597801446915,
0.04550359398126602,
0.00043887447100132704,
0.2428770661354065,
0.0714302584528923,
0.08703769743442535,
-0.12393329292535782,
0.03685152158141136,
-0.02015962265431881,
-0.07785186916589737,
0.09411168843507767,
-0.06562427431344986,
0.01828545518219471,
0.18692293763160706,
0.0921994224190712,
0.09042036533355713,
0.024286162108182907,
-0.1461043804883957,
-0.011306269094347954,
-0.0019017196027562022,
0.013108081184327602,
-0.028724756091833115,
0.04830992594361305,
0.02417300082743168,
0.16414561867713928,
-0.2434474378824234,
-0.014362510293722153,
-0.06699159741401672,
-0.10508446395397186,
0.03806004300713539,
0.3986075520515442,
0.10280942171812057,
0.08911824971437454,
0.04205761104822159,
0.07808046042919159,
0.07172270119190216,
-0.060911741107702255,
-0.03919287770986557,
0.0029525537975132465,
0.06672201305627823,
0.06283099204301834,
-0.1096721738576889,
-0.07391051948070526,
0.25121134519577026,
-0.012842495925724506,
0.08511929959058762,
0.005388883873820305,
-0.07864461839199066,
-0.020966948941349983,
-0.03753238171339035,
-0.06742944568395615,
-0.030450018122792244,
0.09142682701349258,
0.10443522036075592,
-0.02627251110970974,
-0.057735376060009,
-0.04880187660455704,
-0.08240792900323868,
0.036798954010009766,
-0.01852608472108841,
0.09382905811071396,
-0.1977005898952484,
0.027002157643437386,
-0.09537436068058014,
-0.028731871396303177,
0.035261642187833786,
-0.1354391872882843,
-0.10572773963212967,
-0.04711468890309334,
-0.020456651225686073,
0.15393532812595367,
0.0900578573346138,
0.13932132720947266,
0.03588680177927017,
-0.006458973977714777,
0.002962352940812707,
0.10620706528425217,
0.033518463373184204,
0.04427438974380493,
-0.13088229298591614,
0.11114566773176193,
0.01476820558309555,
-0.13733145594596863,
-0.07815627008676529,
-0.1264306753873825,
-0.14804811775684357,
0.00628954591229558,
-0.10352841764688492,
0.07151061296463013,
-0.012502660043537617,
0.007078404072672129,
0.0017033338081091642,
-0.04129905626177788,
0.2558854818344116,
0.0857345387339592,
0.033581946045160294,
0.07376375794410706,
0.013808952644467354,
0.13202373683452606,
0.016112150624394417,
0.07957181334495544,
0.08815552294254303,
0.08076954632997513,
-0.17771051824092865,
-0.04864863306283951,
-0.04469830170273781,
-0.05077893286943436,
0.028230445459485054,
0.10222918540239334,
0.020688224583864212,
-0.11540932208299637,
-0.05813554301857948,
-0.00904981978237629,
0.016132336109876633,
-0.003940207418054342,
0.03400745987892151,
-0.02534010447561741,
-0.049131691455841064,
0.011534555815160275,
0.03482107073068619,
-0.1502424031496048,
-0.051022328436374664,
-0.025424305349588394,
-0.13059145212173462,
-0.013175458647310734,
-0.18072988092899323,
-0.030927788466215134,
-0.08731978386640549,
0.06679363548755646,
-0.17829710245132446,
-0.041993603110313416,
-0.06224721297621727,
0.0938936248421669,
-0.030765732750296593,
-0.03259453922510147,
-0.14968723058700562,
-0.00467525701969862,
-0.04123177379369736,
0.14921283721923828,
-0.17897318303585052,
-0.0004664830048568547,
0.16107138991355896,
-0.08666262030601501,
-0.084199920296669,
0.006031141150742769,
-0.05749634653329849,
-0.020763881504535675,
0.007937643676996231,
0.1560882329940796,
0.0855475589632988,
-0.13401295244693756,
0.08549090474843979,
0.043921153992414474,
-0.10016901046037674,
-0.09601887315511703,
0.04995905980467796,
0.014038375578820705,
-0.2143019288778305,
-0.027723494917154312,
-0.11823903024196625,
0.025661416351795197,
-0.09903255850076675,
-0.04497474059462547,
-0.021336426958441734,
0.010187565349042416,
0.04486991465091705,
-0.01617126353085041,
0.033602286130189896,
0.015460657887160778,
0.043013401329517365,
-0.02015528455376625,
0.09495473653078079,
0.013414335437119007,
0.016377529129385948,
-0.06194644421339035,
0.0959186777472496,
-0.0445554219186306,
-0.028968658298254013,
-0.0803530290722847,
-0.33542776107788086,
0.047958649694919586,
0.013739032670855522,
0.05658461153507233,
0.1032179743051529,
0.030469713732600212,
0.023225057870149612,
0.05243315175175667,
0.06962485611438751,
0.030228296294808388,
0.050747599452733994,
0.02619817480444908,
-0.07741228491067886,
0.07715322077274323,
-0.07671303302049637,
0.14536194503307343,
-0.27577894926071167,
-0.015486817806959152,
0.004145542625337839,
0.06826731562614441,
-0.014542539604008198,
-0.0002117971598636359,
-0.025461345911026,
-0.0050183129496872425,
0.0001286983460886404,
0.004472098313271999,
0.017935119569301605,
-0.00022966811957303435,
-0.198801651597023,
0.16200962662696838,
-0.16200251877307892,
0.08168736845254898,
0.09700217843055725,
-0.061932966113090515,
-0.03446495160460472,
-0.1382218301296234,
0.015701834112405777,
0.012045996263623238,
0.08792395889759064,
-0.02710193581879139,
0.072227343916893,
-0.012050226330757141,
0.047278471291065216,
-0.02334788627922535,
0.04820193722844124,
0.0370565690100193,
-0.029256269335746765,
-0.02704128436744213,
0.011428926140069962,
0.3225078880786896,
-0.03672702610492706,
0.062321726232767105,
0.10218246281147003,
0.06930740922689438,
0.05203952640295029,
0.0024937239941209555,
-0.06180403009057045,
-0.051580291241407394,
-0.050769444555044174,
0.011045663617551327,
0.07084064930677414,
0.026638556271791458,
0.0407535545527935,
0.046676456928253174,
0.040768999606370926,
0.08598557859659195,
-0.09312190860509872,
-0.09233233332633972,
0.03556020185351372,
0.003139668609946966,
-0.14335133135318756,
0.07906454801559448,
-0.03625883907079697,
0.03980028256773949,
-0.01909051649272442,
-0.23830559849739075,
0.007910234853625298,
-0.014032211154699326,
-0.08399147540330887,
0.13144172728061676,
-0.12233482301235199,
-0.17054453492164612,
-0.09685880690813065,
0.030885770916938782,
0.04831402748823166,
0.013027013279497623,
-0.061642419546842575,
-0.0700833722949028,
-0.07070209830999374,
0.01988941617310047,
-0.09683380275964737,
-0.07305248826742172,
-0.09013494104146957,
0.024557817727327347,
-0.015375059098005295,
-0.11591890454292297,
-0.09924490004777908,
-0.024029355496168137,
-0.04656992480158806,
-0.017538724467158318,
0.04764682054519653,
-0.12985651195049286,
0.11494161933660507,
0.3399501442909241,
0.04708670452237129,
0.011799190193414688,
0.04708366468548775,
0.20100252330303192,
-0.09878669679164886,
-0.03905373066663742,
0.006083291955292225,
-0.0377158522605896,
0.03481478989124298,
0.14352969825267792,
0.11899375170469284,
-0.07874607294797897,
-0.005993970204144716,
-0.10043540596961975,
-0.13636146485805511,
-0.14069576561450958,
-0.24412813782691956,
-0.0778193548321724,
-0.062386102974414825,
-0.025961071252822876,
0.04048362374305725,
0.2297181487083435,
0.03956456482410431,
0.04628950357437134,
-0.06221636012196541,
0.00690564326941967,
0.06345225870609283,
-0.05352059379220009,
0.009958472102880478,
-0.0002286998787894845,
-0.013482505455613136,
-0.07937491685152054,
0.0911930724978447,
0.010166563093662262,
0.1994011402130127,
0.23659385740756989,
0.15570160746574402,
0.029774855822324753,
0.10401269793510437,
0.20997324585914612,
0.05613018572330475,
0.04064873233437538,
-0.03635860234498978,
-0.01834784634411335,
-0.029832839965820312,
0.07963381707668304,
0.05604960024356842,
0.09242714941501617,
-0.19447265565395355,
0.02722104638814926,
-0.05355290323495865,
0.03253908082842827,
-0.11323943734169006,
0.08253564685583115,
-0.26065096259117126,
0.0563616119325161,
0.00701400451362133,
0.1401776224374771,
-0.05789720267057419,
0.0626797080039978,
0.023828361183404922,
-0.072551429271698,
-0.09664539992809296,
0.034207914024591446,
0.08502861857414246,
0.05430043488740921,
0.01450961921364069,
-0.03191565349698067,
-0.020357146859169006,
0.010288333520293236,
0.015003175474703312,
-0.040780387818813324,
0.23430155217647552,
-0.0031571299768984318,
-0.13722403347492218,
-0.02154766395688057,
-0.10100763291120529,
-0.0007502477965317667,
0.2663742005825043,
0.11147557199001312,
0.0692099928855896,
-0.18401551246643066,
-0.03576265648007393,
-0.18497395515441895,
0.01835402473807335,
0.08298604935407639,
-0.1264456808567047,
-0.05197121948003769,
0.0027838200330734253,
-0.0064859879203140736,
0.0667543113231659,
0.0001813246199162677,
-0.07070745527744293,
-0.09878722578287125,
0.03743719309568405,
0.043992601335048676,
-0.02244928479194641,
0.01448757667094469,
-0.005572846159338951,
-0.05145835876464844,
0.11977848410606384,
0.054782167077064514,
0.04704328626394272,
-0.06847121566534042,
-0.18473434448242188,
0.1885465681552887,
-0.05873863399028778,
0.055672284215688705,
-0.027071544900536537,
0.03347171097993851,
-0.02604920044541359,
-0.12394443154335022,
0.1314476579427719,
-0.03972989320755005,
0.0004147778090555221,
-0.08307559043169022,
0.11350942403078079,
-0.016418779268860817,
0.06481077522039413,
0.06377676129341125,
0.08246790617704391,
-0.004387226887047291,
-0.14902037382125854,
-0.0857202559709549,
0.07353128492832184,
0.020667944103479385,
0.10626719892024994,
0.03331432864069939,
0.09748867899179459,
0.02931346371769905,
0.06949525326490402,
0.1642434448003769,
0.14116719365119934,
-0.11478476971387863,
0.07187007367610931,
0.22034209966659546,
-0.04995521903038025,
-0.21139825880527496,
-0.0071163540706038475,
-0.13772979378700256,
-0.06018821895122528,
-0.029244566336274147,
-0.06056251376867294,
0.1436123102903366,
0.10472720116376877,
-0.03322272747755051,
0.09475132822990417,
-0.17720361053943634,
-0.09246786683797836,
0.11678586900234222,
-0.08372016251087189,
0.30582329630851746,
-0.04872344434261322,
-0.0679413452744484,
-0.019237341359257698,
-0.10941614210605621,
0.2287832498550415,
-0.041891954839229584,
-0.02249392867088318,
0.025582531467080116,
0.03508169203996658,
0.058338992297649384,
-0.06895075738430023,
0.19647127389907837,
0.05644352734088898,
0.0571054182946682,
-0.06722289323806763,
-0.12473038583993912,
0.11493586748838425,
-0.0069686537608504295,
0.038991186767816544,
0.17651477456092834,
0.07278390973806381,
0.0037642254028469324,
-0.015313678421080112,
-0.0061524854972958565,
-0.016085583716630936,
-0.026652701199054718,
-0.04614124447107315,
-0.09295646846294403,
0.044942669570446014,
-0.0024729478172957897,
-0.006943768821656704,
0.13549357652664185,
-0.023291777819395065,
0.044292040169239044,
-0.009498805738985538,
0.03575563803315163,
0.018912775442004204,
0.03856633976101875,
0.002746972721070051,
-0.059465933591127396,
0.11800174415111542,
-0.11467589437961578,
0.025565173476934433,
0.16462738811969757,
-0.0291375033557415,
0.054954465478658676,
0.08263891935348511,
-0.028971651569008827,
-0.050506316125392914,
0.11457701772451401,
-0.1405654102563858,
-0.12988661229610443,
-0.035947803407907486,
-0.1270236372947693,
0.10174653679132462,
-0.0003366968594491482,
0.07779905200004578,
-0.0367162860929966,
0.023121964186429977,
0.03172720968723297,
-0.004860405344516039,
-0.046180009841918945,
-0.03192530572414398,
0.07204903662204742,
0.017641637474298477,
-0.12101955711841583,
-0.037788525223731995,
0.10563354194164276,
0.049272727221250534,
-0.09890913218259811,
0.044069256633520126,
-0.10896140336990356,
-0.06629721075296402,
-0.09268800914287567,
0.17828789353370667,
-0.17273277044296265,
-0.035229530185461044,
0.07104836404323578,
-0.028966087847948074,
-0.03287694603204727,
0.16238795220851898,
0.03238417208194733,
0.04896451160311699,
0.0078672394156456,
-0.031522538512945175,
-0.07001019269227982,
-0.07857616990804672,
-0.06799822300672531,
0.03495306149125099,
-0.0638345405459404,
0.015740590170025826,
-0.016368433833122253,
0.04656195268034935,
-0.08787074685096741,
-0.06239710748195648,
-0.14712859690189362,
0.023345010355114937,
-0.1163472905755043,
0.00029437540797516704,
-0.0351313091814518,
-0.019758660346269608,
0.02341720275580883,
-0.034735195338726044,
-0.0027063777670264244,
-0.03459774702787399,
-0.07864879816770554,
0.10283765941858292,
0.0015185819938778877,
0.05976630374789238,
-0.031432174146175385,
0.014874791726469994,
0.08206366002559662,
0.002978757256641984,
0.06758679449558258,
0.0959935262799263,
-0.004919720813632011,
0.16904260218143463,
-0.23319345712661743,
0.02368753030896187,
0.07591662555932999,
0.08796588331460953,
0.0660552978515625,
0.047666676342487335,
-0.022845443338155746,
0.022009041160345078,
-0.02753307856619358,
0.008070330135524273,
-0.13651050627231598,
-0.04054572805762291,
-0.07988793402910233,
-0.021833738312125206,
-0.1611824929714203,
0.01490329671651125,
-0.12643763422966003,
0.07712000608444214,
0.09991243481636047,
0.026281971484422684,
0.09845884144306183,
0.08473769575357437,
-0.005305230151861906,
-0.033548504114151,
-0.0029470741283148527,
0.022466866299510002,
0.06477325409650803,
-0.02186226099729538,
-0.0014151553623378277,
-0.04449906945228577,
0.3314724266529083,
-0.0584133043885231,
-0.028689373284578323,
-0.029028264805674553,
0.18340089917182922,
-0.004705279599875212,
0.021942604333162308,
0.1189844161272049,
0.19979327917099,
-0.009442096576094627,
-0.06732048094272614,
0.06339018791913986,
-0.016454551368951797,
-0.05632513016462326,
0.10368771106004715,
-0.037513695657253265,
0.030192645266652107,
-0.02328094094991684,
0.08685296773910522,
-0.00045266241068020463,
0.10325498878955841,
-0.05400051549077034,
-0.06138744205236435,
-0.03912803903222084,
-0.0015833679353818297,
-0.030379390344023705,
0.1544005572795868,
0.00007214926881715655,
0.015401909127831459,
-0.01359484437853098,
-0.08340705931186676,
-0.17425791919231415,
-0.2261875718832016,
-0.07848179340362549,
-0.08914682269096375,
0.02017536386847496,
-0.09562908858060837,
-0.020353930070996284,
0.15227733552455902,
0.047256968915462494,
-0.009234312921762466,
0.028415849432349205,
0.004610441625118256,
0.030078638345003128,
-0.04928956553339958,
-0.020052626729011536,
0.03260550647974014,
-0.025425752624869347,
-0.04527680575847626,
-0.05862021818757057,
-0.002471072832122445,
-0.013768932782113552,
0.005409577861428261,
0.04054351896047592,
0.01613166369497776,
-0.05960244685411453,
-0.060470808297395706,
-0.0878283828496933,
0.010936632752418518,
-0.1328853815793991,
0.05763395503163338,
0.02329370751976967,
0.017739448696374893,
0.019778748974204063,
0.08132559806108475,
0.011736160144209862,
-0.007620375137776136,
-0.13398899137973785,
0.11299213021993637,
-0.06007882207632065,
0.1006985530257225,
0.0010915572056546807,
0.012312115170061588,
-0.07349805533885956,
0.16623878479003906,
0.3005504906177521,
-0.15129627287387848,
0.009374154731631279,
0.012068691663444042,
0.026453908532857895,
0.0005698743625544012,
0.17221742868423462,
-0.0019630829337984324,
0.223097562789917,
0.008398463949561119,
-0.10313446074724197,
-0.09129054099321365,
0.006515521556138992,
-0.02092389017343521,
-0.05403713136911392,
0.058871395885944366,
-0.07238137722015381,
-0.07024656981229782,
0.1410972774028778,
-0.2718396782875061,
0.06787250190973282,
-0.039492812007665634,
-0.032326120883226395,
-0.022192824631929398,
-0.037753261625766754,
-0.07239103317260742,
-0.033922191709280014,
0.07483869045972824,
-0.09334420412778854,
-0.173011913895607,
-0.042430296540260315,
-0.0024020506534725428,
-0.20820507407188416,
-0.046906210482120514,
0.07162901759147644,
0.11986017972230911,
0.20248202979564667,
0.05360860377550125,
0.06962002068758011,
0.010111391544342041,
0.061155516654253006,
-0.02384156361222267,
0.04609064385294914,
0.04898510128259659,
-0.04355510696768761,
-0.09766530990600586,
-0.03017272613942623,
-0.06922177225351334,
-0.04881894960999489,
0.052879698574543,
-0.28110823035240173,
0.04578712210059166,
0.04505123198032379,
-0.027741026133298874,
-0.04003261402249336,
-0.018811343237757683,
-0.05358272045850754,
0.13283702731132507,
0.0696551650762558,
-0.0017368955304846168,
0.014912278391420841,
0.010599424131214619,
0.02059359848499298,
0.006921648513525724,
-0.009843969717621803,
-0.07230661809444427,
-0.009493399411439896,
-0.056863583624362946,
0.06685749441385269,
0.04024048522114754,
0.03053980879485607,
0.00618083355948329,
-0.08470991998910904,
0.07713890075683594,
0.03868569806218147,
0.08193765580654144,
0.12304296344518661,
-0.0006123206112533808,
-0.00299189449287951,
-0.003398333676159382,
0.03924175724387169,
0.00444909930229187,
-0.09281162917613983,
-0.11531247198581696
] |
null | null |
transformers
|
# RickBot built for [Chai](https://chai.ml/)
Make your own [here](https://colab.research.google.com/drive/1o5LxBspm-C28HQvXN-PRQavapDbm5WjG?usp=sharing)
|
{"tags": ["conversational"]}
|
text-generation
|
brimeggi/testbot2
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"conversational",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
# RickBot built for Chai
Make your own here
|
[
"# RickBot built for Chai\nMake your own here"
] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"# RickBot built for Chai\nMake your own here"
] |
[
51,
11
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n# RickBot built for Chai\nMake your own here"
] |
[
-0.011150825768709183,
0.04145511984825134,
-0.004678801167756319,
0.0336763896048069,
0.12031115591526031,
-0.014753859490156174,
0.22364185750484467,
0.107642263174057,
0.07693589478731155,
-0.004552277270704508,
0.12704381346702576,
0.29545801877975464,
0.01173529215157032,
0.10052672773599625,
-0.06006697565317154,
-0.23993316292762756,
0.03390978276729584,
0.06282949447631836,
-0.0436544194817543,
0.10080074518918991,
0.09193950891494751,
-0.055171772837638855,
0.13131460547447205,
-0.0022037639282643795,
-0.19263046979904175,
0.02514885924756527,
-0.0019302413566038013,
-0.10980048030614853,
0.07051794230937958,
0.032110463827848434,
0.10150935500860214,
0.055141348391771317,
-0.07856587320566177,
-0.03893684223294258,
0.06976044923067093,
-0.025181258097290993,
-0.062095027416944504,
0.043997447937726974,
0.024688854813575745,
-0.0770193412899971,
0.13691559433937073,
0.12698420882225037,
-0.027915870770812035,
0.07780664414167404,
-0.13704533874988556,
0.07647588849067688,
-0.06611359119415283,
0.07881827652454376,
0.06463135778903961,
0.10425921529531479,
-0.02596363238990307,
0.08331593871116638,
-0.07413201779127121,
0.09951689839363098,
0.18445415794849396,
-0.23128338158130646,
-0.0723869726061821,
0.15064449608325958,
0.05655790492892265,
0.04300830885767937,
-0.02898770570755005,
0.04599650576710701,
0.001124891801737249,
-0.0024933130480349064,
-0.0740271508693695,
-0.1045415922999382,
-0.18575459718704224,
0.003921935334801674,
-0.05921151116490364,
0.0027330040466040373,
0.20704099535942078,
-0.07176541537046432,
0.055981338024139404,
-0.028761442750692368,
-0.08862290531396866,
-0.011062022298574448,
-0.039405759423971176,
0.00685728807002306,
-0.09016578644514084,
0.07460368424654007,
-0.017557596787810326,
-0.09335976839065552,
-0.1395408809185028,
-0.0661342442035675,
-0.09819558262825012,
0.18369238078594208,
0.0613970048725605,
0.028007537126541138,
-0.24504470825195312,
0.029439760372042656,
0.08148614317178726,
-0.11067121475934982,
-0.009260275401175022,
-0.08271189779043198,
0.06354579329490662,
-0.01859266497194767,
-0.02253034897148609,
-0.12807348370552063,
0.1749914437532425,
0.17171591520309448,
0.022881148383021355,
0.06281881779432297,
-0.04948471486568451,
0.04479441046714783,
0.032528504729270935,
0.05031536519527435,
-0.0015241607325151563,
0.05272556096315384,
0.021466167643666267,
-0.12812519073486328,
0.009883233346045017,
-0.07998165488243103,
-0.17180003225803375,
0.03479284048080444,
-0.07810257375240326,
0.07381381839513779,
0.007816702127456665,
0.12310265004634857,
0.025779662653803825,
-0.03711586445569992,
0.13444221019744873,
-0.04278641194105148,
-0.0383361279964447,
0.01390223577618599,
0.03555385395884514,
0.08330917358398438,
-0.0268411748111248,
0.054944224655628204,
-0.11321454495191574,
-0.02689424902200699,
0.012567145749926567,
0.029751433059573174,
-0.051087986677885056,
0.007765055634081364,
-0.01394432969391346,
0.006440861616283655,
0.014999439008533955,
-0.16489700973033905,
-0.10017409175634384,
-0.025622084736824036,
0.03442199528217316,
-0.03650323301553726,
-0.0979449450969696,
-0.04939094930887222,
0.002047834452241659,
0.022807663306593895,
-0.038137588649988174,
-0.017916161566972733,
-0.025819536298513412,
0.07114581763744354,
0.0029115523211658,
0.08987545222043991,
-0.13433027267456055,
0.06878834217786789,
-0.025110384449362755,
-0.07848629355430603,
-0.10036339610815048,
0.0835830420255661,
0.0003210864379070699,
0.12125350534915924,
-0.016314852982759476,
0.08317133039236069,
-0.044009018689394,
0.07932858169078827,
-0.06336653232574463,
0.18989576399326324,
-0.11168016493320465,
-0.06718592345714569,
0.2800406217575073,
-0.11450672149658203,
-0.23892457783222198,
0.09331201761960983,
0.008506255224347115,
0.09211769700050354,
0.11177093535661697,
0.13636447489261627,
0.04972507059574127,
0.033792681992053986,
0.009489930234849453,
0.025595908984541893,
-0.12059012800455093,
-0.0903896614909172,
0.023431167006492615,
0.038586851209402084,
-0.0440131239593029,
-0.004476291127502918,
0.10482443869113922,
0.061741214245557785,
-0.027594411745667458,
-0.01595555804669857,
0.011509356088936329,
-0.05410397797822952,
-0.031401317566633224,
-0.013392975553870201,
0.11586759984493256,
-0.025759808719158173,
0.00210106885060668,
-0.15279918909072876,
0.077735036611557,
-0.03957529366016388,
0.007007088512182236,
-0.12549236416816711,
0.07694654166698456,
0.011993481777608395,
0.06650689989328384,
-0.17508769035339355,
-0.02817954495549202,
-0.025211086496710777,
0.17221038043498993,
0.14804713428020477,
0.07574493438005447,
0.05816996842622757,
-0.05344897508621216,
0.0063308910466730595,
0.01046198420226574,
0.16087333858013153,
-0.04058145731687546,
-0.06521812826395035,
-0.09512296319007874,
0.08002325147390366,
-0.06414329260587692,
0.002938115270808339,
-0.03206503018736839,
0.04721028357744217,
0.11089026182889938,
0.10249800235033035,
0.021137559786438942,
0.036570899188518524,
0.0468742661178112,
-0.021904466673731804,
-0.09059461951255798,
-0.0025291224010288715,
0.06415865570306778,
0.008340882137417793,
-0.06686529517173767,
0.244649276137352,
-0.10963868349790573,
-0.020582865923643112,
0.1876053810119629,
-0.26701411604881287,
0.03792291879653931,
-0.015711847692728043,
-0.008591122925281525,
0.015337553806602955,
0.0032015759497880936,
0.029499944299459457,
0.17180974781513214,
-0.006066597066819668,
0.16907382011413574,
-0.015202272683382034,
0.00553446589037776,
-0.06523244827985764,
-0.038884907960891724,
-0.013328936882317066,
0.07910837233066559,
0.16297432780265808,
-0.15726783871650696,
0.17121972143650055,
0.09138107299804688,
0.012600064277648926,
0.1565595120191574,
0.023086577653884888,
0.03244199976325035,
0.08116336166858673,
0.032992757856845856,
0.02831592783331871,
-0.073691725730896,
-0.17130137979984283,
-0.061828989535570145,
0.04863770678639412,
-0.0592825673520565,
0.08131495118141174,
-0.06011629477143288,
-0.02735399454832077,
-0.047027479857206345,
-0.0196701567620039,
0.08881430327892303,
0.13349686563014984,
0.005892445798963308,
0.09967100620269775,
-0.03695422410964966,
-0.008113699965178967,
0.04419396445155144,
0.012379536405205727,
-0.022524189203977585,
0.14095182716846466,
-0.04518243670463562,
-0.2987024188041687,
-0.09512696415185928,
-0.19621825218200684,
-0.07745537161827087,
0.07019653171300888,
0.14896398782730103,
-0.2216813564300537,
0.008591565303504467,
0.007431990001350641,
0.03469269350171089,
0.0057189068756997585,
-0.006030987482517958,
-0.015324738807976246,
0.0020695175044238567,
-0.12652523815631866,
-0.06469849497079849,
-0.03995681554079056,
-0.05886904522776604,
-0.058065593242645264,
0.18388505280017853,
-0.1271546185016632,
0.0748671367764473,
0.13533318042755127,
0.0488121435046196,
0.0836564302444458,
-0.018208900466561317,
0.12828132510185242,
-0.11489463597536087,
0.04728242754936218,
0.33297815918922424,
-0.014911501668393612,
0.05447712913155556,
0.11965532600879669,
-0.006807335652410984,
-0.11554266512393951,
0.04613998532295227,
0.008967476896941662,
-0.09810378402471542,
-0.27229517698287964,
-0.06620300561189651,
-0.10128438472747803,
0.06990574300289154,
0.10992177575826645,
0.08760596811771393,
0.12593884766101837,
0.15041570365428925,
-0.05547543242573738,
0.07711178809404373,
0.01074469555169344,
0.10777407139539719,
0.009291023947298527,
-0.016701804473996162,
0.09913180023431778,
-0.06781642884016037,
-0.10634220391511917,
0.07089509069919586,
0.09629807621240616,
0.03884149342775345,
0.06647439301013947,
0.19313450157642365,
0.041681669652462006,
0.027318425476551056,
0.13684289157390594,
0.04659007489681244,
0.03561336547136307,
-0.026196852326393127,
0.006501676514744759,
-0.041376423090696335,
-0.09997225552797318,
0.04355200007557869,
0.004561592824757099,
-0.14705702662467957,
0.015124601311981678,
0.06202467903494835,
0.0729237049818039,
0.158702552318573,
0.007492720149457455,
-0.14139723777770996,
-0.004496438428759575,
0.055216897279024124,
-0.02326507866382599,
-0.09171844273805618,
0.07388392090797424,
0.008532962761819363,
-0.07785152643918991,
0.014345762319862843,
-0.04367091879248619,
0.159242644906044,
-0.09109298884868622,
0.052159398794174194,
-0.0778718814253807,
-0.034777406603097916,
0.022839171811938286,
0.06558409333229065,
-0.3533845543861389,
0.13549847900867462,
0.0010301335714757442,
-0.028002653270959854,
-0.154065802693367,
0.012745370157063007,
0.06702077388763428,
0.06363264471292496,
0.06907512992620468,
-0.03866896778345108,
-0.1959158033132553,
0.02490222454071045,
-0.04112877696752548,
0.04166330024600029,
0.073206327855587,
-0.13520319759845734,
-0.014110401272773743,
-0.02865438722074032,
-0.008597485721111298,
-0.07435891032218933,
-0.1308484524488449,
-0.0359705314040184,
-0.17551545798778534,
0.09144087135791779,
0.17086191475391388,
0.10375957936048508,
0.04504114389419556,
0.027132602408528328,
-0.0369754433631897,
0.21345476806163788,
-0.020323697477579117,
-0.1034807562828064,
-0.09707645326852798,
0.00899566151201725,
-0.03669795021414757,
-0.14438602328300476,
-0.06826160103082657,
-0.08500970900058746,
0.06948389858007431,
-0.05479807406663895,
-0.16429416835308075,
0.0664532333612442,
-0.10114073008298874,
-0.013078792952001095,
-0.03765523433685303,
0.12497803568840027,
-0.007246227469295263,
-0.008749946020543575,
0.03311752527952194,
-0.030120741575956345,
-0.11985611915588379,
-0.08176644891500473,
-0.054455190896987915,
-0.03313582018017769,
0.10898244380950928,
0.03178788349032402,
-0.060182321816682816,
-0.029223443940281868,
-0.11810410767793655,
-0.06850437074899673,
0.2953953742980957,
0.048750199377536774,
0.0011476759100332856,
0.1064702570438385,
0.11998409777879715,
-0.026253286749124527,
-0.30850672721862793,
-0.18515169620513916,
-0.10324754565954208,
-0.07156272232532501,
-0.09736256301403046,
-0.25403234362602234,
0.12436222285032272,
-0.0335036963224411,
-0.01675211451947689,
0.009999006986618042,
-0.1482304483652115,
-0.07798433303833008,
0.17459677159786224,
0.0960831344127655,
0.3324183225631714,
-0.2147013396024704,
-0.08125697821378708,
-0.045545533299446106,
-0.10183548927307129,
0.1337030977010727,
-0.12427008897066116,
0.08163981884717941,
-0.021709877997636795,
0.17697377502918243,
0.036356084048748016,
-0.007669156417250633,
0.061197590082883835,
0.0165790356695652,
-0.03576446324586868,
-0.11456618458032608,
-0.0949815884232521,
-0.062285855412483215,
-0.023734034970402718,
0.03625256195664406,
-0.17582274973392487,
-0.005316399037837982,
-0.06139196828007698,
-0.012871351093053818,
-0.045776695013046265,
-0.02323254384100437,
0.03540283441543579,
-0.051677156239748,
-0.04283729940652847,
-0.02387452870607376,
-0.03803243488073349,
0.0766170546412468,
0.29606303572654724,
-0.12661902606487274,
0.16752280294895172,
-0.0235548485070467,
0.13178594410419464,
-0.1487894058227539,
0.055568672716617584,
-0.09460576623678207,
-0.027501055970788002,
0.08097141981124878,
-0.15464220941066742,
0.09064224362373352,
0.04359215870499611,
-0.05186206474900246,
0.08253858238458633,
0.06058661267161369,
-0.027206817641854286,
0.050504282116889954,
0.1030048131942749,
-0.1946556270122528,
-0.008140803314745426,
-0.06801281869411469,
0.18052810430526733,
0.062286924570798874,
0.09994987398386002,
0.1874670833349228,
0.0467493012547493,
-0.09175365418195724,
-0.012064900249242783,
0.013536111451685429,
-0.0073389932513237,
-0.028363555669784546,
-0.018552642315626144,
0.021644212305545807,
-0.16380125284194946,
0.0023503610864281654,
0.07252131402492523,
-0.12429165095090866,
0.013667360879480839,
0.19531947374343872,
-0.0851740911602974,
-0.19131523370742798,
0.04346149042248726,
0.1363009363412857,
-0.10759901255369186,
-0.04208676889538765,
-0.07584953308105469,
-0.09931711107492447,
0.056875601410865784,
0.2033785730600357,
0.06292885541915894,
0.04524875432252884,
0.004822042305022478,
-0.0002772643347270787,
-0.1010119616985321,
-0.047460515052080154,
-0.08081039786338806,
0.03733786940574646,
-0.10505487769842148,
-0.012283687479794025,
0.005029068794101477,
0.14626851677894592,
-0.06110671907663345,
-0.089434914290905,
-0.17466799914836884,
0.023813553154468536,
-0.05858519673347473,
0.0035903272219002247,
-0.14766481518745422,
-0.026399163529276848,
0.02539767138659954,
-0.01964368112385273,
-0.004960834980010986,
-0.007630054838955402,
-0.11123208701610565,
0.025453854352235794,
-0.025042889639735222,
0.042506176978349686,
-0.10014936327934265,
0.024258971214294434,
0.0711839497089386,
-0.024820713326334953,
0.11761793494224548,
0.1190042570233345,
-0.11866077035665512,
0.04764719679951668,
-0.15923887491226196,
-0.08187411725521088,
0.022488078102469444,
0.03590327873826027,
0.03507033735513687,
0.08500542491674423,
0.010912326164543629,
0.018734728917479515,
0.06046512722969055,
0.02761923335492611,
0.11158962547779083,
-0.038912855088710785,
0.08990641683340073,
-0.013808606192469597,
-0.12740086019039154,
-0.0660703256726265,
0.0416606068611145,
0.10682083666324615,
0.035968758165836334,
0.05841828137636185,
-0.03538830950856209,
0.09410617500543594,
0.013924412429332733,
0.06963547319173813,
0.04351218789815903,
-0.14281147718429565,
-0.04232218489050865,
-0.15254423022270203,
-0.0028134919703006744,
0.0072293970733881,
0.07407944649457932,
-0.021257344633340836,
-0.0306708887219429,
0.01805225759744644,
0.022716881707310677,
0.036802832037210464,
-0.004505137447267771,
0.09737822413444519,
0.037957534193992615,
-0.03651722893118858,
-0.007493763230741024,
0.06138896197080612,
0.05557063966989517,
0.034625113010406494,
0.1558184027671814,
0.016855424270033836,
0.022541828453540802,
0.05459111928939819,
0.00021506089251488447,
0.09596238285303116,
-0.09968727082014084,
-0.18966199457645416,
-0.08669472485780716,
-0.04572615772485733,
-0.017309989780187607,
0.07101890444755554,
0.17730264365673065,
0.025944240391254425,
-0.017496660351753235,
-0.02185484766960144,
-0.018809257075190544,
-0.13819409906864166,
-0.13924674689769745,
-0.09923655539751053,
-0.08295009285211563,
0.010302786715328693,
-0.06547936052083969,
0.008622035384178162,
0.0429561547935009,
0.07416389882564545,
-0.02213001251220703,
0.11072047799825668,
0.02742348611354828,
-0.05974116176366806,
-0.006397634744644165,
-0.06030748039484024,
0.01979765295982361,
0.0072073726914823055,
0.003625538432970643,
-0.12173435837030411,
0.006730447988957167,
-0.018358416855335236,
0.07507095485925674,
-0.0039097946137189865,
0.04177648946642876,
-0.14360228180885315,
-0.1113654375076294,
-0.03811272978782654,
0.034663498401641846,
0.04300718382000923,
0.16560636460781097,
0.009351578541100025,
-0.022764934226870537,
0.03501512110233307,
0.17225335538387299,
-0.017210962250828743,
0.009951808489859104,
-0.11371740698814392,
0.1367739737033844,
0.022824157029390335,
0.00987140741199255,
-0.06235736235976219,
0.052293017506599426,
-0.11000658571720123,
0.3725379407405853,
0.21688275039196014,
-0.082478366792202,
0.038590408861637115,
-0.008527091704308987,
0.039700184017419815,
0.0383303165435791,
0.1024225652217865,
0.11315328627824783,
0.15367384254932404,
-0.051926493644714355,
-0.09003793448209763,
0.0053548747673630714,
-0.012177581898868084,
-0.1317627876996994,
0.034142687916755676,
-0.023356063291430473,
-0.040136344730854034,
0.018593275919556618,
0.08878863602876663,
-0.21441587805747986,
0.0843212753534317,
-0.03396274521946907,
-0.13209901750087738,
-0.06279335170984268,
0.01857992261648178,
0.06839549541473389,
0.04653763025999069,
0.07333647459745407,
0.033219270408153534,
-0.06545223295688629,
0.026792118325829506,
0.031573474407196045,
-0.1744329184293747,
0.05073285102844238,
0.09530770033597946,
-0.14062967896461487,
0.06670670956373215,
-0.04199307784438133,
-0.004909933544695377,
0.11723243445158005,
-0.006381740793585777,
-0.04085194692015648,
0.03770015388727188,
-0.0081280916929245,
-0.08167796581983566,
0.04233407974243164,
0.08753055334091187,
-0.034240081906318665,
-0.012756445445120335,
0.06541509926319122,
-0.16743704676628113,
0.0004542646056506783,
0.017856068909168243,
0.025573154911398888,
0.0025928583927452564,
0.097262442111969,
-0.10714802891016006,
0.07844609767198563,
0.06521432846784592,
-0.013839715160429478,
0.0012886598706245422,
-0.003330953186377883,
0.014234711416065693,
-0.02646373026072979,
-0.10552996397018433,
-0.16352126002311707,
-0.13091902434825897,
-0.12098806351423264,
-0.019180940464138985,
0.02048540487885475,
-0.2037709653377533,
0.02888486161828041,
-0.11173636466264725,
0.04233136028051376,
-0.16543053090572357,
0.04983004555106163,
0.1280258446931839,
-0.0013291090726852417,
0.01008687075227499,
-0.002106369473040104,
0.018125692382454872,
0.08920411765575409,
-0.14706610143184662,
-0.09701342135667801
] |
null | null |
transformers
|
# My Awesome Model
|
{"tags": ["conversational"]}
|
text-generation
|
brokentx/newbrokiev2
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"conversational",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
# My Awesome Model
|
[
"# My Awesome Model"
] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"# My Awesome Model"
] |
[
51,
4
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n# My Awesome Model"
] |
[
-0.05259015038609505,
0.05521034821867943,
-0.005910294596105814,
0.017722278833389282,
0.15250112116336823,
0.02286236733198166,
0.07657632976770401,
0.09513414651155472,
-0.025391526520252228,
-0.047348517924547195,
0.15119488537311554,
0.19781284034252167,
-0.020334534347057343,
0.101333387196064,
-0.04688440263271332,
-0.3143521845340729,
0.06439975649118423,
0.05463787540793419,
-0.015605635941028595,
0.12023304402828217,
0.09468326717615128,
-0.0530015267431736,
0.08742043375968933,
-0.012155864387750626,
-0.1293085366487503,
-0.0027921805158257484,
-0.002384399762377143,
-0.10180269181728363,
0.11194873601198196,
0.033712033182382584,
0.05166437849402428,
0.0182647667825222,
-0.05843055993318558,
-0.139859139919281,
0.03845210000872612,
-0.015005595050752163,
-0.05602653697133064,
0.05648263916373253,
0.059830192476511,
-0.07164353132247925,
0.1669619083404541,
0.13275989890098572,
-0.04237370565533638,
0.056127581745386124,
-0.17620700597763062,
0.017941240221261978,
0.01800798624753952,
0.019184142351150513,
0.05306641012430191,
0.10830496996641159,
-0.03932326287031174,
0.09217294305562973,
-0.11410652846097946,
0.08313368260860443,
0.07800983637571335,
-0.29151955246925354,
-0.025312699377536774,
0.10440942645072937,
0.06437138468027115,
0.048375632613897324,
-0.013386772945523262,
0.0621674507856369,
0.02149512618780136,
0.008602659218013287,
0.02225899137556553,
-0.06727100163698196,
-0.05789240449666977,
0.032748885452747345,
-0.0967593789100647,
-0.03634428232908249,
0.19753605127334595,
-0.024647634476423264,
0.053590498864650726,
-0.06265407055616379,
-0.11300963163375854,
-0.039751436561346054,
-0.050429005175828934,
-0.029761891812086105,
-0.05090925097465515,
0.09489558637142181,
0.004352911841124296,
-0.09534718841314316,
-0.13405443727970123,
-0.01370926946401596,
-0.1618979275226593,
0.15892250835895538,
0.012579603120684624,
0.046201955527067184,
-0.19210097193717957,
0.11465331166982651,
-0.03857925534248352,
-0.08259090781211853,
0.030513519421219826,
-0.12010065466165543,
0.03160654753446579,
-0.008132083341479301,
-0.019599268212914467,
-0.049325279891490936,
0.061037879437208176,
0.08101806789636612,
0.018783701583743095,
0.005755073390901089,
0.018167443573474884,
0.05343452841043472,
0.05891622602939606,
0.10033947974443436,
-0.02891627699136734,
-0.0625043511390686,
0.0025436533614993095,
-0.12051084637641907,
-0.01122665498405695,
-0.05357983708381653,
-0.18095199763774872,
0.002246231772005558,
0.02455340512096882,
0.05192234739661217,
0.011778532527387142,
0.09955989569425583,
-0.028496338054537773,
-0.026898741722106934,
0.06898727267980576,
0.002862759632989764,
-0.015707949176430702,
-0.005368964280933142,
-0.010934269987046719,
0.11485416442155838,
-0.023099146783351898,
0.04774846136569977,
-0.12022071331739426,
0.020393015816807747,
-0.07851235568523407,
-0.0019349842332303524,
-0.06214260309934616,
-0.04864754155278206,
-0.0019346009939908981,
-0.06985589861869812,
0.021118074655532837,
-0.14833110570907593,
-0.17990200221538544,
-0.005064866971224546,
0.021302316337823868,
-0.052403319627046585,
-0.09162671118974686,
-0.0982397273182869,
-0.02586611732840538,
0.03574685752391815,
-0.05873546749353409,
0.013170980848371983,
-0.06884536147117615,
0.06542801111936569,
0.0029820678755640984,
0.05682007595896721,
-0.14085575938224792,
0.08719147741794586,
-0.12582023441791534,
-0.023288866505026817,
-0.061977192759513855,
0.1109607070684433,
0.024780582636594772,
0.1267160177230835,
0.004311583004891872,
-0.0033308975398540497,
-0.08729329705238342,
0.08271238207817078,
-0.04243258014321327,
0.22770646214485168,
-0.10479787737131119,
-0.08809807151556015,
0.2632525563240051,
-0.05423165112733841,
-0.16432519257068634,
0.10179096460342407,
-0.014350244775414467,
0.12198644131422043,
0.13850919902324677,
0.16080057621002197,
0.007628654129803181,
0.03313867375254631,
0.10115300863981247,
0.08631709218025208,
-0.08573295921087265,
-0.0611947737634182,
0.023627014830708504,
-0.011463395319879055,
-0.10670105367898941,
0.046802595257759094,
0.04794782027602196,
0.08188598603010178,
-0.04982871189713478,
-0.028600862249732018,
-0.01972118206322193,
-0.044152840971946716,
0.05264130234718323,
0.007675500120967627,
0.13217447698116302,
-0.03674980252981186,
-0.03692879155278206,
-0.023745311424136162,
0.01699630729854107,
-0.03115241602063179,
0.007061392068862915,
-0.05687357112765312,
0.11091547459363937,
-0.03406180441379547,
0.051789235323667526,
-0.16953988373279572,
-0.04873261600732803,
-0.02087729424238205,
0.1402055323123932,
0.04973345249891281,
0.1329866498708725,
0.06287940591573715,
-0.010758201591670513,
0.00859389640390873,
0.007998145185410976,
0.13181665539741516,
0.007865442894399166,
-0.07660657912492752,
-0.047718439251184464,
0.09176599979400635,
-0.05973208695650101,
0.06147782504558563,
-0.098741315305233,
-0.004747362341731787,
-0.01433002483099699,
0.08674649894237518,
0.006352655589580536,
0.029382232576608658,
-0.006192679051309824,
0.003654100699350238,
-0.06161240115761757,
0.017873648554086685,
0.12492607533931732,
-0.01421504095196724,
-0.07439801841974258,
0.22084392607212067,
-0.15798072516918182,
0.18006981909275055,
0.18165533244609833,
-0.3081994652748108,
0.024602634832262993,
-0.08860466629266739,
-0.036338552832603455,
0.03426366671919823,
0.0491504967212677,
-0.034147560596466064,
0.16587987542152405,
-0.016766328364610672,
0.201018825173378,
-0.03547777235507965,
-0.01287798210978508,
-0.010399105958640575,
-0.03656993433833122,
-0.010632630437612534,
0.09065473079681396,
0.15122920274734497,
-0.1677125245332718,
0.18270380795001984,
0.1660280078649521,
0.06873020529747009,
0.17776396870613098,
0.034313347190618515,
-0.006856906693428755,
0.07112615555524826,
-0.022670727223157883,
-0.07675548642873764,
-0.049287427216768265,
-0.26302891969680786,
-0.027947327122092247,
0.06471601128578186,
0.04510856419801712,
0.11924877762794495,
-0.10971947014331818,
-0.037208184599876404,
0.010892451740801334,
-0.013165894895792007,
0.02132410928606987,
0.09682225435972214,
0.01171150617301464,
0.11804302036762238,
-0.021027036011219025,
-0.05209195241332054,
0.0898953229188919,
0.02727191150188446,
-0.0787680521607399,
0.19168277084827423,
-0.10074768215417862,
-0.3233809769153595,
-0.11354339867830276,
-0.18166927993297577,
-0.017843691632151604,
0.05878754332661629,
0.08049646019935608,
-0.09228580445051193,
-0.02625267766416073,
-0.01639235019683838,
0.0758359357714653,
-0.09145816415548325,
-0.015880629420280457,
-0.09367848187685013,
0.034986745566129684,
-0.10827737301588058,
-0.07011983543634415,
-0.05141967162489891,
-0.03368452936410904,
-0.04457031562924385,
0.13157756626605988,
-0.12242637574672699,
0.06396433711051941,
0.2076517641544342,
0.06227295100688934,
0.05622440204024315,
-0.0229496993124485,
0.23288212716579437,
-0.10842552781105042,
0.02383521944284439,
0.1717897206544876,
-0.03566030040383339,
0.0727933868765831,
0.13435456156730652,
0.006721907295286655,
-0.08144525438547134,
0.03465581312775612,
-0.04592517390847206,
-0.08630958944559097,
-0.20441576838493347,
-0.14156180620193481,
-0.12814727425575256,
0.07913564145565033,
0.03285396471619606,
0.05478321388363838,
0.15024253726005554,
0.11386489123106003,
0.007987297140061855,
0.00976672861725092,
-0.006888182368129492,
0.05438044294714928,
0.17482298612594604,
-0.05838097631931305,
0.10041683167219162,
-0.037591226398944855,
-0.1924494504928589,
0.08022978901863098,
0.04309763014316559,
0.08280511945486069,
0.07474655658006668,
0.0856199786067009,
0.013537914492189884,
0.03723837807774544,
0.10897084325551987,
0.1165735274553299,
0.031679023057222366,
-0.038079675287008286,
-0.04882059991359711,
-0.026300756260752678,
-0.03285675123333931,
0.05745977535843849,
0.07790146768093109,
-0.1608346849679947,
-0.06348084658384323,
-0.06350091099739075,
0.07662643492221832,
0.09017108380794525,
0.11811108142137527,
-0.21219493448734283,
0.01579318381845951,
0.092556893825531,
-0.0494147390127182,
-0.1304239183664322,
0.07402537018060684,
-0.00466050673276186,
-0.1397053301334381,
0.037663187831640244,
-0.014095795340836048,
0.1359514445066452,
-0.0778401643037796,
0.10336452722549438,
-0.08307972550392151,
-0.06147889420390129,
0.03632286190986633,
0.1355396956205368,
-0.30774354934692383,
0.2137020230293274,
-0.022472934797406197,
-0.05296783149242401,
-0.10508129745721817,
-0.011727629229426384,
0.020913105458021164,
0.09079049527645111,
0.10090240091085434,
-0.0025442070327699184,
0.0061299679800868034,
-0.0345483273267746,
-0.053218815475702286,
0.024456629529595375,
0.07957815378904343,
-0.08542889356613159,
0.0017540202243253589,
-0.02361489273607731,
-0.004407065454870462,
-0.032844748347997665,
-0.01189463958144188,
-0.011617658659815788,
-0.16786961257457733,
0.06556065380573273,
-0.002625665394589305,
0.11129079759120941,
0.03491498529911041,
0.0024013579823076725,
-0.1009332686662674,
0.19977013766765594,
0.01796281896531582,
-0.08052749931812286,
-0.08830537647008896,
-0.03254766762256622,
0.03660419583320618,
-0.06121435388922691,
0.027481911703944206,
-0.06916457414627075,
0.033381566405296326,
-0.06441576033830643,
-0.18325145542621613,
0.1268530637025833,
-0.10945470631122589,
-0.03609596937894821,
-0.04321056231856346,
0.18323224782943726,
-0.00929707009345293,
-0.0011623724130913615,
0.05866571143269539,
0.0032208464108407497,
-0.1347510665655136,
-0.10740556567907333,
0.020214511081576347,
-0.015275230631232262,
0.009142245166003704,
0.05559912323951721,
-0.009665844030678272,
0.00045268211397342384,
-0.039558928459882736,
-0.023234419524669647,
0.32348164916038513,
0.10732097923755646,
-0.04944206401705742,
0.17007054388523102,
0.13087597489356995,
-0.0827672928571701,
-0.30699312686920166,
-0.10971353948116302,
-0.10529600828886032,
-0.026918673887848854,
-0.037983208894729614,
-0.19617970287799835,
0.09504909813404083,
-0.03528566658496857,
-0.022136637941002846,
0.11253651231527328,
-0.2759084105491638,
-0.0770430713891983,
0.1826775223016739,
0.003314757253974676,
0.3998824954032898,
-0.10265109688043594,
-0.08777514100074768,
-0.06741699576377869,
-0.1120782196521759,
0.2033512443304062,
-0.05560711398720741,
0.08663415163755417,
-0.00517998356372118,
0.15513743460178375,
0.055607251822948456,
-0.02176513522863388,
0.08932057023048401,
-0.005811662413179874,
-0.0546204075217247,
-0.1219351515173912,
-0.03444604203104973,
-0.009159418754279613,
0.007239421829581261,
0.03589896112680435,
-0.04242607578635216,
0.01279151439666748,
-0.1399589478969574,
-0.045490626245737076,
-0.0764620453119278,
0.024699507281184196,
0.021008269861340523,
-0.0652410089969635,
-0.01643640361726284,
-0.03945036977529526,
-0.012804778292775154,
0.03164318576455116,
0.15236099064350128,
-0.06478006392717361,
0.1476556956768036,
0.04904455319046974,
0.15412139892578125,
-0.14745712280273438,
-0.02258288487792015,
-0.06896031647920609,
-0.05498642474412918,
0.04900865629315376,
-0.10053684562444687,
0.050061121582984924,
0.1202658861875534,
-0.0742902010679245,
0.0987328365445137,
0.0922594666481018,
-0.01938629150390625,
0.0012483424507081509,
0.1226617842912674,
-0.2489612102508545,
-0.07742628455162048,
-0.10509459674358368,
0.013337249867618084,
0.10138551890850067,
0.06995654851198196,
0.17304721474647522,
-0.0037713919300585985,
-0.036284226924180984,
-0.0064643872901797295,
0.025414984673261642,
-0.03540204465389252,
0.05724727362394333,
-0.002706433180719614,
0.016663886606693268,
-0.15213344991207123,
0.060368724167346954,
-0.00024176653823815286,
-0.1438901126384735,
-0.013603870756924152,
0.16073721647262573,
-0.11208858340978622,
-0.15145981311798096,
-0.007263668347150087,
0.13685113191604614,
-0.13171035051345825,
-0.03302847594022751,
-0.03708777576684952,
-0.170182466506958,
0.07439173012971878,
0.1024777740240097,
0.08549231290817261,
0.08025266975164413,
-0.06620611250400543,
-0.00807863101363182,
-0.011656313203275204,
-0.026087598875164986,
0.031810320913791656,
-0.023377234116196632,
-0.09044221043586731,
0.03872343525290489,
-0.026654237881302834,
0.13591371476650238,
-0.09607382118701935,
-0.09331836551427841,
-0.135749951004982,
0.039314381778240204,
-0.12405620515346527,
-0.08138058334589005,
-0.12200927734375,
-0.0591500885784626,
0.00224387738853693,
-0.0001289021165575832,
-0.035674065351486206,
-0.06687422841787338,
-0.13582271337509155,
0.04366770386695862,
-0.04484611004590988,
0.0013091047294437885,
-0.040241483598947525,
0.04561002552509308,
0.06766383349895477,
-0.03493715822696686,
0.13722217082977295,
0.11722734570503235,
-0.07864081114530563,
0.08946478366851807,
-0.16657429933547974,
-0.0683990865945816,
0.08854512125253677,
0.008173754438757896,
0.06165994703769684,
0.06743349134922028,
0.033807408064603806,
0.06109451875090599,
0.04151686280965805,
0.03488299250602722,
0.01739438995718956,
-0.09271225333213806,
0.015541021712124348,
0.022296719253063202,
-0.1294609159231186,
-0.04801803454756737,
-0.029226921498775482,
0.00939185917377472,
0.008117396384477615,
0.11003357172012329,
-0.0426274873316288,
0.09439733624458313,
-0.05888751894235611,
0.036728594452142715,
0.016222506761550903,
-0.16461637616157532,
-0.020102784037590027,
-0.11915475130081177,
0.028684545308351517,
-0.0033096212428063154,
0.25625869631767273,
0.06346847862005234,
0.020517030730843544,
0.01250078622251749,
0.08567021042108536,
0.07241600006818771,
0.02562166005373001,
0.1956365555524826,
0.10854171961545944,
-0.05020022392272949,
-0.12334850430488586,
0.09686340391635895,
0.034720368683338165,
0.06432123482227325,
0.13385434448719025,
-0.026959087699651718,
0.002498799469321966,
0.11019360274076462,
0.011678861454129219,
0.04961980879306793,
-0.09859088063240051,
-0.16400282084941864,
-0.00994415208697319,
0.061864156275987625,
-0.04559077322483063,
0.12240655720233917,
0.11382720619440079,
-0.020697353407740593,
0.03180128335952759,
-0.010503606870770454,
-0.05694027617573738,
-0.16998925805091858,
-0.1630837321281433,
-0.08357038348913193,
-0.11794789135456085,
-0.0027763545513153076,
-0.11386270076036453,
0.013879159465432167,
0.06452289968729019,
0.0604364387691021,
-0.09019444137811661,
0.08891061693429947,
0.0687386617064476,
-0.11843101680278778,
0.08828350901603699,
-0.033263903111219406,
0.07249268144369125,
0.0015160300536081195,
0.003872724948450923,
-0.13800905644893646,
0.032393742352724075,
-0.008493867702782154,
0.04159298539161682,
-0.09244006127119064,
0.022458361461758614,
-0.11297028511762619,
-0.07659684121608734,
-0.07971972227096558,
0.05093973129987717,
-0.03541257977485657,
0.1390930563211441,
0.001295371213927865,
-0.035233911126852036,
0.024190181866288185,
0.22729112207889557,
-0.06350252777338028,
-0.030667411163449287,
-0.0618741400539875,
0.21414142847061157,
0.024466563016176224,
0.10703565180301666,
-0.016775688156485558,
0.019240234047174454,
-0.0764411985874176,
0.3689337372779846,
0.344390869140625,
-0.1225387305021286,
-0.0015968306688591838,
0.031062176451086998,
0.036916591227054596,
0.11621878296136856,
0.12602226436138153,
0.057955991476774216,
0.2995031177997589,
-0.08396036922931671,
-0.002026971662417054,
-0.02688612788915634,
-0.03624163940548897,
-0.04409930482506752,
0.10547586530447006,
0.06835740804672241,
-0.03330419585108757,
-0.027012333273887634,
0.1376710683107376,
-0.2966996431350708,
0.12323499470949173,
-0.15714547038078308,
-0.1487535685300827,
-0.06873904913663864,
-0.005042468197643757,
0.08589684963226318,
0.04748665541410446,
0.1069009080529213,
-0.019124338403344154,
-0.08203735202550888,
0.05766449123620987,
0.0320524163544178,
-0.22844897210597992,
0.011852608993649483,
0.08361081779003143,
-0.06153005734086037,
0.011767351068556309,
-0.017906347289681435,
0.038472190499305725,
0.07790610194206238,
0.025976579636335373,
-0.032770540565252304,
0.06325861811637878,
-0.005814229138195515,
-0.05033424496650696,
0.04302205145359039,
0.05059972032904625,
0.017107632011175156,
-0.1511564701795578,
0.07320158183574677,
-0.1762860119342804,
0.0566408596932888,
-0.005331212189048529,
-0.04948166385293007,
0.000018263708625454456,
0.01998119056224823,
-0.06808236241340637,
0.05880929157137871,
0.0952666699886322,
-0.012173139490187168,
-0.002317852806299925,
-0.056667573750019073,
0.007662574760615826,
-0.0679154172539711,
-0.0747012197971344,
-0.10497893393039703,
-0.1338900774717331,
-0.11392296850681305,
0.10846775025129318,
-0.011928223073482513,
-0.19833622872829437,
0.02906924858689308,
-0.11258108913898468,
0.04933213070034981,
-0.13360801339149475,
0.08599711954593658,
0.1282832771539688,
0.021543797105550766,
-0.01265349704772234,
0.04020093381404877,
0.01591683179140091,
0.08550478518009186,
-0.09200563281774521,
-0.10515180230140686
] |
null | null |
transformers
|
# docusco-bert
## Model description
**docusco-bert** is a fine-tuned BERT model that is ready to use for **token classification**. The model was trained on data sampled from the Corpus of Contemporary American English ([COCA](https://www.english-corpora.org/coca/)) and classifies tokens and token sequences according to a system developed for the [**DocuScope**](https://www.cmu.edu/dietrich/english/research-and-publications/docuscope.html) dictionary-based tagger. Descriptions of the categories are included in a table below.
## About DocuScope
DocuScope is a dicitonary-based tagger that has been developed at Carnegie Mellon University by **David Kaufer** and **Suguru Ishizaki** since the early 2000s. Its categories are rhetorical in their orientation (as opposed to part-of-speech tags, for example, which are morphosyntactic).
DocuScope has been been used in [a wide variety of studies](https://scholar.google.com/scholar?hl=en&as_sdt=0%2C5&q=docuscope&btnG=). Here, for example, is [a short analysis of King Lear](https://graphics.cs.wisc.edu/WP/vep/2017/02/14/guest-post-data-mining-king-lear/), and here is [a published study of Tweets](https://journals.sagepub.com/doi/full/10.1177/2055207619844865).
## Intended uses & limitations
#### How to use
The model was trained on data with tags formatted using [IOB](https://en.wikipedia.org/wiki/Inside%E2%80%93outside%E2%80%93beginning_(tagging)), like those used in common tasks like Named Entity Recogition (NER). Thus, you can use this model with a Transformers NER *pipeline*.
```python
from transformers import AutoTokenizer, AutoModelForTokenClassification
from transformers import pipeline
tokenizer = AutoTokenizer.from_pretrained("browndw/docusco-bert")
model = AutoModelForTokenClassification.from_pretrained("browndw/docusco-bert")
nlp = pipeline("ner", model=model, tokenizer=tokenizer)
example = "Globalization is the process of interaction and integration among people, companies, and governments worldwide."
ds_results = nlp(example)
print(ds_results)
```
#### Limitations and bias
This model is limited by its training dataset of American English texts. Moreover, the current version is trained on only a small subset of the corpus. The goal is to train later versions on more data, which should increase accuracy.
## Training data
This model was fine-tuned on data from the Corpus of Contemporary American English ([COCA](https://www.english-corpora.org/coca/)). The training data contain chunks of text randomly sampled of 5 text-types: Academic, Fiction, Magazine, News, and Spoken.
Typically, BERT models are trained on sentence segments. However, DocuScope tags can span setences. Thus, data were split into chunks that don't split **B + I** sequences and end with sentence-final punctuation marks (i.e., period, quesiton mark or exclamaiton point).
Additionally, the order of the chunks was randomized prior to sampling, and statified sampling was used to provide enough training data for low-frequency caegories. The resulting training data consist of:
* 21,460,177 tokens
* 15,796,305 chunks
The specific counts for each category appear in the following table.
Category|Count
-|-
O|3528038
Syntactic Complexity|2032808
Character|1413771
Description|1224744
Narrative|1159201
Negative|651012
Academic Terms|620932
Interactive|594908
Information Exposition|578228
Positive|463914
Force Stressed|432631
Information Topics|394155
First Person|249744
Metadiscourse Cohesive|240822
Strategic|238255
Public Terms|234213
Reasoning|213775
Information Place|187249
Information States|173146
Information ReportVerbs|119092
Confidence High|112861
Confidence Hedged|110008
Future|96101
Inquiry|94995
Contingent|94860
Information Change|89063
Metadiscourse Interactive|84033
Updates|81424
Citation|71241
Facilitate|50451
Uncertainty|35644
Academic WritingMoves|29352
Information ChangePositive|28475
Responsibility|25362
Citation Authority|22414
Information ChangeNegative|15612
Confidence Low|2876
Citation Hedged|895
-|-
Total|15796305
## Training procedure
This model was trained on a single 2.3 GHz Dual-Core Intel Core i5 with recommended hyperparameters from the [original BERT paper](https://arxiv.org/pdf/1810.04805).
## Eval results
### Overall
metric|test
-|-
f1 |.927
accuracy |.943
### By category
category|precision|recall|f1-score|support
-|-|-|-|-
AcademicTerms|0.91|0.92|0.92|486399
AcademicWritingMoves|0.76|0.82|0.79|20017
Character|0.94|0.95|0.94|1260272
Citation|0.92|0.94|0.93|50812
CitationAuthority|0.86|0.88|0.87|17798
CitationHedged|0.91|0.94|0.92|632
ConfidenceHedged|0.94|0.96|0.95|90393
ConfidenceHigh|0.92|0.94|0.93|113569
ConfidenceLow|0.79|0.81|0.80|2556
Contingent|0.92|0.94|0.93|81366
Description|0.87|0.89|0.88|1098598
Facilitate|0.87|0.90|0.89|41760
FirstPerson|0.96|0.98|0.97|330658
ForceStressed|0.93|0.94|0.93|436188
Future|0.90|0.93|0.92|93365
InformationChange|0.88|0.91|0.89|72813
InformationChangeNegative|0.83|0.85|0.84|12740
InformationChangePositive|0.82|0.86|0.84|22994
InformationExposition|0.94|0.95|0.95|468078
InformationPlace|0.95|0.96|0.96|147688
InformationReportVerbs|0.91|0.93|0.92|95563
InformationStates|0.95|0.95|0.95|139429
InformationTopics|0.90|0.92|0.91|328152
Inquiry|0.85|0.89|0.87|79030
Interactive|0.95|0.96|0.95|602857
MetadiscourseCohesive|0.97|0.98|0.98|195548
MetadiscourseInteractive|0.92|0.94|0.93|73159
Narrative|0.92|0.94|0.93|1023452
Negative|0.88|0.89|0.88|645810
Positive|0.87|0.89|0.88|409775
PublicTerms|0.91|0.92|0.91|184108
Reasoning|0.93|0.95|0.94|169208
Responsibility|0.83|0.87|0.85|21819
Strategic|0.88|0.90|0.89|193768
SyntacticComplexity|0.95|0.96|0.96|1635918
Uncertainty|0.87|0.91|0.89|33684
Updates|0.91|0.93|0.92|77760
-|-|-|-|-
micro avg|0.92|0.93|0.93|10757736
macro avg|0.90|0.92|0.91|10757736
weighted avg|0.92|0.93|0.93|10757736
## DocuScope Category Descriptions
Category (Cluster)|Description|Examples
-|-|-
Academic Terms|Abstract, rare, specialized, or disciplinary-specific terms that are indicative of informationally dense writing|*market price*, *storage capacity*, *regulatory*, *distribution*
Academic Writing Moves|Phrases and terms that indicate academic writing moves, which are common in research genres and are derived from the work of Swales (1981) and Cotos et al. (2015, 2017)|*in the first section*, *the problem is that*, *payment methodology*, *point of contention*
Character|References multiple dimensions of a character or human being as a social agent, both individual and collective|*Pauline*, *her*, *personnel*, *representatives*
Citation|Language that indicates the attribution of information to, or citation of, another source.|*according to*, *is proposing that*, *quotes from*
Citation Authorized|Referencing the citation of another source that is represented as true and not arguable|*confirm that*, *provide evidence*, *common sense*
Citation Hedged|Referencing the citation of another source that is presented as arguable|*suggest that*, *just one opinion*
Confidence Hedged|Referencing language that presents a claim as uncertain|*tends to get*, *maybe*, *it seems that*
Confidence High|Referencing language that presents a claim with certainty|*most likely*, *ensure that*, *know that*, *obviously*
Confidence Low|Referencing language that presents a claim as extremely unlikely|*unlikely*, *out of the question*, *impossible*
Contingent|Referencing contingency, typically contingency in the world, rather than contingency in one's knowledge|*subject to*, *if possible*, *just in case*, *hypothetically*
Description|Language that evokes sights, sounds, smells, touches and tastes, as well as scenes and objects|*stay quiet*, *gas-fired*, *solar panels*, *soft*, *on my desk*
Facilitate|Language that enables or directs one through specific tasks and actions|*let me*, *worth a try*, *I would suggest*
First Person|This cluster captures first person.|*I*, *as soon as I*, *we have been*
Force Stressed|Language that is forceful and stressed, often using emphatics, comparative forms, or superlative forms|*really good*, *the sooner the better*, *necessary*
Future|Referencing future actions, states, or desires|*will be*, *hope to*, *expected changes*
Information Change|Referencing changes of information, particularly changes that are more neutral|*changes*, *revised*, *growth*, *modification to*
Information Change Negative|Referencing negative change|*going downhill*, *slow erosion*, *get worse*
Information Change Positive|Referencing positive change|*improving*, *accrued interest*, *boost morale*
Information Exposition|Information in the form of expository devices, or language that describes or explains, frequently in regards to quantities and comparisons|*final amount*, *several*, *three*, *compare*, *80%*
Information Place|Language designating places|*the city*, *surrounding areas*, *Houston*, *home*
Information Report Verbs|Informational verbs and verb phrases of reporting|*report*, *posted*, *release*, *point out*
Information States|Referencing information states, or states of being|*is*, *are*, *existing*, *been*
Information Topics|Referencing topics, usually nominal subjects or objects, that indicate the “aboutness” of a text|*time*, *money*, *stock price*, *phone interview*
Inquiry|Referencing inquiry, or language that points to some kind of inquiry or investigation|*find out*, *let me know if you have any questions*, *wondering if*
Interactive|Addresses from the author to the reader or from persons in the text to other persons. The address comes in the language of everyday conversation, colloquy, exchange, questions, attention-getters, feedback, interactive genre markers, and the use of the second person.|*can you*, *thank you for*, *please see*, *sounds good to me*
Metadiscourse Cohesive|The use of words to build cohesive markers that help the reader navigate the text and signal linkages in the text, which are often additive or contrastive|*or*, *but*, *also*, *on the other hand*, *notwithstanding*, *that being said*
Metadiscourse Interactive|The use of words to build cohesive markers that interact with the reader|*I agree*, *let’s talk*, *by the way*
Narrative|Language that involves people, description, and events extending in time|*today*, *tomorrow*, *during the*, *this weekend*
Negative|Referencing dimensions of negativity, including negative acts, emotions, relations, and values|*does not*, *sorry for*, *problems*, *confusion*
Positive|Referencing dimensions of positivity, including actions, emotions, relations, and values|*thanks*, *approval*, *agreement*, *looks good*
Public Terms|Referencing public terms, concepts from public language, media, the language of authority, institutions, and responsibility|*discussion*, *amendment*, *corporation*, *authority*, *settlement*
Reasoning|Language that has a reasoning focus, supporting inferences about cause, consequence, generalization, concession, and linear inference either from premise to conclusion or conclusion to premise|*because*, *therefore*, *analysis*, *even if*, *as a result*, *indicating that*
Responsibility|Referencing the language of responsibility|*supposed to*, *requirements*, *obligations*
Strategic|This dimension is active when the text structures strategies activism, advantage-seeking, game-playing cognition, plans, and goal-seeking.|*plan*, *trying to*, *strategy*, *decision*, *coordinate*, *look at the*
Syntactic Complexity|The features in this category are often what are called “function words,” like determiners and prepositions.|*the*, *to*, *for*, *in*, *a lot of*
Uncertainty|References uncertainty, when confidence levels are unknown|*kind of*, *I have no idea*, *for some reason*
Updates|References updates that anticipate someone searching for information and receiving it|*already*, *a new*, *now that*, *here are some*
### BibTeX entry and citation info
```
@incollection{ishizaki2012computer,
title = {Computer-aided rhetorical analysis},
author = {Ishizaki, Suguru and Kaufer, David},
booktitle= {Applied natural language processing: Identification, investigation and resolution},
pages = {276--296},
year = {2012},
publisher= {IGI Global},
url = {https://www.igi-global.com/chapter/content/61054}
}
```
```
@article{DBLP:journals/corr/abs-1810-04805,
author = {Jacob Devlin and
Ming{-}Wei Chang and
Kenton Lee and
Kristina Toutanova},
title = {{BERT:} Pre-training of Deep Bidirectional Transformers for Language
Understanding},
journal = {CoRR},
volume = {abs/1810.04805},
year = {2018},
url = {http://arxiv.org/abs/1810.04805},
archivePrefix = {arXiv},
eprint = {1810.04805},
timestamp = {Tue, 30 Oct 2018 20:39:56 +0100},
biburl = {https://dblp.org/rec/journals/corr/abs-1810-04805.bib},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
```
|
{"language": "en", "datasets": "COCA"}
|
token-classification
|
browndw/docusco-bert
|
[
"transformers",
"pytorch",
"tf",
"jax",
"bert",
"token-classification",
"en",
"dataset:COCA",
"arxiv:1810.04805",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"1810.04805"
] |
[
"en"
] |
TAGS
#transformers #pytorch #tf #jax #bert #token-classification #en #dataset-COCA #arxiv-1810.04805 #autotrain_compatible #endpoints_compatible #has_space #region-us
|
docusco-bert
============
Model description
-----------------
docusco-bert is a fine-tuned BERT model that is ready to use for token classification. The model was trained on data sampled from the Corpus of Contemporary American English (COCA) and classifies tokens and token sequences according to a system developed for the DocuScope dictionary-based tagger. Descriptions of the categories are included in a table below.
About DocuScope
---------------
DocuScope is a dicitonary-based tagger that has been developed at Carnegie Mellon University by David Kaufer and Suguru Ishizaki since the early 2000s. Its categories are rhetorical in their orientation (as opposed to part-of-speech tags, for example, which are morphosyntactic).
DocuScope has been been used in a wide variety of studies. Here, for example, is a short analysis of King Lear, and here is a published study of Tweets.
Intended uses & limitations
---------------------------
#### How to use
The model was trained on data with tags formatted using IOB), like those used in common tasks like Named Entity Recogition (NER). Thus, you can use this model with a Transformers NER *pipeline*.
#### Limitations and bias
This model is limited by its training dataset of American English texts. Moreover, the current version is trained on only a small subset of the corpus. The goal is to train later versions on more data, which should increase accuracy.
Training data
-------------
This model was fine-tuned on data from the Corpus of Contemporary American English (COCA). The training data contain chunks of text randomly sampled of 5 text-types: Academic, Fiction, Magazine, News, and Spoken.
Typically, BERT models are trained on sentence segments. However, DocuScope tags can span setences. Thus, data were split into chunks that don't split B + I sequences and end with sentence-final punctuation marks (i.e., period, quesiton mark or exclamaiton point).
Additionally, the order of the chunks was randomized prior to sampling, and statified sampling was used to provide enough training data for low-frequency caegories. The resulting training data consist of:
* 21,460,177 tokens
* 15,796,305 chunks
The specific counts for each category appear in the following table.
Training procedure
------------------
This model was trained on a single 2.3 GHz Dual-Core Intel Core i5 with recommended hyperparameters from the original BERT paper.
Eval results
------------
### Overall
### By category
DocuScope Category Descriptions
-------------------------------
Category (Cluster): Academic Terms, Description: Abstract, rare, specialized, or disciplinary-specific terms that are indicative of informationally dense writing, Examples: *market price*, *storage capacity*, *regulatory*, *distribution*
Category (Cluster): Academic Writing Moves, Description: Phrases and terms that indicate academic writing moves, which are common in research genres and are derived from the work of Swales (1981) and Cotos et al. (2015, 2017), Examples: *in the first section*, *the problem is that*, *payment methodology*, *point of contention*
Category (Cluster): Character, Description: References multiple dimensions of a character or human being as a social agent, both individual and collective, Examples: *Pauline*, *her*, *personnel*, *representatives*
Category (Cluster): Citation, Description: Language that indicates the attribution of information to, or citation of, another source., Examples: *according to*, *is proposing that*, *quotes from*
Category (Cluster): Citation Authorized, Description: Referencing the citation of another source that is represented as true and not arguable, Examples: *confirm that*, *provide evidence*, *common sense*
Category (Cluster): Citation Hedged, Description: Referencing the citation of another source that is presented as arguable, Examples: *suggest that*, *just one opinion*
Category (Cluster): Confidence Hedged, Description: Referencing language that presents a claim as uncertain, Examples: *tends to get*, *maybe*, *it seems that*
Category (Cluster): Confidence High, Description: Referencing language that presents a claim with certainty, Examples: *most likely*, *ensure that*, *know that*, *obviously*
Category (Cluster): Confidence Low, Description: Referencing language that presents a claim as extremely unlikely, Examples: *unlikely*, *out of the question*, *impossible*
Category (Cluster): Contingent, Description: Referencing contingency, typically contingency in the world, rather than contingency in one's knowledge, Examples: *subject to*, *if possible*, *just in case*, *hypothetically*
Category (Cluster): Description, Description: Language that evokes sights, sounds, smells, touches and tastes, as well as scenes and objects, Examples: *stay quiet*, *gas-fired*, *solar panels*, *soft*, *on my desk*
Category (Cluster): Facilitate, Description: Language that enables or directs one through specific tasks and actions, Examples: *let me*, *worth a try*, *I would suggest*
Category (Cluster): First Person, Description: This cluster captures first person., Examples: *I*, *as soon as I*, *we have been*
Category (Cluster): Force Stressed, Description: Language that is forceful and stressed, often using emphatics, comparative forms, or superlative forms, Examples: *really good*, *the sooner the better*, *necessary*
Category (Cluster): Future, Description: Referencing future actions, states, or desires, Examples: *will be*, *hope to*, *expected changes*
Category (Cluster): Information Change, Description: Referencing changes of information, particularly changes that are more neutral, Examples: *changes*, *revised*, *growth*, *modification to*
Category (Cluster): Information Change Negative, Description: Referencing negative change, Examples: *going downhill*, *slow erosion*, *get worse*
Category (Cluster): Information Change Positive, Description: Referencing positive change, Examples: *improving*, *accrued interest*, *boost morale*
Category (Cluster): Information Exposition, Description: Information in the form of expository devices, or language that describes or explains, frequently in regards to quantities and comparisons, Examples: *final amount*, *several*, *three*, *compare*, *80%*
Category (Cluster): Information Place, Description: Language designating places, Examples: *the city*, *surrounding areas*, *Houston*, *home*
Category (Cluster): Information Report Verbs, Description: Informational verbs and verb phrases of reporting, Examples: *report*, *posted*, *release*, *point out*
Category (Cluster): Information States, Description: Referencing information states, or states of being, Examples: *is*, *are*, *existing*, *been*
Category (Cluster): Information Topics, Description: Referencing topics, usually nominal subjects or objects, that indicate the “aboutness” of a text, Examples: *time*, *money*, *stock price*, *phone interview*
Category (Cluster): Inquiry, Description: Referencing inquiry, or language that points to some kind of inquiry or investigation, Examples: *find out*, *let me know if you have any questions*, *wondering if*
Category (Cluster): Interactive, Description: Addresses from the author to the reader or from persons in the text to other persons. The address comes in the language of everyday conversation, colloquy, exchange, questions, attention-getters, feedback, interactive genre markers, and the use of the second person., Examples: *can you*, *thank you for*, *please see*, *sounds good to me*
Category (Cluster): Metadiscourse Cohesive, Description: The use of words to build cohesive markers that help the reader navigate the text and signal linkages in the text, which are often additive or contrastive, Examples: *or*, *but*, *also*, *on the other hand*, *notwithstanding*, *that being said*
Category (Cluster): Metadiscourse Interactive, Description: The use of words to build cohesive markers that interact with the reader, Examples: *I agree*, *let’s talk*, *by the way*
Category (Cluster): Narrative, Description: Language that involves people, description, and events extending in time, Examples: *today*, *tomorrow*, *during the*, *this weekend*
Category (Cluster): Negative, Description: Referencing dimensions of negativity, including negative acts, emotions, relations, and values, Examples: *does not*, *sorry for*, *problems*, *confusion*
Category (Cluster): Positive, Description: Referencing dimensions of positivity, including actions, emotions, relations, and values, Examples: *thanks*, *approval*, *agreement*, *looks good*
Category (Cluster): Public Terms, Description: Referencing public terms, concepts from public language, media, the language of authority, institutions, and responsibility, Examples: *discussion*, *amendment*, *corporation*, *authority*, *settlement*
Category (Cluster): Reasoning, Description: Language that has a reasoning focus, supporting inferences about cause, consequence, generalization, concession, and linear inference either from premise to conclusion or conclusion to premise, Examples: *because*, *therefore*, *analysis*, *even if*, *as a result*, *indicating that*
Category (Cluster): Responsibility, Description: Referencing the language of responsibility, Examples: *supposed to*, *requirements*, *obligations*
Category (Cluster): Strategic, Description: This dimension is active when the text structures strategies activism, advantage-seeking, game-playing cognition, plans, and goal-seeking., Examples: *plan*, *trying to*, *strategy*, *decision*, *coordinate*, *look at the*
Category (Cluster): Syntactic Complexity, Description: The features in this category are often what are called “function words,” like determiners and prepositions., Examples: *the*, *to*, *for*, *in*, *a lot of*
Category (Cluster): Uncertainty, Description: References uncertainty, when confidence levels are unknown, Examples: *kind of*, *I have no idea*, *for some reason*
Category (Cluster): Updates, Description: References updates that anticipate someone searching for information and receiving it, Examples: *already*, *a new*, *now that*, *here are some*
### BibTeX entry and citation info
|
[
"#### How to use\n\n\nThe model was trained on data with tags formatted using IOB), like those used in common tasks like Named Entity Recogition (NER). Thus, you can use this model with a Transformers NER *pipeline*.",
"#### Limitations and bias\n\n\nThis model is limited by its training dataset of American English texts. Moreover, the current version is trained on only a small subset of the corpus. The goal is to train later versions on more data, which should increase accuracy.\n\n\nTraining data\n-------------\n\n\nThis model was fine-tuned on data from the Corpus of Contemporary American English (COCA). The training data contain chunks of text randomly sampled of 5 text-types: Academic, Fiction, Magazine, News, and Spoken.\n\n\nTypically, BERT models are trained on sentence segments. However, DocuScope tags can span setences. Thus, data were split into chunks that don't split B + I sequences and end with sentence-final punctuation marks (i.e., period, quesiton mark or exclamaiton point).\n\n\nAdditionally, the order of the chunks was randomized prior to sampling, and statified sampling was used to provide enough training data for low-frequency caegories. The resulting training data consist of:\n\n\n* 21,460,177 tokens\n* 15,796,305 chunks\n\n\nThe specific counts for each category appear in the following table.\n\n\n\nTraining procedure\n------------------\n\n\nThis model was trained on a single 2.3 GHz Dual-Core Intel Core i5 with recommended hyperparameters from the original BERT paper.\n\n\nEval results\n------------",
"### Overall",
"### By category\n\n\n\nDocuScope Category Descriptions\n-------------------------------\n\n\nCategory (Cluster): Academic Terms, Description: Abstract, rare, specialized, or disciplinary-specific terms that are indicative of informationally dense writing, Examples: *market price*, *storage capacity*, *regulatory*, *distribution*\nCategory (Cluster): Academic Writing Moves, Description: Phrases and terms that indicate academic writing moves, which are common in research genres and are derived from the work of Swales (1981) and Cotos et al. (2015, 2017), Examples: *in the first section*, *the problem is that*, *payment methodology*, *point of contention*\nCategory (Cluster): Character, Description: References multiple dimensions of a character or human being as a social agent, both individual and collective, Examples: *Pauline*, *her*, *personnel*, *representatives*\nCategory (Cluster): Citation, Description: Language that indicates the attribution of information to, or citation of, another source., Examples: *according to*, *is proposing that*, *quotes from*\nCategory (Cluster): Citation Authorized, Description: Referencing the citation of another source that is represented as true and not arguable, Examples: *confirm that*, *provide evidence*, *common sense*\nCategory (Cluster): Citation Hedged, Description: Referencing the citation of another source that is presented as arguable, Examples: *suggest that*, *just one opinion*\nCategory (Cluster): Confidence Hedged, Description: Referencing language that presents a claim as uncertain, Examples: *tends to get*, *maybe*, *it seems that*\nCategory (Cluster): Confidence High, Description: Referencing language that presents a claim with certainty, Examples: *most likely*, *ensure that*, *know that*, *obviously*\nCategory (Cluster): Confidence Low, Description: Referencing language that presents a claim as extremely unlikely, Examples: *unlikely*, *out of the question*, *impossible*\nCategory (Cluster): Contingent, Description: Referencing contingency, typically contingency in the world, rather than contingency in one's knowledge, Examples: *subject to*, *if possible*, *just in case*, *hypothetically*\nCategory (Cluster): Description, Description: Language that evokes sights, sounds, smells, touches and tastes, as well as scenes and objects, Examples: *stay quiet*, *gas-fired*, *solar panels*, *soft*, *on my desk*\nCategory (Cluster): Facilitate, Description: Language that enables or directs one through specific tasks and actions, Examples: *let me*, *worth a try*, *I would suggest*\nCategory (Cluster): First Person, Description: This cluster captures first person., Examples: *I*, *as soon as I*, *we have been*\nCategory (Cluster): Force Stressed, Description: Language that is forceful and stressed, often using emphatics, comparative forms, or superlative forms, Examples: *really good*, *the sooner the better*, *necessary*\nCategory (Cluster): Future, Description: Referencing future actions, states, or desires, Examples: *will be*, *hope to*, *expected changes*\nCategory (Cluster): Information Change, Description: Referencing changes of information, particularly changes that are more neutral, Examples: *changes*, *revised*, *growth*, *modification to*\nCategory (Cluster): Information Change Negative, Description: Referencing negative change, Examples: *going downhill*, *slow erosion*, *get worse*\nCategory (Cluster): Information Change Positive, Description: Referencing positive change, Examples: *improving*, *accrued interest*, *boost morale*\nCategory (Cluster): Information Exposition, Description: Information in the form of expository devices, or language that describes or explains, frequently in regards to quantities and comparisons, Examples: *final amount*, *several*, *three*, *compare*, *80%*\nCategory (Cluster): Information Place, Description: Language designating places, Examples: *the city*, *surrounding areas*, *Houston*, *home*\nCategory (Cluster): Information Report Verbs, Description: Informational verbs and verb phrases of reporting, Examples: *report*, *posted*, *release*, *point out*\nCategory (Cluster): Information States, Description: Referencing information states, or states of being, Examples: *is*, *are*, *existing*, *been*\nCategory (Cluster): Information Topics, Description: Referencing topics, usually nominal subjects or objects, that indicate the “aboutness” of a text, Examples: *time*, *money*, *stock price*, *phone interview*\nCategory (Cluster): Inquiry, Description: Referencing inquiry, or language that points to some kind of inquiry or investigation, Examples: *find out*, *let me know if you have any questions*, *wondering if*\nCategory (Cluster): Interactive, Description: Addresses from the author to the reader or from persons in the text to other persons. The address comes in the language of everyday conversation, colloquy, exchange, questions, attention-getters, feedback, interactive genre markers, and the use of the second person., Examples: *can you*, *thank you for*, *please see*, *sounds good to me*\nCategory (Cluster): Metadiscourse Cohesive, Description: The use of words to build cohesive markers that help the reader navigate the text and signal linkages in the text, which are often additive or contrastive, Examples: *or*, *but*, *also*, *on the other hand*, *notwithstanding*, *that being said*\nCategory (Cluster): Metadiscourse Interactive, Description: The use of words to build cohesive markers that interact with the reader, Examples: *I agree*, *let’s talk*, *by the way*\nCategory (Cluster): Narrative, Description: Language that involves people, description, and events extending in time, Examples: *today*, *tomorrow*, *during the*, *this weekend*\nCategory (Cluster): Negative, Description: Referencing dimensions of negativity, including negative acts, emotions, relations, and values, Examples: *does not*, *sorry for*, *problems*, *confusion*\nCategory (Cluster): Positive, Description: Referencing dimensions of positivity, including actions, emotions, relations, and values, Examples: *thanks*, *approval*, *agreement*, *looks good*\nCategory (Cluster): Public Terms, Description: Referencing public terms, concepts from public language, media, the language of authority, institutions, and responsibility, Examples: *discussion*, *amendment*, *corporation*, *authority*, *settlement*\nCategory (Cluster): Reasoning, Description: Language that has a reasoning focus, supporting inferences about cause, consequence, generalization, concession, and linear inference either from premise to conclusion or conclusion to premise, Examples: *because*, *therefore*, *analysis*, *even if*, *as a result*, *indicating that*\nCategory (Cluster): Responsibility, Description: Referencing the language of responsibility, Examples: *supposed to*, *requirements*, *obligations*\nCategory (Cluster): Strategic, Description: This dimension is active when the text structures strategies activism, advantage-seeking, game-playing cognition, plans, and goal-seeking., Examples: *plan*, *trying to*, *strategy*, *decision*, *coordinate*, *look at the*\nCategory (Cluster): Syntactic Complexity, Description: The features in this category are often what are called “function words,” like determiners and prepositions., Examples: *the*, *to*, *for*, *in*, *a lot of*\nCategory (Cluster): Uncertainty, Description: References uncertainty, when confidence levels are unknown, Examples: *kind of*, *I have no idea*, *for some reason*\nCategory (Cluster): Updates, Description: References updates that anticipate someone searching for information and receiving it, Examples: *already*, *a new*, *now that*, *here are some*",
"### BibTeX entry and citation info"
] |
[
"TAGS\n#transformers #pytorch #tf #jax #bert #token-classification #en #dataset-COCA #arxiv-1810.04805 #autotrain_compatible #endpoints_compatible #has_space #region-us \n",
"#### How to use\n\n\nThe model was trained on data with tags formatted using IOB), like those used in common tasks like Named Entity Recogition (NER). Thus, you can use this model with a Transformers NER *pipeline*.",
"#### Limitations and bias\n\n\nThis model is limited by its training dataset of American English texts. Moreover, the current version is trained on only a small subset of the corpus. The goal is to train later versions on more data, which should increase accuracy.\n\n\nTraining data\n-------------\n\n\nThis model was fine-tuned on data from the Corpus of Contemporary American English (COCA). The training data contain chunks of text randomly sampled of 5 text-types: Academic, Fiction, Magazine, News, and Spoken.\n\n\nTypically, BERT models are trained on sentence segments. However, DocuScope tags can span setences. Thus, data were split into chunks that don't split B + I sequences and end with sentence-final punctuation marks (i.e., period, quesiton mark or exclamaiton point).\n\n\nAdditionally, the order of the chunks was randomized prior to sampling, and statified sampling was used to provide enough training data for low-frequency caegories. The resulting training data consist of:\n\n\n* 21,460,177 tokens\n* 15,796,305 chunks\n\n\nThe specific counts for each category appear in the following table.\n\n\n\nTraining procedure\n------------------\n\n\nThis model was trained on a single 2.3 GHz Dual-Core Intel Core i5 with recommended hyperparameters from the original BERT paper.\n\n\nEval results\n------------",
"### Overall",
"### By category\n\n\n\nDocuScope Category Descriptions\n-------------------------------\n\n\nCategory (Cluster): Academic Terms, Description: Abstract, rare, specialized, or disciplinary-specific terms that are indicative of informationally dense writing, Examples: *market price*, *storage capacity*, *regulatory*, *distribution*\nCategory (Cluster): Academic Writing Moves, Description: Phrases and terms that indicate academic writing moves, which are common in research genres and are derived from the work of Swales (1981) and Cotos et al. (2015, 2017), Examples: *in the first section*, *the problem is that*, *payment methodology*, *point of contention*\nCategory (Cluster): Character, Description: References multiple dimensions of a character or human being as a social agent, both individual and collective, Examples: *Pauline*, *her*, *personnel*, *representatives*\nCategory (Cluster): Citation, Description: Language that indicates the attribution of information to, or citation of, another source., Examples: *according to*, *is proposing that*, *quotes from*\nCategory (Cluster): Citation Authorized, Description: Referencing the citation of another source that is represented as true and not arguable, Examples: *confirm that*, *provide evidence*, *common sense*\nCategory (Cluster): Citation Hedged, Description: Referencing the citation of another source that is presented as arguable, Examples: *suggest that*, *just one opinion*\nCategory (Cluster): Confidence Hedged, Description: Referencing language that presents a claim as uncertain, Examples: *tends to get*, *maybe*, *it seems that*\nCategory (Cluster): Confidence High, Description: Referencing language that presents a claim with certainty, Examples: *most likely*, *ensure that*, *know that*, *obviously*\nCategory (Cluster): Confidence Low, Description: Referencing language that presents a claim as extremely unlikely, Examples: *unlikely*, *out of the question*, *impossible*\nCategory (Cluster): Contingent, Description: Referencing contingency, typically contingency in the world, rather than contingency in one's knowledge, Examples: *subject to*, *if possible*, *just in case*, *hypothetically*\nCategory (Cluster): Description, Description: Language that evokes sights, sounds, smells, touches and tastes, as well as scenes and objects, Examples: *stay quiet*, *gas-fired*, *solar panels*, *soft*, *on my desk*\nCategory (Cluster): Facilitate, Description: Language that enables or directs one through specific tasks and actions, Examples: *let me*, *worth a try*, *I would suggest*\nCategory (Cluster): First Person, Description: This cluster captures first person., Examples: *I*, *as soon as I*, *we have been*\nCategory (Cluster): Force Stressed, Description: Language that is forceful and stressed, often using emphatics, comparative forms, or superlative forms, Examples: *really good*, *the sooner the better*, *necessary*\nCategory (Cluster): Future, Description: Referencing future actions, states, or desires, Examples: *will be*, *hope to*, *expected changes*\nCategory (Cluster): Information Change, Description: Referencing changes of information, particularly changes that are more neutral, Examples: *changes*, *revised*, *growth*, *modification to*\nCategory (Cluster): Information Change Negative, Description: Referencing negative change, Examples: *going downhill*, *slow erosion*, *get worse*\nCategory (Cluster): Information Change Positive, Description: Referencing positive change, Examples: *improving*, *accrued interest*, *boost morale*\nCategory (Cluster): Information Exposition, Description: Information in the form of expository devices, or language that describes or explains, frequently in regards to quantities and comparisons, Examples: *final amount*, *several*, *three*, *compare*, *80%*\nCategory (Cluster): Information Place, Description: Language designating places, Examples: *the city*, *surrounding areas*, *Houston*, *home*\nCategory (Cluster): Information Report Verbs, Description: Informational verbs and verb phrases of reporting, Examples: *report*, *posted*, *release*, *point out*\nCategory (Cluster): Information States, Description: Referencing information states, or states of being, Examples: *is*, *are*, *existing*, *been*\nCategory (Cluster): Information Topics, Description: Referencing topics, usually nominal subjects or objects, that indicate the “aboutness” of a text, Examples: *time*, *money*, *stock price*, *phone interview*\nCategory (Cluster): Inquiry, Description: Referencing inquiry, or language that points to some kind of inquiry or investigation, Examples: *find out*, *let me know if you have any questions*, *wondering if*\nCategory (Cluster): Interactive, Description: Addresses from the author to the reader or from persons in the text to other persons. The address comes in the language of everyday conversation, colloquy, exchange, questions, attention-getters, feedback, interactive genre markers, and the use of the second person., Examples: *can you*, *thank you for*, *please see*, *sounds good to me*\nCategory (Cluster): Metadiscourse Cohesive, Description: The use of words to build cohesive markers that help the reader navigate the text and signal linkages in the text, which are often additive or contrastive, Examples: *or*, *but*, *also*, *on the other hand*, *notwithstanding*, *that being said*\nCategory (Cluster): Metadiscourse Interactive, Description: The use of words to build cohesive markers that interact with the reader, Examples: *I agree*, *let’s talk*, *by the way*\nCategory (Cluster): Narrative, Description: Language that involves people, description, and events extending in time, Examples: *today*, *tomorrow*, *during the*, *this weekend*\nCategory (Cluster): Negative, Description: Referencing dimensions of negativity, including negative acts, emotions, relations, and values, Examples: *does not*, *sorry for*, *problems*, *confusion*\nCategory (Cluster): Positive, Description: Referencing dimensions of positivity, including actions, emotions, relations, and values, Examples: *thanks*, *approval*, *agreement*, *looks good*\nCategory (Cluster): Public Terms, Description: Referencing public terms, concepts from public language, media, the language of authority, institutions, and responsibility, Examples: *discussion*, *amendment*, *corporation*, *authority*, *settlement*\nCategory (Cluster): Reasoning, Description: Language that has a reasoning focus, supporting inferences about cause, consequence, generalization, concession, and linear inference either from premise to conclusion or conclusion to premise, Examples: *because*, *therefore*, *analysis*, *even if*, *as a result*, *indicating that*\nCategory (Cluster): Responsibility, Description: Referencing the language of responsibility, Examples: *supposed to*, *requirements*, *obligations*\nCategory (Cluster): Strategic, Description: This dimension is active when the text structures strategies activism, advantage-seeking, game-playing cognition, plans, and goal-seeking., Examples: *plan*, *trying to*, *strategy*, *decision*, *coordinate*, *look at the*\nCategory (Cluster): Syntactic Complexity, Description: The features in this category are often what are called “function words,” like determiners and prepositions., Examples: *the*, *to*, *for*, *in*, *a lot of*\nCategory (Cluster): Uncertainty, Description: References uncertainty, when confidence levels are unknown, Examples: *kind of*, *I have no idea*, *for some reason*\nCategory (Cluster): Updates, Description: References updates that anticipate someone searching for information and receiving it, Examples: *already*, *a new*, *now that*, *here are some*",
"### BibTeX entry and citation info"
] |
[
64,
59,
313,
4,
2103,
11
] |
[
"passage: TAGS\n#transformers #pytorch #tf #jax #bert #token-classification #en #dataset-COCA #arxiv-1810.04805 #autotrain_compatible #endpoints_compatible #has_space #region-us \n#### How to use\n\n\nThe model was trained on data with tags formatted using IOB), like those used in common tasks like Named Entity Recogition (NER). Thus, you can use this model with a Transformers NER *pipeline*.#### Limitations and bias\n\n\nThis model is limited by its training dataset of American English texts. Moreover, the current version is trained on only a small subset of the corpus. The goal is to train later versions on more data, which should increase accuracy.\n\n\nTraining data\n-------------\n\n\nThis model was fine-tuned on data from the Corpus of Contemporary American English (COCA). The training data contain chunks of text randomly sampled of 5 text-types: Academic, Fiction, Magazine, News, and Spoken.\n\n\nTypically, BERT models are trained on sentence segments. However, DocuScope tags can span setences. Thus, data were split into chunks that don't split B + I sequences and end with sentence-final punctuation marks (i.e., period, quesiton mark or exclamaiton point).\n\n\nAdditionally, the order of the chunks was randomized prior to sampling, and statified sampling was used to provide enough training data for low-frequency caegories. The resulting training data consist of:\n\n\n* 21,460,177 tokens\n* 15,796,305 chunks\n\n\nThe specific counts for each category appear in the following table.\n\n\n\nTraining procedure\n------------------\n\n\nThis model was trained on a single 2.3 GHz Dual-Core Intel Core i5 with recommended hyperparameters from the original BERT paper.\n\n\nEval results\n------------### Overall"
] |
[
-0.06179025024175644,
0.05737819895148277,
-0.0023022752720862627,
0.0438421294093132,
0.07790970057249069,
0.03191037476062775,
0.054825812578201294,
0.06086999550461769,
-0.11291786283254623,
0.015417962335050106,
0.018283206969499588,
-0.0077156610786914825,
0.059114329516887665,
0.15438403189182281,
-0.019079571589827538,
-0.2467590719461441,
0.0273280069231987,
-0.04740834981203079,
0.08074448257684708,
0.058420926332473755,
0.10294414311647415,
-0.060713138431310654,
0.05099586397409439,
-0.01233222708106041,
-0.09895295649766922,
0.015227244235575199,
-0.005131837911903858,
-0.032594241201877594,
0.10191035270690918,
0.11628181487321854,
0.11684908717870712,
0.02568092569708824,
0.03564336895942688,
-0.1989145427942276,
0.0383746400475502,
0.08076251298189163,
0.005587737075984478,
0.0716511532664299,
0.06864751130342484,
0.029647812247276306,
0.0898432582616806,
0.01262375246733427,
0.028410160914063454,
0.038277316838502884,
-0.09789882600307465,
0.014782199636101723,
-0.1319912225008011,
0.04194182530045509,
0.049836043268442154,
0.05242273956537247,
-0.011010689660906792,
0.0006323708221316338,
-0.04796813055872917,
0.06214909255504608,
0.15979361534118652,
-0.12306591123342514,
-0.03962993249297142,
-0.06331272423267365,
0.013837123289704323,
0.06081296503543854,
-0.04419367387890816,
-0.059083495289087296,
0.06181451678276062,
0.03263111785054207,
0.006082364823669195,
0.017820939421653748,
-0.10177483409643173,
-0.02690601535141468,
-0.11324983835220337,
-0.03455587849020958,
0.09389793127775192,
-0.0023105612490326166,
-0.08799495548009872,
-0.04883632808923721,
-0.010894987732172012,
-0.02409021183848381,
0.0008340850472450256,
0.025738507509231567,
-0.015451476909220219,
0.010329094715416431,
0.07556161284446716,
-0.01625499501824379,
-0.13220155239105225,
-0.054307859390974045,
-0.10846185684204102,
0.07748709619045258,
0.03060653805732727,
0.05201459676027298,
0.01423535868525505,
0.1171041801571846,
-0.08646564930677414,
-0.014188098721206188,
-0.01811494305729866,
-0.04992295056581497,
-0.08617446571588516,
-0.07082068920135498,
-0.12501850724220276,
-0.11940980702638626,
-0.05239635333418846,
0.10848499834537506,
-0.15548090636730194,
-0.01942398026585579,
0.005428222939372063,
-0.007839709520339966,
0.07878787815570831,
-0.046119604259729385,
-0.07196884602308273,
0.1040070429444313,
-0.019597521051764488,
0.0032579575199633837,
-0.054024651646614075,
0.008816036395728588,
0.011068669147789478,
0.054520074278116226,
-0.006265352480113506,
0.030645711347460747,
-0.046812254935503006,
0.002247823169454932,
-0.05668358504772186,
-0.028342705219984055,
0.1499030739068985,
-0.1130872592329979,
-0.0311869066208601,
0.015613407827913761,
-0.004916108213365078,
0.026822354644536972,
0.01971258781850338,
0.00447047408670187,
-0.06608513742685318,
0.06512279063463211,
-0.059693675488233566,
-0.02860846370458603,
-0.08522740006446838,
-0.11123565584421158,
0.016979726031422615,
-0.05465523526072502,
-0.05230429023504257,
-0.09263218194246292,
-0.2053583264350891,
-0.059207964688539505,
0.02510925941169262,
-0.06176124885678291,
-0.03328423202037811,
-0.03526047244668007,
-0.026783982291817665,
-0.006274322513490915,
-0.008170761168003082,
0.18143385648727417,
-0.05706525221467018,
0.02333986759185791,
-0.10459498316049576,
0.038995981216430664,
-0.06437047570943832,
0.05322389677166939,
-0.041883938014507294,
0.02317081205546856,
-0.026569057255983353,
0.10937117785215378,
0.011649216525256634,
0.04991130903363228,
-0.05227091163396835,
-0.028600869700312614,
-0.0795622169971466,
0.018306327983736992,
0.021332984790205956,
0.08986007422208786,
-0.2914396822452545,
-0.037733402103185654,
0.082989901304245,
-0.11000237613916397,
0.04218251630663872,
0.11473193764686584,
-0.08384422212839127,
0.039123810827732086,
0.0848180428147316,
0.11823124438524246,
0.045000217854976654,
-0.003917216788977385,
-0.0740673840045929,
-0.060119275003671646,
0.006754955276846886,
0.07979113608598709,
0.035931650549173355,
-0.0021122500766068697,
-0.009368553757667542,
0.009013221599161625,
-0.001275755581445992,
0.05237762629985809,
-0.057809118181467056,
-0.03979881480336189,
0.009010892361402512,
-0.07381818443536758,
-0.024735866114497185,
0.004498511087149382,
0.02392813190817833,
-0.04485013708472252,
-0.02959323674440384,
0.03831774368882179,
0.0780395120382309,
-0.019447797909379005,
0.04630635306239128,
-0.04021771252155304,
0.04425966739654541,
0.012034082785248756,
-0.012252863496541977,
-0.19391782581806183,
0.005127508193254471,
0.045036885887384415,
0.055795732885599136,
0.056902091950178146,
0.03020128607749939,
-5.247361514193472e-7,
0.04412554204463959,
-0.049240048974752426,
0.02360336109995842,
0.009829693473875523,
0.006208017934113741,
-0.08803091943264008,
-0.11339651793241501,
-0.059498921036720276,
-0.04910985380411148,
0.03618485480546951,
-0.14355546236038208,
0.014743363484740257,
0.01812884584069252,
0.12474950402975082,
0.028918635100126266,
-0.06325583904981613,
-0.005253021139651537,
0.056560542434453964,
-0.05236296355724335,
-0.01358318142592907,
0.009267076849937439,
0.0462028905749321,
0.01703765243291855,
0.015577820129692554,
-0.062396090477705,
-0.2165055125951767,
0.04556487873196602,
0.0327707938849926,
-0.13055376708507538,
-0.01738564856350422,
-0.06242303550243378,
-0.017624637112021446,
-0.11367207020521164,
-0.056936707347631454,
0.22469152510166168,
0.005119698122143745,
0.10086623579263687,
-0.08092962205410004,
-0.03169039636850357,
-0.03283281996846199,
0.04137348383665085,
-0.0615084245800972,
0.04552780091762543,
-0.0006490215309895575,
-0.12666450440883636,
0.017758792266249657,
-0.04286912456154823,
-0.007156725972890854,
0.18153636157512665,
0.02965935692191124,
-0.13728514313697815,
0.025488117709755898,
-0.02933897078037262,
0.021329136565327644,
0.028535716235637665,
0.019831795245409012,
0.0490691177546978,
0.057246170938014984,
0.023243874311447144,
0.03630721941590309,
-0.11390534788370132,
0.041285108774900436,
0.02582162246108055,
-0.029832730069756508,
-0.07042185962200165,
-0.04499145224690437,
0.008681440725922585,
0.10618453472852707,
0.029852118343114853,
0.029837382957339287,
-0.009556476026773453,
-0.061872128397226334,
-0.091038279235363,
0.15150466561317444,
-0.0799325481057167,
-0.16923488676548004,
-0.12463736534118652,
0.003759178100153804,
0.050138089805841446,
0.018290281295776367,
0.01678461767733097,
-0.05134180560708046,
-0.05627183988690376,
-0.15229259431362152,
0.03411830589175224,
0.03660575672984123,
0.02799868769943714,
-0.015225955285131931,
0.001959301996976137,
-0.018939068540930748,
-0.06690825521945953,
0.046763189136981964,
-0.04008038714528084,
0.0254535973072052,
0.021788163110613823,
-0.06609997898340225,
0.05573292449116707,
0.1280338317155838,
0.008593385107815266,
-0.026291340589523315,
-0.033636972308158875,
0.1715358942747116,
-0.05799201503396034,
0.05021285638213158,
0.0762515515089035,
-0.0664311945438385,
0.024981874972581863,
0.11243495345115662,
0.0096855154260993,
-0.022478491067886353,
0.05878213420510292,
0.04834732785820961,
-0.04399017617106438,
-0.23383770883083344,
-0.046817827969789505,
-0.058827854692935944,
-0.01066944096237421,
0.06515277922153473,
0.004827356431633234,
0.007706589996814728,
0.017237307503819466,
-0.07293703407049179,
0.031069055199623108,
-0.006323369685560465,
0.028305312618613243,
0.040471550077199936,
-0.0026101835537701845,
0.0723528191447258,
-0.03752560913562775,
0.01693711057305336,
0.1412719041109085,
-0.020242303609848022,
0.2279743105173111,
-0.0751870647072792,
0.19882649183273315,
0.03815079852938652,
0.029464377090334892,
0.03180123120546341,
0.10090852528810501,
-0.037518542259931564,
0.007380668539553881,
-0.030385777354240417,
0.006918755359947681,
0.04847060143947601,
0.09293242543935776,
0.0019095021998509765,
0.006994419731199741,
-0.06196582317352295,
-0.07721900194883347,
0.03566282242536545,
0.29981544613838196,
0.035668034106492996,
-0.1839302033185959,
-0.13007310032844543,
0.03258417174220085,
-0.1506894826889038,
-0.06354302167892456,
-0.010953426361083984,
0.16330738365650177,
-0.07854095101356506,
0.04031739756464958,
0.005662132520228624,
0.07624887675046921,
-0.05191998556256294,
-0.024558499455451965,
0.06691176444292068,
0.07778987288475037,
-0.06151328235864639,
0.056306831538677216,
-0.14360643923282623,
0.09883646667003632,
-0.02140219137072563,
0.05887995660305023,
-0.06090552359819412,
-0.003304356010630727,
0.017489833757281303,
-0.019363539293408394,
0.0648137629032135,
-0.01625899225473404,
-0.05330680310726166,
0.029933083802461624,
-0.05230223014950752,
-0.0005992102669551969,
0.0367458276450634,
-0.02144516073167324,
0.08231482654809952,
-0.011456118896603584,
0.02236631140112877,
0.011142896488308907,
0.0020074876956641674,
-0.0874166414141655,
-0.14866913855075836,
0.031647976487874985,
-0.009566284716129303,
0.03004474751651287,
-0.07116737216711044,
-0.10351085662841797,
-0.05350961908698082,
0.15437375009059906,
-0.08285725116729736,
-0.05638919770717621,
-0.11648404598236084,
0.07814806699752808,
0.13328997790813446,
0.0045203291811048985,
0.00912356935441494,
0.03771974518895149,
0.16684763133525848,
-0.03858353942632675,
-0.09161709249019623,
0.02113683894276619,
-0.07860177755355835,
-0.16135075688362122,
-0.09967450052499771,
0.08358913660049438,
0.06490195542573929,
0.0710633173584938,
-0.04959260672330856,
0.018678871914744377,
-0.03230108320713043,
-0.06444000452756882,
-0.03910498321056366,
0.013130350969731808,
0.06402819603681564,
0.1221243366599083,
-0.1215711161494255,
-0.12034798413515091,
-0.09039568156003952,
-0.11480258405208588,
0.08031436055898666,
0.1382296234369278,
-0.05441588535904884,
0.11572794616222382,
0.0216217041015625,
-0.09375206381082535,
-0.1514594703912735,
0.00458634365350008,
0.0733916237950325,
0.056695908308029175,
-0.01810412108898163,
-0.1661188304424286,
-0.005478410515934229,
0.09580695629119873,
0.03962528333067894,
0.04909905418753624,
-0.20638780295848846,
-0.13716864585876465,
0.058804359287023544,
0.026013405993580818,
0.0888422504067421,
-0.07602469623088837,
-0.047665175050497055,
-0.06458459794521332,
0.022380247712135315,
0.15591289103031158,
-0.0690237432718277,
0.07232780754566193,
0.005362601485103369,
-0.010117840953171253,
0.004369667265564203,
-0.027552954852581024,
0.07833923399448395,
0.04373937100172043,
0.04436272010207176,
-0.043946363031864166,
-0.041396573185920715,
-0.032997921109199524,
-0.0020645686890929937,
0.05152956768870354,
0.02632196620106697,
0.04793963581323624,
-0.029613852500915527,
-0.06567706167697906,
-0.0904427170753479,
-0.017579853534698486,
-0.013337397947907448,
-0.024412117898464203,
-0.07162122428417206,
0.07198289036750793,
0.057145413011312485,
0.0044228932820260525,
-0.03286874666810036,
-0.06323179602622986,
0.07343839108943939,
0.12714070081710815,
0.12121488153934479,
-0.061750851571559906,
0.033692389726638794,
0.026961054652929306,
-0.0066298539750278,
0.034514810889959335,
-0.05267484858632088,
0.011751211248338223,
0.12024994939565659,
0.016467800363898277,
0.14110176265239716,
0.06753813475370407,
-0.08949153870344162,
0.01434928085654974,
0.0698009803891182,
-0.14855296909809113,
-0.09740269184112549,
0.0007302594021894038,
0.034334901720285416,
-0.16494134068489075,
-0.053963758051395416,
0.11638150364160538,
-0.04508643224835396,
-0.023372318595647812,
-0.011471999809145927,
0.05525503680109978,
-0.013241519220173359,
0.13505809009075165,
0.0047665187157690525,
0.00015212174912448972,
-0.06870497763156891,
0.0683101937174797,
0.09986836463212967,
-0.049176622182130814,
-0.030426176264882088,
0.10171312093734741,
-0.10194533318281174,
-0.03325299918651581,
-0.005834911484271288,
0.02334750071167946,
-0.11256436258554459,
-0.04740871489048004,
-0.07233401387929916,
-0.11542683839797974,
0.11300375312566757,
0.16710643470287323,
0.010558078996837139,
0.08751694113016129,
-0.05360233411192894,
-0.049710698425769806,
-0.03984041139483452,
0.047274596989154816,
0.013041123747825623,
0.03898622840642929,
-0.002499511232599616,
0.1807773858308792,
-0.05460813269019127,
0.03261210396885872,
-0.019749630242586136,
0.0012982606422156096,
-0.02699877880513668,
-0.004324574023485184,
-0.10412286967039108,
0.020070817321538925,
-0.10150899738073349,
-0.004806896205991507,
0.027252309024333954,
0.010841632261872292,
-0.00036383443512022495,
-0.0016028244281187654,
-0.033898402005434036,
-0.012652629986405373,
-0.056416675448417664,
0.06517647206783295,
-0.055308084934949875,
0.009334503673017025,
0.04081738740205765,
-0.04033701866865158,
0.08206955343484879,
-0.0059455749578773975,
-0.024601563811302185,
0.0007504558889195323,
-0.0777047798037529,
-0.0056569515727460384,
-0.01881365291774273,
0.045931410044431686,
-0.0180406104773283,
-0.11202747374773026,
-0.007582783233374357,
0.02956586703658104,
-0.02919461391866207,
0.020745068788528442,
0.07020388543605804,
-0.06310199946165085,
0.018128186464309692,
0.040730178356170654,
0.011301293969154358,
-0.08958090841770172,
0.023336948826909065,
0.0763653814792633,
0.025751566514372826,
0.11884629726409912,
-0.03244265168905258,
0.015257958322763443,
-0.12584300339221954,
0.01889217458665371,
0.008637824095785618,
-0.024315427988767624,
-0.011460844427347183,
-0.005182791035622358,
0.060292813926935196,
0.043795146048069,
0.16327069699764252,
-0.08015911281108856,
-0.03348877653479576,
0.040755338966846466,
-0.051165301352739334,
0.0440966859459877,
0.07586836814880371,
0.17044883966445923,
0.012736024335026741,
-0.06400454789400101,
-0.06286803632974625,
-0.012251018546521664,
0.000603850232437253,
0.10249123722314835,
0.19896671175956726,
0.11898354440927505,
0.1262911856174469,
0.06921451538801193,
-0.007842510007321835,
-0.0938894972205162,
0.06900098919868469,
0.05276917293667793,
-0.04095657542347908,
0.04794589802622795,
-0.00786800030618906,
0.08271954953670502,
0.08268777281045914,
-0.20563159883022308,
0.10019472241401672,
-0.027697650715708733,
-0.0684790089726448,
-0.0011752384016290307,
-0.04920842498540878,
-0.006835067644715309,
-0.04585832357406616,
-0.022043956443667412,
-0.1424718201160431,
0.05068805441260338,
0.11816063523292542,
0.007465400267392397,
0.013655662536621094,
0.08688991516828537,
-0.07946773618459702,
-0.0753588080406189,
0.10740015655755997,
-0.024492010474205017,
0.022465143352746964,
-0.039293915033340454,
-0.028971143066883087,
0.06732778251171112,
-0.04467358440160751,
0.01284093875437975,
0.02534470148384571,
0.07557297497987747,
0.028595587238669395,
0.026908043771982193,
-0.0519428551197052,
0.012927855364978313,
-0.023874489590525627,
0.04264761880040169,
0.15553319454193115,
0.07580505311489105,
0.015268358401954174,
0.040116168558597565,
0.10247091203927994,
-0.026145827025175095,
-0.019213085994124413,
-0.21457292139530182,
0.05275210365653038,
0.02906735986471176,
-0.01801348477602005,
0.06240229308605194,
-0.09556972235441208,
0.01338224671781063,
0.1837213635444641,
0.10595262050628662,
-0.09446161985397339,
-0.027722027152776718,
0.036105088889598846,
-0.0012403810396790504,
0.028533654287457466,
0.06187884137034416,
0.06012454628944397,
0.1729276180267334,
-0.0721321552991867,
0.0035308278165757656,
0.023328620940446854,
0.005557001102715731,
-0.007233733776956797,
0.1016353890299797,
-0.03700653836131096,
-0.006299895700067282,
-0.04532419145107269,
0.06847099214792252,
-0.05945884808897972,
-0.2281784862279892,
0.016519783064723015,
-0.053790055215358734,
-0.10129598528146744,
-0.03930220752954483,
-0.06068049371242523,
-0.015358150005340576,
0.051197778433561325,
-0.017124217003583908,
0.012728001922369003,
0.17976243793964386,
-0.005283711943775415,
-0.02962425909936428,
-0.11237703263759613,
0.11324653029441833,
-0.07207611948251724,
0.06258634477853775,
-0.018897799775004387,
0.07912416011095047,
0.04416725039482117,
0.035086892545223236,
-0.07210446894168854,
0.04786348715424538,
0.005528632551431656,
0.0023018831852823496,
0.051813092082738876,
0.14064042270183563,
-0.0006329550524242222,
0.09816139936447144,
0.024106403812766075,
-0.07785940170288086,
0.08721593022346497,
0.008679910562932491,
-0.05228941887617111,
-0.054302629083395004,
0.01976737752556801,
-0.0910312831401825,
0.1733260452747345,
0.18970589339733124,
-0.026422595605254173,
0.008655446581542492,
0.005207394249737263,
0.010695409961044788,
-0.0062937489710748196,
0.11918283998966217,
-0.031475722789764404,
-0.2038872390985489,
0.023683475330471992,
-0.021071646362543106,
0.04779786244034767,
-0.23629941046237946,
-0.05445537343621254,
-0.015258911065757275,
-0.07434716075658798,
-0.02289072424173355,
0.07345610111951828,
0.094227135181427,
0.01995157264173031,
-0.05031723529100418,
0.03802382946014404,
0.005252827890217304,
0.06748570501804352,
-0.04925565794110298,
-0.07649383693933487
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# biobertpt-all-finetuned-ner
This model is a fine-tuned version of [pucpr/biobertpt-all](https://huggingface.co/pucpr/biobertpt-all) on an unknown dataset.
It achieves the following results on the evaluation set:
- Loss: 2.3721
- Precision: 0.0179
- Recall: 0.0149
- F1: 0.0163
- Accuracy: 0.6790
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 2e-05
- train_batch_size: 16
- eval_batch_size: 16
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 3
### Training results
| Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
|:-------------:|:-----:|:----:|:---------------:|:---------:|:------:|:------:|:--------:|
| No log | 1.0 | 1 | 2.7864 | 0.0091 | 0.0448 | 0.0152 | 0.3339 |
| No log | 2.0 | 2 | 2.5096 | 0.0097 | 0.0149 | 0.0118 | 0.6292 |
| No log | 3.0 | 3 | 2.3721 | 0.0179 | 0.0149 | 0.0163 | 0.6790 |
### Framework versions
- Transformers 4.12.0.dev0
- Pytorch 1.9.1+cu102
- Datasets 1.13.3
- Tokenizers 0.10.3
|
{"tags": ["generated_from_trainer"], "metrics": ["precision", "recall", "f1", "accuracy"], "model-index": [{"name": "biobertpt-all-finetuned-ner", "results": []}]}
|
token-classification
|
brunodorneles/biobertpt-all-finetuned-ner
|
[
"transformers",
"pytorch",
"bert",
"token-classification",
"generated_from_trainer",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #bert #token-classification #generated_from_trainer #autotrain_compatible #endpoints_compatible #region-us
|
biobertpt-all-finetuned-ner
===========================
This model is a fine-tuned version of pucpr/biobertpt-all on an unknown dataset.
It achieves the following results on the evaluation set:
* Loss: 2.3721
* Precision: 0.0179
* Recall: 0.0149
* F1: 0.0163
* Accuracy: 0.6790
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 2e-05
* train\_batch\_size: 16
* eval\_batch\_size: 16
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 3
### Training results
### Framework versions
* Transformers 4.12.0.dev0
* Pytorch 1.9.1+cu102
* Datasets 1.13.3
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.12.0.dev0\n* Pytorch 1.9.1+cu102\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #bert #token-classification #generated_from_trainer #autotrain_compatible #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.12.0.dev0\n* Pytorch 1.9.1+cu102\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
44,
98,
4,
37
] |
[
"passage: TAGS\n#transformers #pytorch #bert #token-classification #generated_from_trainer #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3### Training results### Framework versions\n\n\n* Transformers 4.12.0.dev0\n* Pytorch 1.9.1+cu102\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
-0.08574707061052322,
0.033294886350631714,
-0.002179153962060809,
0.11498131603002548,
0.22072277963161469,
0.03585495054721832,
0.1049187108874321,
0.08272372931241989,
-0.12564755976200104,
0.021849066019058228,
0.11364040523767471,
0.1781337857246399,
-0.01588957943022251,
0.10135847330093384,
-0.06571927666664124,
-0.2594590187072754,
-0.01835458166897297,
0.03133433684706688,
-0.09304870665073395,
0.12647230923175812,
0.08835294842720032,
-0.1562059223651886,
0.08105804026126862,
-0.0076107122004032135,
-0.267911821603775,
0.02656073495745659,
0.03093671053647995,
-0.06173355132341385,
0.1482468545436859,
0.002661587204784155,
0.17615772783756256,
-0.014227665029466152,
0.11167348176240921,
-0.1597781777381897,
0.004098556470125914,
0.0567711666226387,
0.016420463100075722,
0.07185067236423492,
0.05554269254207611,
-0.013961143791675568,
0.08719369769096375,
-0.09294337779283524,
0.07557564973831177,
0.006679212208837271,
-0.13297432661056519,
-0.21085162460803986,
-0.07660864293575287,
-0.0016059884801506996,
0.060565341264009476,
0.08755442500114441,
-0.003234084229916334,
0.1607544720172882,
-0.13129951059818268,
0.09633883833885193,
0.23857395350933075,
-0.255024254322052,
-0.07947016507387161,
0.04086463898420334,
-0.020593050867319107,
0.07493667304515839,
-0.10776294767856598,
-0.029550375416874886,
0.06349259614944458,
0.05940477177500725,
0.12353094667196274,
-0.036531779915094376,
-0.13853925466537476,
0.024800600484013557,
-0.15563833713531494,
0.008129812777042389,
0.059090495109558105,
0.010731811635196209,
-0.02560407482087612,
0.02187238447368145,
-0.06666076928377151,
-0.15363557636737823,
-0.04125670716166496,
-0.03433184325695038,
0.053516943007707596,
-0.06147613003849983,
-0.08993519097566605,
0.02952231653034687,
-0.08917497843503952,
-0.06567558646202087,
-0.06966308504343033,
0.2088550329208374,
0.04719529673457146,
0.020332718268036842,
-0.03578594699501991,
0.10522425174713135,
-0.01618979685008526,
-0.1266278475522995,
0.04064418002963066,
0.02620083838701248,
-0.01702183485031128,
-0.07364368438720703,
-0.0845923125743866,
-0.041366420686244965,
-0.002894582226872444,
0.12019089609384537,
-0.05445260927081108,
0.048433803021907806,
0.04777102172374725,
0.02231699787080288,
-0.08868124336004257,
0.19863176345825195,
-0.03026539832353592,
-0.01703263819217682,
0.01084715873003006,
0.03426365926861763,
-0.019264571368694305,
-0.008985348977148533,
-0.10241436213254929,
0.006796198897063732,
0.12215638160705566,
0.008773908950388432,
-0.09996157139539719,
0.05530187860131264,
-0.04071754962205887,
-0.015091811306774616,
-0.020944999530911446,
-0.10437626391649246,
0.05788082256913185,
-0.011317770928144455,
-0.08706549555063248,
0.007543760351836681,
0.0041982196271419525,
0.02396366558969021,
-0.004806093871593475,
0.17740997672080994,
-0.09290032088756561,
0.05934976041316986,
-0.11848591268062592,
-0.11649183183908463,
-0.004403416533023119,
-0.05599473416805267,
0.026509881019592285,
-0.10289780050516129,
-0.13228783011436462,
-0.014879736118018627,
0.04431144520640373,
-0.026159044355154037,
-0.033907778561115265,
-0.04731268063187599,
-0.08516120910644531,
0.004641582723706961,
-0.019363176077604294,
0.15339598059654236,
-0.05373377352952957,
0.10359236598014832,
0.06135264039039612,
0.06674215197563171,
-0.07266418635845184,
0.054425884038209915,
-0.09283299744129181,
-0.00452326750382781,
-0.24031654000282288,
0.04342271015048027,
-0.0578366219997406,
0.06576213985681534,
-0.06098579242825508,
-0.12253334373235703,
0.046663809567689896,
-0.008203159086406231,
0.09538056701421738,
0.07892399281263351,
-0.1602635532617569,
-0.0816149190068245,
0.12749315798282623,
-0.06480037420988083,
-0.08185068517923355,
0.10191327333450317,
-0.07507898658514023,
0.02087729051709175,
0.08629047125577927,
0.13937297463417053,
0.036219336092472076,
-0.07795631885528564,
0.02069086953997612,
-0.0396401584148407,
0.056336890906095505,
-0.04992720112204552,
0.03817509487271309,
0.028281306847929955,
-0.009466608986258507,
0.030626598745584488,
-0.03515760228037834,
0.06109471246600151,
-0.13083204627037048,
-0.08365534991025925,
-0.03593355044722557,
-0.1004098653793335,
0.054494936019182205,
0.06724905222654343,
0.09579387307167053,
-0.10652191936969757,
-0.05678794905543327,
0.13603386282920837,
0.060989879071712494,
-0.03847412019968033,
0.029966779053211212,
-0.0539296418428421,
0.060444846749305725,
-0.04474783316254616,
-0.03746289014816284,
-0.2066301703453064,
-0.04849487543106079,
0.011754545383155346,
0.0517057366669178,
0.034454114735126495,
-0.0050093140453100204,
0.07331312447786331,
0.07980994135141373,
-0.05866840481758118,
-0.0018054997781291604,
-0.046754274517297745,
0.00013768200005870312,
-0.1567946821451187,
-0.19597820937633514,
-0.030448485165834427,
-0.007235149387270212,
0.08950912207365036,
-0.196833074092865,
0.01936146430671215,
-0.03754182159900665,
0.09509456902742386,
0.002394323004409671,
-0.006959832739084959,
-0.08349639922380447,
0.12092719972133636,
-0.020392918959259987,
-0.044624608010053635,
0.06147528812289238,
-0.026199810206890106,
-0.06008165329694748,
-0.08904282003641129,
-0.08183056861162186,
0.19213683903217316,
0.13748383522033691,
-0.17725254595279694,
-0.10309228301048279,
0.020724790170788765,
-0.05501440912485123,
-0.014625022187829018,
-0.0600549653172493,
0.051691245287656784,
0.20127423107624054,
-0.013482670299708843,
0.1427735537290573,
-0.05425586551427841,
-0.039031755179166794,
0.003421296365559101,
-0.030669737607240677,
0.05983404815196991,
0.10303463041782379,
0.15604493021965027,
-0.058965008705854416,
0.1276164948940277,
0.14701171219348907,
-0.11793574690818787,
0.0975097045302391,
-0.024725740775465965,
-0.06717932969331741,
-0.018117358908057213,
-0.05326202139258385,
-0.007107182871550322,
0.08574842661619186,
-0.09775900095701218,
-0.01626765727996826,
-0.0009771197801455855,
0.03402768820524216,
0.01946544088423252,
-0.23708787560462952,
-0.04304438456892967,
0.02602894976735115,
-0.003704569535329938,
-0.00725855166092515,
-0.027572743594646454,
0.03389887511730194,
0.11764697730541229,
0.0011914119822904468,
-0.08802776783704758,
0.02254614420235157,
0.007098565809428692,
-0.0678364709019661,
0.2134837806224823,
-0.09313873946666718,
-0.08396995812654495,
-0.09063097089529037,
-0.09957566112279892,
-0.04914283752441406,
0.015095793642103672,
0.04633362591266632,
-0.12578658759593964,
-0.027834219858050346,
-0.01864064484834671,
0.047325197607278824,
0.005166393239051104,
0.05826205760240555,
0.007940680719912052,
-0.014736885204911232,
0.05646192282438278,
-0.105960413813591,
-0.014873543754220009,
-0.07518399506807327,
-0.08969918638467789,
0.06432941555976868,
0.07001975178718567,
0.11013452708721161,
0.1685953438282013,
-0.04715237393975258,
0.000025233786800527014,
-0.023421745747327805,
0.27951762080192566,
-0.06288733333349228,
-0.05355367064476013,
0.09572754055261612,
-0.02654837816953659,
0.05059071630239487,
0.09660188853740692,
0.0824151411652565,
-0.1160077378153801,
0.019492991268634796,
0.04233264923095703,
-0.0343753956258297,
-0.20374764502048492,
-0.043247662484645844,
-0.03826248645782471,
-0.07810012251138687,
0.07251295447349548,
0.008972959592938423,
0.014679976738989353,
0.05874541029334068,
0.06329406797885895,
0.10753832757472992,
-0.06991289556026459,
0.05026654899120331,
0.11493580043315887,
0.054667744785547256,
0.12988446652889252,
-0.024894896894693375,
-0.09567024558782578,
0.025500906631350517,
-0.055424824357032776,
0.22985942661762238,
0.006887319963425398,
0.03561151772737503,
0.04569721221923828,
0.1750505119562149,
0.004996976815164089,
0.08960584551095963,
0.012565625831484795,
-0.053739484399557114,
-0.00580159667879343,
-0.03448297828435898,
-0.04604972526431084,
0.011221260763704777,
-0.05195258557796478,
0.0569409616291523,
-0.13184155523777008,
-0.011948646046221256,
0.06372808665037155,
0.2402798980474472,
0.01757066883146763,
-0.31604668498039246,
-0.0622406005859375,
-0.0030199233442544937,
-0.03738372400403023,
-0.0037452327087521553,
0.009971121326088905,
0.08214394748210907,
-0.10251060873270035,
0.026470601558685303,
-0.046814028173685074,
0.0925217941403389,
-0.01889006979763508,
0.049441806972026825,
0.06349195539951324,
0.12073231488466263,
-0.0017091992776840925,
0.05114637687802315,
-0.3070346415042877,
0.2634674608707428,
0.009445558302104473,
0.08057984709739685,
-0.058145828545093536,
-0.011654289439320564,
0.03830965980887413,
0.06582538783550262,
-0.00024364879936911166,
-0.010644407942891121,
-0.024445699527859688,
-0.24382850527763367,
-0.030638599768280983,
0.044258423149585724,
0.13365037739276886,
-0.014796745032072067,
0.10345812886953354,
-0.01779002882540226,
0.005410708021372557,
0.08349651098251343,
-0.025364546105265617,
-0.07600059360265732,
-0.049888189882040024,
-0.038017306476831436,
0.011146202683448792,
-0.04077397286891937,
-0.050310567021369934,
-0.12335960566997528,
-0.12032702565193176,
0.12987130880355835,
0.0201577078551054,
-0.020197072997689247,
-0.1247793585062027,
0.11487389355897903,
0.08198875933885574,
-0.08190113306045532,
0.04406411573290825,
0.025543197989463806,
0.049100931733846664,
0.043778207153081894,
-0.06534523516893387,
0.11066820472478867,
-0.0559898316860199,
-0.16362547874450684,
-0.062170468270778656,
0.07390934973955154,
0.05427972972393036,
0.06822264939546585,
-0.024525564163923264,
0.027102096006274223,
-0.013393049128353596,
-0.09585569053888321,
0.019213629886507988,
-0.020927147939801216,
0.0605730302631855,
0.05545017123222351,
-0.053417325019836426,
0.01692192628979683,
-0.06333763152360916,
-0.006766193080693483,
0.178842693567276,
0.25276684761047363,
-0.08562466502189636,
-0.019655920565128326,
0.0224666278809309,
-0.06157684326171875,
-0.18320061266422272,
0.09464381635189056,
0.09393109381198883,
-0.004170246422290802,
0.036324914544820786,
-0.1638360172510147,
0.17911045253276825,
0.11311426013708115,
0.0028746218886226416,
0.10883774608373642,
-0.2808826267719269,
-0.13678708672523499,
0.10250274091959,
0.1677665114402771,
0.1598365753889084,
-0.1342344880104065,
0.0013755284016951919,
-0.03198333457112312,
-0.13187697529792786,
0.10732948780059814,
-0.0584261454641819,
0.10839500278234482,
-0.023392511531710625,
0.11660736799240112,
0.0013129935832694173,
-0.0477880984544754,
0.10459574311971664,
0.04458526521921158,
0.12196251004934311,
-0.05385001376271248,
-0.06260603666305542,
0.021763432770967484,
-0.021029498428106308,
-0.025808341801166534,
-0.008362810127437115,
0.017940882593393326,
-0.07058487832546234,
-0.01129599753767252,
-0.09245789051055908,
0.03401319310069084,
-0.031168809160590172,
-0.06749173253774643,
-0.02946830354630947,
0.017028165981173515,
0.036737244576215744,
-0.023533621802926064,
0.10461712628602982,
0.022892996668815613,
0.16594329476356506,
0.05382145196199417,
0.05132715031504631,
-0.09325584024190903,
-0.031641144305467606,
0.007858602330088615,
-0.01082677859812975,
0.06887488812208176,
-0.11930865049362183,
0.02704593725502491,
0.15419480204582214,
0.021009517833590508,
0.12052498012781143,
0.10376930981874466,
-0.00941264908760786,
0.009922864846885204,
0.06921786069869995,
-0.16041822731494904,
-0.025722820311784744,
0.012464425526559353,
-0.08214294165372849,
-0.07777516543865204,
0.057390015572309494,
0.09033871442079544,
-0.07601459324359894,
-0.02171550691127777,
-0.02326861396431923,
-0.019058745354413986,
-0.07839643955230713,
0.22376902401447296,
0.07937409728765488,
0.042232852429151535,
-0.11512424051761627,
0.05501468852162361,
0.05792493000626564,
-0.053482409566640854,
-0.005271948408335447,
0.08284226804971695,
-0.07469281554222107,
-0.011763123795390129,
0.126795694231987,
0.21308907866477966,
-0.09646483510732651,
-0.01457120943814516,
-0.13914348185062408,
-0.11463214457035065,
0.0718182697892189,
0.19720196723937988,
0.1253376454114914,
-0.006339407060295343,
-0.04807649552822113,
0.03896945342421532,
-0.13624799251556396,
0.05544903874397278,
0.03654300421476364,
0.08834608644247055,
-0.13760323822498322,
0.2051357924938202,
0.0054458449594676495,
0.04859163612127304,
-0.033658191561698914,
0.03408975526690483,
-0.12509387731552124,
0.02787935361266136,
-0.1282351016998291,
-0.059763140976428986,
0.010937495157122612,
-0.0010605136631056666,
0.010378647595643997,
-0.08414772152900696,
-0.06393129378557205,
0.012442400678992271,
-0.1318516731262207,
-0.020550452172756195,
0.039174679666757584,
0.029546381905674934,
-0.12358888983726501,
-0.04451756924390793,
0.026275457814335823,
-0.056205879896879196,
0.04990920051932335,
0.072211354970932,
0.022129636257886887,
0.0905223861336708,
-0.16073119640350342,
-0.0383937805891037,
0.07970191538333893,
-0.0009589440305717289,
0.11316674947738647,
-0.05437195301055908,
-0.006368640810251236,
-0.008582944050431252,
0.11978026479482651,
0.02763029746711254,
0.09036856144666672,
-0.13227373361587524,
0.0002957615943159908,
-0.03430505841970444,
-0.1123104989528656,
-0.04745245352387428,
-0.0022190760355442762,
0.0877269059419632,
0.01258427556604147,
0.17972847819328308,
-0.07911381870508194,
0.05657210573554039,
-0.20253758132457733,
-0.0175416748970747,
-0.025447649881243706,
-0.10128291696310043,
-0.12884421646595,
-0.06151459738612175,
0.07750260084867477,
-0.043845273554325104,
0.13143843412399292,
0.041792746633291245,
0.08544405549764633,
0.03153499588370323,
-0.022223781794309616,
-0.010607320815324783,
0.039702046662569046,
0.17734940350055695,
0.06698792427778244,
-0.03878585994243622,
0.07120352238416672,
0.07889685034751892,
0.11871693283319473,
0.08203288167715073,
0.23145978152751923,
0.1330566257238388,
-0.023013940081000328,
0.08407437056303024,
0.03089417889714241,
-0.0588361881673336,
-0.15404823422431946,
-0.011659447103738785,
-0.056316740810871124,
0.06918223947286606,
-0.030092675238847733,
0.19620934128761292,
0.031047377735376358,
-0.1609182208776474,
0.04150836914777756,
-0.07281506806612015,
-0.09235402941703796,
-0.1179637461900711,
0.05692053958773613,
-0.08422736823558807,
-0.15686842799186707,
0.01475649792701006,
-0.11218216270208359,
0.016273630782961845,
0.1343720257282257,
0.012371564283967018,
-0.008257798850536346,
0.17443272471427917,
0.02720535174012184,
0.04927535355091095,
0.0466097854077816,
0.0005239838501438498,
-0.019517352804541588,
-0.09061490744352341,
-0.07153396308422089,
-0.04149410501122475,
-0.0042979964055120945,
0.03664267435669899,
-0.06570661067962646,
-0.11256468296051025,
0.032028187066316605,
-0.019210513681173325,
-0.10419058799743652,
0.03288797289133072,
0.019995076581835747,
0.05835463106632233,
0.026716850697994232,
-0.00512345926836133,
0.02591623365879059,
-0.02741910144686699,
0.21438603103160858,
-0.08546163886785507,
-0.08965221792459488,
-0.09284648299217224,
0.3153691589832306,
0.05801083892583847,
0.01868157461285591,
0.023583944886922836,
-0.0678633525967598,
-0.02045915648341179,
0.22024956345558167,
0.17474447190761566,
-0.13317455351352692,
-0.007468425203114748,
-0.0043083736672997475,
-0.015444889664649963,
-0.03230234235525131,
0.14165043830871582,
0.14104105532169342,
0.0376712866127491,
-0.1087118536233902,
-0.04128853231668472,
-0.0630766972899437,
-0.012084861285984516,
-0.037510816007852554,
0.02578422985970974,
0.055124860256910324,
0.016946975141763687,
-0.05697031691670418,
0.05514606833457947,
-0.0706215426325798,
-0.10936151444911957,
0.07869172096252441,
-0.20852625370025635,
-0.16931094229221344,
-0.0029540571849793196,
0.09533697366714478,
-0.005929530132561922,
0.0794503465294838,
-0.029145941138267517,
-0.018643712624907494,
0.0597352460026741,
-0.027470093220472336,
-0.061909984797239304,
-0.10781529545783997,
0.12222912907600403,
-0.11008471250534058,
0.17922452092170715,
-0.04285053536295891,
0.09826033562421799,
0.11578600108623505,
0.07779998332262039,
-0.04995230212807655,
0.05066172778606415,
0.04351970553398132,
-0.11583543568849564,
0.004253310151398182,
0.11038024723529816,
-0.04208793491125107,
0.045101020485162735,
0.033856045454740524,
-0.15530383586883545,
0.027953999117016792,
-0.052306078374385834,
-0.04391750693321228,
-0.03817050904035568,
-0.07025119662284851,
-0.06603924185037613,
0.10920765995979309,
0.23452770709991455,
-0.012081297114491463,
0.036737110465765,
-0.08444595336914062,
0.01100365910679102,
0.04798204451799393,
0.043029800057411194,
-0.11114457249641418,
-0.2600107789039612,
0.020810222253203392,
0.09881048649549484,
-0.045046139508485794,
-0.19496488571166992,
-0.08550649136304855,
0.012190740555524826,
-0.06982191652059555,
-0.09630292654037476,
0.09387215971946716,
0.047301698476076126,
0.050657693296670914,
-0.049609169363975525,
-0.142581969499588,
-0.08669613301753998,
0.16258525848388672,
-0.1494792252779007,
-0.09581559896469116
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# wav2vec2-base-timit-demo-colab
This model is a fine-tuned version of [facebook/wav2vec2-base](https://huggingface.co/facebook/wav2vec2-base) on the None dataset.
It achieves the following results on the evaluation set:
- Loss: 0.4779
- Wer: 0.3453
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0001
- train_batch_size: 32
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 1000
- num_epochs: 30
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Wer |
|:-------------:|:-----:|:----:|:---------------:|:------:|
| 3.4307 | 4.0 | 500 | 1.4129 | 0.9980 |
| 0.626 | 8.0 | 1000 | 0.4605 | 0.4499 |
| 0.2199 | 12.0 | 1500 | 0.4457 | 0.3898 |
| 0.1303 | 16.0 | 2000 | 0.4418 | 0.3771 |
| 0.0851 | 20.0 | 2500 | 0.4647 | 0.3548 |
| 0.0604 | 24.0 | 3000 | 0.4603 | 0.3499 |
| 0.0461 | 28.0 | 3500 | 0.4779 | 0.3453 |
### Framework versions
- Transformers 4.11.3
- Pytorch 1.10.0+cu111
- Datasets 1.18.3
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "model-index": [{"name": "wav2vec2-base-timit-demo-colab", "results": []}]}
|
automatic-speech-recognition
|
bryan6aero/wav2vec2-base-timit-demo-colab
|
[
"transformers",
"pytorch",
"tensorboard",
"wav2vec2",
"automatic-speech-recognition",
"generated_from_trainer",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us
|
wav2vec2-base-timit-demo-colab
==============================
This model is a fine-tuned version of facebook/wav2vec2-base on the None dataset.
It achieves the following results on the evaluation set:
* Loss: 0.4779
* Wer: 0.3453
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 0.0001
* train\_batch\_size: 32
* eval\_batch\_size: 8
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* lr\_scheduler\_warmup\_steps: 1000
* num\_epochs: 30
* mixed\_precision\_training: Native AMP
### Training results
### Framework versions
* Transformers 4.11.3
* Pytorch 1.10.0+cu111
* Datasets 1.18.3
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 1000\n* num\\_epochs: 30\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.18.3\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 1000\n* num\\_epochs: 30\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.18.3\n* Tokenizers 0.10.3"
] |
[
56,
130,
4,
35
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 1000\n* num\\_epochs: 30\n* mixed\\_precision\\_training: Native AMP### Training results### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.18.3\n* Tokenizers 0.10.3"
] |
[
-0.10833754390478134,
0.10381042957305908,
-0.003447136841714382,
0.05311182141304016,
0.10943093150854111,
-0.02224290370941162,
0.12992502748966217,
0.1490924060344696,
-0.11156157404184341,
0.07095726579427719,
0.12520445883274078,
0.1469612419605255,
0.044384390115737915,
0.1459488868713379,
-0.05123339593410492,
-0.2855369746685028,
0.048294976353645325,
0.03550826758146286,
-0.020840534940361977,
0.12408933788537979,
0.08524196594953537,
-0.1261489987373352,
0.05181831493973732,
0.03754477575421333,
-0.1591220498085022,
-0.001641957787796855,
-0.008117086254060268,
-0.10824380815029144,
0.11797899007797241,
0.013362843543291092,
0.07320088893175125,
0.048765409737825394,
0.06339815258979797,
-0.21467654407024384,
0.008721605874598026,
0.045480094850063324,
0.027293900027871132,
0.07399290800094604,
0.06101059168577194,
-0.0253707654774189,
0.12154541909694672,
-0.07785171270370483,
0.08432452380657196,
0.03452400863170624,
-0.10040441900491714,
-0.295693039894104,
-0.0883895605802536,
0.047700464725494385,
0.07843475788831711,
0.08981457352638245,
-0.00999368354678154,
0.1470525562763214,
-0.057681191712617874,
0.11329855024814606,
0.2798192799091339,
-0.31179121136665344,
-0.04599817469716072,
-0.05289574712514877,
0.05597834661602974,
0.05841030925512314,
-0.0901239812374115,
-0.02046792581677437,
0.010743708349764347,
0.046851977705955505,
0.13231885433197021,
-0.01715417020022869,
-0.06198609992861748,
-0.008344883099198341,
-0.1534324288368225,
-0.06298980861902237,
0.11046526581048965,
0.017656773328781128,
-0.042628876864910126,
-0.09404584765434265,
-0.05194579064846039,
-0.2004159539937973,
-0.06980933248996735,
-0.01500130258500576,
0.039956334978342056,
-0.04952618107199669,
-0.10413790494203568,
-0.019491255283355713,
-0.06758825480937958,
-0.07009370625019073,
-0.03837838023900986,
0.19532173871994019,
0.06178545951843262,
-0.0007504495442844927,
-0.04200323671102524,
0.06930477917194366,
-0.014736226759850979,
-0.13804151117801666,
-0.023672964423894882,
0.036250852048397064,
-0.022838842123746872,
-0.01682872325181961,
-0.04348614066839218,
-0.06593196094036102,
0.018360575661063194,
0.1567915380001068,
-0.1088852807879448,
0.09793650358915329,
-0.01537051610648632,
0.03874713182449341,
-0.10357552021741867,
0.20873264968395233,
-0.04153716564178467,
0.03293122723698616,
-0.005830306094139814,
0.055414408445358276,
0.033529847860336304,
-0.026014741510152817,
-0.09795874357223511,
0.034854013472795486,
0.11659786105155945,
0.053310833871364594,
-0.04302902892231941,
0.05821622163057327,
-0.027089765295386314,
-0.009910321794450283,
0.011593430303037167,
-0.11522748321294785,
0.03396046161651611,
0.0198811162263155,
-0.06172381713986397,
0.0008120397687889636,
0.019153296947479248,
0.004520639777183533,
-0.06453731656074524,
0.08428143709897995,
-0.056282371282577515,
0.033882591873407364,
-0.05637597292661667,
-0.12755036354064941,
0.02759174071252346,
-0.10902155190706253,
-0.001358338282443583,
-0.10306747257709503,
-0.09193158149719238,
-0.010619371198117733,
0.036999672651290894,
-0.03549756854772568,
-0.03275611996650696,
-0.07301835715770721,
-0.09623170644044876,
0.04175805300474167,
-0.03570253774523735,
0.0764346495270729,
-0.07133547961711884,
0.09405636042356491,
0.03081537038087845,
0.08494142442941666,
-0.01318286918103695,
0.062569260597229,
-0.06405647844076157,
0.029027704149484634,
-0.20785629749298096,
0.078687384724617,
-0.09378376603126526,
0.058948077261447906,
-0.12374458461999893,
-0.1170143187046051,
0.03827769681811333,
-0.004956687800586224,
0.10379257798194885,
0.0937594622373581,
-0.16922356188297272,
-0.08996674418449402,
0.2025158554315567,
-0.08362291753292084,
-0.08466292172670364,
0.12438537180423737,
-0.023574335500597954,
-0.012047374621033669,
0.05270986631512642,
0.25722435116767883,
0.0563923642039299,
-0.12386839836835861,
0.01153150387108326,
-0.03621745854616165,
0.047043293714523315,
-0.04501413181424141,
0.05954015627503395,
-0.02173132449388504,
0.07572626322507858,
0.01326675247400999,
-0.006562749855220318,
0.042281605303287506,
-0.08780118823051453,
-0.07798930257558823,
-0.040403641760349274,
-0.07652655988931656,
0.013507777824997902,
0.034905679523944855,
0.06404134631156921,
-0.11733686923980713,
-0.11073767393827438,
0.04709266126155853,
0.08484742790460587,
-0.10454373061656952,
0.07569947093725204,
-0.11945994943380356,
0.08855628222227097,
-0.012427026405930519,
-0.0042078010737895966,
-0.19148027896881104,
0.033684469759464264,
0.03369207680225372,
-0.027014397084712982,
0.03843504935503006,
-0.06565430760383606,
0.07286848872900009,
0.04831041023135185,
-0.024084001779556274,
-0.04726380854845047,
-0.008630751632153988,
0.012781241908669472,
-0.09038025140762329,
-0.20807726681232452,
-0.040402818471193314,
-0.04182978719472885,
0.07309912890195847,
-0.13454800844192505,
0.034716520458459854,
0.07227864861488342,
0.09292402863502502,
0.02967613935470581,
-0.028521638363599777,
0.0027323609683662653,
0.09046582877635956,
-0.017737697809934616,
-0.06717314571142197,
0.05653621628880501,
0.023511258885264397,
-0.08707185834646225,
0.048796478658914566,
-0.1481570303440094,
0.127961665391922,
0.14512650668621063,
-0.008458556607365608,
-0.0681370198726654,
0.0027188167441636324,
-0.05006382241845131,
-0.0315980389714241,
-0.0025538518093526363,
0.04147781804203987,
0.22176256775856018,
0.01608957350254059,
0.14620628952980042,
-0.09077949076890945,
-0.04409495368599892,
0.049091413617134094,
-0.02334122359752655,
-0.009143802337348461,
0.12483556568622589,
0.04845994710922241,
-0.05674070864915848,
0.11428955942392349,
0.08967925608158112,
-0.08586719632148743,
0.11837322264909744,
-0.06838078796863556,
-0.07681573182344437,
-0.016253173351287842,
0.006750784814357758,
0.028568439185619354,
0.09584370255470276,
-0.15449927747249603,
-0.04031454026699066,
0.02691691555082798,
0.020981546491384506,
0.02508392371237278,
-0.20947007834911346,
0.014041672460734844,
0.03178508207201958,
-0.08192425966262817,
-0.043465156108140945,
-0.0011847163550555706,
0.012034800834953785,
0.09432540088891983,
0.013446008786559105,
-0.09667441248893738,
0.009430745616555214,
0.0037322519347071648,
-0.07600316405296326,
0.17992286384105682,
-0.12140516191720963,
-0.17771458625793457,
-0.10324431955814362,
-0.0862940177321434,
-0.032839421182870865,
-0.006773955188691616,
0.0887315422296524,
-0.09486573934555054,
-0.044363152235746384,
-0.08358942717313766,
-0.023079875856637955,
-0.03151819482445717,
0.04283427074551582,
0.03156427666544914,
-0.01136570330709219,
0.06314032524824142,
-0.11243854463100433,
-0.019515544176101685,
-0.041744768619537354,
0.004032604396343231,
0.05496735870838165,
0.03658017888665199,
0.10614565014839172,
0.1565544754266739,
-0.015423845499753952,
0.04914018139243126,
-0.04671413451433182,
0.1867409497499466,
-0.07426898181438446,
-0.041470639407634735,
0.1136881560087204,
-0.007811855059117079,
0.06949979066848755,
0.10878996551036835,
0.04568083956837654,
-0.09368357807397842,
-0.013869465328752995,
-0.000707953586243093,
-0.04555567353963852,
-0.22215522825717926,
-0.036037545651197433,
-0.04656601697206497,
-0.00568003486841917,
0.10165924578905106,
0.040871743112802505,
0.02505088411271572,
0.018389305099844933,
0.028121553361415863,
0.00035212599323131144,
0.0012278348440304399,
0.09916964918375015,
0.1341795027256012,
0.0387304350733757,
0.1326872706413269,
-0.043069735169410706,
-0.03335773944854736,
0.03271381929516792,
-0.0015795581275597215,
0.23355889320373535,
0.014797404408454895,
0.18411597609519958,
0.05663689598441124,
0.16338348388671875,
0.04172950237989426,
0.06686992943286896,
-0.004308757837861776,
-0.011605213396251202,
0.012266881763935089,
-0.051825493574142456,
-0.042994026094675064,
0.022489888593554497,
0.0273785088211298,
0.004465919919312,
-0.1159159392118454,
0.0005170528893359005,
0.04267645999789238,
0.3521466553211212,
0.026302076876163483,
-0.33115461468696594,
-0.0937834158539772,
-0.011363771744072437,
-0.09160836786031723,
-0.029828879982233047,
0.04430842027068138,
0.08963862806558609,
-0.07562659680843353,
0.06577971577644348,
-0.06103985011577606,
0.09144850075244904,
-0.059319667518138885,
0.029836803674697876,
0.03289255127310753,
0.07434683293104172,
0.005700880195945501,
0.03577127307653427,
-0.2962503433227539,
0.28073421120643616,
0.005631123203784227,
0.07630942016839981,
-0.059538017958402634,
0.012447638437151909,
0.02244623191654682,
0.021201057359576225,
0.0854242816567421,
-0.025091901421546936,
-0.12549014389514923,
-0.16572368144989014,
-0.09539511799812317,
0.015275818295776844,
0.12291479855775833,
0.03043687902390957,
0.11055338382720947,
-0.008221535012125969,
-0.016779381781816483,
0.04930062219500542,
-0.10247119516134262,
-0.0565626323223114,
-0.09930874407291412,
0.013917908072471619,
0.06958311051130295,
0.017841244116425514,
-0.07698749750852585,
-0.10803275555372238,
-0.07963237911462784,
0.161455899477005,
-0.04690762236714363,
-0.049646005034446716,
-0.12043671309947968,
0.009213562123477459,
0.10760517418384552,
-0.08037063479423523,
0.0627606213092804,
0.007560367230325937,
0.1034381240606308,
0.003693344769999385,
-0.06942233443260193,
0.11578889191150665,
-0.06958215683698654,
-0.16740162670612335,
-0.023777656257152557,
0.14403222501277924,
0.029652034863829613,
0.06261475384235382,
-0.010333992540836334,
0.03588103502988815,
-0.02198963798582554,
-0.0782666876912117,
0.03668055683374405,
0.0313185378909111,
0.04941844940185547,
-0.018752507865428925,
-0.014451628550887108,
-0.005778694525361061,
-0.0897565484046936,
-0.01813792996108532,
0.20751960575580597,
0.24517950415611267,
-0.09391327947378159,
0.095774345099926,
0.06509755551815033,
-0.03955508768558502,
-0.17117023468017578,
-0.009669424965977669,
0.07201457023620605,
-0.00040477776201441884,
-0.03234190493822098,
-0.1950286626815796,
0.02182387374341488,
0.06428606063127518,
-0.02105681411921978,
0.07620948553085327,
-0.3114224076271057,
-0.1389889419078827,
0.14483876526355743,
0.11684533208608627,
0.057372041046619415,
-0.14682094752788544,
-0.05427340418100357,
-0.009698581881821156,
-0.08959914743900299,
0.09872198104858398,
-0.07368794083595276,
0.13339248299598694,
-0.02151283621788025,
0.0900125801563263,
0.011481883004307747,
-0.05909395590424538,
0.10904435813426971,
0.006878409069031477,
0.05564282089471817,
-0.04371855780482292,
0.02109719254076481,
0.04945603385567665,
-0.06575894355773926,
0.05426900461316109,
-0.07870833575725555,
0.0321306437253952,
-0.08992088586091995,
-0.030698301270604134,
-0.08440285176038742,
0.012920956127345562,
-0.012694328092038631,
-0.027571629732847214,
-0.038240376859903336,
0.00040720109245739877,
0.06439678370952606,
-0.012324657291173935,
0.15859998762607574,
-0.0258988868445158,
0.1213768869638443,
0.16440238058567047,
0.10472052544355392,
-0.10338187217712402,
-0.06646968424320221,
0.006159121636301279,
-0.03442716598510742,
0.05600771680474281,
-0.12481767684221268,
0.0331452377140522,
0.13678844273090363,
0.02906477451324463,
0.11560565233230591,
0.0657036304473877,
-0.07196593284606934,
0.029690509662032127,
0.03940979763865471,
-0.14030630886554718,
-0.1259399950504303,
0.012432526797056198,
0.04283227026462555,
-0.07060881704092026,
0.07352157682180405,
0.11225481331348419,
-0.05890776589512825,
-0.019077425822615623,
-0.0010647890157997608,
0.014384094625711441,
-0.039235200732946396,
0.19945017993450165,
0.04253912717103958,
0.06556674838066101,
-0.12472614645957947,
0.07962489128112793,
0.04067164659500122,
-0.13785240054130554,
0.06680858135223389,
0.11523443460464478,
-0.09564115107059479,
-0.029312387108802795,
0.03305184841156006,
0.1058652251958847,
-0.027327246963977814,
-0.07625725865364075,
-0.14180098474025726,
-0.14805257320404053,
0.11542604118585587,
0.20982274413108826,
0.05477139726281166,
0.011962365359067917,
-0.05966893583536148,
0.016742343083024025,
-0.12094023823738098,
0.07404458522796631,
0.040687933564186096,
0.06161949783563614,
-0.12236526608467102,
0.15302594006061554,
0.01823774166405201,
0.04901929199695587,
-0.014212665148079395,
-0.008479558862745762,
-0.11560764163732529,
0.04105975478887558,
-0.1377730667591095,
0.007889210246503353,
-0.06813781708478928,
0.002953618997707963,
0.002498693997040391,
-0.04447924718260765,
-0.062049854546785355,
0.03951378911733627,
-0.12002760171890259,
-0.02218621037900448,
-0.004193393047899008,
0.029725441709160805,
-0.12637798488140106,
-0.009144372306764126,
0.007749427575618029,
-0.09551648050546646,
0.09743473678827286,
0.08704204112291336,
-0.02983301691710949,
0.050036896020174026,
-0.04546830430626869,
-0.03167468309402466,
0.08094117045402527,
-0.003110236721113324,
0.055044252425432205,
-0.13397149741649628,
-0.019748948514461517,
0.014943324960768223,
0.03051268868148327,
0.02191765606403351,
0.11163926869630814,
-0.11216187477111816,
0.002342303516343236,
-0.02661878988146782,
-0.052631352096796036,
-0.0695110633969307,
0.0566021203994751,
0.10603443533182144,
0.028557132929563522,
0.16374637186527252,
-0.09526465833187103,
0.030032064765691757,
-0.16133320331573486,
0.004723858553916216,
-0.02056591957807541,
-0.12526042759418488,
-0.043614841997623444,
-0.031058959662914276,
0.08091603964567184,
-0.06501792371273041,
0.12357719242572784,
-0.027396967634558678,
0.03133884072303772,
0.039567429572343826,
-0.08330715447664261,
-0.04500983655452728,
0.04368012025952339,
0.19865919649600983,
0.037938669323921204,
-0.04089481383562088,
0.07326071709394455,
0.017733758315443993,
0.07938048988580704,
0.12459861487150192,
0.1737319976091385,
0.15788210928440094,
0.060173243284225464,
0.11847540736198425,
0.05435815453529358,
-0.058412231504917145,
-0.16708436608314514,
0.08628037571907043,
-0.06032026931643486,
0.13355810940265656,
-0.011683795601129532,
0.23349842429161072,
0.126515194773674,
-0.15185151994228363,
0.06547676026821136,
-0.01775580458343029,
-0.08892745524644852,
-0.11879414319992065,
-0.059978779405355453,
-0.08449370414018631,
-0.17035658657550812,
0.007223862688988447,
-0.10407434403896332,
0.060791682451963425,
0.04036923497915268,
0.0406450591981411,
0.017503537237644196,
0.13356520235538483,
0.025533415377140045,
0.0011981537099927664,
0.0938468649983406,
-0.0034534884616732597,
-0.05139409005641937,
-0.0654342845082283,
-0.08168738335371017,
0.03930104151368141,
-0.011124776676297188,
0.05700472742319107,
-0.0044067357666790485,
-0.06600939482450485,
0.05390038341283798,
-0.035257499665021896,
-0.09521207958459854,
0.02477937377989292,
0.02138591930270195,
0.07421143352985382,
0.053345803171396255,
0.0343724749982357,
-0.03974883630871773,
-0.0016492705326527357,
0.19061097502708435,
-0.0947212427854538,
-0.09959877282381058,
-0.10897103697061539,
0.2683177888393402,
0.03826966509222984,
-0.01721738465130329,
0.022094130516052246,
-0.058050334453582764,
-0.03629877790808678,
0.2044251561164856,
0.17119856178760529,
-0.010132716968655586,
0.004274469800293446,
-0.01581609807908535,
-0.005809308495372534,
-0.043228887021541595,
0.08381844311952591,
0.15583012998104095,
0.06372498720884323,
-0.06269604712724686,
-0.06358547508716583,
-0.05333370715379715,
-0.034645576030015945,
-0.06843351572751999,
0.07628190517425537,
0.014270270243287086,
-0.02650071680545807,
-0.03774745762348175,
0.0622498095035553,
-0.09407172352075577,
-0.08780978620052338,
0.01707332581281662,
-0.1899011880159378,
-0.1541675627231598,
0.007431644015014172,
0.06914526224136353,
0.013699430041015148,
0.03485763445496559,
0.0046659428626298904,
-0.013051481917500496,
0.08807174861431122,
0.0005368085112422705,
-0.08228840678930283,
-0.060809750109910965,
0.092787005007267,
-0.14782628417015076,
0.15854524075984955,
-0.03908930718898773,
0.04669244587421417,
0.12287257611751556,
0.08951910585165024,
-0.08050762861967087,
0.08849873393774033,
0.04622596129775047,
-0.10895267128944397,
0.02583940513432026,
0.15606917440891266,
-0.03488616645336151,
0.0890420526266098,
0.02996581420302391,
-0.11539477854967117,
0.010171609930694103,
-0.10265477001667023,
-0.03983833268284798,
-0.03537425026297569,
-0.04617121443152428,
-0.04696659743785858,
0.10657443851232529,
0.1665657013654709,
-0.045781467109918594,
0.004395944532006979,
-0.053576916456222534,
0.008421660400927067,
0.046719495207071304,
0.003148264018818736,
-0.05753806233406067,
-0.2782512605190277,
0.011577482335269451,
0.027842320501804352,
0.00722676794975996,
-0.2543206810951233,
-0.08786150068044662,
0.010264093987643719,
-0.04437977075576782,
-0.08825569599866867,
0.08789321780204773,
0.07012148946523666,
0.04342355951666832,
-0.058009400963783264,
-0.04866177216172218,
-0.03920764848589897,
0.18731571733951569,
-0.17453256249427795,
-0.0540112666785717
] |
null | null |
transformers
|
# Work In Progress
# How to use?
To generate text with HTML, the sentence must start with ` htmlOn |||` (note the space at the beginning 😉). To generate normal text, you don't need to add anything.
# Training details
We continued the pre-training of [gpt2](https://huggingface.co/gpt2).
Dataset:[Natural_Questions_HTML_reduced_all](https://huggingface.co/datasets/SaulLu/Natural_Questions_HTML_reduced_all)
50% of the examples in the training data contained all HTML tags with only their `id` and `class` attributes. 50% of the examples were just plain text.
Training example with metadata:
```
htmlOn |||
<li id:n-sitesupport><a>Donate to Wikipedia</a></li>
<li id:n-shoplink><a>Wikipedia store</a></li>
</ul></div></div><div class:portal id:p-interaction><h3 id:p-interaction-label>Interaction</h3>
<div class:body><ul><li id:n-help><a>Help</a></li>
<li id:n-aboutsite><a>About Wikipedia</a></li>
<li id:n-portal><a>Community portal</a></li>
<li id:n-recentchanges><a>Recent changes</a></li>
<li id:n-contactpage><a>Contact page</a></li>
</ul></div></div><div class:portal id:p-tb><h3 id:p-tb-label>Tools</h3>
<div class:body><ul><li id:t-whatlinkshere><a>What links here</a></li>
<li id:t-recentchangeslinked><a>Related changes</a></li>
<li id:t-upload><a>Upload file</a></li>
<li id:t-specialpages><a>Special pages</a></li>
<li id:t-permalink><a>Permanent link</a></li>
<li id:t-info><a>Page information</a></li>
<li id:t-wikibase><a>Wikidata item</a></li>
<li id:t-cite><a>Cite this page</a></li>
</ul></div></div><div class:portal id:p-coll-print_export><h3 id:p-coll-print_export-label>Print/export</h3>
<div class:body><ul><li id:coll-create_a_book><a>Create a book</a></li>
<li id:coll-download-as-rdf2latex><a>Download as PDF</a></li>
<li id:t-print><a>Printable version</a></li>
</ul></div></div><div class:portal id:p-lang><h3 id:p-lang-label>Languages</h3>
<div class:body><ul><li class:interlanguage-link interwiki-ca><a class:interlanguage-link-target>Català</a></li>
<li class:interlanguage-link interwiki-da><a class:interlanguage-link-target>Dansk</a></li>
<li class:interlanguage-link interwiki-de><a class:interlanguage-link-target>Deutsch</a></li>
<li class:interlanguage-link interwiki-es><a class:interlanguage-link-target>Español</a></li>
<li class:interlanguage-link interwiki-eu><a class:interlanguage-link-target>Euskara</a></li>
<li class:interlanguage-link interwiki-fa><a class:interlanguage-link-target>فارسی</a></li>
<li class:interlanguage-link interwiki-fr><a class:interlanguage-link-target>Français</a></li>
<li class:interlanguage-link interwiki-id><a class:interlanguage-link-target>Bahasa Indonesia</a></li>
<li class:interlanguage-link interwiki-nl><a class:interlanguage-link-target>Nederlands</a></li>
<li class:interlanguage-link interwiki-pt><a class:interlanguage-link-target>Português</a></li>
<li class:interlanguage-link interwiki-fi><a class:interlanguage-link-target>Suomi</a></li>
<li class:interlanguage-link interwiki-vi><a class:interlanguage-link-target>Tiếng Việt</a></li>
<button class:mw-interlanguage-selector mw-ui-button>5 more</button>
</ul><div class:after-portlet after-portlet-lang><span class:wb-langlinks-edit wb-langlinks-link><a class:wbc-editpage>Edit links</a></span></div>
</div></div></
```
|
{"widget": [{"text": " htmlOn ||| <div"}]}
|
text-generation
|
bs-modeling-metadata/html-metadata-exp1-subexp1-1857108
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
# Work In Progress
# How to use?
To generate text with HTML, the sentence must start with ' htmlOn |||' (note the space at the beginning ). To generate normal text, you don't need to add anything.
# Training details
We continued the pre-training of gpt2.
Dataset:Natural_Questions_HTML_reduced_all
50% of the examples in the training data contained all HTML tags with only their 'id' and 'class' attributes. 50% of the examples were just plain text.
Training example with metadata:
|
[
"# Work In Progress",
"# How to use?\n\nTo generate text with HTML, the sentence must start with ' htmlOn |||' (note the space at the beginning ). To generate normal text, you don't need to add anything.",
"# Training details\n\nWe continued the pre-training of gpt2.\n\nDataset:Natural_Questions_HTML_reduced_all\n50% of the examples in the training data contained all HTML tags with only their 'id' and 'class' attributes. 50% of the examples were just plain text.\n\nTraining example with metadata:"
] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"# Work In Progress",
"# How to use?\n\nTo generate text with HTML, the sentence must start with ' htmlOn |||' (note the space at the beginning ). To generate normal text, you don't need to add anything.",
"# Training details\n\nWe continued the pre-training of gpt2.\n\nDataset:Natural_Questions_HTML_reduced_all\n50% of the examples in the training data contained all HTML tags with only their 'id' and 'class' attributes. 50% of the examples were just plain text.\n\nTraining example with metadata:"
] |
[
47,
5,
48,
72
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n# Work In Progress# How to use?\n\nTo generate text with HTML, the sentence must start with ' htmlOn |||' (note the space at the beginning ). To generate normal text, you don't need to add anything.# Training details\n\nWe continued the pre-training of gpt2.\n\nDataset:Natural_Questions_HTML_reduced_all\n50% of the examples in the training data contained all HTML tags with only their 'id' and 'class' attributes. 50% of the examples were just plain text.\n\nTraining example with metadata:"
] |
[
-0.0566776767373085,
0.03406408801674843,
-0.00035880168434232473,
0.039425548166036606,
0.21293963491916656,
0.0606042705476284,
0.08346728980541229,
0.1406218260526657,
-0.0348978154361248,
-0.03721633180975914,
0.172393798828125,
0.1027710884809494,
-0.01481769047677517,
0.09721840173006058,
-0.010905969887971878,
-0.19611245393753052,
-0.021974295377731323,
0.08434294909238815,
-0.01160364132374525,
0.12029436230659485,
0.07739058136940002,
-0.08508637547492981,
0.0813530907034874,
0.021682582795619965,
-0.1844148486852646,
-0.007642805110663176,
0.006544292438775301,
-0.06515566259622574,
0.11967496573925018,
-0.017047755420207977,
0.06049061194062233,
-0.01205169502645731,
0.040055498480796814,
-0.1678674966096878,
0.023133547976613045,
0.05651479586958885,
0.013775128871202469,
0.06678963452577591,
0.05351285636425018,
-0.06377548724412918,
0.012040764093399048,
0.010829341597855091,
0.01836094819009304,
0.0995149090886116,
-0.14268121123313904,
-0.07732828706502914,
-0.035390470176935196,
0.0012025788892060518,
0.11436140537261963,
0.1221064031124115,
-0.02908242866396904,
0.008381186984479427,
-0.13335151970386505,
0.06300708651542664,
0.20222806930541992,
-0.19357751309871674,
-0.03544533625245094,
0.06230204179883003,
0.019568394869565964,
-0.010058739222586155,
-0.06931421905755997,
0.06056768074631691,
0.03439505398273468,
0.050649479031562805,
-0.005013441201299429,
-0.02589612454175949,
-0.15099705755710602,
0.01742122694849968,
-0.07858342677354813,
-0.030701057985424995,
0.298516184091568,
0.005296168848872185,
-0.024185679852962494,
-0.10980623215436935,
-0.012824878096580505,
-0.04779033362865448,
-0.04356354847550392,
0.017811719328165054,
-0.04440693557262421,
0.07158326357603073,
-0.05374077335000038,
-0.12147731333971024,
-0.14502626657485962,
-0.10371796786785126,
-0.004215597175061703,
0.034308843314647675,
0.01448158547282219,
-0.0006200168281793594,
-0.10378804057836533,
0.17281083762645721,
0.005753593053668737,
-0.03601973503828049,
0.01608589105308056,
-0.06561241298913956,
0.025663264095783234,
-0.013672097586095333,
-0.12592877447605133,
-0.16066361963748932,
0.04845719039440155,
0.10304999351501465,
0.0642247274518013,
0.015648700296878815,
0.024721486493945122,
0.06221316009759903,
-0.08591428399085999,
0.09714536368846893,
0.008541855029761791,
0.00845605693757534,
0.048012446612119675,
-0.0529278963804245,
-0.07505021244287491,
-0.01912374049425125,
-0.13606084883213043,
-0.07363497465848923,
0.0032690262887626886,
0.05713469907641411,
-0.011984082870185375,
0.10366030037403107,
-0.07496702671051025,
-0.013266801834106445,
-0.058908939361572266,
-0.10327060520648956,
-0.011079872958362103,
-0.004425802733749151,
0.026159709319472313,
-0.016226565465331078,
0.09745723754167557,
0.02284299209713936,
-0.07964522391557693,
-0.04308929666876793,
-0.042474694550037384,
0.04168499633669853,
-0.09362976998090744,
-0.01506481971591711,
-0.023890912532806396,
-0.07679888606071472,
-0.022312574088573456,
-0.08893562108278275,
-0.22150173783302307,
-0.006719058845192194,
0.08115237951278687,
0.021105492487549782,
0.016889067366719246,
-0.06151643022894859,
0.012554967775940895,
-0.00047778236330486834,
0.005757635459303856,
0.09345579147338867,
-0.04208308458328247,
0.08261646330356598,
0.002780307549983263,
0.0698980987071991,
-0.055277422070503235,
0.0787775069475174,
-0.136412113904953,
-0.02601306326687336,
-0.07285317778587341,
0.10952863097190857,
0.09666930884122849,
0.06268417090177536,
-0.05725072696805,
-0.0052488865330815315,
-0.053894732147455215,
0.024303661659359932,
0.021520990878343582,
0.18307620286941528,
-0.08509597927331924,
-0.06476296484470367,
0.1935233622789383,
-0.0020404525566846132,
-0.07419976592063904,
0.05707355961203575,
-0.026483546942472458,
0.2185954451560974,
0.10978365689516068,
0.14633946120738983,
0.00694079976528883,
-0.007208284921944141,
0.12798690795898438,
0.05905609950423241,
-0.042078711092472076,
-0.007088201120495796,
-0.016450034454464912,
-0.08968739211559296,
-0.13323865830898285,
0.05946322903037071,
-0.04653944820165634,
0.08908350765705109,
-0.0738309919834137,
-0.06716618686914444,
-0.037569496780633926,
-0.05238139256834984,
0.13089844584465027,
0.06647355854511261,
0.08045733720064163,
-0.01612854190170765,
-0.07576189935207367,
-0.031162457540631294,
0.011104614473879337,
-0.11714417487382889,
0.03553958237171173,
-0.03201743960380554,
0.06823208928108215,
-0.07221470773220062,
0.009453765116631985,
-0.18018251657485962,
-0.11800462752580643,
-0.020283406600356102,
0.18993420898914337,
0.04103820398449898,
0.005111935082823038,
0.06795504689216614,
-0.014642292633652687,
-0.06276825070381165,
0.007410694379359484,
-0.006601358763873577,
-0.02528437413275242,
-0.10477977246046066,
-0.12028947472572327,
-0.013000315986573696,
-0.03787478804588318,
0.10558504611253738,
-0.1298907846212387,
0.05318880453705788,
-0.030002111569046974,
0.08734158426523209,
0.010420388542115688,
-0.0488126277923584,
0.031543295830488205,
0.0033334121108055115,
-0.024863921105861664,
-0.0438656359910965,
0.08917684853076935,
0.008691930212080479,
-0.0794907808303833,
0.023230385035276413,
-0.0076566096395254135,
-0.04626046493649483,
0.12073524296283722,
-0.21177521347999573,
-0.12720677256584167,
-0.020283931866288185,
-0.051534876227378845,
0.015419908799231052,
-0.11274569481611252,
-0.025148101150989532,
0.22579817473888397,
-0.019700458273291588,
0.12312056869268417,
-0.013714289292693138,
-0.06681670993566513,
-0.018561124801635742,
-0.047239698469638824,
0.0665624663233757,
0.017781151458621025,
0.09752023220062256,
-0.035125549882650375,
0.11329414695501328,
0.012441293336451054,
-0.04492954909801483,
0.19634070992469788,
0.008028233423829079,
-0.07396584004163742,
0.07910964637994766,
0.024962889030575752,
-0.030731813982129097,
0.03869789466261864,
-0.2783295810222626,
-0.07252757996320724,
0.057160183787345886,
0.02149033546447754,
0.06441346555948257,
-0.18413423001766205,
-0.027098840102553368,
-0.0241317767649889,
-0.06315649300813675,
-0.0047475057654082775,
0.02477189712226391,
-0.020307054743170738,
0.08968669921159744,
0.04205578938126564,
0.04428935423493385,
0.11743143200874329,
0.019241461530327797,
-0.1400424838066101,
0.15362174808979034,
-0.02938789874315262,
-0.19948464632034302,
-0.07604876160621643,
-0.02925994247198105,
-0.055809974670410156,
0.07556936144828796,
0.08674878627061844,
-0.18696942925453186,
-0.041924431920051575,
0.020175522193312645,
-0.0012151270639151335,
-0.0037949890829622746,
0.048599615693092346,
-0.03634736314415932,
0.009665611200034618,
-0.02455361932516098,
-0.10111746191978455,
-0.003598712384700775,
-0.01453342754393816,
-0.05311069265007973,
0.07235348224639893,
-0.12505044043064117,
0.09551180154085159,
0.18354426324367523,
-0.03804720565676689,
0.0894709974527359,
-0.027962541207671165,
0.1862964779138565,
-0.09305021166801453,
-0.005425375420600176,
0.13339729607105255,
-0.010587524622678757,
0.01532112155109644,
0.05580700933933258,
0.023128937929868698,
-0.07039088755846024,
0.05207350477576256,
0.047690633684396744,
-0.06338837742805481,
-0.23332346975803375,
-0.03946975991129875,
-0.11742541193962097,
-0.04745671898126602,
0.07426915317773819,
0.04470844566822052,
0.10707678645849228,
0.1051061823964119,
-0.05038342624902725,
0.06732446700334549,
0.06315937638282776,
0.1258733570575714,
0.08358966559171677,
0.024687228724360466,
0.10644926875829697,
-0.02768116630613804,
-0.06966785341501236,
0.01658862829208374,
-0.010885217227041721,
0.2147822231054306,
-0.017041433602571487,
0.018236443400382996,
0.03438685089349747,
0.07582240551710129,
0.01802968420088291,
0.13282857835292816,
-0.021763933822512627,
-0.012803482823073864,
-0.0163005031645298,
-0.025233596563339233,
-0.029377548024058342,
0.05161860212683678,
-0.05136698856949806,
-0.09564328193664551,
-0.1116742417216301,
-0.05974338948726654,
0.09455382823944092,
0.19969552755355835,
0.10711047053337097,
-0.2553209066390991,
-0.05492054298520088,
-0.017544040456414223,
-0.04634498432278633,
-0.12513597309589386,
0.024354003369808197,
0.01191036682575941,
-0.15683406591415405,
0.02600739523768425,
-0.03299464285373688,
0.13048525154590607,
0.012700884602963924,
0.03169581666588783,
0.05125534534454346,
-0.06283016502857208,
-0.026924602687358856,
0.1396503448486328,
-0.2973422706127167,
0.09921185672283173,
0.03541387617588043,
0.07880625873804092,
-0.13090504705905914,
0.006495073437690735,
0.03333130106329918,
0.09025552123785019,
0.10842350125312805,
-0.013356605544686317,
0.15907029807567596,
-0.06706389039754868,
0.009084236808121204,
0.054543349891901016,
0.09254734218120575,
-0.057253241539001465,
0.028687840327620506,
-0.03812088072299957,
0.003559867385774851,
0.04534551873803139,
-0.10505764931440353,
-0.1693103313446045,
-0.1411483883857727,
0.023103345185518265,
0.017244720831513405,
0.16794323921203613,
0.002127395709976554,
-0.025163503363728523,
-0.00445137033239007,
0.2895555794239044,
0.017019586637616158,
-0.10582322627305984,
-0.12833209335803986,
0.027223598212003708,
0.01919475942850113,
-0.01029207743704319,
0.07494963705539703,
-0.030813127756118774,
0.07323504239320755,
0.012996670790016651,
-0.09163334965705872,
0.10305404663085938,
-0.07716681063175201,
-0.1306416243314743,
-0.06308647245168686,
0.08614759147167206,
0.05534329265356064,
0.013191387057304382,
0.06732536852359772,
-0.026782555505633354,
-0.08300231397151947,
-0.10169937461614609,
0.031037382781505585,
0.019565729424357414,
0.1679467260837555,
0.04796391353011131,
-0.09317738562822342,
0.036152150481939316,
-0.05047878250479698,
-0.058253634721040726,
0.280306339263916,
0.057802341878414154,
-0.08357944339513779,
0.07443871349096298,
0.06113755702972412,
-0.07477448135614395,
-0.24487486481666565,
0.06638653576374054,
0.013274976052343845,
-0.020962728187441826,
-0.08565080165863037,
-0.228577122092247,
0.09360554814338684,
-0.02178122289478779,
-0.005175701808184385,
0.09587511420249939,
-0.2327284961938858,
-0.08613024652004242,
0.10136933624744415,
0.03967545926570892,
0.18940071761608124,
-0.1202307641506195,
0.000568636110983789,
-0.043962422758340836,
-0.13636134564876556,
0.10757055133581161,
-0.11290273070335388,
0.11380257457494736,
-0.020366892218589783,
0.09029266983270645,
0.01667601615190506,
-0.06003972515463829,
0.09658077359199524,
0.024473465979099274,
0.023974936455488205,
-0.021329645067453384,
0.02056110091507435,
0.1468181312084198,
-0.007331074681133032,
0.09752321988344193,
0.025284182280302048,
0.017728092148900032,
-0.13466951251029968,
-0.08672717213630676,
-0.0896381139755249,
0.03360733017325401,
0.041429463773965836,
-0.12975738942623138,
-0.014472376555204391,
-0.07671086490154266,
0.05452895537018776,
0.014526117593050003,
0.032198693603277206,
-0.10551145672798157,
0.0969821959733963,
-0.025245757773518562,
0.15122206509113312,
-0.12576824426651,
-0.05666491761803627,
-0.009184383787214756,
-0.059669505804777145,
0.08216391503810883,
-0.136895090341568,
0.07895295321941376,
0.04484563693404198,
0.019648045301437378,
0.13879023492336273,
0.1382906287908554,
0.04028793424367905,
0.027180140838027,
0.055970191955566406,
-0.15673603117465973,
0.022595321759581566,
-0.06210968643426895,
-0.1350807547569275,
-0.10095508396625519,
0.07876476645469666,
0.04451707378029823,
-0.02607305720448494,
-0.04984261095523834,
0.004954760894179344,
0.011866649612784386,
-0.10956951975822449,
0.04857839271426201,
0.057583630084991455,
0.007232774514704943,
-0.09923011809587479,
0.022179342806339264,
0.02001372165977955,
-0.01877976395189762,
0.026351451873779297,
0.04593224823474884,
-0.13295906782150269,
-0.08617269992828369,
0.09491796046495438,
0.24688667058944702,
-0.09847228229045868,
-0.04900142177939415,
-0.07379554957151413,
-0.0743173286318779,
0.06161129102110863,
-0.010592578910291195,
0.045592907816171646,
0.015922488644719124,
-0.028211073949933052,
0.02122998796403408,
-0.1483834832906723,
0.017065923660993576,
0.05962684005498886,
0.02553996443748474,
-0.07844872027635574,
0.14712651073932648,
-0.00620470242574811,
0.0750994011759758,
-0.06317520141601562,
-0.032917674630880356,
-0.08163153380155563,
0.08732625097036362,
-0.11941580474376678,
-0.020361904054880142,
-0.08144077658653259,
-0.03680482134222984,
0.0033279277849942446,
-0.02180738002061844,
-0.001799928373657167,
0.00370837957598269,
-0.07572881132364273,
-0.004512039478868246,
-0.021703200414776802,
0.028194420039653778,
-0.06622899323701859,
0.004881695378571749,
0.03149335831403732,
-0.053636182099580765,
0.11224937438964844,
0.05402922257781029,
-0.07002574950456619,
0.0319911390542984,
-0.1702600121498108,
-0.0350172184407711,
0.06924981623888016,
-0.032636530697345734,
-0.04793888330459595,
0.019712897017598152,
0.025504494085907936,
-0.0026335534639656544,
0.038020387291908264,
0.055235959589481354,
0.14556993544101715,
-0.12038882076740265,
0.0023208661004900932,
-0.05695067718625069,
-0.061883069574832916,
-0.08882302790880203,
0.04063120111823082,
0.027365414425730705,
0.09713051468133926,
0.10866837203502655,
-0.07437068969011307,
0.08473442494869232,
-0.08157476037740707,
0.022436462342739105,
-0.004299817141145468,
-0.010653635486960411,
-0.06610359996557236,
-0.09893488138914108,
0.059538766741752625,
-0.01571415551006794,
0.20531390607357025,
0.03293094038963318,
0.01740262657403946,
-0.009422956965863705,
0.028573663905262947,
0.1074947863817215,
-0.00963591318577528,
0.19999608397483826,
0.039494991302490234,
-0.01244763471186161,
0.004148144740611315,
0.13282422721385956,
0.0618765614926815,
0.05603903904557228,
0.1731691062450409,
-0.012701335363090038,
-0.01728508248925209,
0.08631355315446854,
-0.15679675340652466,
-0.058687567710876465,
-0.1306379735469818,
-0.008431602269411087,
0.056885577738285065,
0.05825427547097206,
-0.09689649939537048,
0.04962775483727455,
0.17506539821624756,
-0.11321312934160233,
0.03294495493173599,
-0.03644457086920738,
-0.07433080673217773,
-0.09847822040319443,
-0.12407934665679932,
-0.018071405589580536,
-0.16175247728824615,
0.00409707659855485,
-0.08359388262033463,
-0.006143564358353615,
0.049551818519830704,
-0.002286003204062581,
-0.005685469135642052,
0.16534145176410675,
0.05913920700550079,
-0.08964434266090393,
0.050220366567373276,
-0.065432608127594,
0.07003063708543777,
-0.06398919224739075,
-0.04465099796652794,
-0.02179744653403759,
-0.04632618650794029,
0.05830945819616318,
0.024069197475910187,
-0.04203488677740097,
0.00943849142640829,
-0.08803638070821762,
-0.05756024271249771,
-0.03486526384949684,
0.0809657871723175,
0.03764752298593521,
0.14254285395145416,
0.004082448780536652,
-0.0456109419465065,
0.02046898379921913,
0.2206643968820572,
-0.08889824897050858,
-0.11660934239625931,
-0.1412883847951889,
0.16274315118789673,
0.06917347759008408,
-0.03717152774333954,
0.011949140578508377,
-0.11114206910133362,
0.05696375295519829,
0.3312082588672638,
0.17083969712257385,
-0.01722225360572338,
0.0032203523442149162,
0.04527569189667702,
-0.013001143001019955,
0.0710480734705925,
0.06528586894273758,
0.05402258783578873,
0.14707444608211517,
-0.11640525609254837,
-0.022240497171878815,
0.018742186948657036,
-0.05677850544452667,
-0.09052205085754395,
0.07189279049634933,
0.04197729006409645,
-0.04374244436621666,
-0.030894989147782326,
0.10535070300102234,
-0.10431963950395584,
0.025707246735692024,
0.05940292030572891,
-0.03400067985057831,
-0.06631434708833694,
-0.004139459226280451,
-0.04166393354535103,
-0.039779629558324814,
0.05208462104201317,
0.000010874291547224857,
0.0072783962823450565,
0.13270355761051178,
0.017672963440418243,
-0.124097540974617,
-0.034724410623311996,
0.08746042847633362,
-0.022736286744475365,
0.1097133457660675,
-0.012374780140817165,
0.08465288579463959,
0.032850466668605804,
0.03384612873196602,
-0.06309442222118378,
0.027906781062483788,
0.0016392081743106246,
0.06545290350914001,
0.06027704477310181,
0.04434004798531532,
-0.02664465270936489,
-0.0826067328453064,
-0.014612146653234959,
-0.1475154459476471,
0.059179533272981644,
-0.01828865520656109,
-0.029995054006576538,
-0.08594825863838196,
-0.02189585380256176,
-0.06333231925964355,
0.11833591759204865,
0.11426369845867157,
-0.039379630237817764,
-0.0019962650258094072,
-0.066829614341259,
0.008030805736780167,
0.06895291060209274,
-0.0722922682762146,
-0.11671452969312668,
-0.13956612348556519,
-0.021938201040029526,
0.18244504928588867,
0.0074723707512021065,
-0.17017382383346558,
-0.01198898907750845,
0.018080584704875946,
-0.02611350454390049,
-0.10103931277990341,
0.08351483196020126,
0.08562871068716049,
-0.008722907863557339,
-0.018319901078939438,
-0.051638517528772354,
0.0033086210023611784,
0.10791590809822083,
-0.16463489830493927,
-0.09946959465742111
] |
null | null |
transformers
|
# Work In Progress
# How to use?
This model can only generate regular text.
# Training details
We continued the pre-training of [gpt2](https://huggingface.co/gpt2).
Dataset:[Natural_Questions_HTML_reduced_all](https://huggingface.co/datasets/SaulLu/Natural_Questions_HTML_reduced_all)
100% of the examples were just plain text.
Training example:
```
start up firms to succeed.[4] Firms like power companies, cable television companies and wireless communication companies with large start up costs fall within this category. A company wishing to enter such industries must have the financial ability to spend millions of dollars before starting operations and generating any revenue.[5] Similarly established firms also have a competitive advantage over new firms. An established firm threatened by a new competitor can lower prices to drive out the competition. Microsoft is a firm that has substantial pricing or market power due to technological superiority in its design and production processes.[4] Finally government created barriers to entry can be a source of market power. A prime example are patents granted to pharmaceutical companies. These patents give the drug companies a virtual monopoly in the protected product for the term of the patent.
Measurement[edit]
Concentration ratios are the most common measures of market power.[6] The four-firm concentration ratio measures the percentage of total industry output attributable to the top four companies. For monopolies the four firm ratio is 100 per cent while the ratio is zero for perfect competition.[7] The four firm concentration domestic (U.S) ratios for cigarettes is 93%; for automobiles, 84% and for beer, 85%.[8]
Another measure of concentration is the Herfindahl-Hirschman Index (HHI) which is calculated by "summing the squares of the percentage market shares of all participants in the market".[8] The HHI index for perfect competition is zero; for monopoly, 10,000.
U.S. courts almost never consider a firm to possess market power if it has a market share of less than 50 percent.[9]
Elasticity of demand[edit]
Market power is the ability to raise price above marginal cost (MC) and earn a positive profit.[10] The degree to which a firm can raise price (P) above marginal cost depends on the shape of the demand curve at the profit maximizing output.[10] That is, elasticity is the critical factor in determining market power. The relationship between market power and the price elasticity of demand (PED) can be summarized by the equation:
P M C = P E D 1 + P E D. {\displaystyle {\frac {P}{MC}}={\frac {PED}{1+PED}}.}
Note that PED will be negative, so the ratio is always greater than one. The higher the P/MC ratio, the more market power the firm possesses. As PED increases in magnitude, the P/MC ratio approaches one, and market power approaches zero.[11] The equation is derived from the monopolist pricing rule:
P − M C P = − 1 P E D. {\displaystyle {\frac {P-MC}{P}}=-{\frac {1}{PED}}.}
Nobel Memorial Prize[edit]
Jean Tirole was awarded the 2014 Nobel Memorial Prize in Economic Sciences for his analysis of market power and economic regulation.
See also[edit]
Bargaining power
Imperfect competition
Market concentration
Natural monopoly
Predatory pricing
Price discrimination
Dominance (economics)
References[edit]
Jump up ^ Vatiero Massimiliano (2010). "The Ordoliberal notion of market power: an institutionalist reassessment". European Competition Journal. 6 (3): 689–707. doi:10.5235/ecj.v6n3.689.
Jump up ^ Vatiero M. (2009), "An Institutionalist Explanation of Market Dominances". World Competition. Law and Economics Review, 32(2):221–226.
Jump up ^ If the power company raised rates the customer either pays the increase or does without power.
^ Jump up to: a b c d e Krugman & Wells, Microeconomics 2d ed. (Worth 2009)
Jump up ^ Often such natural monopolies will also have the benefit of government granted monopolies.
Jump up ^ Samuelson & Nordhaus, Microeconomics, 17th ed. (McGraw-Hill 2001) at 183–184.
Jump up ^ Samuelson & Nordhaus, Microeconomics, 17th ed. (McGraw-Hill 2001) at 183.
^ Jump up to: a b Samuelson & Nordhaus, Microeconomics, 17th ed. (McGraw-Hill 2001) at 184.
Jump up ^ J. Gregory Sidak & Hal J. Singer, Überregulation Without Economics: The World Trade Organization’s Decision in the U.S.-Mexico Arbitration on Telecommunications Services, General Agreement on Trade in Services, GATS, 57 FED. COMM. L.J. 1, 34 (2004), http://www.repository.law.indiana.edu/cgi/viewcontent.cgi?article=1388&context=fclj.
^ Jump up to: a b
```
|
{}
|
text-generation
|
bs-modeling-metadata/html-metadata-exp1-subexp2-1929863
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
# Work In Progress
# How to use?
This model can only generate regular text.
# Training details
We continued the pre-training of gpt2.
Dataset:Natural_Questions_HTML_reduced_all
100% of the examples were just plain text.
Training example:
|
[
"# Work In Progress",
"# How to use?\n\nThis model can only generate regular text.",
"# Training details\n\nWe continued the pre-training of gpt2.\n\nDataset:Natural_Questions_HTML_reduced_all\n\n100% of the examples were just plain text.\n\nTraining example:"
] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"# Work In Progress",
"# How to use?\n\nThis model can only generate regular text.",
"# Training details\n\nWe continued the pre-training of gpt2.\n\nDataset:Natural_Questions_HTML_reduced_all\n\n100% of the examples were just plain text.\n\nTraining example:"
] |
[
47,
5,
13,
42
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n# Work In Progress# How to use?\n\nThis model can only generate regular text.# Training details\n\nWe continued the pre-training of gpt2.\n\nDataset:Natural_Questions_HTML_reduced_all\n\n100% of the examples were just plain text.\n\nTraining example:"
] |
[
-0.04571015387773514,
0.031126508489251137,
-0.0008552896906621754,
0.04394849017262459,
0.15504036843776703,
0.049813203513622284,
0.10480301827192307,
0.15874680876731873,
-0.0682179406285286,
-0.01844627410173416,
0.20731712877750397,
0.14596936106681824,
0.0014297551242634654,
0.12699636816978455,
-0.01155886147171259,
-0.26702430844306946,
0.0121035510674119,
0.06026294082403183,
-0.03263358399271965,
0.14422614872455597,
0.11421510577201843,
-0.04459042102098465,
0.06772457808256149,
0.014196579344570637,
-0.19052830338478088,
-0.02819206565618515,
0.0013771939557045698,
-0.11353974044322968,
0.13788802921772003,
0.04534050077199936,
0.05291064828634262,
0.03012349084019661,
0.032638970762491226,
-0.14688533544540405,
0.023033831268548965,
0.040734000504016876,
0.0035793851129710674,
0.08532848954200745,
0.03601066768169403,
-0.04617581516504288,
0.13308288156986237,
0.010796324349939823,
0.015628565102815628,
0.06537258625030518,
-0.1432255506515503,
-0.030286027118563652,
-0.023694714531302452,
0.018506797030568123,
0.10642971098423004,
0.12725049257278442,
-0.041299063712358475,
0.09096068888902664,
-0.117939293384552,
0.06917493045330048,
0.08729390799999237,
-0.24200434982776642,
-0.05297733470797539,
0.1302012801170349,
0.04072583094239235,
0.03194691240787506,
-0.02353709191083908,
0.09436699002981186,
0.07075810432434082,
0.0703377053141594,
-0.020763617008924484,
-0.012106793001294136,
-0.10580462962388992,
0.011830233037471771,
-0.10277558118104935,
-0.06551472097635269,
0.23552711308002472,
-0.0471552349627018,
-0.011927570216357708,
-0.14284755289554596,
-0.053093746304512024,
-0.05698907747864723,
-0.014942294918000698,
-0.022339297458529472,
-0.07460756599903107,
0.06616563349962234,
-0.07797776162624359,
-0.13373970985412598,
-0.11933283507823944,
-0.136724591255188,
-0.04859281703829765,
0.03819592669606209,
0.027470579370856285,
0.01884312927722931,
-0.10583847761154175,
0.19874311983585358,
-0.021547790616750717,
-0.046733543276786804,
0.01760001853108406,
-0.10206234455108643,
0.029338890686631203,
-0.0023937446530908346,
-0.08264053612947464,
-0.08695266395807266,
0.0536954328417778,
0.09305441379547119,
0.0459553487598896,
0.02669467404484749,
0.023308537900447845,
0.057091180235147476,
-0.009786012582480907,
0.08992438018321991,
-0.01566767878830433,
0.0288259144872427,
0.053636182099580765,
-0.006574684288352728,
-0.059812892228364944,
-0.04005177319049835,
-0.132467120885849,
-0.06144978106021881,
0.051535654813051224,
0.085209921002388,
0.02166755497455597,
0.10827389359474182,
-0.03394864499568939,
-0.028494473546743393,
-0.02283421903848648,
-0.07321659475564957,
-0.05466824769973755,
-0.03043181821703911,
-0.019578734412789345,
-0.003585210768505931,
0.06242749094963074,
0.03711504116654396,
-0.09180481731891632,
-0.040296752005815506,
-0.03555936738848686,
-0.019209425896406174,
-0.08429685235023499,
-0.038504708558321,
-0.030828198418021202,
-0.07985589653253555,
-0.012556750327348709,
-0.0999288558959961,
-0.25864797830581665,
-0.02159106358885765,
0.038783952593803406,
-0.03245863690972328,
-0.057004135102033615,
-0.08526645600795746,
0.0059660994447767735,
0.020690595731139183,
-0.035660117864608765,
0.09862879663705826,
-0.05192453786730766,
0.09117316454648972,
-0.00018181510677095503,
0.07077854871749878,
-0.02355997823178768,
0.04227418452501297,
-0.10462027043104172,
-0.021434316411614418,
-0.018119029700756073,
0.11603325605392456,
0.01015302911400795,
0.04715178161859512,
-0.09374196827411652,
-0.06422801315784454,
-0.05104469135403633,
0.05643845349550247,
0.022516457363963127,
0.2119714617729187,
-0.1111694872379303,
-0.04946903884410858,
0.21631236374378204,
-0.03206963464617729,
-0.10239926725625992,
0.11339321732521057,
-0.038102272897958755,
0.2182629406452179,
0.1175459772348404,
0.13879790902137756,
-0.005259703379124403,
0.03019600175321102,
0.11883898824453354,
0.07924516499042511,
-0.040096770972013474,
0.03583608567714691,
-0.0008215743000619113,
-0.058268360793590546,
-0.15000249445438385,
0.04062475636601448,
-0.008015180937945843,
0.039690278470516205,
-0.10011406987905502,
-0.08049627393484116,
-0.03741028159856796,
-0.04664715379476547,
0.10779903829097748,
0.02099856361746788,
0.13217689096927643,
-0.03105149045586586,
-0.0937046930193901,
-0.046753671020269394,
0.04261614754796028,
-0.08872060477733612,
0.00737627362832427,
-0.06405455619096756,
0.05359664186835289,
-0.08498810231685638,
0.035808369517326355,
-0.15625910460948944,
-0.09967372566461563,
-0.0158808883279562,
0.16225557029247284,
0.03835642710328102,
0.08755102008581161,
0.08193615078926086,
0.001029122737236321,
-0.03984272480010986,
0.008350759744644165,
0.020525533705949783,
-0.02065301313996315,
-0.11337128281593323,
-0.10820966958999634,
0.020668694749474525,
-0.0504344217479229,
0.08031506836414337,
-0.1633160263299942,
0.047672078013420105,
-0.046806059777736664,
0.06382360309362411,
-0.052578844130039215,
-0.006592017598450184,
0.03022095188498497,
-0.0061895051039755344,
-0.035805054008960724,
-0.07143095135688782,
0.10813324898481369,
0.001164534711278975,
-0.08440904319286346,
0.08578941971063614,
-0.05366094410419464,
0.02537653036415577,
0.13335414230823517,
-0.14207272231578827,
-0.1015554741024971,
0.018420644104480743,
-0.06747747957706451,
0.012051169760525227,
-0.08231914788484573,
-0.023359015583992004,
0.26542025804519653,
-0.019597306847572327,
0.13428431749343872,
-0.04260177165269852,
-0.028612196445465088,
-0.01748599112033844,
-0.05870562791824341,
0.07537513226270676,
0.0444849357008934,
0.11172368377447128,
-0.10535190999507904,
0.10463089495897293,
-0.016137687489390373,
-0.005825631320476532,
0.22866342961788177,
0.019870422780513763,
-0.0565841980278492,
0.06621658802032471,
0.02723117172718048,
-0.022980056703090668,
0.023010872304439545,
-0.20078295469284058,
-0.05100920796394348,
0.06576836109161377,
0.047599200159311295,
0.11258723586797714,
-0.17269966006278992,
-0.04486191272735596,
0.004609846975654364,
-0.060140252113342285,
-0.036547474563121796,
0.06846974045038223,
-0.047613050788640976,
0.1102096438407898,
0.012415706180036068,
0.03375055640935898,
0.09227815270423889,
0.005527970846742392,
-0.13276369869709015,
0.17690762877464294,
-0.05104620382189751,
-0.23635804653167725,
-0.10587559640407562,
0.03813637048006058,
-0.026872677728533745,
0.08350813388824463,
0.07148070633411407,
-0.1576618105173111,
-0.037216365337371826,
0.00056970224250108,
0.06521885842084885,
-0.05147072672843933,
0.014689031057059765,
-0.0415620319545269,
-0.006714287213981152,
-0.04592747241258621,
-0.13938355445861816,
-0.016857240349054337,
-0.025502221658825874,
-0.1089872494339943,
0.08296286314725876,
-0.15932141244411469,
0.05923888087272644,
0.1858452707529068,
0.018334273248910904,
0.07109799981117249,
-0.04970511794090271,
0.24267831444740295,
-0.10097150504589081,
0.003405011724680662,
0.13929663598537445,
0.002505362033843994,
-0.0011534433579072356,
-0.001333496067672968,
0.003555690636858344,
-0.09768082201480865,
0.04355796426534653,
0.00472666509449482,
-0.08543924987316132,
-0.25863534212112427,
-0.07382649183273315,
-0.07444380223751068,
0.031937796622514725,
0.08205016702413559,
0.04431798309087753,
0.1422395557165146,
0.11483392864465714,
-0.02878953516483307,
0.0764632299542427,
0.023227784782648087,
0.08142830431461334,
0.10484112054109573,
-0.002395468298345804,
0.13617193698883057,
-0.05432910844683647,
-0.09557034075260162,
0.0573989674448967,
-0.0333188995718956,
0.2135137915611267,
-0.01094609685242176,
0.03362942487001419,
0.025347121059894562,
0.06492848694324493,
0.08571568131446838,
0.15737195312976837,
0.0075992401689291,
-0.025980085134506226,
-0.051716506481170654,
-0.027141859754920006,
-0.09214063733816147,
0.0580456480383873,
-0.0169978104531765,
-0.1142992451786995,
-0.0798707902431488,
-0.030268758535385132,
0.0698084905743599,
0.13336403667926788,
0.09764795005321503,
-0.2508530616760254,
-0.0706804096698761,
0.00834178738296032,
-0.006123896222561598,
-0.13474635779857635,
0.0671028420329094,
0.028240952640771866,
-0.1907995045185089,
-0.030369814485311508,
-0.050309255719184875,
0.14090339839458466,
-0.019104959443211555,
0.04276876151561737,
-0.041344109922647476,
-0.05429399386048317,
-0.0420401357114315,
0.16466686129570007,
-0.29183828830718994,
0.18036648631095886,
0.011570445261895657,
0.08300367742776871,
-0.12108194828033447,
0.0032606779132038355,
0.03493044152855873,
0.09171129018068314,
0.14699237048625946,
-0.010457534343004227,
0.03760668635368347,
-0.02456248365342617,
-0.09257420897483826,
0.07912047207355499,
0.06858392059803009,
-0.04399656876921654,
0.03858282044529915,
-0.005845424719154835,
0.04173408821225166,
0.04040961340069771,
-0.13731631636619568,
-0.1368895173072815,
-0.11980151385068893,
0.05818244442343712,
-0.04978107288479805,
0.10502417385578156,
-0.016641464084386826,
-0.06419333815574646,
-0.01933096908032894,
0.28953632712364197,
0.05111270397901535,
-0.10195091366767883,
-0.1238890066742897,
0.017556022852659225,
0.1046115979552269,
-0.007875163108110428,
0.0627254992723465,
-0.01223880983889103,
0.020432284101843834,
-0.008633344434201717,
-0.09419626742601395,
0.0980309471487999,
-0.10859785974025726,
-0.16368195414543152,
-0.034506648778915405,
0.12517675757408142,
0.01120060309767723,
0.03477786108851433,
0.07165290415287018,
0.006137725431472063,
-0.0948013961315155,
-0.15606752038002014,
0.051148612052202225,
0.004366617649793625,
0.06054490804672241,
0.016397912055253983,
-0.035877205431461334,
0.10002938657999039,
-0.04791627079248428,
-0.0768669843673706,
0.28639814257621765,
0.13071812689304352,
-0.07075423747301102,
0.09105879813432693,
0.058803558349609375,
-0.0873216763138771,
-0.23278078436851501,
-0.010586747899651527,
-0.006053881254047155,
0.010389232076704502,
-0.10185591131448746,
-0.2072819024324417,
0.008541266433894634,
-0.0008604038157500327,
-0.0033508678898215294,
0.05622677505016327,
-0.33203789591789246,
-0.10413237661123276,
0.13961030542850494,
0.06133129447698593,
0.2242690771818161,
-0.1105777844786644,
-0.03030765801668167,
-0.04556239768862724,
-0.13027827441692352,
0.12846697866916656,
-0.09544350206851959,
0.11925477534532547,
-0.01302627008408308,
0.11511676013469696,
0.03537474572658539,
-0.07944546639919281,
0.144987091422081,
-0.010765565559267998,
-0.008199283853173256,
-0.05309978127479553,
0.027116239070892334,
0.09771732240915298,
0.0255692508071661,
0.09449061006307602,
0.0006073294207453728,
0.06208193674683571,
-0.168474942445755,
-0.09537944942712784,
-0.07287416607141495,
0.04060622304677963,
0.02006204053759575,
-0.1294715255498886,
-0.009225426241755486,
-0.05105502903461456,
0.03775068372488022,
-0.004999670200049877,
0.00580682372674346,
-0.10246144235134125,
0.10467270016670227,
0.008032765239477158,
0.1307564377784729,
-0.03374418616294861,
-0.03794645145535469,
0.005704648792743683,
-0.048579949885606766,
0.08087196201086044,
-0.16883325576782227,
0.027967605739831924,
0.10044284909963608,
0.0013438253663480282,
0.10134456306695938,
0.11757508665323257,
0.013464299030601978,
0.023873722180724144,
0.0552312396466732,
-0.18664050102233887,
-0.04769942909479141,
-0.06626972556114197,
-0.07537411153316498,
-0.0006359985563904047,
0.055303674191236496,
0.07625818997621536,
-0.05925546586513519,
-0.044734735041856766,
-0.0056620631366968155,
-0.009261669591069221,
-0.08276107162237167,
0.14742618799209595,
0.024261735379695892,
0.017427688464522362,
-0.10492827743291855,
0.028938155621290207,
0.009135624393820763,
-0.034734297543764114,
0.05321768671274185,
0.024975650012493134,
-0.12263449281454086,
-0.09223920851945877,
0.07796375453472137,
0.16771802306175232,
-0.08998585492372513,
-0.035084888339042664,
-0.10015051811933517,
-0.08032883703708649,
0.04781336337327957,
0.007181140594184399,
0.06437071412801743,
0.05199826881289482,
-0.08407386392354965,
0.00296915415674448,
-0.13555586338043213,
-0.005803146865218878,
0.10669328272342682,
0.009404432028532028,
-0.05391650274395943,
0.1807999461889267,
-0.003491924377158284,
0.07335851341485977,
-0.08978581428527832,
-0.0370611697435379,
-0.10644277185201645,
0.08103101700544357,
-0.14750626683235168,
-0.05112931877374649,
-0.06535235047340393,
-0.06111215427517891,
-0.0039771562442183495,
-0.027426274493336678,
-0.01086332369595766,
-0.012961512431502342,
-0.09847843647003174,
0.007467328105121851,
-0.039443906396627426,
0.010853325016796589,
-0.04079928994178772,
0.0202726311981678,
0.0550617091357708,
-0.06268738955259323,
0.1167730912566185,
0.09182123094797134,
-0.04792163148522377,
0.061625488102436066,
-0.1341770440340042,
0.005423842463642359,
0.05412967503070831,
-0.026027627289295197,
-0.03746722638607025,
-0.011482927948236465,
0.041066527366638184,
-0.001609697355888784,
0.037995029240846634,
0.08150427043437958,
0.06568189710378647,
-0.1165214255452156,
0.027599114924669266,
-0.027192987501621246,
-0.060779206454753876,
-0.0731472447514534,
0.038941338658332825,
-0.006192103028297424,
0.12413564324378967,
0.10601004958152771,
-0.0768299475312233,
0.08370030671358109,
-0.07627614587545395,
0.018554240465164185,
-0.022720670327544212,
-0.053925883024930954,
-0.016420450061559677,
-0.08187765628099442,
0.06999850273132324,
-0.014429139904677868,
0.24994640052318573,
0.015962889418005943,
0.034155890345573425,
-0.021755803376436234,
0.024872208014130592,
0.06733065098524094,
-0.0206169281154871,
0.24493734538555145,
0.06260111182928085,
0.02211538888514042,
-0.05228559300303459,
0.09524737298488617,
0.02991286851465702,
0.01246440690010786,
0.17017675936222076,
-0.02099202387034893,
-0.010377472266554832,
0.11622951179742813,
-0.08050751686096191,
-0.06191404163837433,
-0.09821359068155289,
-0.03451629728078842,
0.04263506829738617,
0.05071943998336792,
-0.062262993305921555,
-0.00816527009010315,
0.15266641974449158,
-0.08993540704250336,
0.030418245121836662,
-0.03848550096154213,
-0.09985649585723877,
-0.14282864332199097,
-0.09591031074523926,
-0.04726814106106758,
-0.12411707639694214,
0.029111912474036217,
-0.10745833069086075,
0.004675357602536678,
0.03683966398239136,
0.05320456251502037,
-0.04788228124380112,
0.16395875811576843,
0.022290894761681557,
-0.10876459628343582,
0.0676335021853447,
-0.0600680448114872,
0.08092240244150162,
-0.1089569702744484,
-0.02796044945716858,
-0.06864411383867264,
-0.00019527098629623652,
0.04811754450201988,
0.008503993973135948,
-0.05147166550159454,
-0.0006365832523442805,
-0.04988280311226845,
-0.027749456465244293,
-0.06800936907529831,
0.07986383885145187,
0.05406643822789192,
0.13042451441287994,
0.007566430605947971,
-0.04858599230647087,
0.01601790450513363,
0.28083568811416626,
-0.07637922465801239,
-0.19767387211322784,
-0.1543778032064438,
0.2521328330039978,
0.02224528044462204,
0.00728145707398653,
0.014690801501274109,
-0.04123038798570633,
0.02996525913476944,
0.32060691714286804,
0.26256808638572693,
-0.06538024544715881,
-0.011328311637043953,
0.023531440645456314,
0.0011067744344472885,
0.08076684176921844,
0.10397224873304367,
0.0596902035176754,
0.1696273684501648,
-0.1170850619673729,
-0.03760433569550514,
-0.01707853563129902,
-0.032810911536216736,
-0.07168977707624435,
0.09166553616523743,
0.08210558444261551,
-0.04268466681241989,
-0.02739763632416725,
0.12090613692998886,
-0.13689208030700684,
0.025758732110261917,
-0.06629146635532379,
-0.0774931013584137,
-0.10811324417591095,
0.02853504754602909,
-0.015737012028694153,
-0.030850324779748917,
0.09087392687797546,
-0.026688672602176666,
-0.013327458873391151,
0.08537052571773529,
0.022731056436896324,
-0.1186041459441185,
0.013323287479579449,
0.08814572542905807,
0.011137763038277626,
0.10806011408567429,
0.0031315942760556936,
0.09595684707164764,
0.05684081092476845,
-0.013006236404180527,
-0.07169108092784882,
0.08017507940530777,
0.0009511671378277242,
0.026461802423000336,
0.06120004877448082,
0.05286932364106178,
0.004020386375486851,
-0.09075475484132767,
0.01252367440611124,
-0.16019639372825623,
0.04305659979581833,
0.012037698179483414,
-0.007972550578415394,
-0.0770919993519783,
0.0016704470617696643,
-0.07721739262342453,
0.12285615503787994,
0.12912113964557648,
-0.04295601695775986,
0.015978598967194557,
-0.07322704046964645,
0.06878259778022766,
0.01979692094027996,
-0.01979057677090168,
-0.09093066304922104,
-0.16696248948574066,
-0.06042615696787834,
0.10913237929344177,
-0.03244129195809364,
-0.19230502843856812,
-0.023158524185419083,
-0.041742466390132904,
-0.02466615103185177,
-0.07534359395503998,
0.10755181312561035,
0.0920075848698616,
0.005429149605333805,
-0.0030527091585099697,
0.02840290777385235,
-0.017102086916565895,
0.07948600500822067,
-0.13891702890396118,
-0.09696359187364578
] |
null | null |
transformers
|
# Work In Progress
# How to use?
To generate text with HTML, the sentence must start with ` htmlOn |||` (note the space at the beginning 😉). To generate normal text, you don't need to add anything.
# Training details
We continued the pre-training of [gpt2](https://huggingface.co/gpt2).
Dataset:[Natural_Questions_HTML_reduced_all](https://huggingface.co/datasets/SaulLu/Natural_Questions_HTML_reduced_all)
50% of the examples in the training data contained `h1`, `h2`, ..., `h6` and `p` HTML tags with only their `id` and `class` attributes. 50% of the examples were just plain text.
Training example with metadata:
```
htmlOn ||| <h1 id:firstHeading class:firstHeading>Market power</h1>
From Wikipedia, the free encyclopedia
Jump to: navigation, search
Competition law
Basic concepts
History of competition law
Monopoly
Coercive monopoly
Natural monopoly
Barriers to entry
Herfindahl–Hirschman Index
Market concentration
Market power
SSNIP test
Relevant market
Merger control
Anti-competitive practices
Monopolization
Collusion
Formation of cartels
Price fixing
Bid rigging
Product bundling and tying
Refusal to deal
Group boycott
Essential facilities
Exclusive dealing
Dividing territories
Conscious parallelism
Predatory pricing
Misuse of patents and copyrights
Enforcement authorities and organizations
International Competition Network
List of competition regulators
v
t
e
<p>In economics and particularly in industrial organization, market power is the ability of a firm to profitably raise the market price of a good or service over marginal cost. In perfectly competitive markets, market participants have no market power. A firm with total market power can raise prices without losing any customers to competitors. Market participants that have market power are therefore sometimes referred to as "price makers" or "price setters", while those without are sometimes called "price takers". Significant market power occurs when prices exceed marginal cost and long run average cost, so the firm makes profit.</p>
<p>A firm with market power has the ability to individually affect either the total quantity or the prevailing price in the market. Price makers face a downward-sloping demand curve, such that price increases lead to a lower quantity demanded. The decrease in supply as a result of the exercise of market power creates an economic deadweight loss which is often viewed as socially undesirable. As a result, many countries have anti-trust or other legislation intended to limit the ability of firms to accrue market power. Such legislation often regulates mergers and sometimes introduces a judicial power to compel divestiture.</p>
<p>A firm usually has market power by virtue of controlling a large portion of the market. In extreme cases—monopoly and monopsony—the firm controls the entire market. However, market size alone is not the only indicator of market power. Highly concentrated markets may be contestable if there are no barriers to entry or exit, limiting the incumbent firm's ability to raise its price above competitive levels.</p>
<p>Market power gives firms the ability to engage in unilateral anti-competitive behavior.[1] Some of the behaviours that firms with market power are accused of engaging in include predatory pricing, product tying, and creation of overcapacity or other barriers to entry. If no individual participant in the market has significant market power, then anti-competitive behavior can take place only through collusion, or the exercise of a group of participants' collective market power.</p>
<p>The Lerner index and Herfindahl index may be used to measure market power.</p>
<p></p><h2>Contents</h2>
[hide]
1 Oligopoly
2 Monopoly power
3 Source
4 Measurement
5 Elasticity of demand
6 Nobel Memorial Prize
7 See also
8 References
9 Further references
<p></p><h2>Oligopoly[edit]</h2>
<p>When several firms control a significant share of market sales, the resulting market structure is called an oligopoly or oligopsony. An oligopoly may engage in collusion, either tacit or overt, and thereby exercise market power. A group of firms that explicitly agree to affect market price or output is called a cartel.</p>
<h2>Monopoly power[edit]</h2>
<p>Monopoly power is an example of market failure which occurs when one or more of the participants has the ability to influence the price or other outcomes in some general or specialized market. The most commonly discussed form of market power is that of a monopoly, but other forms such as monopsony, and more moderate versions of these two extremes, exist.</p>
<p>A well-known example of monopolistic market power is Microsoft's market share in PC operating systems. The United States v. Microsoft case dealt with an allegation that Microsoft illegally exercised its market power by bundling its web browser with its operating system. In this respect, the notion of dominance and dominant position in EU Antitrust Law is a strictly related aspect.[2]</p>
<h2>Source[edit]</h2>
<p>A monopoly can raise prices and retain customers because the monopoly has no competitors. If a customer has no other place to go to obtain the goods or services, they either pay the increased price or do without.[3] Thus the key to market power is to preclude competition through high barriers of entry. Barriers to entry that are significant sources
```
|
{"widget": [{"text": " htmlOn ||| <h1"}]}
|
text-generation
|
bs-modeling-metadata/html-metadata-exp1-subexp3-1898197
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
# Work In Progress
# How to use?
To generate text with HTML, the sentence must start with ' htmlOn |||' (note the space at the beginning ). To generate normal text, you don't need to add anything.
# Training details
We continued the pre-training of gpt2.
Dataset:Natural_Questions_HTML_reduced_all
50% of the examples in the training data contained 'h1', 'h2', ..., 'h6' and 'p' HTML tags with only their 'id' and 'class' attributes. 50% of the examples were just plain text.
Training example with metadata:
|
[
"# Work In Progress",
"# How to use?\n\nTo generate text with HTML, the sentence must start with ' htmlOn |||' (note the space at the beginning ). To generate normal text, you don't need to add anything.",
"# Training details\n\nWe continued the pre-training of gpt2.\n\nDataset:Natural_Questions_HTML_reduced_all\n50% of the examples in the training data contained 'h1', 'h2', ..., 'h6' and 'p' HTML tags with only their 'id' and 'class' attributes. 50% of the examples were just plain text.\n\nTraining example with metadata:"
] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"# Work In Progress",
"# How to use?\n\nTo generate text with HTML, the sentence must start with ' htmlOn |||' (note the space at the beginning ). To generate normal text, you don't need to add anything.",
"# Training details\n\nWe continued the pre-training of gpt2.\n\nDataset:Natural_Questions_HTML_reduced_all\n50% of the examples in the training data contained 'h1', 'h2', ..., 'h6' and 'p' HTML tags with only their 'id' and 'class' attributes. 50% of the examples were just plain text.\n\nTraining example with metadata:"
] |
[
47,
5,
48,
91
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n# Work In Progress# How to use?\n\nTo generate text with HTML, the sentence must start with ' htmlOn |||' (note the space at the beginning ). To generate normal text, you don't need to add anything.# Training details\n\nWe continued the pre-training of gpt2.\n\nDataset:Natural_Questions_HTML_reduced_all\n50% of the examples in the training data contained 'h1', 'h2', ..., 'h6' and 'p' HTML tags with only their 'id' and 'class' attributes. 50% of the examples were just plain text.\n\nTraining example with metadata:"
] |
[
-0.05037961155176163,
0.05559840053319931,
-0.0006311305332928896,
0.023181993514299393,
0.1912783533334732,
0.054096974432468414,
0.07837308198213577,
0.152407705783844,
-0.024461261928081512,
-0.034051500260829926,
0.16964423656463623,
0.08576386421918869,
-0.0018731668824329972,
0.08069802820682526,
-0.04895714670419693,
-0.1869286298751831,
-0.0204708743840456,
0.07026976346969604,
-0.05517420545220375,
0.09865067154169083,
0.08015436679124832,
-0.09214577078819275,
0.07608074694871902,
0.004544184077531099,
-0.17570137977600098,
0.021796952933073044,
0.014408677816390991,
-0.0369504950940609,
0.12627506256103516,
-0.002673159586265683,
0.06485455483198166,
0.035129062831401825,
0.057057105004787445,
-0.1774819791316986,
0.024688439443707466,
0.04802466556429863,
0.013426434248685837,
0.06948865950107574,
0.030498165637254715,
-0.052402839064598083,
0.027985841035842896,
-0.018033521249890327,
-0.007184267044067383,
0.09420330077409744,
-0.11236096173524857,
-0.09824423491954803,
-0.05020548030734062,
-0.020023159682750702,
0.1173207014799118,
0.10787861049175262,
-0.02285614237189293,
0.008592997677624226,
-0.12912003695964813,
0.048929374665021896,
0.23849964141845703,
-0.17967642843723297,
-0.011850576847791672,
0.06809975206851959,
0.0026868917047977448,
0.003381277434527874,
-0.05536121129989624,
0.060299135744571686,
0.042268674820661545,
0.04017151892185211,
0.003772841999307275,
-0.04101313278079033,
-0.13311445713043213,
0.03196259215474129,
-0.06175805255770683,
-0.03864601254463196,
0.31422919034957886,
0.01727849431335926,
-0.008395574986934662,
-0.07001341879367828,
-0.03220929205417633,
-0.06273029744625092,
-0.0356900654733181,
0.02589176595211029,
-0.03263724222779274,
0.02954559028148651,
-0.026678310707211494,
-0.13840307295322418,
-0.15475106239318848,
-0.09278115630149841,
0.02717374451458454,
0.03089098073542118,
0.030176622793078423,
0.005088488105684519,
-0.0786842554807663,
0.17316420376300812,
0.026978883892297745,
-0.05935822054743767,
0.02986585721373558,
-0.04764567315578461,
0.010982206091284752,
0.002152678556740284,
-0.13940483331680298,
-0.1683533638715744,
0.07408803701400757,
0.07376651465892792,
-0.021563824266195297,
0.03966520354151726,
0.017771156504750252,
0.054243192076683044,
-0.07741230726242065,
0.06709129363298416,
-0.010703971609473228,
0.032431747764348984,
0.06977880746126175,
-0.023016298189759254,
-0.07110563665628433,
-0.028208788484334946,
-0.14677347242832184,
-0.07006043195724487,
0.0397568941116333,
0.05480237677693367,
-0.026022283360362053,
0.09553215652704239,
-0.04378542676568031,
-0.010608211159706116,
-0.07075686007738113,
-0.13192963600158691,
-0.0354602187871933,
0.007917121984064579,
0.01533531304448843,
-0.03277863562107086,
0.08094584941864014,
0.025237975642085075,
-0.09972713142633438,
-0.030912546440958977,
-0.0298159159719944,
0.03418542817234993,
-0.09262912720441818,
-0.010648886673152447,
-0.019353313371539116,
-0.06020619720220566,
-0.027731815353035927,
-0.08349897712469101,
-0.19331304728984833,
-0.01610388420522213,
0.09537437558174133,
0.011425905860960484,
0.010117515921592712,
-0.05581164360046387,
-0.00993003137409687,
-0.008333450183272362,
-0.015079664997756481,
0.09175930172204971,
-0.04038940742611885,
0.09102383255958557,
-0.010335910134017467,
0.07404135167598724,
-0.09661449491977692,
0.07807863503694534,
-0.09874715656042099,
-0.00855162926018238,
-0.08931449800729752,
0.08868645131587982,
0.07491947710514069,
0.059394411742687225,
-0.055407457053661346,
-0.015196333639323711,
-0.0067992848344147205,
0.02076096460223198,
0.03032713197171688,
0.1656230390071869,
-0.08159308135509491,
-0.04672331362962723,
0.15916043519973755,
0.010072167962789536,
-0.0805722177028656,
0.07456260174512863,
-0.03531849756836891,
0.18107740581035614,
0.09210410714149475,
0.15665043890476227,
-0.005485346540808678,
-0.02647331915795803,
0.1294591724872589,
0.06892170757055283,
-0.023651933297514915,
-0.01551580149680376,
-0.014109847135841846,
-0.08079767227172852,
-0.13147279620170593,
0.055299144238233566,
-0.06157013401389122,
0.059188198298215866,
-0.05372443050146103,
-0.07604356855154037,
-0.027019357308745384,
-0.05396446958184242,
0.11132292449474335,
0.0730879008769989,
0.0982232317328453,
-0.0077613298781216145,
-0.07933522015810013,
-0.0197038222104311,
0.031277019530534744,
-0.1292993277311325,
0.049484983086586,
-0.014288350008428097,
0.09357766062021255,
-0.04293154180049896,
0.0068368311040103436,
-0.17239849269390106,
-0.09314431250095367,
-0.029477039352059364,
0.14652474224567413,
0.04612391069531441,
0.06698230654001236,
0.04369032382965088,
-0.00743126031011343,
-0.08247922360897064,
-0.0005887110019102693,
-0.01144985668361187,
-0.007770818192511797,
-0.08478280156850815,
-0.13815352320671082,
-0.030804287642240524,
-0.022225014865398407,
0.18017815053462982,
-0.14075526595115662,
0.0439370721578598,
-0.02206275798380375,
0.11726064234972,
-0.009397836402058601,
-0.014335918240249157,
0.007140832021832466,
-0.0031903411727398634,
-0.010184776969254017,
-0.03592144697904587,
0.09625041484832764,
0.008880789391696453,
-0.09279832243919373,
0.003921589348465204,
-0.022053712978959084,
-0.07049746066331863,
0.1084398701786995,
-0.20677492022514343,
-0.10262289643287659,
-0.02807915210723877,
-0.05294353887438774,
0.007407168857753277,
-0.09653978794813156,
-0.033132582902908325,
0.20410257577896118,
-0.017727194353938103,
0.14229780435562134,
-0.038741420954465866,
-0.07990176230669022,
-0.031660500913858414,
-0.058100249618291855,
0.062225840985774994,
0.009562053717672825,
0.07231806218624115,
-0.06750184297561646,
0.08855041861534119,
0.012357581406831741,
-0.08395794779062271,
0.23017895221710205,
-0.014303915202617645,
-0.07291793078184128,
0.05995848402380943,
0.04154326021671295,
-0.052160102874040604,
0.05260568857192993,
-0.23186498880386353,
-0.05137467384338379,
0.05007457360625267,
0.013819406740367413,
0.08040221035480499,
-0.19354960322380066,
-0.003940775990486145,
-0.049569472670555115,
-0.06545600295066833,
0.023409685119986534,
0.02563590742647648,
-0.002273761434480548,
0.09140536189079285,
0.05090222880244255,
0.03672214224934578,
0.10732059925794601,
-0.012025467120110989,
-0.145280122756958,
0.16795262694358826,
-0.041143350303173065,
-0.19671975076198578,
-0.05756871774792671,
-0.03341729938983917,
-0.06751969456672668,
0.04597833752632141,
0.08748982101678848,
-0.16033896803855896,
-0.03168352693319321,
0.012318200431764126,
-0.010991642251610756,
-0.009130039252340794,
0.020600929856300354,
-0.0484427772462368,
0.017985709011554718,
0.0013741075526922941,
-0.08597449213266373,
-0.008586145006120205,
-0.022438371554017067,
-0.04674537852406502,
0.08174848556518555,
-0.0986097976565361,
0.07370080798864365,
0.17896564304828644,
-0.053863849490880966,
0.10346197336912155,
-0.024313796311616898,
0.1669130027294159,
-0.07692159712314606,
-0.00007654365617781878,
0.11523096263408661,
-0.016021128743886948,
0.03668169304728508,
0.09914930909872055,
0.02987593598663807,
-0.05963229760527611,
0.04419030249118805,
0.052058637142181396,
-0.03688140958547592,
-0.2456197440624237,
-0.043383363634347916,
-0.10338026285171509,
-0.07611531019210815,
0.0641123577952385,
0.037716638296842575,
0.1035647839307785,
0.0732884630560875,
-0.0517234280705452,
0.06187645345926285,
0.043720077723264694,
0.10543154180049896,
0.11376499384641647,
0.02596289850771427,
0.07600273936986923,
-0.019084185361862183,
-0.06598015129566193,
0.0055470275692641735,
0.013431982137262821,
0.19633065164089203,
-0.05292662978172302,
0.014946362003684044,
0.048363830894231796,
0.07984928786754608,
0.004905360285192728,
0.10342016071081161,
-0.02230067551136017,
-0.009339327923953533,
-0.013213072903454304,
-0.03415605425834656,
-0.022844534367322922,
0.05752263590693474,
-0.026346219703555107,
-0.1010013297200203,
-0.11784744262695312,
-0.03145745396614075,
0.07099020481109619,
0.19721846282482147,
0.12608879804611206,
-0.21995475888252258,
-0.0192091204226017,
0.0033487766049802303,
-0.03169765695929527,
-0.131786048412323,
0.028079252690076828,
0.02400539256632328,
-0.15132412314414978,
0.040114521980285645,
-0.03521190583705902,
0.11128265410661697,
0.009484123438596725,
0.04400797560811043,
0.029611509293317795,
-0.030561186373233795,
-0.01655948907136917,
0.14575855433940887,
-0.29492199420928955,
0.1386677324771881,
0.042990006506443024,
0.06316180527210236,
-0.12657530605793,
-0.0029748568776994944,
0.038004808127880096,
0.07667867839336395,
0.10539560765028,
-0.006560537964105606,
0.10943733900785446,
-0.052578117698431015,
0.03989659249782562,
0.0783686488866806,
0.08677258342504501,
-0.04011330381035805,
0.03763258829712868,
-0.03599127009510994,
-0.0029074533376842737,
0.037159644067287445,
-0.08991247415542603,
-0.16678111255168915,
-0.13955748081207275,
0.05159289389848709,
-0.002090967260301113,
0.15245173871517181,
-0.01554233767092228,
-0.01782415062189102,
-0.020227093249559402,
0.2536965012550354,
0.047196898609399796,
-0.11842437088489532,
-0.13014647364616394,
0.07577933371067047,
0.009446412324905396,
-0.024373695254325867,
0.037698712199926376,
-0.021913982927799225,
0.06543236970901489,
0.005162764806300402,
-0.09538263827562332,
0.09717121720314026,
-0.041802339255809784,
-0.12158066034317017,
-0.05111781880259514,
0.11476034671068192,
0.050611626356840134,
0.026869237422943115,
0.0490945428609848,
-0.04528533294796944,
-0.06246979907155037,
-0.09483925253152847,
0.023764198645949364,
-0.004312688484787941,
0.18391142785549164,
0.06998094916343689,
-0.09898246079683304,
0.04370812326669693,
-0.049376845359802246,
-0.050055161118507385,
0.2920994758605957,
0.11461557447910309,
-0.08826374262571335,
0.09623511880636215,
0.08465991914272308,
-0.054953377693891525,
-0.27654364705085754,
0.04207586124539375,
0.025088684633374214,
-0.032005954533815384,
-0.10680907964706421,
-0.20633064210414886,
0.08925089985132217,
-0.02554069645702839,
-0.010169305838644505,
0.06548069417476654,
-0.19616881012916565,
-0.08878950774669647,
0.11437562108039856,
0.04634217545390129,
0.18837398290634155,
-0.10696098208427429,
-0.007481754757463932,
-0.08874796330928802,
-0.17437443137168884,
0.08431144058704376,
-0.15544955432415009,
0.11038874089717865,
-0.028481727465987206,
0.08542896807193756,
0.011921979486942291,
-0.06176942586898804,
0.09822790324687958,
0.02417929284274578,
0.036573294550180435,
-0.0168402548879385,
0.011907978914678097,
0.1375388652086258,
-0.0377681665122509,
0.10277210175991058,
-0.01570509560406208,
0.022653954103589058,
-0.12908783555030823,
-0.06253767758607864,
-0.11130087077617645,
0.04845086485147476,
0.012495561502873898,
-0.11898865550756454,
-0.013897470198571682,
-0.06191376596689224,
0.08551307022571564,
0.009411865845322609,
0.02628629095852375,
-0.1080959215760231,
0.11318600922822952,
0.038511745631694794,
0.14660902321338654,
-0.17766173183918,
0.0005081603303551674,
0.002610029885545373,
-0.04644739627838135,
0.08034642785787582,
-0.1530238538980484,
0.08487891405820847,
0.06794480234384537,
0.017616314813494682,
0.15373112261295319,
0.11854569613933563,
0.014220581389963627,
0.009744571521878242,
0.044810421764850616,
-0.17456461489200592,
0.0042763627134263515,
-0.08729765564203262,
-0.12148960679769516,
-0.10033760964870453,
0.07235130667686462,
0.06560055166482925,
-0.028546195477247238,
-0.038127217441797256,
0.009272675029933453,
0.010663649998605251,
-0.10049322247505188,
0.0412980318069458,
0.07677976787090302,
0.037633173167705536,
-0.11161410063505173,
0.02673383429646492,
0.01978500746190548,
-0.017450639978051186,
0.031504079699516296,
0.09219376742839813,
-0.13073304295539856,
-0.09498464316129684,
0.08836504817008972,
0.24718132615089417,
-0.09186617285013199,
-0.0638636127114296,
-0.08551585674285889,
-0.09660261124372482,
0.045492447912693024,
0.0009352120105177164,
0.050962429493665695,
-0.005642176140099764,
-0.007739467080682516,
-0.012155982665717602,
-0.154816135764122,
0.05086960271000862,
0.029189327731728554,
0.02690032124519348,
-0.07728555798530579,
0.13369043171405792,
-0.0008116018143482506,
0.11044324934482574,
-0.05535205453634262,
-0.03562123328447342,
-0.08031261712312698,
0.07686225324869156,
-0.1706515997648239,
-0.02215917594730854,
-0.06965764611959457,
-0.0376485250890255,
0.015176039189100266,
-0.015164783224463463,
-0.003560683922842145,
0.019900918006896973,
-0.09517423808574677,
-0.0092872753739357,
-0.02783690020442009,
0.028906328603625298,
-0.05071144178509712,
-0.010462069883942604,
0.009628897532820702,
-0.061250198632478714,
0.11796196550130844,
0.042917124927043915,
-0.0815163403749466,
0.06929764896631241,
-0.12632538378238678,
-0.07263772934675217,
0.04271158576011658,
-0.01617865078151226,
-0.012955786660313606,
-0.021069886162877083,
-0.005062534008175135,
-0.005751417949795723,
0.06163327768445015,
0.03767499327659607,
0.12072905153036118,
-0.10587383806705475,
0.0026480937376618385,
-0.08813407272100449,
-0.04547034949064255,
-0.09559988975524902,
0.04698353633284569,
-0.0026703812181949615,
0.08380004018545151,
0.09489244222640991,
-0.07981420308351517,
0.0775383859872818,
-0.08679871261119843,
0.005485810339450836,
0.010001706890761852,
-0.011184095405042171,
-0.07391775399446487,
-0.09092508256435394,
0.06404724717140198,
-0.04532020911574364,
0.19256891310214996,
0.015858393162488937,
0.024163760244846344,
0.006078075617551804,
0.008008291944861412,
0.08775854110717773,
-0.0021143530029803514,
0.19118693470954895,
0.017530208453536034,
-0.013590076938271523,
0.01845407672226429,
0.10700953751802444,
0.049127839505672455,
0.0937795415520668,
0.21701285243034363,
-0.0018842191202566028,
0.021331731230020523,
0.0830090269446373,
-0.12079384922981262,
-0.05438540503382683,
-0.1212267354130745,
0.04831244796514511,
0.05948065221309662,
0.0718071311712265,
-0.09799972921609879,
0.051264215260744095,
0.19404569268226624,
-0.12511852383613586,
0.03328970819711685,
-0.05987674370408058,
-0.09407215565443039,
-0.10904650390148163,
-0.10692577064037323,
-0.031873900443315506,
-0.18019986152648926,
-0.001362879411317408,
-0.08546547591686249,
0.00183002813719213,
0.08688998222351074,
-0.0024576338473707438,
-0.010964810848236084,
0.1585192084312439,
0.09887133538722992,
-0.09332185238599777,
0.035084664821624756,
-0.054619427770376205,
0.0776362195611,
-0.03427765518426895,
-0.02850026823580265,
0.0035255001857876778,
-0.05693932995200157,
0.041346531361341476,
0.019255515187978745,
-0.03474019467830658,
0.009156813845038414,
-0.08361310511827469,
-0.0643819198012352,
-0.013525165617465973,
0.09058314561843872,
0.06117071956396103,
0.15106439590454102,
0.006563164759427309,
-0.05385446175932884,
0.011709189973771572,
0.20619522035121918,
-0.10164932906627655,
-0.1418599933385849,
-0.13997504115104675,
0.21511991322040558,
0.08761761337518692,
-0.0406041257083416,
0.04206922650337219,
-0.1275637000799179,
0.047590222209692,
0.3084791600704193,
0.1751507818698883,
-0.023435061797499657,
0.009391081519424915,
0.04776322841644287,
-0.011431548744440079,
0.030990757048130035,
0.07735908031463623,
0.06825537234544754,
0.13165543973445892,
-0.11264730244874954,
-0.04533868283033371,
0.022246968001127243,
-0.06788985431194305,
-0.11002043634653091,
0.08989155292510986,
0.046679627150297165,
-0.06047654151916504,
-0.006091950926929712,
0.07414087653160095,
-0.07631712406873703,
-0.0455305241048336,
0.041403427720069885,
-0.0337318480014801,
-0.05832811817526817,
0.002117831725627184,
-0.0316447876393795,
-0.043703075498342514,
0.03630242496728897,
-0.024548154324293137,
0.002009111223742366,
0.17049020528793335,
0.004513515159487724,
-0.11213085800409317,
-0.0059165130369365215,
0.10983214527368546,
-0.061441656202077866,
0.09673050791025162,
-0.008974711410701275,
0.10696595907211304,
0.048238348215818405,
0.0359010249376297,
-0.04540165513753891,
0.05126308649778366,
0.035892270505428314,
0.020350923761725426,
0.053840670734643936,
0.08561455458402634,
-0.018864354118704796,
-0.07220157235860825,
-0.009702411480247974,
-0.15894097089767456,
0.03809628635644913,
0.01118828821927309,
-0.039998363703489304,
-0.08218851685523987,
-0.02383166179060936,
-0.06515459716320038,
0.11839921772480011,
0.115425243973732,
-0.035573381930589676,
0.014135830104351044,
-0.0741955116391182,
-0.01051426213234663,
0.058192405849695206,
-0.06171172857284546,
-0.11044101417064667,
-0.13717709481716156,
0.007510934956371784,
0.16915422677993774,
-0.0020446062553673983,
-0.17918646335601807,
0.010278192348778248,
0.02796827256679535,
-0.020502954721450806,
-0.09586475044488907,
0.10081816464662552,
0.07730679959058762,
0.00030784725095145404,
-0.02140389382839203,
-0.057151611894369125,
-0.03214067593216896,
0.10829474776983261,
-0.18815717101097107,
-0.09491800516843796
] |
null | null |
transformers
|
# Model Trained Using AutoNLP
- Problem type: Multi-class Classification
- Model ID: 9522090
## Validation Metrics
- Loss: 0.3541755676269531
- Accuracy: 0.8759671179883946
- Macro F1: 0.5330133182738012
- Micro F1: 0.8759671179883946
- Weighted F1: 0.8482773065757196
- Macro Precision: 0.537738108882869
- Micro Precision: 0.8759671179883946
- Weighted Precision: 0.8241048710814852
- Macro Recall: 0.5316621214820499
- Micro Recall: 0.8759671179883946
- Weighted Recall: 0.8759671179883946
## Usage
You can use cURL to access this model:
```
$ curl -X POST -H "Authorization: Bearer YOUR_API_KEY" -H "Content-Type: application/json" -d '{"inputs": "I love AutoNLP"}' https://api-inference.huggingface.co/models/bshlgrs/autonlp-classification-9522090
```
Or Python API:
```
from transformers import AutoModelForSequenceClassification, AutoTokenizer
model = AutoModelForSequenceClassification.from_pretrained("bshlgrs/autonlp-classification-9522090", use_auth_token=True)
tokenizer = AutoTokenizer.from_pretrained("bshlgrs/autonlp-classification-9522090", use_auth_token=True)
inputs = tokenizer("I love AutoNLP", return_tensors="pt")
outputs = model(**inputs)
```
|
{"language": "en", "tags": "autonlp", "datasets": ["bshlgrs/autonlp-data-classification"], "widget": [{"text": "I love AutoNLP \ud83e\udd17"}]}
|
text-classification
|
bshlgrs/autonlp-classification-9522090
|
[
"transformers",
"pytorch",
"bert",
"text-classification",
"autonlp",
"en",
"dataset:bshlgrs/autonlp-data-classification",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #bert #text-classification #autonlp #en #dataset-bshlgrs/autonlp-data-classification #autotrain_compatible #endpoints_compatible #region-us
|
# Model Trained Using AutoNLP
- Problem type: Multi-class Classification
- Model ID: 9522090
## Validation Metrics
- Loss: 0.3541755676269531
- Accuracy: 0.8759671179883946
- Macro F1: 0.5330133182738012
- Micro F1: 0.8759671179883946
- Weighted F1: 0.8482773065757196
- Macro Precision: 0.537738108882869
- Micro Precision: 0.8759671179883946
- Weighted Precision: 0.8241048710814852
- Macro Recall: 0.5316621214820499
- Micro Recall: 0.8759671179883946
- Weighted Recall: 0.8759671179883946
## Usage
You can use cURL to access this model:
Or Python API:
|
[
"# Model Trained Using AutoNLP\n\n- Problem type: Multi-class Classification\n- Model ID: 9522090",
"## Validation Metrics\n\n- Loss: 0.3541755676269531\n- Accuracy: 0.8759671179883946\n- Macro F1: 0.5330133182738012\n- Micro F1: 0.8759671179883946\n- Weighted F1: 0.8482773065757196\n- Macro Precision: 0.537738108882869\n- Micro Precision: 0.8759671179883946\n- Weighted Precision: 0.8241048710814852\n- Macro Recall: 0.5316621214820499\n- Micro Recall: 0.8759671179883946\n- Weighted Recall: 0.8759671179883946",
"## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
"TAGS\n#transformers #pytorch #bert #text-classification #autonlp #en #dataset-bshlgrs/autonlp-data-classification #autotrain_compatible #endpoints_compatible #region-us \n",
"# Model Trained Using AutoNLP\n\n- Problem type: Multi-class Classification\n- Model ID: 9522090",
"## Validation Metrics\n\n- Loss: 0.3541755676269531\n- Accuracy: 0.8759671179883946\n- Macro F1: 0.5330133182738012\n- Micro F1: 0.8759671179883946\n- Weighted F1: 0.8482773065757196\n- Macro Precision: 0.537738108882869\n- Micro Precision: 0.8759671179883946\n- Weighted Precision: 0.8241048710814852\n- Macro Recall: 0.5316621214820499\n- Micro Recall: 0.8759671179883946\n- Weighted Recall: 0.8759671179883946",
"## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
59,
25,
149,
17
] |
[
"passage: TAGS\n#transformers #pytorch #bert #text-classification #autonlp #en #dataset-bshlgrs/autonlp-data-classification #autotrain_compatible #endpoints_compatible #region-us \n# Model Trained Using AutoNLP\n\n- Problem type: Multi-class Classification\n- Model ID: 9522090## Validation Metrics\n\n- Loss: 0.3541755676269531\n- Accuracy: 0.8759671179883946\n- Macro F1: 0.5330133182738012\n- Micro F1: 0.8759671179883946\n- Weighted F1: 0.8482773065757196\n- Macro Precision: 0.537738108882869\n- Micro Precision: 0.8759671179883946\n- Weighted Precision: 0.8241048710814852\n- Macro Recall: 0.5316621214820499\n- Micro Recall: 0.8759671179883946\n- Weighted Recall: 0.8759671179883946## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
-0.06786561012268066,
0.17361539602279663,
-0.002672463422641158,
0.11000768840312958,
0.09727243334054947,
0.05605144053697586,
0.10872629284858704,
0.1494981348514557,
0.05574570596218109,
0.12881387770175934,
0.09412495791912079,
0.15672653913497925,
0.060445260256528854,
0.13870938122272491,
-0.10420013219118118,
-0.16131116449832916,
0.013311325572431087,
0.029515065252780914,
0.09003909677267075,
0.06696450710296631,
0.08235664665699005,
-0.07248422503471375,
0.11330047994852066,
-0.04897173494100571,
-0.13703513145446777,
0.03472188115119934,
0.09201226383447647,
-0.06918961554765701,
0.07530247420072556,
0.09135092794895172,
0.11416243016719818,
-0.01823110692203045,
0.06857883930206299,
-0.11743941158056259,
-0.025883769616484642,
0.038914598524570465,
-0.03269163891673088,
0.08471262454986572,
0.12913276255130768,
-0.037392012774944305,
0.014519613236188889,
-0.08950173854827881,
0.07821065187454224,
0.08148835599422455,
-0.08069298416376114,
-0.14789190888404846,
-0.12250964343547821,
0.07890327274799347,
0.0782918632030487,
0.036283571273088455,
-0.0045669227838516235,
0.16402721405029297,
-0.037894271314144135,
0.11506839841604233,
0.06858783960342407,
-0.2342757284641266,
-0.03515153005719185,
0.2239142656326294,
-0.053737591952085495,
-0.003950492478907108,
-0.010685352608561516,
0.017606722190976143,
0.05577496066689491,
0.009125993587076664,
0.001457687932997942,
-0.051326312124729156,
-0.07508072257041931,
0.0007494303863495588,
-0.11897294223308563,
-0.034405667334795,
0.15537096560001373,
0.01444239355623722,
-0.04971437528729439,
-0.10783404856920242,
-0.07976026087999344,
-0.09124363958835602,
-0.05263838171958923,
-0.021541070193052292,
0.013725082390010357,
-0.031343378126621246,
-0.05966513603925705,
0.07544037699699402,
-0.009204499423503876,
-0.06599295884370804,
-0.15367676317691803,
0.037785764783620834,
-0.003498554229736328,
0.049819834530353546,
0.0073861475102603436,
0.03449040278792381,
-0.0652577131986618,
-0.06590502709150314,
0.008104441687464714,
0.026193181052803993,
-0.10550475865602493,
-0.07474161684513092,
0.009656698442995548,
0.030573386698961258,
0.023681025952100754,
0.20112693309783936,
0.021828588098287582,
0.11978612840175629,
-0.01596931740641594,
-0.018999863415956497,
-0.054067280143499374,
0.12559650838375092,
-0.06838250160217285,
-0.13461464643478394,
0.02969919890165329,
-0.00878328550606966,
0.026019055396318436,
-0.033538684248924255,
-0.09682387858629227,
-0.07938595861196518,
0.08290272206068039,
0.046181727200746536,
0.017773181200027466,
0.04477790370583534,
-0.07663077116012573,
-0.07225946336984634,
0.033890657126903534,
-0.10791295766830444,
0.06005755066871643,
0.00797327421605587,
-0.11216109246015549,
0.049580659717321396,
0.06186019629240036,
0.009701470844447613,
-0.0836767926812172,
0.028876012191176414,
-0.10916446894407272,
0.0058469208888709545,
-0.0769512876868248,
-0.1365731656551361,
0.056276559829711914,
-0.025558779016137123,
-0.02193315513432026,
-0.09016521275043488,
-0.19743527472019196,
-0.07308920472860336,
-0.002567039802670479,
-0.10452473908662796,
-0.03749832883477211,
-0.012891002930700779,
0.00978834368288517,
0.05153851583600044,
0.008250757120549679,
0.060440462082624435,
-0.02010461688041687,
0.04011701047420502,
0.063998281955719,
0.10481196641921997,
-0.04576681926846504,
0.03195463865995407,
-0.044179029762744904,
0.010190144181251526,
-0.14912694692611694,
0.1077408567070961,
-0.09373205900192261,
0.05165649205446243,
-0.16164159774780273,
-0.013837669044733047,
0.08337962627410889,
-0.02231098897755146,
0.036512963473796844,
0.09577394276857376,
-0.1643800288438797,
-0.014083214104175568,
0.09973013401031494,
-0.06338202953338623,
-0.10538890957832336,
0.10203000158071518,
-0.022051000967621803,
0.015839237719774246,
0.06751560419797897,
0.11631157249212265,
0.12888644635677338,
-0.055231817066669464,
-0.07385443150997162,
0.010756568983197212,
0.015216332860291004,
-0.050509992986917496,
0.09314823150634766,
-0.03218591958284378,
-0.14609287679195404,
0.014248628169298172,
0.06661446392536163,
-0.02321796864271164,
-0.010866262018680573,
-0.06652805954217911,
-0.024563906714320183,
-0.01822700724005699,
0.03044114261865616,
0.013548014685511589,
0.03186727687716484,
-0.04169132933020592,
-0.022478943690657616,
0.08882592618465424,
0.1419791281223297,
-0.02105974406003952,
-0.03296351060271263,
-0.13852939009666443,
0.045239005237817764,
-0.10994386672973633,
-0.029921231791377068,
-0.20863382518291473,
-0.06722375005483627,
0.030086854472756386,
-0.09969605505466461,
0.025613002479076385,
-0.031928468495607376,
0.08889669924974442,
0.014176805503666401,
0.012224586680531502,
0.041074663400650024,
0.1385577768087387,
-0.042229317128658295,
-0.10805058479309082,
-0.06058919057250023,
-0.05636000633239746,
-0.0014167663175612688,
0.21702681481838226,
-0.18781128525733948,
0.013067623600363731,
0.0846904069185257,
0.0726352259516716,
0.010586883872747421,
-0.042864833027124405,
-0.03916886821389198,
0.054277800023555756,
0.013081567361950874,
-0.052440524101257324,
0.06591587513685226,
-0.04217399284243584,
-0.09622353315353394,
-0.03792142868041992,
-0.24062125384807587,
0.16537536680698395,
0.11497880518436432,
0.024391327053308487,
-0.057782132178545,
-0.09644852578639984,
0.040088530629873276,
-0.04112276807427406,
-0.01847400702536106,
0.020005173981189728,
0.10864764451980591,
0.037130728363990784,
0.10660865902900696,
-0.07151563465595245,
-0.06420294940471649,
-0.005184763111174107,
-0.03612837567925453,
-0.03144703805446625,
0.18134990334510803,
0.02704266831278801,
-0.16505874693393707,
0.10442905873060226,
-0.014046757481992245,
-0.09259431809186935,
0.09257380664348602,
0.02637636661529541,
-0.020730243995785713,
-0.08256930112838745,
-0.009329406544566154,
0.048191726207733154,
0.007288149558007717,
-0.01296592690050602,
0.04260409623384476,
0.07994118332862854,
-0.0247221477329731,
0.01767057180404663,
-0.08339551836252213,
0.030434632673859596,
0.010959617793560028,
-0.04937080666422844,
-0.025982942432165146,
0.0006650951690971851,
0.05506855994462967,
0.12252193689346313,
0.025321969762444496,
0.009973783977329731,
0.030018331483006477,
-0.015443667769432068,
-0.10098905861377716,
0.20907297730445862,
-0.10031627863645554,
-0.13146443665027618,
-0.1743680089712143,
-0.18294809758663177,
-0.10458096861839294,
-0.03547509387135506,
-0.03236577287316322,
-0.07646159082651138,
-0.09859509766101837,
-0.045652396976947784,
-0.024601373821496964,
-0.005412259604781866,
-0.05829242244362831,
0.010489121079444885,
-0.017006438225507736,
0.0865185484290123,
-0.12516966462135315,
-0.032752908766269684,
0.0009415848180651665,
-0.10472941398620605,
0.05002110078930855,
0.007892750203609467,
0.068546362221241,
0.1579282134771347,
-0.022444913163781166,
0.035773809999227524,
-0.013748017139732838,
0.19584740698337555,
-0.011728750541806221,
-0.002425572369247675,
0.17421801388263702,
0.049068108201026917,
0.05184509605169296,
0.1327092945575714,
0.06136574596166611,
-0.08496307581663132,
0.008547226898372173,
0.07950706779956818,
-0.01925315521657467,
-0.2016776204109192,
-0.17975881695747375,
0.005976121872663498,
0.06983053684234619,
0.14786209166049957,
0.020884528756141663,
0.07052821666002274,
0.1059817522764206,
0.008392995223402977,
0.1405775547027588,
-0.02615485154092312,
0.06834781914949417,
0.14543603360652924,
0.03135932981967926,
0.14809492230415344,
-0.08314090967178345,
-0.004885923117399216,
0.11739332973957062,
0.026479527354240417,
0.07016008347272873,
0.04426944628357887,
0.13850490748882294,
-0.029674433171749115,
0.09597466140985489,
0.05375465750694275,
0.09369860589504242,
0.0022283485159277916,
-0.024759415537118912,
0.02341221645474434,
-0.06783190369606018,
-0.10906840860843658,
0.008987859822809696,
0.04172106087207794,
0.038147106766700745,
-0.06114547699689865,
-0.008171037770807743,
0.007069653831422329,
0.08512847870588303,
0.052999045699834824,
-0.4321748912334442,
-0.051193490624427795,
0.06923038512468338,
-0.02223990485072136,
-0.10138167440891266,
-0.03596929833292961,
0.003371458500623703,
-0.1272958219051361,
0.08165611326694489,
-0.038481999188661575,
0.11242032051086426,
-0.09566058963537216,
-0.022153595462441444,
-0.04405958205461502,
0.07840856909751892,
-0.006258002948015928,
0.061278898268938065,
-0.1918763816356659,
0.14617887139320374,
0.07099796086549759,
0.015325425192713737,
-0.09550845623016357,
0.029290175065398216,
0.044764257967472076,
-0.005917159840464592,
0.11356481164693832,
0.0019229712197557092,
-0.10852055996656418,
-0.2921583354473114,
-0.09553412348031998,
0.002008322859182954,
-0.0012760369572788477,
-0.0400250107049942,
0.07559143006801605,
-0.046003036201000214,
-0.004555037245154381,
-0.021901000291109085,
-0.058828070759773254,
-0.053852587938308716,
-0.07313553243875504,
0.02917208895087242,
0.08747568726539612,
-0.02188596874475479,
-0.02875036559998989,
-0.0264018252491951,
-0.02307405136525631,
0.11431007087230682,
-0.1283310502767563,
-0.060589686036109924,
-0.15143699944019318,
0.02221655659377575,
0.12801893055438995,
-0.12296006083488464,
0.03105292282998562,
-0.02536950632929802,
0.08497121930122375,
-0.030647652223706245,
-0.13729418814182281,
0.08834867179393768,
-0.04286562278866768,
-0.053861331194639206,
0.012283192947506905,
0.04487480968236923,
-0.011934962123632431,
0.07466858625411987,
0.0349883958697319,
0.02798704244196415,
-0.016219720244407654,
-0.11380423605442047,
-0.0007195695652626455,
0.047278568148612976,
0.10945487767457962,
0.09603126347064972,
-0.01734280027449131,
-0.11425498127937317,
-0.04428420960903168,
0.08662229031324387,
0.165511354804039,
0.23784247040748596,
-0.06867945194244385,
0.002615859266370535,
0.10870467126369476,
-0.04116002097725868,
-0.23318395018577576,
-0.005409163888543844,
0.005859818775206804,
0.004936231300234795,
-0.05285906046628952,
-0.11018598079681396,
0.13197879493236542,
0.1774367392063141,
-0.030812714248895645,
-0.023769589141011238,
-0.23408301174640656,
-0.1248500794172287,
0.1723039448261261,
0.07673019170761108,
0.07929990440607071,
-0.1457063853740692,
-0.08398905396461487,
-0.13929545879364014,
-0.11549726128578186,
0.13690342009067535,
-0.08052728325128555,
0.0654611811041832,
-0.04465944319963455,
0.09970808029174805,
0.04720347374677658,
-0.05531221628189087,
0.14241185784339905,
0.036201346665620804,
0.07550109177827835,
-0.0703062191605568,
-0.07898416370153427,
-0.06929781287908554,
-0.09307558089494705,
0.1397196501493454,
0.02099667489528656,
0.046132974326610565,
-0.2433874011039734,
-0.007592806592583656,
0.0014128244947642088,
0.06125297024846077,
-0.03857797756791115,
-0.035991474986076355,
-0.02977697178721428,
0.036701053380966187,
-0.04174354672431946,
-0.038657501339912415,
0.03488410636782646,
-0.041769832372665405,
0.09988812357187271,
0.21316686272621155,
0.1273903101682663,
-0.05060908943414688,
-0.0345831997692585,
0.04959949851036072,
-0.0523994155228138,
0.0604323148727417,
-0.1315118819475174,
0.07318603247404099,
0.10904379934072495,
0.013896231539547443,
0.1181042268872261,
0.04234450310468674,
-0.038518667221069336,
-0.019989287480711937,
0.046030957251787186,
-0.12081975489854813,
0.020839428529143333,
0.0038000098429620266,
-0.006451890338212252,
-0.1026775911450386,
-0.04458305239677429,
0.13328039646148682,
0.02898133173584938,
-0.026970205828547478,
0.020688239485025406,
-0.004708316642791033,
-0.014377908781170845,
0.22467385232448578,
0.01482112891972065,
0.09097341448068619,
-0.13141444325447083,
0.09183070063591003,
0.10487860441207886,
-0.11146966367959976,
0.016475358977913857,
0.11946458369493484,
-0.07653260976076126,
-0.06431148201227188,
-0.01456517819315195,
0.17273904383182526,
-0.12626661360263824,
-0.035357389599084854,
-0.03033587895333767,
-0.10215558856725693,
0.06612288951873779,
0.22641336917877197,
0.09838429093360901,
0.020658452063798904,
-0.023826517164707184,
-0.0699506625533104,
-0.1148291528224945,
0.05017426982522011,
0.06658735871315002,
0.023498481139540672,
-0.10912184417247772,
0.17335709929466248,
-0.033214785158634186,
-0.013956543058156967,
-0.02040468342602253,
0.011328494176268578,
-0.2119111567735672,
-0.036974769085645676,
-0.07334030419588089,
0.036498717963695526,
-0.0503755584359169,
0.05891728401184082,
-0.009330358356237411,
0.021175170317292213,
-0.04067489877343178,
0.0013731407234445214,
-0.08437413722276688,
-0.054527271538972855,
0.019680000841617584,
0.08124221861362457,
-0.09373243898153305,
-0.02405591309070587,
0.06038116291165352,
-0.019567551091313362,
0.06136113777756691,
0.05609283596277237,
0.07383375614881516,
-0.00158564536832273,
-0.035675179213285446,
-0.019219664856791496,
0.06156926602125168,
0.022034883499145508,
0.07218345999717712,
-0.16243457794189453,
0.048753365874290466,
-0.009648087434470654,
0.048413656651973724,
0.0733187273144722,
0.11468535661697388,
-0.10784585773944855,
0.03681180626153946,
-0.1104266494512558,
-0.07642221450805664,
-0.10662297159433365,
0.04475022852420807,
0.14610204100608826,
0.05254749953746796,
0.07468272000551224,
-0.08273791521787643,
0.025077350437641144,
-0.2018127292394638,
-0.009035098366439342,
-0.034991368651390076,
-0.07027626037597656,
-0.07332836091518402,
-0.011519690044224262,
0.08319978415966034,
-0.010431998409330845,
0.0970681682229042,
-0.0037222299724817276,
-0.015412699431180954,
0.028070207685232162,
0.1292572021484375,
-0.02939828298985958,
-0.03429540991783142,
0.15541806817054749,
0.10076690465211868,
-0.003966066054999828,
0.11162394285202026,
0.09786836802959442,
0.037220023572444916,
0.01886431872844696,
0.0025771991349756718,
0.07095590233802795,
-0.08859623968601227,
0.0705084428191185,
0.052026718854904175,
-0.08690329641103745,
-0.041414327919483185,
0.13289347290992737,
-0.11874683201313019,
0.016712334007024765,
-0.0680471658706665,
0.03249634429812431,
0.10476823896169662,
-0.12436389178037643,
0.03232616186141968,
0.023822549730539322,
-0.06848294287919998,
-0.21604299545288086,
-0.11665244400501251,
-0.13989397883415222,
-0.033832427114248276,
-0.0277628805488348,
-0.1116304099559784,
0.025187961757183075,
0.16293159127235413,
0.013494996353983879,
0.02839038521051407,
0.07998939603567123,
-0.26418912410736084,
-0.0049712639302015305,
-0.02312135137617588,
-0.00021053594537079334,
-0.022652355954051018,
-0.011970482766628265,
-0.02679537422955036,
0.026491068303585052,
0.025437312200665474,
0.08391157537698746,
0.012994928285479546,
0.04680335521697998,
0.06549352407455444,
-0.010395104065537453,
-0.090214803814888,
-0.029749613255262375,
0.024398380890488625,
0.03287789225578308,
0.15709547698497772,
0.018968431279063225,
0.004226443357765675,
-0.03933501988649368,
0.17387482523918152,
-0.09698493778705597,
0.004332102835178375,
-0.11523417383432388,
0.24585922062397003,
-0.00980711542069912,
0.0761386901140213,
-0.0018042400479316711,
-0.016981571912765503,
0.012967471964657307,
0.14595864713191986,
0.11037055402994156,
-0.0011576565448194742,
-0.022486157715320587,
0.023678593337535858,
-0.005490300711244345,
-0.021361229941248894,
0.10880126804113388,
0.025141652673482895,
0.12494713068008423,
-0.05224988982081413,
0.0511978380382061,
0.03516645357012749,
-0.03973478823900223,
-0.07103239744901657,
0.06421373039484024,
-0.0029650789219886065,
0.005449211224913597,
0.013943457044661045,
0.07874289155006409,
-0.0436011478304863,
0.08879044651985168,
0.08848583698272705,
-0.11745098233222961,
-0.14341507852077484,
0.026386922225356102,
-0.021588675677776337,
-0.043951526284217834,
0.07213770598173141,
-0.024154148995876312,
-0.005888916552066803,
0.02592300996184349,
-0.023331375792622566,
-0.22311389446258545,
-0.10022208094596863,
0.010866622440516949,
0.15098294615745544,
0.262179970741272,
0.018720552325248718,
0.08110513538122177,
0.16987311840057373,
-0.04216577112674713,
-0.14288663864135742,
0.08926188945770264,
-0.00984434224665165,
-0.12042960524559021,
0.08673262596130371,
0.07969687134027481,
-0.0372026227414608,
0.17849579453468323,
0.056771669536828995,
-0.18993233144283295,
-0.010128594003617764,
-0.011585928499698639,
0.01889048144221306,
-0.09036793559789658,
0.008712491020560265,
-0.09354692697525024,
0.12615899741649628,
0.1850648820400238,
-0.020640432834625244,
-0.014331123791635036,
-0.052979208528995514,
0.055153489112854004,
-0.006182064767926931,
0.034422729164361954,
-0.04105168953537941,
-0.09264498203992844,
0.056938331574201584,
-0.17676717042922974,
-0.014318970032036304,
-0.28733450174331665,
-0.03300654888153076,
-0.022374307736754417,
-0.04090716317296028,
-0.07705093920230865,
0.08910217136144638,
0.040826380252838135,
0.017655327916145325,
-0.03309132158756256,
-0.2707986831665039,
0.022102337330579758,
0.11666838824748993,
-0.1124747171998024,
-0.10640855133533478
] |
null | null |
transformers
|
# Model Trained Using AutoNLP
- Problem type: Multi-class Classification
- Model ID: 9532137
## Validation Metrics
- Loss: 0.34556105732917786
- Accuracy: 0.8749890724713699
- Macro F1: 0.5243623959669343
- Micro F1: 0.8749890724713699
- Weighted F1: 0.8638030768409057
- Macro Precision: 0.5016762404900895
- Micro Precision: 0.8749890724713699
- Weighted Precision: 0.8547962562614184
- Macro Recall: 0.5529674694200845
- Micro Recall: 0.8749890724713699
- Weighted Recall: 0.8749890724713699
## Usage
You can use cURL to access this model:
```
$ curl -X POST -H "Authorization: Bearer YOUR_API_KEY" -H "Content-Type: application/json" -d '{"inputs": "I love AutoNLP"}' https://api-inference.huggingface.co/models/bshlgrs/autonlp-classification_with_all_labellers-9532137
```
Or Python API:
```
from transformers import AutoModelForSequenceClassification, AutoTokenizer
model = AutoModelForSequenceClassification.from_pretrained("bshlgrs/autonlp-classification_with_all_labellers-9532137", use_auth_token=True)
tokenizer = AutoTokenizer.from_pretrained("bshlgrs/autonlp-classification_with_all_labellers-9532137", use_auth_token=True)
inputs = tokenizer("I love AutoNLP", return_tensors="pt")
outputs = model(**inputs)
```
|
{"language": "en", "tags": "autonlp", "datasets": ["bshlgrs/autonlp-data-classification_with_all_labellers"], "widget": [{"text": "I love AutoNLP \ud83e\udd17"}]}
|
text-classification
|
bshlgrs/autonlp-classification_with_all_labellers-9532137
|
[
"transformers",
"pytorch",
"bert",
"text-classification",
"autonlp",
"en",
"dataset:bshlgrs/autonlp-data-classification_with_all_labellers",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #bert #text-classification #autonlp #en #dataset-bshlgrs/autonlp-data-classification_with_all_labellers #autotrain_compatible #endpoints_compatible #region-us
|
# Model Trained Using AutoNLP
- Problem type: Multi-class Classification
- Model ID: 9532137
## Validation Metrics
- Loss: 0.34556105732917786
- Accuracy: 0.8749890724713699
- Macro F1: 0.5243623959669343
- Micro F1: 0.8749890724713699
- Weighted F1: 0.8638030768409057
- Macro Precision: 0.5016762404900895
- Micro Precision: 0.8749890724713699
- Weighted Precision: 0.8547962562614184
- Macro Recall: 0.5529674694200845
- Micro Recall: 0.8749890724713699
- Weighted Recall: 0.8749890724713699
## Usage
You can use cURL to access this model:
Or Python API:
|
[
"# Model Trained Using AutoNLP\n\n- Problem type: Multi-class Classification\n- Model ID: 9532137",
"## Validation Metrics\n\n- Loss: 0.34556105732917786\n- Accuracy: 0.8749890724713699\n- Macro F1: 0.5243623959669343\n- Micro F1: 0.8749890724713699\n- Weighted F1: 0.8638030768409057\n- Macro Precision: 0.5016762404900895\n- Micro Precision: 0.8749890724713699\n- Weighted Precision: 0.8547962562614184\n- Macro Recall: 0.5529674694200845\n- Micro Recall: 0.8749890724713699\n- Weighted Recall: 0.8749890724713699",
"## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
"TAGS\n#transformers #pytorch #bert #text-classification #autonlp #en #dataset-bshlgrs/autonlp-data-classification_with_all_labellers #autotrain_compatible #endpoints_compatible #region-us \n",
"# Model Trained Using AutoNLP\n\n- Problem type: Multi-class Classification\n- Model ID: 9532137",
"## Validation Metrics\n\n- Loss: 0.34556105732917786\n- Accuracy: 0.8749890724713699\n- Macro F1: 0.5243623959669343\n- Micro F1: 0.8749890724713699\n- Weighted F1: 0.8638030768409057\n- Macro Precision: 0.5016762404900895\n- Micro Precision: 0.8749890724713699\n- Weighted Precision: 0.8547962562614184\n- Macro Recall: 0.5529674694200845\n- Micro Recall: 0.8749890724713699\n- Weighted Recall: 0.8749890724713699",
"## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
67,
25,
150,
17
] |
[
"passage: TAGS\n#transformers #pytorch #bert #text-classification #autonlp #en #dataset-bshlgrs/autonlp-data-classification_with_all_labellers #autotrain_compatible #endpoints_compatible #region-us \n# Model Trained Using AutoNLP\n\n- Problem type: Multi-class Classification\n- Model ID: 9532137## Validation Metrics\n\n- Loss: 0.34556105732917786\n- Accuracy: 0.8749890724713699\n- Macro F1: 0.5243623959669343\n- Micro F1: 0.8749890724713699\n- Weighted F1: 0.8638030768409057\n- Macro Precision: 0.5016762404900895\n- Micro Precision: 0.8749890724713699\n- Weighted Precision: 0.8547962562614184\n- Macro Recall: 0.5529674694200845\n- Micro Recall: 0.8749890724713699\n- Weighted Recall: 0.8749890724713699## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
-0.05354078486561775,
0.2018890678882599,
-0.0028793944511562586,
0.12102517485618591,
0.10504767298698425,
0.049827903509140015,
0.09537854045629501,
0.13884712755680084,
0.0009958823211491108,
0.12137550860643387,
0.10383302718400955,
0.1528237909078598,
0.05274273082613945,
0.11177245527505875,
-0.09628409147262573,
-0.1417008340358734,
0.008727425709366798,
0.02381497621536255,
0.05422403663396835,
0.06652554124593735,
0.06906628608703613,
-0.07360021024942398,
0.11910472065210342,
-0.03800972178578377,
-0.13733439147472382,
0.03217190504074097,
0.07229356467723846,
-0.062311965972185135,
0.053035348653793335,
0.09345091134309769,
0.12226027250289917,
-0.027018295601010323,
0.09307900071144104,
-0.10438655316829681,
-0.025855733081698418,
0.05191432312130928,
-0.015544279478490353,
0.08714564144611359,
0.15112903714179993,
-0.02483461983501911,
0.03956565260887146,
-0.1028938964009285,
0.08345406502485275,
0.08397591859102249,
-0.08230626583099365,
-0.11364709585905075,
-0.108375683426857,
0.0632532611489296,
0.09690278768539429,
0.044390540570020676,
-0.010785997845232487,
0.18385761976242065,
-0.05223802477121353,
0.12139125168323517,
0.047898076474666595,
-0.21128909289836884,
-0.05380425602197647,
0.19122925400733948,
-0.06345673650503159,
-0.037738312035799026,
0.0021594492718577385,
0.021365072578191757,
0.05919966474175453,
0.02007933147251606,
-0.00843273289501667,
-0.062357451766729355,
-0.06680731475353241,
-0.020591534674167633,
-0.14089377224445343,
-0.014381926506757736,
0.16117382049560547,
0.018668627366423607,
-0.05987238883972168,
-0.08286580443382263,
-0.06566070020198822,
-0.08934227377176285,
-0.0702093318104744,
-0.012441565282642841,
0.01565239019691944,
-0.05077517777681351,
-0.04388374835252762,
0.06926820427179337,
-0.002145956037566066,
-0.06153792515397072,
-0.17274488508701324,
0.05192943662405014,
0.0021890071220695972,
0.05340830609202385,
0.0017868030117824674,
0.016820786520838737,
-0.014256272464990616,
-0.04376913979649544,
0.021053120493888855,
0.025817522779107094,
-0.1059068813920021,
-0.10525951534509659,
0.0035276664420962334,
0.062462665140628815,
0.03738874942064285,
0.16219279170036316,
0.001984567614272237,
0.1115574985742569,
0.03758009150624275,
-0.02979709394276142,
-0.026633670553565025,
0.06958761066198349,
-0.09230589866638184,
-0.0986475944519043,
0.02814839407801628,
-0.04081427678465843,
0.019896015524864197,
-0.025006012991070747,
-0.09336758404970169,
-0.0784209594130516,
0.061608266085386276,
0.044817712157964706,
0.019278613850474358,
0.04714512079954147,
-0.04651327431201935,
-0.09189341217279434,
0.008752070367336273,
-0.11192377656698227,
0.04398064687848091,
0.034151285886764526,
-0.12409862130880356,
0.07972587645053864,
0.017390791326761246,
0.010334007441997528,
-0.12031640857458115,
0.026248695328831673,
-0.12256203591823578,
0.00760030560195446,
-0.08701781183481216,
-0.13367117941379547,
0.05053098872303963,
0.02173067443072796,
-0.014385051093995571,
-0.08045821636915207,
-0.19700942933559418,
-0.07303070276975632,
-0.006650647614151239,
-0.12048749625682831,
-0.04537303000688553,
-0.008235948160290718,
0.010100197046995163,
0.049598246812820435,
0.016936827450990677,
0.1145055815577507,
-0.03480476140975952,
0.013454489409923553,
0.07861055433750153,
0.08470198512077332,
-0.034558773040771484,
0.025583820417523384,
-0.027152031660079956,
0.010363470762968063,
-0.09710743278265,
0.09591256082057953,
-0.07735484093427658,
0.04913335293531418,
-0.17799288034439087,
-0.04529310017824173,
0.11121757328510284,
-0.011519244872033596,
0.02842099405825138,
0.12052054703235626,
-0.14443829655647278,
0.004322185646742582,
0.10371535271406174,
-0.05297449976205826,
-0.08342543989419937,
0.06304644048213959,
-0.01387585885822773,
-0.019127964973449707,
0.05715940520167351,
0.09179607033729553,
0.16452600061893463,
-0.046950068324804306,
-0.04365517199039459,
0.02463432215154171,
0.03182973340153694,
-0.07254622876644135,
0.1139122024178505,
-0.02380385249853134,
-0.1424361914396286,
0.015563974156975746,
0.04955138638615608,
-0.02173626609146595,
-0.03209846094250679,
-0.05659567937254906,
-0.021901676431298256,
-0.04050644487142563,
0.013051104731857777,
0.010892597958445549,
0.06108869984745979,
-0.023983580991625786,
-0.016395822167396545,
0.08820069581270218,
0.1776035726070404,
-0.01744372397661209,
-0.05751319229602814,
-0.15782617032527924,
0.053090900182724,
-0.11680716276168823,
-0.035812441259622574,
-0.2076202780008316,
-0.07379397004842758,
0.015691161155700684,
-0.14067064225673676,
0.0228214580565691,
-0.02368306741118431,
0.08584500104188919,
0.026136038824915886,
0.016102738678455353,
0.04556580260396004,
0.1352982521057129,
-0.03213338926434517,
-0.08498570322990417,
-0.08649599552154541,
-0.06804297864437103,
0.003359764814376831,
0.18634802103042603,
-0.20705775916576385,
0.0015504976036027074,
0.051708996295928955,
0.0729253888130188,
-0.0003957925655413419,
-0.049546144902706146,
-0.02594696171581745,
0.05672118812799454,
0.017765995115041733,
-0.06248202174901962,
0.08430973440408707,
-0.042153939604759216,
-0.06330399960279465,
-0.058858003467321396,
-0.24295121431350708,
0.1439015120267868,
0.10903805494308472,
0.025048580020666122,
-0.07556918263435364,
-0.07074874639511108,
0.03943197429180145,
-0.01995188184082508,
-0.02434913069009781,
0.06823868304491043,
0.10860326886177063,
0.040835894644260406,
0.10053332149982452,
-0.07202128320932388,
-0.07321008294820786,
0.008094539865851402,
-0.03678601235151291,
-0.02877860516309738,
0.21239596605300903,
0.06176408752799034,
-0.18395498394966125,
0.0963987410068512,
-0.020436713472008705,
-0.10274901986122131,
0.06420204788446426,
0.009900783188641071,
-0.02419763058423996,
-0.09461736679077148,
-0.008568624965846539,
0.03615080192685127,
0.01260604802519083,
-0.0013233382487669587,
0.06383669376373291,
0.07193583250045776,
-0.025181371718645096,
0.018085336312651634,
-0.06699604541063309,
0.028818883001804352,
0.01375990267843008,
-0.037785422056913376,
-0.025062276050448418,
-0.016735997051000595,
0.05519116297364235,
0.13606317341327667,
0.008475842885673046,
0.013692153617739677,
0.0007887376705184579,
-0.01489444449543953,
-0.10964296013116837,
0.22704757750034332,
-0.0841648280620575,
-0.11797028034925461,
-0.15347453951835632,
-0.177782341837883,
-0.12027294933795929,
-0.044934988021850586,
-0.07048238813877106,
-0.08096051216125488,
-0.1047084704041481,
-0.0669277161359787,
-0.0310844536870718,
-0.019638169556856155,
-0.07805577665567398,
0.040475498884916306,
-0.007621432188898325,
0.0998973697423935,
-0.1130688488483429,
-0.018858719617128372,
0.010192311368882656,
-0.09401799738407135,
0.034579724073410034,
0.04024102911353111,
0.03703952208161354,
0.1683911383152008,
-0.016173485666513443,
0.030396033078432083,
-0.00046203911188058555,
0.22074992954730988,
-0.008920644409954548,
-0.012035329826176167,
0.1838129311800003,
0.05309239402413368,
0.020714275538921356,
0.11439809203147888,
0.06702326238155365,
-0.0960584208369255,
0.008421777747571468,
0.0896516740322113,
0.0030129451770335436,
-0.17917120456695557,
-0.21440188586711884,
0.009028620086610317,
0.046301502734422684,
0.1427171677350998,
0.0074890851974487305,
-0.016564469784498215,
0.09270138293504715,
0.0071587408892810345,
0.08992631733417511,
-0.05388806015253067,
0.059787213802337646,
0.14677096903324127,
0.023970523849129677,
0.1556127518415451,
-0.07334941625595093,
-0.02119765803217888,
0.11883661150932312,
-0.003507372224703431,
0.05703480914235115,
0.02644113078713417,
0.09889945387840271,
-0.030461156740784645,
0.08395195007324219,
0.04668755456805229,
0.09776189178228378,
0.025191590189933777,
-0.020133044570684433,
0.030433060601353645,
-0.06434667855501175,
-0.10000129789113998,
-0.021506745368242264,
0.035189446061849594,
0.022643763571977615,
-0.09415193647146225,
-0.0033011585474014282,
-0.020161984488368034,
0.11667096614837646,
0.07000650465488434,
-0.43170368671417236,
-0.05588240176439285,
0.06098340451717377,
-0.03629307448863983,
-0.0963435173034668,
-0.03579608350992203,
0.011877210810780525,
-0.12351375073194504,
0.07875931262969971,
-0.02592792920768261,
0.11014599353075027,
-0.09661594778299332,
-0.02234482765197754,
-0.028088387101888657,
0.05616476759314537,
-0.007593289948999882,
0.06760834902524948,
-0.16877877712249756,
0.1313779205083847,
0.054873500019311905,
0.028270725160837173,
-0.07581846415996552,
0.04001538082957268,
0.02279212698340416,
0.018549250438809395,
0.12183581292629242,
0.024020865559577942,
-0.1630818247795105,
-0.30000555515289307,
-0.09258589148521423,
0.00574153009802103,
-0.011594055220484734,
-0.014022709801793098,
0.07349610328674316,
-0.048719774931669235,
0.00527541758492589,
-0.020596353337168694,
-0.05857555568218231,
-0.04142668843269348,
-0.07588770240545273,
0.03173461928963661,
0.054134659469127655,
-0.05628226324915886,
-0.007898632436990738,
-0.024787919595837593,
-0.032824404537677765,
0.10843123495578766,
-0.12080550193786621,
-0.05223691090941429,
-0.15023314952850342,
0.03734719380736351,
0.11572091281414032,
-0.11930873245000839,
0.03548095375299454,
-0.02053520828485489,
0.08395295590162277,
0.011315672658383846,
-0.13939224183559418,
0.06515563279390335,
-0.04444286227226257,
-0.05296246334910393,
0.023549411445856094,
0.00628588767722249,
-0.01331470999866724,
0.06686389446258545,
0.03706066682934761,
0.007998156361281872,
-0.0007158697699196637,
-0.11848846077919006,
-0.01179138757288456,
0.04475198686122894,
0.15377359092235565,
0.07897240668535233,
-0.04866860434412956,
-0.12546667456626892,
-0.03519701957702637,
0.07681571692228317,
0.17523938417434692,
0.28695857524871826,
-0.05339585617184639,
-0.02100514993071556,
0.0677366778254509,
-0.051271598786115646,
-0.23068949580192566,
-0.008656098507344723,
0.01048089936375618,
-0.014713214710354805,
-0.04556016996502876,
-0.10494984686374664,
0.17788971960544586,
0.19267980754375458,
-0.022328680381178856,
-0.01241613645106554,
-0.26901042461395264,
-0.12109560519456863,
0.1951368749141693,
0.10037026554346085,
0.08929591625928879,
-0.14686277508735657,
-0.05939779430627823,
-0.13061976432800293,
-0.09056096524000168,
0.17209464311599731,
-0.059613849967718124,
0.0598011240363121,
-0.042728740721940994,
0.10975035279989243,
0.05025476589798927,
-0.05955054983496666,
0.13653573393821716,
0.018535230308771133,
0.07951010018587112,
-0.07417583465576172,
-0.07310914248228073,
-0.10648909211158752,
-0.07161056250333786,
0.1055348813533783,
0.013750570826232433,
0.06736268103122711,
-0.2552928924560547,
-0.009199022315442562,
-0.005528146866708994,
0.07952385395765305,
-0.05173400416970253,
-0.034153908491134644,
-0.015619148500263691,
0.06969082355499268,
-0.02531210333108902,
-0.03312481567263603,
-0.007412171922624111,
-0.04293881356716156,
0.05368310585618019,
0.1999531239271164,
0.10136266052722931,
-0.026071680709719658,
-0.0326363630592823,
0.045056745409965515,
-0.031520552933216095,
0.06318625807762146,
-0.08593426644802094,
0.08055984973907471,
0.10786829888820648,
0.0345945879817009,
0.09944179654121399,
0.035720743238925934,
-0.00359544949606061,
-0.047708213329315186,
0.029720967635512352,
-0.11334341764450073,
0.02021140046417713,
0.02069886401295662,
0.019464507699012756,
-0.09766965359449387,
-0.049717310816049576,
0.15116554498672485,
0.045833978801965714,
-0.03516549989581108,
0.024559564888477325,
0.01330446545034647,
-0.0020509986206889153,
0.2305067628622055,
0.0018346490105614066,
0.08245553076267242,
-0.12515108287334442,
0.08263532817363739,
0.11837530136108398,
-0.13118088245391846,
0.030029477551579475,
0.09085802733898163,
-0.07209935784339905,
-0.08165796101093292,
0.07132238894701004,
0.16235420107841492,
-0.13705472648143768,
-0.028619466349482536,
-0.008700085803866386,
-0.10405043512582779,
0.07087766379117966,
0.21016831696033478,
0.09623072296380997,
0.0009894786635413766,
-0.013378788717091084,
-0.08690325170755386,
-0.12308021634817123,
0.05340128391981125,
0.07120978832244873,
0.01332789845764637,
-0.11351615935564041,
0.1972469985485077,
-0.04551108554005623,
-0.028237447142601013,
-0.02132517285645008,
0.021679705008864403,
-0.23275303840637207,
-0.037498168647289276,
-0.07752683758735657,
0.052311114966869354,
-0.06725787371397018,
0.06105753406882286,
-0.009406937286257744,
0.031370408833026886,
-0.05782648175954819,
-0.006787009071558714,
-0.06805278360843658,
-0.047851115465164185,
0.020720381289720535,
0.0787658840417862,
-0.08593796193599701,
-0.019609039649367332,
0.05411291867494583,
-0.006616227328777313,
0.03677260875701904,
0.0633661299943924,
0.07745520025491714,
-0.003990157041698694,
-0.0015280097723007202,
0.0021294117905199528,
0.046162836253643036,
0.03131505101919174,
0.07450167834758759,
-0.19559869170188904,
0.06737173348665237,
0.0030377914663404226,
0.04920181632041931,
0.0778874009847641,
0.12874388694763184,
-0.11334292590618134,
0.017870936542749405,
-0.08592910319566727,
-0.06809185445308685,
-0.12108822166919708,
0.039462167769670486,
0.13283465802669525,
0.029617497697472572,
0.09963305294513702,
-0.07967445254325867,
0.03974072262644768,
-0.18545030057430267,
-0.0038296766579151154,
-0.016309838742017746,
-0.05428338423371315,
-0.06569668650627136,
0.01924450509250164,
0.08250989019870758,
-0.03942900151014328,
0.09805496037006378,
-0.005145295988768339,
-0.01448514312505722,
0.03403167799115181,
0.100454181432724,
-0.03493122383952141,
0.00019098003394901752,
0.19150938093662262,
0.08616433292627335,
-0.009639067575335503,
0.1104118824005127,
0.10286709666252136,
0.04542091488838196,
0.013406436890363693,
0.0178945604711771,
0.11516676843166351,
-0.16056258976459503,
0.058953531086444855,
0.027501804754137993,
-0.1072535440325737,
-0.0006228763959370553,
0.16997641324996948,
-0.12182225286960602,
0.039162881672382355,
-0.06939258426427841,
0.024620266631245613,
0.08608020097017288,
-0.12406748533248901,
0.03662416338920593,
-0.004331379197537899,
-0.0713026151061058,
-0.2182742804288864,
-0.1272290050983429,
-0.13337188959121704,
-0.026461683213710785,
-0.017525985836982727,
-0.12383958697319031,
0.011700005270540714,
0.15195316076278687,
0.018609819933772087,
0.010670548304915428,
0.07116415351629257,
-0.2669263482093811,
-0.017941201105713844,
-0.02816210687160492,
0.011171636171638966,
-0.029068710282444954,
-0.02747153863310814,
-0.022613195702433586,
0.02379748970270157,
0.037421755492687225,
0.09418243914842606,
0.008076546713709831,
0.03133755922317505,
0.05802759528160095,
-0.024201590567827225,
-0.07908651977777481,
-0.03527915105223656,
-0.003543037222698331,
0.033759716898202896,
0.1347515732049942,
0.008363851346075535,
-0.0008467532461509109,
-0.04851914942264557,
0.18389426171779633,
-0.09456650912761688,
0.006124137435108423,
-0.11759469658136368,
0.2727586627006531,
-0.016546355560421944,
0.07329034805297852,
0.021747907623648643,
0.0034245343413203955,
-0.0033656612504273653,
0.16739068925380707,
0.14170081913471222,
-0.019683849066495895,
-0.0373840406537056,
0.0260548647493124,
-0.012676766142249107,
-0.024728944525122643,
0.12051210552453995,
0.027261922135949135,
0.06300767511129379,
-0.058854423463344574,
0.04794666916131973,
0.047341879457235336,
-0.0173321645706892,
-0.07575451582670212,
0.10534206032752991,
0.03370346128940582,
0.023568371310830116,
0.014541608281433582,
0.08242140710353851,
-0.07955709844827652,
0.09779976308345795,
0.06997980922460556,
-0.07198112457990646,
-0.16453881561756134,
0.03262527659535408,
-0.06622464954853058,
-0.052316851913928986,
0.08373261243104935,
-0.028775542974472046,
-0.014849245548248291,
-0.007130816113203764,
-0.020205330103635788,
-0.19545911252498627,
-0.12390965968370438,
0.032654598355293274,
0.14294445514678955,
0.26397743821144104,
0.024056831374764442,
0.0799286738038063,
0.17121334373950958,
-0.06495148688554764,
-0.15016327798366547,
0.06307061016559601,
-0.011814560741186142,
-0.10461290180683136,
0.11936967074871063,
0.09055296331644058,
-0.03416988626122475,
0.19298972189426422,
0.047987014055252075,
-0.17518137395381927,
-0.02424856647849083,
-0.015172259882092476,
0.05473354086279869,
-0.07502590119838715,
0.029016142711043358,
-0.10105352848768234,
0.12559399008750916,
0.18080922961235046,
-0.02056194096803665,
-0.009562351740896702,
-0.055784571915864944,
0.07318969070911407,
-0.02227046713232994,
0.04741506278514862,
-0.03531832620501518,
-0.11282209306955338,
0.05862381309270859,
-0.24730360507965088,
0.0022437286097556353,
-0.3156374394893646,
-0.015024678781628609,
-0.005792559124529362,
-0.054204490035772324,
-0.07037433981895447,
0.09981486946344376,
0.052276551723480225,
0.020026901736855507,
-0.050121042877435684,
-0.21293170750141144,
0.016469638794660568,
0.11085104942321777,
-0.0958489254117012,
-0.11693250387907028
] |
null | null |
transformers
|
# Model Trained Using AutoNLP
- Problem type: Multi-class Classification
- Model ID: 10022181
## Validation Metrics
- Loss: 0.369505375623703
- Accuracy: 0.8706206896551724
- Macro F1: 0.5410226656476808
- Micro F1: 0.8706206896551724
- Weighted F1: 0.8515634683886795
- Macro Precision: 0.5159711665622992
- Micro Precision: 0.8706206896551724
- Weighted Precision: 0.8346991124101657
- Macro Recall: 0.5711653346601209
- Micro Recall: 0.8706206896551724
- Weighted Recall: 0.8706206896551724
## Usage
You can use cURL to access this model:
```
$ curl -X POST -H "Authorization: Bearer YOUR_API_KEY" -H "Content-Type: application/json" -d '{"inputs": "I love AutoNLP"}' https://api-inference.huggingface.co/models/bshlgrs/autonlp-old-data-trained-10022181
```
Or Python API:
```
from transformers import AutoModelForSequenceClassification, AutoTokenizer
model = AutoModelForSequenceClassification.from_pretrained("bshlgrs/autonlp-old-data-trained-10022181", use_auth_token=True)
tokenizer = AutoTokenizer.from_pretrained("bshlgrs/autonlp-old-data-trained-10022181", use_auth_token=True)
inputs = tokenizer("I love AutoNLP", return_tensors="pt")
outputs = model(**inputs)
```
|
{"language": "en", "tags": "autonlp", "datasets": ["bshlgrs/autonlp-data-old-data-trained"], "widget": [{"text": "I love AutoNLP \ud83e\udd17"}]}
|
text-classification
|
bshlgrs/autonlp-old-data-trained-10022181
|
[
"transformers",
"pytorch",
"bert",
"text-classification",
"autonlp",
"en",
"dataset:bshlgrs/autonlp-data-old-data-trained",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #bert #text-classification #autonlp #en #dataset-bshlgrs/autonlp-data-old-data-trained #autotrain_compatible #endpoints_compatible #region-us
|
# Model Trained Using AutoNLP
- Problem type: Multi-class Classification
- Model ID: 10022181
## Validation Metrics
- Loss: 0.369505375623703
- Accuracy: 0.8706206896551724
- Macro F1: 0.5410226656476808
- Micro F1: 0.8706206896551724
- Weighted F1: 0.8515634683886795
- Macro Precision: 0.5159711665622992
- Micro Precision: 0.8706206896551724
- Weighted Precision: 0.8346991124101657
- Macro Recall: 0.5711653346601209
- Micro Recall: 0.8706206896551724
- Weighted Recall: 0.8706206896551724
## Usage
You can use cURL to access this model:
Or Python API:
|
[
"# Model Trained Using AutoNLP\n\n- Problem type: Multi-class Classification\n- Model ID: 10022181",
"## Validation Metrics\n\n- Loss: 0.369505375623703\n- Accuracy: 0.8706206896551724\n- Macro F1: 0.5410226656476808\n- Micro F1: 0.8706206896551724\n- Weighted F1: 0.8515634683886795\n- Macro Precision: 0.5159711665622992\n- Micro Precision: 0.8706206896551724\n- Weighted Precision: 0.8346991124101657\n- Macro Recall: 0.5711653346601209\n- Micro Recall: 0.8706206896551724\n- Weighted Recall: 0.8706206896551724",
"## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
"TAGS\n#transformers #pytorch #bert #text-classification #autonlp #en #dataset-bshlgrs/autonlp-data-old-data-trained #autotrain_compatible #endpoints_compatible #region-us \n",
"# Model Trained Using AutoNLP\n\n- Problem type: Multi-class Classification\n- Model ID: 10022181",
"## Validation Metrics\n\n- Loss: 0.369505375623703\n- Accuracy: 0.8706206896551724\n- Macro F1: 0.5410226656476808\n- Micro F1: 0.8706206896551724\n- Weighted F1: 0.8515634683886795\n- Macro Precision: 0.5159711665622992\n- Micro Precision: 0.8706206896551724\n- Weighted Precision: 0.8346991124101657\n- Macro Recall: 0.5711653346601209\n- Micro Recall: 0.8706206896551724\n- Weighted Recall: 0.8706206896551724",
"## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
63,
26,
153,
17
] |
[
"passage: TAGS\n#transformers #pytorch #bert #text-classification #autonlp #en #dataset-bshlgrs/autonlp-data-old-data-trained #autotrain_compatible #endpoints_compatible #region-us \n# Model Trained Using AutoNLP\n\n- Problem type: Multi-class Classification\n- Model ID: 10022181## Validation Metrics\n\n- Loss: 0.369505375623703\n- Accuracy: 0.8706206896551724\n- Macro F1: 0.5410226656476808\n- Micro F1: 0.8706206896551724\n- Weighted F1: 0.8515634683886795\n- Macro Precision: 0.5159711665622992\n- Micro Precision: 0.8706206896551724\n- Weighted Precision: 0.8346991124101657\n- Macro Recall: 0.5711653346601209\n- Micro Recall: 0.8706206896551724\n- Weighted Recall: 0.8706206896551724## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
-0.07872708886861801,
0.19660229980945587,
-0.0022387311328202486,
0.10613835602998734,
0.10842139273881912,
0.042551252990961075,
0.07750429958105087,
0.1677345186471939,
-0.0007890004781074822,
0.13753646612167358,
0.10119763016700745,
0.13467927277088165,
0.06815805286169052,
0.1294824630022049,
-0.08809568732976913,
-0.16572248935699463,
0.004231477156281471,
0.034964218735694885,
0.10005641728639603,
0.07936783134937286,
0.06977830827236176,
-0.0966939926147461,
0.1052321195602417,
-0.03712764009833336,
-0.14088784158229828,
0.026460308581590652,
0.07051370292901993,
-0.08234164863824844,
0.0647466629743576,
0.06778295338153839,
0.09845465421676636,
-0.047097496688365936,
0.07995855063199997,
-0.06803872436285019,
-0.017713574692606926,
0.04447218403220177,
0.0010251940693706274,
0.08270138502120972,
0.165619358420372,
-0.020980728790163994,
0.03153905272483826,
-0.09199250489473343,
0.06592068076133728,
0.07601243257522583,
-0.07513229548931122,
-0.15570256114006042,
-0.12209957838058472,
0.08599931001663208,
0.09808596968650818,
0.0692518949508667,
-0.008268855512142181,
0.16002938151359558,
-0.06054297462105751,
0.09995496273040771,
0.08211227506399155,
-0.22312121093273163,
-0.04361678659915924,
0.19064165651798248,
-0.05755456164479256,
-0.026479998603463173,
-0.0013415234861895442,
0.010762068443000317,
0.05135411396622658,
0.03154899924993515,
-0.0007150170858949423,
-0.04117407277226448,
-0.07894787937402725,
0.020331695675849915,
-0.1354389637708664,
-0.030605057254433632,
0.14200444519519806,
-0.0027936764527112246,
-0.05208945274353027,
-0.09530523419380188,
-0.06679224967956543,
-0.06527911126613617,
-0.07851669192314148,
-0.027891194447875023,
0.005378182511776686,
-0.04339084401726723,
-0.06873434036970139,
0.0453362874686718,
-0.001066685188561678,
-0.05578664690256119,
-0.1669502854347229,
0.012276466004550457,
0.01582867093384266,
0.055989403277635574,
-0.007367551792412996,
0.019846558570861816,
-0.00949921179562807,
-0.046119458973407745,
0.0019418090814724565,
0.02993137203156948,
-0.06208273768424988,
-0.08122611790895462,
0.0019230743637308478,
0.05851125717163086,
0.02207105979323387,
0.15857288241386414,
-0.008339367806911469,
0.09417469054460526,
0.022504271939396858,
-0.025183184072375298,
-0.04877179116010666,
0.08984606713056564,
-0.11132415384054184,
-0.11913438886404037,
0.04412095621228218,
-0.015652166679501534,
0.001765458844602108,
-0.020474297925829887,
-0.07999405264854431,
-0.0675482377409935,
0.09771928191184998,
0.05476473644375801,
0.019660377874970436,
0.029489366337656975,
-0.03787961229681969,
-0.0759797990322113,
-0.012145588174462318,
-0.11569716036319733,
0.05971347913146019,
0.031411152333021164,
-0.15440994501113892,
0.07799236476421356,
0.029430033639073372,
-0.0005782424123026431,
-0.13506077229976654,
0.02991139516234398,
-0.1066918894648552,
-0.001510472153313458,
-0.09366381913423538,
-0.1252351850271225,
0.05206391587853432,
-0.0064217704348266125,
-0.02070893533527851,
-0.07108734548091888,
-0.2317553013563156,
-0.07545939832925797,
-0.011114604771137238,
-0.11966106295585632,
-0.03496694564819336,
-0.020598864182829857,
-0.014480031095445156,
0.04109611734747887,
0.00924922525882721,
0.10323365032672882,
-0.024566365405917168,
0.04514474421739578,
0.03884129226207733,
0.08223272860050201,
-0.00518586253747344,
0.03649303689599037,
-0.0431930236518383,
-0.0018385117873549461,
-0.08866170048713684,
0.07858983427286148,
-0.0566951222717762,
0.013011216185986996,
-0.1673247069120407,
-0.044954217970371246,
0.08749976754188538,
-0.012084001675248146,
0.04321970045566559,
0.1003643274307251,
-0.14578740298748016,
-0.006545417942106724,
0.10466636717319489,
-0.03473929315805435,
-0.09364719688892365,
0.09125831723213196,
-0.01580391265451908,
0.00833837129175663,
0.05473918840289116,
0.10178157687187195,
0.12206399440765381,
-0.044834960252046585,
-0.07644515484571457,
-0.0010660459520295262,
0.031117910519242287,
-0.07059682160615921,
0.08844617754220963,
-0.024619799107313156,
-0.10849538445472717,
0.009116840548813343,
0.05250339210033417,
-0.023501388728618622,
-0.03786337003111839,
-0.06162174046039581,
-0.024432310834527016,
-0.031004074960947037,
-0.00009593670984031633,
0.005442640744149685,
0.049777042120695114,
-0.03856061398983002,
-0.02695799246430397,
0.08728594332933426,
0.1538827270269394,
-0.022005470469594002,
-0.048986464738845825,
-0.15108650922775269,
0.10190786421298981,
-0.09405851364135742,
-0.032928843051195145,
-0.21607482433319092,
-0.04007487744092941,
0.023995768278837204,
-0.11938036978244781,
-0.009634568355977535,
-0.02862706407904625,
0.08447006344795227,
0.026218624785542488,
0.037684518843889236,
0.04505181312561035,
0.11785487830638885,
-0.04761457443237305,
-0.09613754600286484,
-0.05200643837451935,
-0.05155307054519653,
0.015938345342874527,
0.1988896280527115,
-0.1983303725719452,
-0.0008226896752603352,
0.0423155203461647,
0.07159547507762909,
-0.015202612616121769,
-0.043750111013650894,
-0.028051050379872322,
0.04685423523187637,
0.031623370945453644,
-0.05351421609520912,
0.09342142939567566,
-0.031059682369232178,
-0.034912947565317154,
-0.024584639817476273,
-0.25551167130470276,
0.12874700129032135,
0.11407866328954697,
0.01654680073261261,
-0.05220513790845871,
-0.04831311106681824,
0.03154817223548889,
-0.03267126530408859,
-0.036830753087997437,
0.05183834582567215,
0.13953444361686707,
0.03364505618810654,
0.11280597001314163,
-0.07954607903957367,
-0.08778885006904602,
-0.004226498305797577,
-0.03006383404135704,
-0.019239237532019615,
0.1828967183828354,
0.04872221499681473,
-0.15478335320949554,
0.10218948870897293,
-0.006513666827231646,
-0.09041853994131088,
0.07956502586603165,
0.011887483298778534,
-0.04077539220452309,
-0.07195188105106354,
-0.011891039088368416,
0.037737321108579636,
0.01457160897552967,
0.03366696834564209,
0.057339444756507874,
0.0844402089715004,
-0.02106354385614395,
0.02696436271071434,
-0.09868094325065613,
0.03487590327858925,
0.010819792747497559,
-0.04862966015934944,
0.009874148294329643,
0.010061725042760372,
0.08157780021429062,
0.1356019377708435,
0.022176582366228104,
0.025921758264303207,
0.009886117652058601,
-0.01355932466685772,
-0.10631436854600906,
0.2310294210910797,
-0.10031712055206299,
-0.1341526359319687,
-0.13663256168365479,
-0.1559152454137802,
-0.07988078147172928,
-0.0619330070912838,
-0.05657641589641571,
-0.06553038209676743,
-0.09527865797281265,
-0.05132658779621124,
-0.015630552545189857,
-0.02560747228562832,
-0.08340616524219513,
0.026635389775037766,
-0.027493834495544434,
0.08333936333656311,
-0.12161194533109665,
-0.027173051610589027,
0.0012703026877716184,
-0.11518139392137527,
0.04061073064804077,
0.04493151605129242,
0.04187781736254692,
0.1351734846830368,
-0.016511991620063782,
0.04460841417312622,
-0.00837976485490799,
0.2127462476491928,
-0.00007683510921197012,
-0.01345015224069357,
0.21494032442569733,
0.06353779137134552,
0.03152882307767868,
0.07732273638248444,
0.045581523329019547,
-0.07916861772537231,
-0.009887170046567917,
0.09984403848648071,
0.0001483636151533574,
-0.18542258441448212,
-0.19519613683223724,
0.01653079129755497,
0.07571666687726974,
0.13977159559726715,
0.019456738606095314,
0.018914561718702316,
0.08874907344579697,
-0.009412530809640884,
0.09110910445451736,
-0.0720573216676712,
0.04985629394650459,
0.13561581075191498,
0.028034526854753494,
0.1387687772512436,
-0.058332283049821854,
-0.002023175125941634,
0.11881077289581299,
-0.0028051421977579594,
0.06130082532763481,
0.017441848292946815,
0.07198203355073929,
-0.025915294885635376,
0.1024816706776619,
0.05064273998141289,
0.10428262501955032,
-0.0016989430878311396,
-0.02387997880578041,
0.026761600747704506,
-0.07438020408153534,
-0.08280213922262192,
-0.02724943682551384,
0.03005274571478367,
0.05399008467793465,
-0.08550948649644852,
-0.000874814169947058,
-0.006051858887076378,
0.12036871165037155,
0.04913020133972168,
-0.443508505821228,
-0.0634344145655632,
0.0463380366563797,
-0.025519514456391335,
-0.10535483062267303,
-0.02596805989742279,
0.0018610454862937331,
-0.15033400058746338,
0.03330252319574356,
-0.053209781646728516,
0.11414594203233719,
-0.07506729662418365,
-0.009812305681407452,
-0.015216611325740814,
0.09217683970928192,
0.0050085983239114285,
0.06732986867427826,
-0.16917411983013153,
0.11642943322658539,
0.053151436150074005,
0.04353189840912819,
-0.1048918217420578,
0.019557181745767593,
0.035851988941431046,
-0.05514277517795563,
0.12134286761283875,
0.01416326779872179,
-0.12855689227581024,
-0.2747369706630707,
-0.11542510241270065,
0.007586006075143814,
-0.0033608046360313892,
-0.004103892017155886,
0.09936363995075226,
-0.05596652626991272,
0.0031995743047446012,
-0.03191857412457466,
-0.05037279427051544,
-0.056089501827955246,
-0.06477932631969452,
0.02317366562783718,
0.06947837769985199,
-0.03823317214846611,
-0.025040313601493835,
-0.03108060173690319,
-0.024989934638142586,
0.09935639798641205,
-0.1426064819097519,
-0.060340337455272675,
-0.15299591422080994,
0.07343324273824692,
0.12982110679149628,
-0.11220305413007736,
0.04419238492846489,
-0.020496530458331108,
0.08124390244483948,
-0.014698824845254421,
-0.14414642751216888,
0.06309955567121506,
-0.06269790977239609,
-0.061764828860759735,
0.03125119209289551,
0.02136950194835663,
0.0008559559937566519,
0.0758160799741745,
0.02765060029923916,
0.024919377639889717,
-0.034895967692136765,
-0.10817986726760864,
-0.020332079380750656,
0.05372745543718338,
0.1601119488477707,
0.07117031514644623,
-0.05057009309530258,
-0.08321089297533035,
-0.03554953262209892,
0.08115161210298538,
0.20187611877918243,
0.23861803114414215,
-0.06018013134598732,
-0.0035447992850095034,
0.07878834754228592,
-0.0367894321680069,
-0.26270031929016113,
-0.013814478181302547,
0.01902385987341404,
0.01479695737361908,
-0.06985140591859818,
-0.12115302681922913,
0.15753598511219025,
0.1869601309299469,
-0.029528452083468437,
-0.02785763330757618,
-0.308705598115921,
-0.11133979260921478,
0.17559559643268585,
0.08592531085014343,
0.112381212413311,
-0.13582777976989746,
-0.05435745418071747,
-0.09026754647493362,
-0.07146763801574707,
0.1505485326051712,
-0.07910488545894623,
0.06602658331394196,
-0.05765116587281227,
0.12159226834774017,
0.049658216536045074,
-0.06885399669408798,
0.11638971418142319,
-0.012791869230568409,
0.05947750806808472,
-0.07564309239387512,
-0.058464862406253815,
-0.08973708748817444,
-0.06981756538152695,
0.12271612137556076,
0.025049373507499695,
0.0657012015581131,
-0.26708924770355225,
-0.000013637092706630938,
-0.013917645439505577,
0.0641416385769844,
-0.0521111898124218,
-0.03084489330649376,
-0.01776849292218685,
0.05571994557976723,
-0.026530548930168152,
-0.03655734285712242,
-0.026374023407697678,
-0.018819844350218773,
0.08029326796531677,
0.15535025298595428,
0.08380595594644547,
-0.027755405753850937,
-0.04761015623807907,
0.03817363455891609,
-0.025810902938246727,
0.058451730757951736,
-0.09424573928117752,
0.05425140634179115,
0.1152360662817955,
0.02528274618089199,
0.09947708994150162,
0.047557588666677475,
-0.04926434904336929,
-0.028134671971201897,
0.0206247940659523,
-0.14359506964683533,
0.023586632683873177,
0.01133468933403492,
0.046873539686203,
-0.10327330976724625,
-0.08221735060214996,
0.12466535717248917,
0.013224628753960133,
-0.04527486860752106,
0.008157658390700817,
0.010396448895335197,
0.003650140017271042,
0.24220652878284454,
0.006217145826667547,
0.07871643453836441,
-0.12288131564855576,
0.104259192943573,
0.11113832145929337,
-0.14043092727661133,
0.03716934844851494,
0.1233072280883789,
-0.06632047891616821,
-0.07098269462585449,
0.07696586847305298,
0.17040567100048065,
-0.12402437627315521,
-0.045531343668699265,
-0.030374400317668915,
-0.1179162785410881,
0.10249359160661697,
0.2093622386455536,
0.07939376682043076,
-0.0042401766404509544,
-0.020788324996829033,
-0.08635082840919495,
-0.12674908339977264,
0.06501387059688568,
0.08804909139871597,
-0.003985477611422539,
-0.08764687180519104,
0.19372187554836273,
-0.04420628026127815,
-0.0011447696015238762,
-0.021150263026356697,
0.027039630338549614,
-0.21425321698188782,
-0.03349177911877632,
-0.0982847511768341,
0.046595752239227295,
-0.056072384119033813,
0.028403041884303093,
-0.03171592578291893,
0.03701949492096901,
-0.04942813515663147,
0.010974958539009094,
-0.07833769172430038,
-0.058489978313446045,
0.012252460233867168,
0.06488373130559921,
-0.0878901481628418,
-0.0041522253304719925,
0.04878545552492142,
-0.006169463042169809,
0.0524737611413002,
0.08634880185127258,
0.07534702122211456,
0.004417812917381525,
-0.01313876360654831,
-0.05205614119768143,
0.03545183688402176,
0.026346420869231224,
0.09093683958053589,
-0.18369705975055695,
0.06919994205236435,
-0.0012260129442438483,
0.0291218850761652,
0.07119100540876389,
0.1193428486585617,
-0.09720434248447418,
0.03838619217276573,
-0.10360206663608551,
-0.02448674850165844,
-0.1169855073094368,
0.03507372736930847,
0.11487563699483871,
0.043347083032131195,
0.07222968339920044,
-0.08982671797275543,
0.03836857154965401,
-0.1900513470172882,
-0.007496298756450415,
-0.05302528664469719,
-0.06789425015449524,
-0.07345809787511826,
0.009801557287573814,
0.09847795218229294,
-0.015422006137669086,
0.07026012986898422,
-0.0016624232521280646,
-0.01122105773538351,
0.03149758279323578,
0.11517580598592758,
-0.04606817290186882,
-0.017736762762069702,
0.16375231742858887,
0.09373775869607925,
-0.00922313891351223,
0.12941592931747437,
0.10363154113292694,
0.04979514330625534,
0.04987788572907448,
0.03984452411532402,
0.12855839729309082,
-0.08512642234563828,
0.06798921525478363,
0.03279170021414757,
-0.12482433766126633,
-0.009061058983206749,
0.15739068388938904,
-0.11125408858060837,
0.032804373651742935,
-0.07960041612386703,
0.03911319375038147,
0.11785772442817688,
-0.12440841645002365,
0.03342295065522194,
-0.005001695826649666,
-0.06410001218318939,
-0.2214563488960266,
-0.10945074260234833,
-0.13523045182228088,
-0.0034976082388311625,
-0.019340721890330315,
-0.12554383277893066,
0.04218379408121109,
0.14370672404766083,
0.0271990317851305,
0.010531822219491005,
0.0735556110739708,
-0.2532211244106293,
-0.03417302668094635,
-0.01880798302590847,
0.005128142889589071,
-0.0005164009053260088,
-0.03013494238257408,
-0.02751801535487175,
0.02408722974359989,
0.02656644582748413,
0.1092359647154808,
-0.0001398869208060205,
0.048398274928331375,
0.07264450937509537,
-0.040656767785549164,
-0.06891047209501266,
-0.027794718742370605,
-0.002258151536807418,
0.049952637404203415,
0.13403961062431335,
0.018739640712738037,
0.006918298080563545,
-0.044286224991083145,
0.21006935834884644,
-0.09274651110172272,
-0.01585296168923378,
-0.10924125462770462,
0.24444691836833954,
0.01283947192132473,
0.054024871438741684,
0.027292389422655106,
0.0008623148896731436,
-0.0007827749941498041,
0.2030927836894989,
0.1285199671983719,
-0.0289496798068285,
-0.030556805431842804,
0.032355744391679764,
-0.010341973975300789,
-0.04409698396921158,
0.11790865659713745,
0.06494194269180298,
0.12038593739271164,
-0.062321435660123825,
0.039781585335731506,
0.00858172308653593,
-0.037505630403757095,
-0.11427487432956696,
0.10213615000247955,
0.021801479160785675,
0.008201445452868938,
0.039277251809835434,
0.07181121408939362,
-0.07117167860269547,
0.08244162052869797,
0.04444232955574989,
-0.12261729687452316,
-0.1730538010597229,
0.026660608127713203,
-0.041348669677972794,
-0.014279537834227085,
0.09011966735124588,
-0.021099288016557693,
-0.0006233942112885416,
-0.0012915186816826463,
-0.01590178720653057,
-0.20071162283420563,
-0.09413211792707443,
0.019007278606295586,
0.1247146874666214,
0.2917688488960266,
0.03316810727119446,
0.06096825376152992,
0.17302699387073517,
-0.0611906461417675,
-0.16130536794662476,
0.07424961775541306,
0.0020526612643152475,
-0.1193438246846199,
0.10353069007396698,
0.10304129123687744,
-0.03357357531785965,
0.15814633667469025,
0.05014033243060112,
-0.16086065769195557,
-0.019960302859544754,
-0.014239300042390823,
0.03609246015548706,
-0.06529106944799423,
0.01577080227434635,
-0.08391014486551285,
0.12953656911849976,
0.18580013513565063,
-0.03968627378344536,
0.0012197699397802353,
-0.06856309622526169,
0.06560782343149185,
-0.02129627950489521,
0.06966179609298706,
-0.0131224999204278,
-0.11179332435131073,
0.06172272190451622,
-0.24832521378993988,
-0.00013392016990110278,
-0.29632508754730225,
-0.0059361532330513,
-0.009210391901433468,
-0.05248979106545448,
-0.08844053000211716,
0.0883665680885315,
0.0491335429251194,
0.004736937582492828,
-0.0395720936357975,
-0.18948297202587128,
0.0075248293578624725,
0.11445502191781998,
-0.10025592893362045,
-0.10413645952939987
] |
null | null |
transformers
|
## This model is trained for GoEmotions dataset which contains labeled 58k Reddit comments with 28 emotions
- admiration, amusement, anger, annoyance, approval, caring, confusion, curiosity, desire, disappointment, disapproval, disgust, embarrassment, excitement, fear, gratitude, grief, joy, love, nervousness, optimism, pride, realization, relief, remorse, sadness, surprise + neutral
## Training details:
- The training script is provided here: https://github.com/bsinghpratap/roberta_train_goEmotion
- Please feel free to start an issue in the repo if you have trouble running the model and I would try to respond as soon as possible.
- The model works well on most of the emotions except: 'desire', 'disgust', 'embarrassment', 'excitement', 'fear', 'grief', 'nervousness', 'pride', 'relief', 'remorse', 'surprise']
- I'll try to fine-tune the model further and update here if RoBERTa achieves a better performance.
- Each text datapoint can have more than 1 label. Most of the training set had 1 label: Counter({1: 36308, 2: 6541, 3: 532, 4: 28, 5: 1}). So currently I just used the first label for each of the datapoint. Not ideal but it does a decent job.
## Model Performance
============================================================<br>
Emotion: admiration<br>
============================================================<br>
GoEmotions Paper: 0.65<br>
RoBERTa: 0.62<br>
Support: 504<br>
============================================================<br>
Emotion: amusement<br>
============================================================<br>
GoEmotions Paper: 0.80<br>
RoBERTa: 0.78<br>
Support: 252<br>
============================================================<br>
Emotion: anger<br>
============================================================<br>
GoEmotions Paper: 0.47<br>
RoBERTa: 0.44<br>
Support: 197<br>
============================================================<br>
Emotion: annoyance<br>
============================================================<br>
GoEmotions Paper: 0.34<br>
RoBERTa: 0.22<br>
Support: 286<br>
============================================================<br>
Emotion: approval<br>
============================================================<br>
GoEmotions Paper: 0.36<br>
RoBERTa: 0.31<br>
Support: 318<br>
============================================================<br>
Emotion: caring<br>
============================================================<br>
GoEmotions Paper: 0.39<br>
RoBERTa: 0.24<br>
Support: 114<br>
============================================================<br>
Emotion: confusion<br>
============================================================<br>
GoEmotions Paper: 0.37<br>
RoBERTa: 0.29<br>
Support: 139<br>
============================================================<br>
Emotion: curiosity<br>
============================================================<br>
GoEmotions Paper: 0.54<br>
RoBERTa: 0.48<br>
Support: 233<br>
============================================================<br>
Emotion: disappointment<br>
============================================================<br>
GoEmotions Paper: 0.28<br>
RoBERTa: 0.18<br>
Support: 127<br>
============================================================<br>
Emotion: disapproval<br>
============================================================<br>
GoEmotions Paper: 0.39<br>
RoBERTa: 0.26<br>
Support: 220<br>
============================================================<br>
Emotion: gratitude<br>
============================================================<br>
GoEmotions Paper: 0.86<br>
RoBERTa: 0.84<br>
Support: 288<br>
============================================================<br>
Emotion: joy<br>
============================================================<br>
GoEmotions Paper: 0.51<br>
RoBERTa: 0.47<br>
Support: 116<br>
============================================================<br>
Emotion: love<br>
============================================================<br>
GoEmotions Paper: 0.78<br>
RoBERTa: 0.68<br>
Support: 169<br>
============================================================<br>
Emotion: neutral<br>
============================================================<br>
GoEmotions Paper: 0.68<br>
RoBERTa: 0.61<br>
Support: 1606<br>
============================================================<br>
Emotion: optimism<br>
============================================================<br>
GoEmotions Paper: 0.51<br>
RoBERTa: 0.52<br>
Support: 120<br>
============================================================<br>
Emotion: realization<br>
============================================================<br>
GoEmotions Paper: 0.21<br>
RoBERTa: 0.15<br>
Support: 109<br>
============================================================<br>
Emotion: sadness<br>
============================================================<br>
GoEmotions Paper: 0.49<br>
RoBERTa: 0.42<br>
Support: 108
|
{"language": "en", "license": "mit", "tags": ["text-classification", "pytorch", "roberta", "emotions"], "datasets": ["go_emotions"], "widget": [{"text": "I am not feeling well today."}]}
|
text-classification
|
bsingh/roberta_goEmotion
|
[
"transformers",
"pytorch",
"roberta",
"text-classification",
"emotions",
"en",
"dataset:go_emotions",
"license:mit",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #roberta #text-classification #emotions #en #dataset-go_emotions #license-mit #autotrain_compatible #endpoints_compatible #region-us
|
## This model is trained for GoEmotions dataset which contains labeled 58k Reddit comments with 28 emotions
- admiration, amusement, anger, annoyance, approval, caring, confusion, curiosity, desire, disappointment, disapproval, disgust, embarrassment, excitement, fear, gratitude, grief, joy, love, nervousness, optimism, pride, realization, relief, remorse, sadness, surprise + neutral
## Training details:
- The training script is provided here: URL
- Please feel free to start an issue in the repo if you have trouble running the model and I would try to respond as soon as possible.
- The model works well on most of the emotions except: 'desire', 'disgust', 'embarrassment', 'excitement', 'fear', 'grief', 'nervousness', 'pride', 'relief', 'remorse', 'surprise']
- I'll try to fine-tune the model further and update here if RoBERTa achieves a better performance.
- Each text datapoint can have more than 1 label. Most of the training set had 1 label: Counter({1: 36308, 2: 6541, 3: 532, 4: 28, 5: 1}). So currently I just used the first label for each of the datapoint. Not ideal but it does a decent job.
## Model Performance
============================================================<br>
Emotion: admiration<br>
============================================================<br>
GoEmotions Paper: 0.65<br>
RoBERTa: 0.62<br>
Support: 504<br>
============================================================<br>
Emotion: amusement<br>
============================================================<br>
GoEmotions Paper: 0.80<br>
RoBERTa: 0.78<br>
Support: 252<br>
============================================================<br>
Emotion: anger<br>
============================================================<br>
GoEmotions Paper: 0.47<br>
RoBERTa: 0.44<br>
Support: 197<br>
============================================================<br>
Emotion: annoyance<br>
============================================================<br>
GoEmotions Paper: 0.34<br>
RoBERTa: 0.22<br>
Support: 286<br>
============================================================<br>
Emotion: approval<br>
============================================================<br>
GoEmotions Paper: 0.36<br>
RoBERTa: 0.31<br>
Support: 318<br>
============================================================<br>
Emotion: caring<br>
============================================================<br>
GoEmotions Paper: 0.39<br>
RoBERTa: 0.24<br>
Support: 114<br>
============================================================<br>
Emotion: confusion<br>
============================================================<br>
GoEmotions Paper: 0.37<br>
RoBERTa: 0.29<br>
Support: 139<br>
============================================================<br>
Emotion: curiosity<br>
============================================================<br>
GoEmotions Paper: 0.54<br>
RoBERTa: 0.48<br>
Support: 233<br>
============================================================<br>
Emotion: disappointment<br>
============================================================<br>
GoEmotions Paper: 0.28<br>
RoBERTa: 0.18<br>
Support: 127<br>
============================================================<br>
Emotion: disapproval<br>
============================================================<br>
GoEmotions Paper: 0.39<br>
RoBERTa: 0.26<br>
Support: 220<br>
============================================================<br>
Emotion: gratitude<br>
============================================================<br>
GoEmotions Paper: 0.86<br>
RoBERTa: 0.84<br>
Support: 288<br>
============================================================<br>
Emotion: joy<br>
============================================================<br>
GoEmotions Paper: 0.51<br>
RoBERTa: 0.47<br>
Support: 116<br>
============================================================<br>
Emotion: love<br>
============================================================<br>
GoEmotions Paper: 0.78<br>
RoBERTa: 0.68<br>
Support: 169<br>
============================================================<br>
Emotion: neutral<br>
============================================================<br>
GoEmotions Paper: 0.68<br>
RoBERTa: 0.61<br>
Support: 1606<br>
============================================================<br>
Emotion: optimism<br>
============================================================<br>
GoEmotions Paper: 0.51<br>
RoBERTa: 0.52<br>
Support: 120<br>
============================================================<br>
Emotion: realization<br>
============================================================<br>
GoEmotions Paper: 0.21<br>
RoBERTa: 0.15<br>
Support: 109<br>
============================================================<br>
Emotion: sadness<br>
============================================================<br>
GoEmotions Paper: 0.49<br>
RoBERTa: 0.42<br>
Support: 108
|
[
"## This model is trained for GoEmotions dataset which contains labeled 58k Reddit comments with 28 emotions\n- admiration, amusement, anger, annoyance, approval, caring, confusion, curiosity, desire, disappointment, disapproval, disgust, embarrassment, excitement, fear, gratitude, grief, joy, love, nervousness, optimism, pride, realization, relief, remorse, sadness, surprise + neutral",
"## Training details:\n- The training script is provided here: URL\n- Please feel free to start an issue in the repo if you have trouble running the model and I would try to respond as soon as possible.\n- The model works well on most of the emotions except: 'desire', 'disgust', 'embarrassment', 'excitement', 'fear', 'grief', 'nervousness', 'pride', 'relief', 'remorse', 'surprise']\n- I'll try to fine-tune the model further and update here if RoBERTa achieves a better performance.\n- Each text datapoint can have more than 1 label. Most of the training set had 1 label: Counter({1: 36308, 2: 6541, 3: 532, 4: 28, 5: 1}). So currently I just used the first label for each of the datapoint. Not ideal but it does a decent job.",
"## Model Performance\n============================================================<br>\nEmotion: admiration<br>\n============================================================<br>\nGoEmotions Paper: 0.65<br>\nRoBERTa: 0.62<br>\nSupport: 504<br>\n============================================================<br>\nEmotion: amusement<br>\n============================================================<br>\nGoEmotions Paper: 0.80<br>\nRoBERTa: 0.78<br>\nSupport: 252<br>\n============================================================<br>\nEmotion: anger<br>\n============================================================<br>\nGoEmotions Paper: 0.47<br>\nRoBERTa: 0.44<br>\nSupport: 197<br>\n============================================================<br>\nEmotion: annoyance<br>\n============================================================<br>\nGoEmotions Paper: 0.34<br>\nRoBERTa: 0.22<br>\nSupport: 286<br>\n============================================================<br>\nEmotion: approval<br>\n============================================================<br>\nGoEmotions Paper: 0.36<br>\nRoBERTa: 0.31<br>\nSupport: 318<br>\n============================================================<br>\nEmotion: caring<br>\n============================================================<br>\nGoEmotions Paper: 0.39<br>\nRoBERTa: 0.24<br>\nSupport: 114<br>\n============================================================<br>\nEmotion: confusion<br>\n============================================================<br>\nGoEmotions Paper: 0.37<br>\nRoBERTa: 0.29<br>\nSupport: 139<br>\n============================================================<br>\nEmotion: curiosity<br>\n============================================================<br>\nGoEmotions Paper: 0.54<br>\nRoBERTa: 0.48<br>\nSupport: 233<br>\n============================================================<br>\nEmotion: disappointment<br>\n============================================================<br>\nGoEmotions Paper: 0.28<br>\nRoBERTa: 0.18<br>\nSupport: 127<br>\n============================================================<br>\nEmotion: disapproval<br>\n============================================================<br>\nGoEmotions Paper: 0.39<br>\nRoBERTa: 0.26<br>\nSupport: 220<br>\n============================================================<br>\nEmotion: gratitude<br>\n============================================================<br>\nGoEmotions Paper: 0.86<br>\nRoBERTa: 0.84<br>\nSupport: 288<br>\n============================================================<br>\nEmotion: joy<br>\n============================================================<br>\nGoEmotions Paper: 0.51<br>\nRoBERTa: 0.47<br>\nSupport: 116<br>\n============================================================<br>\nEmotion: love<br>\n============================================================<br>\nGoEmotions Paper: 0.78<br>\nRoBERTa: 0.68<br>\nSupport: 169<br>\n============================================================<br>\nEmotion: neutral<br>\n============================================================<br>\nGoEmotions Paper: 0.68<br>\nRoBERTa: 0.61<br>\nSupport: 1606<br>\n============================================================<br>\nEmotion: optimism<br>\n============================================================<br>\nGoEmotions Paper: 0.51<br>\nRoBERTa: 0.52<br>\nSupport: 120<br>\n============================================================<br>\nEmotion: realization<br>\n============================================================<br>\nGoEmotions Paper: 0.21<br>\nRoBERTa: 0.15<br>\nSupport: 109<br>\n============================================================<br>\nEmotion: sadness<br>\n============================================================<br>\nGoEmotions Paper: 0.49<br>\nRoBERTa: 0.42<br>\nSupport: 108"
] |
[
"TAGS\n#transformers #pytorch #roberta #text-classification #emotions #en #dataset-go_emotions #license-mit #autotrain_compatible #endpoints_compatible #region-us \n",
"## This model is trained for GoEmotions dataset which contains labeled 58k Reddit comments with 28 emotions\n- admiration, amusement, anger, annoyance, approval, caring, confusion, curiosity, desire, disappointment, disapproval, disgust, embarrassment, excitement, fear, gratitude, grief, joy, love, nervousness, optimism, pride, realization, relief, remorse, sadness, surprise + neutral",
"## Training details:\n- The training script is provided here: URL\n- Please feel free to start an issue in the repo if you have trouble running the model and I would try to respond as soon as possible.\n- The model works well on most of the emotions except: 'desire', 'disgust', 'embarrassment', 'excitement', 'fear', 'grief', 'nervousness', 'pride', 'relief', 'remorse', 'surprise']\n- I'll try to fine-tune the model further and update here if RoBERTa achieves a better performance.\n- Each text datapoint can have more than 1 label. Most of the training set had 1 label: Counter({1: 36308, 2: 6541, 3: 532, 4: 28, 5: 1}). So currently I just used the first label for each of the datapoint. Not ideal but it does a decent job.",
"## Model Performance\n============================================================<br>\nEmotion: admiration<br>\n============================================================<br>\nGoEmotions Paper: 0.65<br>\nRoBERTa: 0.62<br>\nSupport: 504<br>\n============================================================<br>\nEmotion: amusement<br>\n============================================================<br>\nGoEmotions Paper: 0.80<br>\nRoBERTa: 0.78<br>\nSupport: 252<br>\n============================================================<br>\nEmotion: anger<br>\n============================================================<br>\nGoEmotions Paper: 0.47<br>\nRoBERTa: 0.44<br>\nSupport: 197<br>\n============================================================<br>\nEmotion: annoyance<br>\n============================================================<br>\nGoEmotions Paper: 0.34<br>\nRoBERTa: 0.22<br>\nSupport: 286<br>\n============================================================<br>\nEmotion: approval<br>\n============================================================<br>\nGoEmotions Paper: 0.36<br>\nRoBERTa: 0.31<br>\nSupport: 318<br>\n============================================================<br>\nEmotion: caring<br>\n============================================================<br>\nGoEmotions Paper: 0.39<br>\nRoBERTa: 0.24<br>\nSupport: 114<br>\n============================================================<br>\nEmotion: confusion<br>\n============================================================<br>\nGoEmotions Paper: 0.37<br>\nRoBERTa: 0.29<br>\nSupport: 139<br>\n============================================================<br>\nEmotion: curiosity<br>\n============================================================<br>\nGoEmotions Paper: 0.54<br>\nRoBERTa: 0.48<br>\nSupport: 233<br>\n============================================================<br>\nEmotion: disappointment<br>\n============================================================<br>\nGoEmotions Paper: 0.28<br>\nRoBERTa: 0.18<br>\nSupport: 127<br>\n============================================================<br>\nEmotion: disapproval<br>\n============================================================<br>\nGoEmotions Paper: 0.39<br>\nRoBERTa: 0.26<br>\nSupport: 220<br>\n============================================================<br>\nEmotion: gratitude<br>\n============================================================<br>\nGoEmotions Paper: 0.86<br>\nRoBERTa: 0.84<br>\nSupport: 288<br>\n============================================================<br>\nEmotion: joy<br>\n============================================================<br>\nGoEmotions Paper: 0.51<br>\nRoBERTa: 0.47<br>\nSupport: 116<br>\n============================================================<br>\nEmotion: love<br>\n============================================================<br>\nGoEmotions Paper: 0.78<br>\nRoBERTa: 0.68<br>\nSupport: 169<br>\n============================================================<br>\nEmotion: neutral<br>\n============================================================<br>\nGoEmotions Paper: 0.68<br>\nRoBERTa: 0.61<br>\nSupport: 1606<br>\n============================================================<br>\nEmotion: optimism<br>\n============================================================<br>\nGoEmotions Paper: 0.51<br>\nRoBERTa: 0.52<br>\nSupport: 120<br>\n============================================================<br>\nEmotion: realization<br>\n============================================================<br>\nGoEmotions Paper: 0.21<br>\nRoBERTa: 0.15<br>\nSupport: 109<br>\n============================================================<br>\nEmotion: sadness<br>\n============================================================<br>\nGoEmotions Paper: 0.49<br>\nRoBERTa: 0.42<br>\nSupport: 108"
] |
[
55,
108,
208,
1010
] |
[
"passage: TAGS\n#transformers #pytorch #roberta #text-classification #emotions #en #dataset-go_emotions #license-mit #autotrain_compatible #endpoints_compatible #region-us \n## This model is trained for GoEmotions dataset which contains labeled 58k Reddit comments with 28 emotions\n- admiration, amusement, anger, annoyance, approval, caring, confusion, curiosity, desire, disappointment, disapproval, disgust, embarrassment, excitement, fear, gratitude, grief, joy, love, nervousness, optimism, pride, realization, relief, remorse, sadness, surprise + neutral## Training details:\n- The training script is provided here: URL\n- Please feel free to start an issue in the repo if you have trouble running the model and I would try to respond as soon as possible.\n- The model works well on most of the emotions except: 'desire', 'disgust', 'embarrassment', 'excitement', 'fear', 'grief', 'nervousness', 'pride', 'relief', 'remorse', 'surprise']\n- I'll try to fine-tune the model further and update here if RoBERTa achieves a better performance.\n- Each text datapoint can have more than 1 label. Most of the training set had 1 label: Counter({1: 36308, 2: 6541, 3: 532, 4: 28, 5: 1}). So currently I just used the first label for each of the datapoint. Not ideal but it does a decent job."
] |
[
-0.032077547162771225,
0.02740655280649662,
-0.0059502944350242615,
0.04314001649618149,
0.1097431480884552,
0.054898012429475784,
-0.020536962896585464,
0.106776662170887,
0.10717599838972092,
0.07738594710826874,
0.04381665959954262,
0.11144295334815979,
0.01134608406573534,
0.042700860649347305,
-0.021139925345778465,
-0.2346932590007782,
0.005442509427666664,
-0.03012579306960106,
0.13749946653842926,
0.09461618959903717,
0.11239808797836304,
-0.05860808119177818,
0.06017375364899635,
-0.06387196481227875,
-0.030897358432412148,
0.017607398331165314,
-0.009399617090821266,
0.006718365475535393,
0.08846139162778854,
0.013363336212933064,
0.12653979659080505,
0.03527203947305679,
-0.022477956488728523,
-0.228147953748703,
0.04745020717382431,
0.07035571336746216,
0.02246759459376335,
0.039071377366781235,
-0.0015985857462510467,
-0.12202386558055878,
0.14187368750572205,
-0.21248097717761993,
0.07250258326530457,
0.0764814242720604,
-0.1263926476240158,
-0.21894752979278564,
-0.0843273252248764,
0.05043872445821762,
0.116920605301857,
0.07900696247816086,
-0.10037556290626526,
0.1542617827653885,
-0.13703462481498718,
0.03946692496538162,
0.27297526597976685,
-0.2263920158147812,
-0.030697796493768692,
0.0002563120215199888,
0.09556904435157776,
-0.02585325390100479,
-0.14137819409370422,
0.026392677798867226,
0.016814103350043297,
0.0480935275554657,
-0.0499018095433712,
-0.0009958179434761405,
0.22412237524986267,
-0.04546515643596649,
-0.0999443456530571,
-0.06650612503290176,
0.06918364018201828,
0.1442076563835144,
-0.09395062923431396,
-0.17599906027317047,
-0.028554560616612434,
-0.08932014554738998,
-0.037396155297756195,
-0.08244085311889648,
0.014493006281554699,
0.012241944670677185,
0.020576216280460358,
-0.016661621630191803,
-0.1473916471004486,
0.0549931637942791,
-0.04213675111532211,
0.03622924163937569,
-0.02106454409658909,
-0.012802032753825188,
0.03372267633676529,
0.0075315251015126705,
-0.043070316314697266,
-0.0573396272957325,
-0.03405096009373665,
-0.10658396035432816,
-0.1421113908290863,
-0.009825125336647034,
-0.13027812540531158,
-0.09228702634572983,
-0.012744038365781307,
0.148023784160614,
-0.0040056053549051285,
0.01895933412015438,
0.022851187735795975,
0.013093029148876667,
0.14171551167964935,
0.11304159462451935,
-0.016894394531846046,
-0.09588810056447983,
-0.07034265249967575,
0.059600524604320526,
0.02371956966817379,
-0.0034306077286601067,
0.03259854018688202,
0.03862926363945007,
0.03982025757431984,
0.0422247014939785,
-0.020442917943000793,
0.08640483021736145,
-0.10399644821882248,
0.004371624905616045,
-0.02298392355442047,
-0.10048576444387436,
0.0068761915899813175,
0.033103253692388535,
-0.03001384064555168,
0.09992709010839462,
-0.020115794613957405,
0.018839584663510323,
0.01370721310377121,
-0.0032558387611061335,
-0.039114248007535934,
-0.05977826938033104,
-0.12622348964214325,
-0.0645509883761406,
0.06306637823581696,
-0.047204118221998215,
-0.013559077866375446,
-0.10239121317863464,
-0.18979190289974213,
-0.059816669672727585,
0.062036607414484024,
-0.06334589421749115,
0.003994904924184084,
-0.031086836010217667,
-0.013064156286418438,
-0.0127401202917099,
0.023442354053258896,
0.019345439970493317,
-0.004636999685317278,
0.03057359904050827,
-0.0108132129535079,
0.12125520408153534,
0.06243133172392845,
-0.004901442676782608,
-0.13824164867401123,
0.0005157776176929474,
-0.20397774875164032,
0.08849722146987915,
-0.11092884838581085,
0.10592183470726013,
-0.0882209911942482,
-0.026372278109192848,
0.06073892489075661,
0.06916685402393341,
-0.02387150004506111,
0.16055864095687866,
-0.18952560424804688,
-0.08381520956754684,
0.12355933338403702,
-0.09757380932569504,
0.021165575832128525,
0.15767976641654968,
-0.062392525374889374,
0.1046694666147232,
0.1209343671798706,
0.1188054010272026,
-0.05465763062238693,
-0.0813155248761177,
-0.10136034339666367,
-0.014207388274371624,
-0.12205258011817932,
0.1822846531867981,
0.011498675681650639,
0.0136005450040102,
0.031242897734045982,
0.02928995154798031,
0.09152978658676147,
0.027522660791873932,
-0.04676467180252075,
-0.06558141112327576,
-0.015193594619631767,
-0.025898903608322144,
0.05380759388208389,
0.05349656194448471,
-0.056481461971998215,
-0.08097260445356369,
-0.20548364520072937,
-0.13085299730300903,
0.0881158635020256,
0.002495463704690337,
0.0017516890075057745,
-0.1061059907078743,
0.08822119235992432,
0.10093100368976593,
0.0067057982087135315,
-0.17593710124492645,
-0.05317295342683792,
0.01178790908306837,
-0.020290972664952278,
0.0430702343583107,
0.11306137591600418,
0.09094865620136261,
-0.05174807831645012,
-0.02429724670946598,
-0.0137453842908144,
-0.07799462229013443,
-0.004842168651521206,
-0.030257336795330048,
-0.21859753131866455,
-0.0018519448349252343,
-0.09704407304525375,
0.1871810108423233,
-0.17869655787944794,
-0.0028274531941860914,
0.05384340509772301,
0.10646229237318039,
0.044835541397333145,
-0.03535361960530281,
0.0039452118799090385,
-0.015383650548756123,
0.002321530133485794,
0.0033886749297380447,
0.10357151925563812,
-0.006699902005493641,
-0.0463947169482708,
0.055427540093660355,
-0.060243505984544754,
-0.22539390623569489,
0.1052294597029686,
-0.03442133963108063,
-0.14879202842712402,
0.09181132912635803,
-0.09340592473745346,
0.06016148254275322,
-0.010463332757353783,
0.01271942537277937,
0.12064395844936371,
0.08024520426988602,
0.04834628477692604,
-0.03443514555692673,
-0.011693393811583519,
0.013827201910316944,
-0.0818871334195137,
-0.07957778871059418,
0.11804316192865372,
-0.01778045855462551,
-0.1967354267835617,
0.02607908472418785,
0.09036871790885925,
-0.0781613290309906,
0.1841513216495514,
0.028171125799417496,
-0.09601714462041855,
-0.07479266822338104,
-0.02548212744295597,
-0.025143828243017197,
0.02785140834748745,
-0.09758204966783524,
-0.011770565062761307,
0.04507558047771454,
-0.04068595916032791,
0.0021808031015098095,
-0.06745114922523499,
-0.021945232525467873,
0.023786917328834534,
0.04235757514834404,
0.03376028314232826,
0.08318483829498291,
0.018726229667663574,
0.08454682677984238,
0.02017197757959366,
-0.05981685593724251,
-0.031312279403209686,
-0.014348920434713364,
-0.04513752833008766,
0.11814018338918686,
-0.05932602658867836,
-0.2637559771537781,
-0.02824152261018753,
0.06175151839852333,
-0.014477488584816456,
0.02295982837677002,
0.047686561942100525,
-0.24005159735679626,
-0.039044711738824844,
-0.01681686006486416,
0.021420270204544067,
0.004229656420648098,
-0.010888106189668179,
0.04758397489786148,
0.006115893833339214,
-0.007622320670634508,
-0.06806396692991257,
-0.013040059246122837,
-0.06667137891054153,
-0.12926451861858368,
0.04097995534539223,
-0.04589805379509926,
0.0744444727897644,
0.10987293720245361,
0.077766552567482,
-0.00019490037811920047,
-0.08968961238861084,
0.23533682525157928,
-0.11202278733253479,
-0.0033846416044980288,
0.086501844227314,
-0.00701954634860158,
0.09650393575429916,
0.13013491034507751,
0.0011037496151402593,
-0.10543043166399002,
0.046033043414354324,
0.16141705214977264,
0.008705566637217999,
-0.21827831864356995,
-0.012575054541230202,
0.004797454923391342,
0.036433469504117966,
-0.06686202436685562,
0.037525102496147156,
0.13519814610481262,
0.0020645123440772295,
-0.020518943667411804,
-0.10999095439910889,
-0.05267959088087082,
0.0964057520031929,
0.06432131677865982,
-0.051079291850328445,
0.05953868851065636,
-0.02535780891776085,
-0.06271723657846451,
0.08145369589328766,
-0.09003672003746033,
0.2600444257259369,
0.014243937097489834,
0.12423665821552277,
0.09651245921850204,
-0.006100951228290796,
-0.033394116908311844,
-0.008806885220110416,
-0.04197027161717415,
-0.03575035557150841,
-0.11853751540184021,
-0.004089874215424061,
-0.08435243368148804,
0.06105393171310425,
0.12928049266338348,
0.03543592616915703,
-0.11976080387830734,
0.02236444316804409,
0.12584665417671204,
0.22856654226779938,
0.06976024061441422,
-0.10613096505403519,
-0.0663490816950798,
0.024521030485630035,
-0.08085808157920837,
0.0065084779635071754,
-0.06677036732435226,
-0.11247267574071884,
-0.09620237350463867,
0.06470552086830139,
0.01384938508272171,
0.02028028853237629,
-0.08106008172035217,
0.07644776254892349,
-0.10962722450494766,
0.002738260431215167,
0.02459123358130455,
0.060527700930833817,
-0.2076464742422104,
0.2757992446422577,
0.002245353301987052,
0.0696953684091568,
-0.05456429719924927,
-0.004127422347664833,
0.061529893428087234,
-0.02044132724404335,
0.16946382820606232,
-0.017300834879279137,
0.004757726565003395,
-0.0733812227845192,
0.03354402631521225,
0.037693995982408524,
0.10421652346849442,
-0.06013786047697067,
0.12390138953924179,
-0.01630435138940811,
-0.007725871168076992,
-0.003460349515080452,
0.15313035249710083,
-0.18351592123508453,
-0.10057329386472702,
0.06168109178543091,
-0.06103047356009483,
-0.04092990979552269,
-0.0243965033441782,
-0.07388915121555328,
0.06110316514968872,
0.2378322184085846,
0.006946169305592775,
0.014183026738464832,
-0.10730277001857758,
0.08030491322278976,
0.040552180260419846,
-0.0799821987748146,
-0.046033840626478195,
0.008957123383879662,
0.10286667943000793,
-0.01971154473721981,
-0.028751743957400322,
0.06621111184358597,
-0.02850005030632019,
-0.14918014407157898,
-0.09438912570476532,
0.057650696486234665,
0.08039233088493347,
0.08042340725660324,
0.04352934658527374,
0.026048121973872185,
0.010780996643006802,
-0.037417955696582794,
0.107376329600811,
0.038981687277555466,
-0.0015292427269741893,
0.02785816602408886,
-0.04417841508984566,
-0.051559388637542725,
-0.10901240259408951,
-0.07696317136287689,
0.16507360339164734,
0.3267245590686798,
-0.062149059027433395,
0.09614267945289612,
0.09184976667165756,
-0.09215930849313736,
-0.15509334206581116,
-0.06824637204408646,
0.13846901059150696,
-0.01888228952884674,
-0.00243915943428874,
-0.20505864918231964,
0.04010983183979988,
0.054631903767585754,
-0.013367886655032635,
-0.15581496059894562,
-0.11974424123764038,
-0.04873251914978027,
0.07769183069467545,
0.02820049785077572,
0.20867201685905457,
-0.1638098806142807,
0.002812102437019348,
-0.035753995180130005,
-0.08297751098871231,
0.16878774762153625,
-0.09448903799057007,
0.12995897233486176,
0.05381762608885765,
0.13811545073986053,
0.03214683383703232,
-0.039197444915771484,
0.13681110739707947,
0.031551286578178406,
0.08151713758707047,
-0.08274102210998535,
-0.005092756822705269,
0.050390906631946564,
-0.07354103028774261,
0.06500695645809174,
-0.05254247784614563,
-0.04821677505970001,
-0.1889730840921402,
-0.08219477534294128,
-0.15431571006774902,
0.05668892711400986,
-0.04637369140982628,
-0.05595620721578598,
-0.02418714389204979,
0.11196639388799667,
0.06475293636322021,
-0.028037266805768013,
-0.07730697840452194,
-0.09403912723064423,
0.0031763948500156403,
-0.07472441345453262,
0.11070159822702408,
0.03636329621076584,
-0.22376082837581635,
-0.0011438162764534354,
-0.03149339184165001,
0.00007305135659407824,
-0.13875065743923187,
-0.004875434096902609,
0.07882101833820343,
-0.04246765002608299,
0.18290670216083527,
0.029751958325505257,
-0.15318383276462555,
0.06004448980093002,
0.12281728535890579,
-0.06943320482969284,
-0.14735738933086395,
0.037996988743543625,
-0.05623920261859894,
-0.10405924916267395,
-0.11072065681219101,
0.13936620950698853,
0.013021135702729225,
-0.03649121895432472,
-0.02689482644200325,
0.012919226661324501,
-0.0038949516601860523,
-0.021518506109714508,
-0.009728292003273964,
0.02029132843017578,
-0.052221573889255524,
0.025106709450483322,
0.05238134413957596,
-0.21055035293102264,
0.11481042951345444,
0.09550849348306656,
-0.032034240663051605,
-0.07224992662668228,
0.005967273376882076,
0.2243904322385788,
-0.08523812890052795,
0.02176477760076523,
-0.05588913336396217,
-0.058471549302339554,
0.057771481573581696,
0.17970813810825348,
0.05027609318494797,
-0.0017235652776435018,
-0.007339143194258213,
0.035554349422454834,
-0.0401388444006443,
0.041540052741765976,
0.12273989617824554,
-0.05010795220732689,
-0.04792628437280655,
0.05037027597427368,
-0.005791586823761463,
-0.013981220312416553,
-0.01312006264925003,
-0.020893890410661697,
-0.1005830243229866,
-0.02446005679666996,
-0.14704887568950653,
-0.04262356087565422,
-0.009446687996387482,
-0.005351679865270853,
0.03130495175719261,
0.01609760709106922,
-0.0059239622205495834,
-0.03309227153658867,
-0.06836256384849548,
-0.0360727421939373,
0.05662011727690697,
0.07399888336658478,
-0.18247847259044647,
-0.06397046893835068,
0.09258360415697098,
-0.07159505784511566,
0.08469291031360626,
0.03653350844979286,
0.02172068879008293,
0.08378533273935318,
-0.1869925707578659,
0.0075246915221214294,
0.0229936633259058,
-0.048349812626838684,
0.007862081751227379,
-0.056517984718084335,
0.050279900431632996,
-0.05792376399040222,
0.05284310504794121,
0.04775108024477959,
0.08005882054567337,
-0.09146349132061005,
0.00467498367652297,
0.1796269565820694,
-0.08740736544132233,
-0.07481642067432404,
-0.018632445484399796,
0.027591286227107048,
0.03577296435832977,
0.17318116128444672,
-0.05192558839917183,
0.03392072021961212,
-0.17194794118404388,
0.006342428270727396,
0.0056861466728150845,
0.023222114890813828,
-0.03285432234406471,
-0.07695063203573227,
0.03726794198155403,
-0.04792265221476555,
0.0458892323076725,
0.019581764936447144,
0.09051990509033203,
0.040852054953575134,
-0.02931540086865425,
-0.025259485468268394,
0.005495396908372641,
0.02759382128715515,
0.10436694324016571,
-0.03871814161539078,
-0.006331661716103554,
0.016063394024968147,
0.057477205991744995,
-0.039188649505376816,
0.14462247490882874,
0.1612475961446762,
0.22939008474349976,
0.08016946911811829,
-0.010595009662210941,
-0.0160163976252079,
-0.03177141025662422,
0.04514686018228531,
-0.07399909943342209,
0.10599355399608612,
-0.05459277331829071,
0.12121511250734329,
0.16718259453773499,
0.015731116756796837,
0.12545928359031677,
-0.08796621114015579,
-0.039189908653497696,
-0.07295041531324387,
-0.11531883478164673,
-0.07567646354436874,
-0.03036350943148136,
0.08860121667385101,
-0.08376600593328476,
0.021647732704877853,
-0.06398025155067444,
0.02150350622832775,
-0.029776381328701973,
-0.020622262731194496,
-0.024687113240361214,
-0.02471667155623436,
0.14896854758262634,
-0.05800745263695717,
-0.0421164333820343,
0.028834957629442215,
0.047949351370334625,
0.008018333464860916,
0.022345632314682007,
0.029157834127545357,
0.06475681066513062,
-0.12093779444694519,
0.02765033021569252,
-0.0811295136809349,
-0.12008069455623627,
0.018851496279239655,
0.007785415276885033,
-0.02187502384185791,
0.07394122332334518,
-0.012949848547577858,
-0.0028720004484057426,
0.007156883366405964,
0.20488415658473969,
-0.02007248066365719,
-0.03996570408344269,
-0.18196134269237518,
0.23551417887210846,
0.05366740748286247,
0.05073122680187225,
-0.007203459274023771,
-0.1220417395234108,
-0.0011719680624082685,
0.1427675038576126,
0.11013636738061905,
-0.01237388327717781,
0.00003691491292556748,
0.049426354467868805,
0.030683055520057678,
-0.04499350115656853,
0.018465232104063034,
0.12929320335388184,
0.12376102805137634,
-0.11683172732591629,
0.1101967915892601,
-0.027521681040525436,
-0.037952568382024765,
-0.030952544882893562,
0.07285144180059433,
0.05657988786697388,
0.04318666458129883,
-0.05887987092137337,
0.08915029466152191,
-0.04662853479385376,
-0.21852053701877594,
0.05457857996225357,
-0.08647307008504868,
-0.09699393063783646,
-0.041779760271310806,
0.08352762460708618,
0.10526258498430252,
0.15007181465625763,
0.08126559108495712,
-0.09036818146705627,
0.23051097989082336,
-0.030585048720240593,
-0.1274176985025406,
-0.038459550589323044,
0.14314086735248566,
-0.04089817404747009,
0.12879525125026703,
-0.02070510759949684,
0.04266524687409401,
0.1393001228570938,
-0.04062687233090401,
-0.05983886495232582,
0.05054629594087601,
0.044056475162506104,
-0.0549355149269104,
0.022889969870448112,
0.22826170921325684,
-0.021794544532895088,
0.008874276652932167,
0.07714467495679855,
-0.16006731986999512,
0.06785130500793457,
-0.026414774358272552,
-0.042070936411619186,
-0.05545744672417641,
0.1992291957139969,
-0.13364669680595398,
0.05808575078845024,
0.1966513842344284,
0.024525482207536697,
0.03524129092693329,
-0.11395745724439621,
-0.050581760704517365,
0.0513949953019619,
-0.008921848610043526,
-0.08324772864580154,
-0.09618958085775375,
0.012735842727124691,
0.1622762829065323,
0.06323163211345673,
-0.2554260492324829,
-0.10884933173656464,
0.06604603677988052,
0.03147406503558159,
-0.061334580183029175,
0.061727724969387054,
0.06884552538394928,
0.050552744418382645,
-0.007478257641196251,
-0.13390974700450897,
0.03133908286690712,
0.13647224009037018,
-0.10291197150945663,
-0.031087292358279228
] |
null | null |
transformers
|
# Yoda DialoGPT Model
|
{"tags": ["conversational"]}
|
text-generation
|
bspans/DialoGPT-small-yoda
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"conversational",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
# Yoda DialoGPT Model
|
[
"# Yoda DialoGPT Model"
] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"# Yoda DialoGPT Model"
] |
[
51,
8
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n# Yoda DialoGPT Model"
] |
[
-0.019523952156305313,
0.08738385140895844,
-0.0057960688136518,
0.01156123448163271,
0.1749093234539032,
-0.005737789440900087,
0.16108189523220062,
0.12950651347637177,
0.0008764049271121621,
-0.05426200106739998,
0.09532385319471359,
0.12720444798469543,
0.03391678258776665,
0.09928861260414124,
-0.06926876306533813,
-0.3217432498931885,
0.050389744341373444,
0.04106822982430458,
-0.03284084424376488,
0.11956336349248886,
0.0893711969256401,
-0.02720244787633419,
0.08215956389904022,
0.021456509828567505,
-0.1412605494260788,
0.00343534373678267,
0.00905965268611908,
-0.15076036751270294,
0.12530973553657532,
0.06549831479787827,
0.025502733886241913,
0.011720659211277962,
-0.055777207016944885,
-0.13874663412570953,
0.03698261082172394,
-0.019788799807429314,
-0.04485246539115906,
0.03194704279303551,
0.018043240532279015,
-0.09553586691617966,
0.09889886528253555,
0.10606331378221512,
-0.025147615000605583,
0.05249358341097832,
-0.17657004296779633,
-0.02725200541317463,
0.020698128268122673,
0.040154147893190384,
0.10159304738044739,
0.09707565605640411,
-0.05108032003045082,
0.053304221481084824,
-0.06052464619278908,
0.089447520673275,
0.06357408314943314,
-0.300363153219223,
-0.022725332528352737,
0.12468787282705307,
0.040617212653160095,
0.04498717561364174,
-0.050432171672582626,
0.07051602005958557,
0.03045903705060482,
0.0019289364572614431,
-0.0628027394413948,
-0.0716988816857338,
0.0019597059581428766,
0.012008442543447018,
-0.09656941890716553,
-0.010923312976956367,
0.2560465335845947,
-0.031599145382642746,
0.05677775293588638,
-0.06123391166329384,
-0.11553069949150085,
-0.004293516743928194,
-0.05804640054702759,
-0.011375603266060352,
-0.09449518471956253,
0.10447583347558975,
0.008482451550662518,
-0.07451125234365463,
-0.11656408756971359,
-0.017828039824962616,
-0.15149115025997162,
0.180460125207901,
0.03413243591785431,
0.036579426378011703,
-0.22433871030807495,
0.08802085369825363,
-0.03587491065263748,
-0.09692022949457169,
0.02450793795287609,
-0.10433249175548553,
0.020796045660972595,
0.032108258455991745,
-0.026830539107322693,
-0.07390373945236206,
0.0917741060256958,
0.11511855572462082,
0.02032558061182499,
0.012341717258095741,
-0.03310564160346985,
0.04163290932774544,
0.07095904648303986,
0.09903064370155334,
-0.011971370317041874,
-0.08492200821638107,
0.03229416161775589,
-0.0944281667470932,
0.009367357939481735,
-0.06431592255830765,
-0.18728888034820557,
-0.03857465833425522,
0.06471797078847885,
0.05468624457716942,
0.025072120130062103,
0.12597984075546265,
-0.00022867369989398867,
-0.04822615906596184,
0.08977240324020386,
-0.026426630094647408,
-0.02923239953815937,
-0.006165133323520422,
-0.0009879893623292446,
0.10979928076267242,
-0.002538383938372135,
0.05451763793826103,
-0.09801647067070007,
0.001588058890774846,
-0.05145164579153061,
-0.013760016299784184,
-0.01988789066672325,
-0.029027733951807022,
-0.009274656884372234,
-0.026272330433130264,
0.03811375051736832,
-0.1553666889667511,
-0.1921958476305008,
-0.0037082363851368427,
-0.0074559166096150875,
-0.061466533690690994,
-0.09020272642374039,
-0.10401469469070435,
-0.03174158185720444,
0.04081694781780243,
-0.06298784166574478,
-0.03399362415075302,
-0.057806551456451416,
0.10336291790008545,
-0.020108068361878395,
0.08822479844093323,
-0.09634412825107574,
0.06400231271982193,
-0.09735651314258575,
0.00434221513569355,
-0.04047475382685661,
0.13150398433208466,
0.010459667071700096,
0.06504938006401062,
-0.015651753172278404,
0.0007718545966781676,
-0.07933813333511353,
0.06354745477437973,
-0.030909189954400063,
0.2477322816848755,
-0.06748250871896744,
-0.10917367786169052,
0.3046492636203766,
-0.057712018489837646,
-0.12073205411434174,
0.12596748769283295,
0.005363780539482832,
0.09644947946071625,
0.1331852227449417,
0.21487672626972198,
-0.01984696462750435,
-0.0068543353118002415,
0.06968425214290619,
0.06471166014671326,
-0.07364511489868164,
-0.007237342651933432,
0.02287183701992035,
-0.0028508794493973255,
-0.05827122554183006,
0.02750004082918167,
0.11182942241430283,
0.10710450261831284,
-0.0436226911842823,
-0.04278378188610077,
0.0026933399494737387,
-0.011849762871861458,
0.07116065174341202,
-0.0365264005959034,
0.11617828905582428,
-0.028566621243953705,
-0.0537077821791172,
-0.013959919102489948,
0.02254384569823742,
-0.03388066589832306,
0.03580782935023308,
-0.09874553233385086,
0.0741543099284172,
-0.06825221329927444,
0.0752701386809349,
-0.10730817168951035,
-0.017628977075219154,
-0.034241046756505966,
0.20684359967708588,
0.03826877474784851,
0.06472475081682205,
0.06203655153512955,
-0.04675152152776718,
-0.02051282301545143,
0.023833075538277626,
0.14890873432159424,
-0.014275558292865753,
-0.0662805438041687,
-0.0635322779417038,
0.10819830000400543,
-0.039758771657943726,
0.1025112122297287,
-0.04390538111329079,
0.02048656903207302,
0.028846506029367447,
0.09316487610340118,
-0.011549445800483227,
0.04302366077899933,
0.030832676216959953,
-0.007349775172770023,
-0.06696102023124695,
0.0017331854905933142,
0.08373831957578659,
-0.005132065620273352,
-0.10288438200950623,
0.2222297638654709,
-0.17195571959018707,
0.1359700709581375,
0.17662493884563446,
-0.17865830659866333,
0.017574386671185493,
-0.10005436837673187,
-0.037142314016819,
-0.00025902545894496143,
0.06969170272350311,
-0.036339011043310165,
0.16891242563724518,
-0.022703181952238083,
0.17872141301631927,
-0.030542802065610886,
0.010599200613796711,
-0.01142777968198061,
-0.08117922395467758,
0.003259657183662057,
0.07835908234119415,
0.10237734764814377,
-0.1326715499162674,
0.18860146403312683,
0.08570080995559692,
0.045992374420166016,
0.22627609968185425,
0.028362713754177094,
0.008782551623880863,
0.06298546493053436,
-0.01865970715880394,
-0.03125203028321266,
-0.04416363313794136,
-0.25625625252723694,
-0.0290618184953928,
0.07478655874729156,
0.06145786494016647,
0.11144622415304184,
-0.09655871987342834,
-0.04936106875538826,
-0.013494624756276608,
-0.021363334730267525,
0.062416840344667435,
0.12828034162521362,
0.013264449313282967,
0.1167183592915535,
-0.0059207468293607235,
-0.03929155319929123,
0.06159475818276405,
0.02072814479470253,
-0.06096654012799263,
0.17215952277183533,
-0.12527748942375183,
-0.3565032482147217,
-0.10248059034347534,
-0.24221158027648926,
-0.056703414767980576,
0.08034399151802063,
0.11166344583034515,
-0.14502766728401184,
-0.016429679468274117,
0.02560395747423172,
0.12267337739467621,
-0.09008996188640594,
0.0014080893015488982,
-0.020322594791650772,
0.007355939596891403,
-0.13421140611171722,
-0.06806187331676483,
-0.04745855927467346,
-0.021423274651169777,
-0.045107994228601456,
0.13724087178707123,
-0.1646089106798172,
0.04772131145000458,
0.2324029505252838,
0.07655342668294907,
0.04489736631512642,
-0.04226483777165413,
0.24265289306640625,
-0.14229315519332886,
0.04625394567847252,
0.22603121399879456,
-0.03659775108098984,
0.053787652403116226,
0.16097471117973328,
-0.02670721709728241,
-0.10324563086032867,
0.045783743262290955,
-0.03113577328622341,
-0.0995752215385437,
-0.20755931735038757,
-0.12694452702999115,
-0.11706919223070145,
0.10899513214826584,
0.02387518808245659,
0.06351396441459656,
0.16105690598487854,
0.09651904553174973,
-0.04736444354057312,
0.03199975565075874,
0.04159309342503548,
0.07883670926094055,
0.24515806138515472,
-0.06764056533575058,
0.13209126889705658,
0.007306548301130533,
-0.16198775172233582,
0.066169373691082,
0.14288395643234253,
0.05947096645832062,
0.051575370132923126,
0.07941140234470367,
0.01768209971487522,
0.021771052852272987,
0.14529356360435486,
0.016733845695853233,
0.03093128651380539,
-0.02891351655125618,
-0.04478578642010689,
-0.030718382447957993,
-0.036213118582963943,
0.07220549136400223,
0.0417274534702301,
-0.1399887055158615,
-0.0054369899444282055,
-0.027707338333129883,
0.0770663470029831,
0.037722669541835785,
0.06967300176620483,
-0.15600547194480896,
-0.03203626349568367,
0.09158895164728165,
-0.024593599140644073,
-0.12873585522174835,
0.08975482732057571,
-0.01908034272491932,
-0.14002364873886108,
0.043596696108579636,
-0.028919989243149757,
0.11619032919406891,
-0.06838040798902512,
0.07618727535009384,
-0.12877094745635986,
-0.059359580278396606,
-0.008672763593494892,
0.11440369486808777,
-0.31111884117126465,
0.15767203271389008,
-0.011538026854395866,
-0.04392215237021446,
-0.11061955243349075,
0.0045563094317913055,
0.016024133190512657,
0.08876042813062668,
0.10492165386676788,
-0.01760014146566391,
0.08012101799249649,
0.027847539633512497,
-0.07334725558757782,
0.03266557678580284,
0.07878436148166656,
-0.04987972602248192,
-0.014850550331175327,
-0.04197229817509651,
-0.0000978057796601206,
-0.02876323089003563,
-0.07210519909858704,
0.02252531237900257,
-0.1819554567337036,
0.06710727512836456,
0.06405352056026459,
0.1427077203989029,
0.03037877008318901,
-0.020968027412891388,
-0.10803697258234024,
0.23970577120780945,
0.0012306334683671594,
-0.1268099993467331,
-0.09241144359111786,
-0.018575552850961685,
0.049345701932907104,
-0.05837111175060272,
0.021209344267845154,
-0.06610070914030075,
0.02726023644208908,
-0.07309141755104065,
-0.1779344379901886,
0.11890455335378647,
-0.09634703397750854,
-0.05284712091088295,
-0.03711375594139099,
0.2057812213897705,
-0.016008200123906136,
0.017970696091651917,
0.03375663608312607,
-0.012173106893897057,
-0.12149205058813095,
-0.07905389368534088,
0.001352551393210888,
0.04623747617006302,
-0.011133072897791862,
0.009968928061425686,
0.007712570484727621,
-0.0597420334815979,
-0.08887357264757156,
-0.057483602315187454,
0.28739267587661743,
0.1576206237077713,
-0.017131688073277473,
0.1775549054145813,
0.10761696845293045,
-0.06094074249267578,
-0.2615423798561096,
-0.13595062494277954,
-0.06465941667556763,
0.0035373596474528313,
-0.1157761812210083,
-0.16934320330619812,
0.05419965088367462,
-0.008816462010145187,
-0.032214414328336716,
0.14025019109249115,
-0.28583794832229614,
-0.10500112920999527,
0.15566395223140717,
-0.013431021012365818,
0.4354375898838043,
-0.12307620793581009,
-0.07617643475532532,
-0.04080875217914581,
-0.14442025125026703,
0.1490771323442459,
0.07389727234840393,
0.11531392484903336,
-0.008173860609531403,
0.15738078951835632,
0.04668857157230377,
0.002615033183246851,
0.1104152500629425,
0.005060439929366112,
-0.06574253737926483,
-0.09679088741540909,
-0.05511532723903656,
-0.0028297093231230974,
0.024284876883029938,
-0.00009939500159816816,
-0.030338723212480545,
0.01801430620253086,
-0.09141166508197784,
-0.07678817212581635,
-0.08509235829114914,
0.01889665052294731,
0.03825429826974869,
-0.11536121368408203,
-0.019585909321904182,
-0.037963904440402985,
-0.003683168673887849,
0.010112706571817398,
0.19379524886608124,
-0.10160581767559052,
0.1345922350883484,
0.08849729597568512,
0.0978013128042221,
-0.11799908429384232,
0.036970704793930054,
-0.07255762815475464,
-0.06529514491558075,
0.08426868915557861,
-0.14844943583011627,
0.028706392273306847,
0.09113392233848572,
-0.03548480570316315,
0.09999995678663254,
0.0803898423910141,
-0.02706962078809738,
0.03362881392240524,
0.10125748813152313,
-0.2189195454120636,
-0.06930802762508392,
-0.08704355359077454,
0.0006127272499725223,
0.09254586696624756,
0.08337843418121338,
0.22132565081119537,
-0.01883464679121971,
-0.05001356080174446,
0.018195856362581253,
0.03584945946931839,
-0.031703848391771317,
0.06505109369754791,
0.00213877996429801,
0.008921983651816845,
-0.14074736833572388,
0.044212136417627335,
0.012137535959482193,
-0.09707194566726685,
0.051778193563222885,
0.1244431734085083,
-0.11344485729932785,
-0.0967695489525795,
-0.10089945048093796,
0.08107870072126389,
-0.1365201473236084,
-0.023028064519166946,
-0.04981016367673874,
-0.12355407327413559,
0.05571397766470909,
0.07273027300834656,
0.04666009917855263,
0.07333047688007355,
-0.09213495254516602,
-0.011618593707680702,
-0.03820956125855446,
0.009861210361123085,
0.018628379330039024,
-0.02314133197069168,
-0.08309488743543625,
0.011602655053138733,
-0.030296947807073593,
0.13917332887649536,
-0.09529707580804825,
-0.11700040102005005,
-0.15952196717262268,
0.05111368000507355,
-0.1322380006313324,
-0.07627776265144348,
-0.13031302392482758,
-0.051525820046663284,
-0.01730186678469181,
-0.03715847432613373,
-0.037025559693574905,
-0.041578467935323715,
-0.10939093679189682,
0.034663423895835876,
-0.0555557943880558,
0.00482974061742425,
-0.04300270602107048,
0.02639732137322426,
0.054452694952487946,
-0.024436473846435547,
0.14131103456020355,
0.12744934856891632,
-0.10868880152702332,
0.08113951236009598,
-0.1163242906332016,
-0.03841060400009155,
0.11817452311515808,
0.0023974007926881313,
0.05620858818292618,
0.0823453813791275,
0.0043671284802258015,
0.04781347140669823,
0.027568669989705086,
0.04239194467663765,
0.017047427594661713,
-0.09182713180780411,
0.04498209431767464,
-0.049205485731363297,
-0.09966055303812027,
-0.034835997968912125,
-0.020566027611494064,
0.017175476998090744,
0.0443974994122982,
0.06539114564657211,
-0.05885276943445206,
0.0998002365231514,
-0.05283541604876518,
0.029093701392412186,
-0.0015582863707095385,
-0.1496967375278473,
0.004565210547298193,
-0.09400121122598648,
0.03080974519252777,
0.025354906916618347,
0.20674514770507812,
0.02143581211566925,
0.019811976701021194,
0.0045729028061032295,
0.04262814298272133,
0.02662437967956066,
0.0008452392648905516,
0.23246945440769196,
0.1169443354010582,
-0.04213881120085716,
-0.06708279997110367,
0.08681177347898483,
0.0431700199842453,
0.027975047007203102,
0.02720009721815586,
-0.056879859417676926,
0.006275257561355829,
0.08592189103364944,
-0.033538904041051865,
0.03673096001148224,
-0.14662985503673553,
-0.12037275731563568,
-0.0617678165435791,
0.030360406264662743,
-0.03882453218102455,
0.08190231025218964,
0.16327977180480957,
-0.025331970304250717,
0.015857765451073647,
-0.005972431041300297,
-0.06087341904640198,
-0.156987726688385,
-0.181394562125206,
-0.08188193291425705,
-0.161268413066864,
0.021071013063192368,
-0.1318165510892868,
0.00724777951836586,
0.059222158044576645,
0.07390937209129333,
-0.07776957005262375,
0.07912173867225647,
0.0745166540145874,
-0.12757854163646698,
0.06997381895780563,
-0.02983960136771202,
0.07618314027786255,
-0.015161785297095776,
-0.009188877418637276,
-0.08158104121685028,
0.04870324581861496,
0.01555937435477972,
0.046702876687049866,
-0.04176706448197365,
0.018237605690956116,
-0.13165713846683502,
-0.07299299538135529,
-0.05454114452004433,
0.08630012720823288,
0.003979259170591831,
0.13670922815799713,
0.008452602662146091,
-0.03317524120211601,
0.03557967767119408,
0.2741837501525879,
-0.057093679904937744,
-0.08502321690320969,
-0.08896739035844803,
0.22083306312561035,
-0.008874064311385155,
0.08963079005479813,
-0.029141085222363472,
-0.009722059592604637,
-0.08967766910791397,
0.3410298526287079,
0.2846130430698395,
-0.11068493872880936,
-0.0002095689851557836,
0.008463584817945957,
0.03799765184521675,
0.09002353250980377,
0.09882869571447372,
0.10552507638931274,
0.2919275760650635,
-0.0717669129371643,
-0.0322408489882946,
-0.027761828154325485,
-0.0499984435737133,
-0.0662522241473198,
0.07452364265918732,
0.032165635377168655,
-0.06851378083229065,
-0.026024173945188522,
0.12049338966608047,
-0.2906939685344696,
0.08247104287147522,
-0.16907718777656555,
-0.1853809803724289,
-0.10582920908927917,
0.011834150180220604,
0.07125915586948395,
0.0504484660923481,
0.08101130276918411,
-0.0026515850331634283,
-0.05465413257479668,
0.05746897682547569,
0.028347624465823174,
-0.15475796163082123,
0.049650706350803375,
0.07204406708478928,
-0.03681061789393425,
-0.06864535808563232,
-0.030581865459680557,
0.00870884582400322,
0.06940717250108719,
0.06984347850084305,
-0.004045487847179174,
0.046936336904764175,
0.005699544679373503,
-0.02268660068511963,
0.03353185951709747,
0.052910007536411285,
0.026234135031700134,
-0.09257465600967407,
0.0860561802983284,
-0.16123923659324646,
0.029919596388936043,
0.030013414099812508,
-0.01809672638773918,
-0.02339254878461361,
0.05425231531262398,
-0.08251561969518661,
0.0731799378991127,
0.0658644437789917,
-0.019834263250231743,
-0.02088710106909275,
-0.02824585512280464,
0.01647145487368107,
-0.03324836492538452,
-0.08249709755182266,
-0.09985757619142532,
-0.1549200713634491,
-0.13455232977867126,
0.08409097045660019,
-0.004433628171682358,
-0.1798699051141739,
0.03165286034345627,
-0.13443686068058014,
0.06604624539613724,
-0.14470809698104858,
0.10480460524559021,
0.06017756834626198,
0.024408267810940742,
-0.00063045893330127,
0.01005895622074604,
0.050300873816013336,
0.07121419906616211,
-0.1316031962633133,
-0.08455201238393784
] |
null | null |
transformers
|
# hseBERT
**hseBert-it-cased** is a BERT model obtained by MLM adaptive-tuning [**bert-base-italian-xxl-cased**](https://huggingface.co/dbmdz/bert-base-italian-xxl-cased) on texts of Italian regulation (Testo unico sulla sicurezza sul lavoro - D.lgs. 9 aprile 2008, n. 81, Codice dell'Ambiente - D.lgs. 3 aprile 2006, n. 152), approximately 7k sentences.
# Usage
```python
from transformers import AutoModel, AutoTokenizer
model_name = "bullmount/hseBert-it-cased"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModel.from_pretrained(model_name)
```
|
{"language": "it", "license": "mit", "widget": [{"text": "\u00c8 stata pubblicata la [MASK] di conversione del D.L. 24 dicembre 2021 n. 221 ."}, {"text": "La legge fornisce l\u2019esatta [MASK] di Green pass base."}, {"text": "Il datore di lavoro organizza e predispone i posti di lavoro di cui all'articolo 173, in [MASK] ai requisiti minimi di cui all'allegato XXXIV."}, {"text": "Le principali novit\u00e0 riguardano la quarantena precauzionale e il [MASK] di autosorveglianza."}]}
|
fill-mask
|
bullmount/hseBert-it-cased
|
[
"transformers",
"pytorch",
"tensorboard",
"bert",
"fill-mask",
"it",
"license:mit",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"it"
] |
TAGS
#transformers #pytorch #tensorboard #bert #fill-mask #it #license-mit #autotrain_compatible #endpoints_compatible #region-us
|
# hseBERT
hseBert-it-cased is a BERT model obtained by MLM adaptive-tuning bert-base-italian-xxl-cased on texts of Italian regulation (Testo unico sulla sicurezza sul lavoro - D.lgs. 9 aprile 2008, n. 81, Codice dell'Ambiente - D.lgs. 3 aprile 2006, n. 152), approximately 7k sentences.
# Usage
|
[
"# hseBERT\n\nhseBert-it-cased is a BERT model obtained by MLM adaptive-tuning bert-base-italian-xxl-cased on texts of Italian regulation (Testo unico sulla sicurezza sul lavoro - D.lgs. 9 aprile 2008, n. 81, Codice dell'Ambiente - D.lgs. 3 aprile 2006, n. 152), approximately 7k sentences.",
"# Usage"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #bert #fill-mask #it #license-mit #autotrain_compatible #endpoints_compatible #region-us \n",
"# hseBERT\n\nhseBert-it-cased is a BERT model obtained by MLM adaptive-tuning bert-base-italian-xxl-cased on texts of Italian regulation (Testo unico sulla sicurezza sul lavoro - D.lgs. 9 aprile 2008, n. 81, Codice dell'Ambiente - D.lgs. 3 aprile 2006, n. 152), approximately 7k sentences.",
"# Usage"
] |
[
47,
95,
3
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #bert #fill-mask #it #license-mit #autotrain_compatible #endpoints_compatible #region-us \n# hseBERT\n\nhseBert-it-cased is a BERT model obtained by MLM adaptive-tuning bert-base-italian-xxl-cased on texts of Italian regulation (Testo unico sulla sicurezza sul lavoro - D.lgs. 9 aprile 2008, n. 81, Codice dell'Ambiente - D.lgs. 3 aprile 2006, n. 152), approximately 7k sentences.# Usage"
] |
[
-0.07277893275022507,
-0.07551196217536926,
-0.003667302895337343,
0.07105502486228943,
0.05911112204194069,
0.0021237567998468876,
0.1371840536594391,
0.0860380232334137,
0.17947006225585938,
0.0004654456570278853,
0.1148022785782814,
0.0795484408736229,
-0.033747024834156036,
-0.02183746173977852,
-0.05807880312204361,
-0.22230412065982819,
0.05732748284935951,
0.09648949652910233,
-0.06691063195466995,
0.07037387788295746,
0.10563920438289642,
-0.05213996767997742,
0.08175522089004517,
0.01671067625284195,
-0.03163299337029457,
0.06971463561058044,
0.04185430333018303,
-0.04815249890089035,
0.14887635409832,
0.09163524210453033,
0.14793074131011963,
0.054084412753582,
0.05480330437421799,
-0.04485197365283966,
0.014356125146150589,
-0.029882680624723434,
-0.05414752662181854,
0.032049935311079025,
-0.007784601766616106,
0.046473175287246704,
0.009861258789896965,
0.03055371530354023,
-0.02577485330402851,
-0.022169409319758415,
-0.06363539397716522,
0.08246274292469025,
0.005780953913927078,
-0.03956039994955063,
0.009685703553259373,
0.10094956308603287,
0.004365458618849516,
0.14315801858901978,
-0.16019858419895172,
0.05352295562624931,
0.1631789356470108,
-0.29469266533851624,
-0.02400715835392475,
0.022875340655446053,
0.0376119390130043,
0.003375053871423006,
-0.016017170622944832,
0.0837404727935791,
0.08378493785858154,
0.02171963080763817,
-0.029304340481758118,
-0.11448770761489868,
0.10318824648857117,
0.026791738346219063,
-0.10782875120639801,
0.0697929784655571,
0.22650909423828125,
-0.013327854685485363,
-0.055301032960414886,
0.012363692745566368,
-0.03144604340195656,
0.06083221733570099,
0.011343882419168949,
-0.09431327879428864,
0.009493588469922543,
-0.0010584060801193118,
0.08622497320175171,
0.010932892560958862,
-0.11624817550182343,
-0.04569278284907341,
-0.14697298407554626,
0.25123101472854614,
0.017444957047700882,
-0.031085999682545662,
-0.025621360167860985,
0.0005155848921276629,
-0.09776514768600464,
-0.07582255452871323,
-0.0018403830472379923,
0.001519002951681614,
0.0899525135755539,
-0.011353947222232819,
-0.06582703441381454,
-0.13947482407093048,
0.023314548656344414,
-0.05224524810910225,
0.023798871785402298,
-0.006399133708328009,
0.018674127757549286,
0.09237232059240341,
0.014281218871474266,
0.1282486766576767,
-0.07897703349590302,
-0.07250496745109558,
0.0003384247829671949,
0.011916653253138065,
0.03897379711270332,
-0.021606186404824257,
-0.18451440334320068,
-0.05470800772309303,
-0.050222691148519516,
-0.026502614840865135,
-0.07455500960350037,
0.07295119762420654,
-0.06890470534563065,
-0.02408450096845627,
0.003806962165981531,
-0.11823153495788574,
-0.007073497399687767,
-0.014811712317168713,
0.01098465546965599,
0.047276291996240616,
-0.040549375116825104,
0.007280910387635231,
-0.045745305716991425,
0.1344098597764969,
-0.10769068449735641,
-0.01384344045072794,
-0.04583454504609108,
-0.11492019891738892,
0.01639634557068348,
-0.07635495811700821,
0.007360133342444897,
-0.1407405287027359,
-0.025748640298843384,
0.03288351744413376,
0.01947476528584957,
-0.06288104504346848,
0.045472923666238785,
-0.019116321578621864,
0.02674132212996483,
-0.010816723108291626,
0.009574257768690586,
-0.07727466523647308,
-0.04448554292321205,
0.060049619525671005,
-0.05549443140625954,
0.03252604603767395,
-0.22486400604248047,
0.027091896161437035,
-0.11797785758972168,
-0.031814176589250565,
-0.17844708263874054,
-0.03502984344959259,
-0.05624246224761009,
-0.0315876379609108,
-0.05073625594377518,
-0.02717018686234951,
-0.048927877098321915,
0.08935682475566864,
0.06633167713880539,
0.16422320902347565,
-0.15249207615852356,
-0.09625221788883209,
0.039212338626384735,
-0.10923906415700912,
-0.10648209601640701,
0.16196846961975098,
-0.018758799880743027,
0.12294898927211761,
0.06683734059333801,
0.19217778742313385,
-0.06312073767185211,
-0.17332780361175537,
0.028437435626983643,
0.12237707525491714,
0.014640897512435913,
-0.01765027455985546,
0.08084630221128464,
-0.015950217843055725,
-0.07891342043876648,
0.028221821412444115,
-0.14471912384033203,
-0.0373818539083004,
-0.028192367404699326,
-0.05006479099392891,
0.040551137179136276,
-0.006546677555888891,
0.07357762008905411,
-0.005553329363465309,
0.07115396112203598,
-0.060016658157110214,
-0.08498761802911758,
0.03740199655294418,
0.028624102473258972,
-0.022529492154717445,
0.04924141615629196,
-0.08491139113903046,
0.16360284388065338,
0.03512183576822281,
-0.02229498140513897,
-0.020100142806768417,
-0.012981938198208809,
0.01741175912320614,
0.10669898241758347,
0.040292419493198395,
0.1756095439195633,
0.05008145421743393,
0.014202479273080826,
-0.07251285016536713,
0.05079719424247742,
0.011505037546157837,
0.03743467852473259,
-0.06251107156276703,
-0.19526922702789307,
0.018410174176096916,
-0.04712170362472534,
0.027968095615506172,
-0.0309390090405941,
-0.007206061389297247,
0.0944334864616394,
0.030972182750701904,
-0.04194781929254532,
0.050079770386219025,
-0.1188349649310112,
0.01855364255607128,
-0.03457050025463104,
0.07695210725069046,
0.08474421501159668,
0.02773123048245907,
-0.11120742559432983,
0.16240251064300537,
-0.06847210973501205,
0.18782714009284973,
0.1309197098016739,
-0.11636868119239807,
-0.03915092349052429,
-0.1421797126531601,
-0.004775161389261484,
0.017700674012303352,
0.00589792849496007,
-0.11188271641731262,
0.1908888816833496,
-0.006565275602042675,
0.08755454421043396,
-0.11186715960502625,
0.04099750891327858,
0.0381828173995018,
-0.050743743777275085,
-0.09244424849748611,
0.061802711337804794,
0.12332630902528763,
-0.12509199976921082,
0.09626193344593048,
0.27742597460746765,
-0.09196244180202484,
0.23783724009990692,
0.03146858513355255,
-0.05397875979542732,
-0.02406134083867073,
0.044256266206502914,
-0.03930798918008804,
0.10620276629924774,
-0.27613091468811035,
0.008417348377406597,
0.0006778018432669342,
-0.00783359631896019,
0.020465467125177383,
-0.10703667253255844,
-0.05438388139009476,
0.0003209850110579282,
0.011303822509944439,
-0.04544102028012276,
0.00032274494878947735,
-0.0773952454328537,
0.0992196649312973,
0.04669641703367233,
-0.2607683837413788,
0.07724670320749283,
0.004495509434491396,
-0.08546032011508942,
0.1536431461572647,
-0.11289235204458237,
-0.20823435485363007,
-0.10202183574438095,
-0.050035662949085236,
-0.06814064085483551,
0.043459974229335785,
0.027635864913463593,
-0.06181216239929199,
-0.04160269349813461,
0.013908499851822853,
0.052606888115406036,
-0.017024055123329163,
-0.004297714680433273,
0.01420689933001995,
-0.01388401910662651,
-0.07514270395040512,
-0.08735758066177368,
-0.0982171967625618,
-0.1035907194018364,
-0.08655905723571777,
-0.009428881108760834,
-0.16290508210659027,
0.09873662143945694,
0.1356445699930191,
-0.016473904252052307,
0.029307791963219643,
-0.04960669204592705,
0.11556890606880188,
-0.04155917093157768,
0.012598750181496143,
0.06691436469554901,
-0.060762692242860794,
0.031088780611753464,
0.19598102569580078,
0.08965162932872772,
-0.007981807924807072,
-0.03008205257356167,
0.030076872557401657,
-0.042790599167346954,
-0.1469438076019287,
-0.09902864694595337,
-0.08166831731796265,
0.07507114112377167,
-0.007114064879715443,
0.022922448813915253,
0.018342677503824234,
0.07284995168447495,
0.03758874163031578,
-0.051879774779081345,
0.010417559184134007,
0.04517007991671562,
0.1821611374616623,
-0.05998316407203674,
0.13186024129390717,
0.010342705994844437,
-0.13936536014080048,
0.09191783517599106,
0.01719720847904682,
0.06814723461866379,
0.05636867880821228,
0.03663257136940956,
0.0626329705119133,
0.046511210501194,
0.016957636922597885,
0.06823979318141937,
-0.03505369648337364,
-0.08829306066036224,
-0.06862205266952515,
-0.09898024797439575,
0.01374859269708395,
0.08482254296541214,
-0.016535649076104164,
-0.08626766502857208,
-0.04746004566550255,
0.020354915410280228,
0.059481289237737656,
0.09754728525876999,
0.11634108424186707,
-0.12754009664058685,
0.007987760938704014,
-0.023391321301460266,
0.05722573772072792,
-0.02348806895315647,
0.07767806947231293,
0.09355971217155457,
-0.03641393408179283,
0.02808317169547081,
0.006958622485399246,
0.037130944430828094,
0.04378088563680649,
0.10250366479158401,
-0.039311811327934265,
-0.00815507024526596,
-0.026320694014430046,
0.07831597328186035,
-0.2617456614971161,
0.35349270701408386,
0.03578293323516846,
-0.00929408147931099,
-0.0851832702755928,
-0.02172384038567543,
0.015107858926057816,
0.18254454433918,
0.20968590676784515,
0.013127616606652737,
0.10488680750131607,
-0.10634589195251465,
0.012270419858396053,
0.008131776005029678,
0.10778040438890457,
0.0466572530567646,
-0.012734055519104004,
-0.028469901531934738,
0.018058523535728455,
0.07976803928613663,
0.16752788424491882,
-0.04570610076189041,
-0.11334704607725143,
0.099282406270504,
0.0773473009467125,
-0.07134348899126053,
-0.005560457240790129,
-0.14969266951084137,
-0.11802598834037781,
0.21999911963939667,
0.03173701837658882,
-0.005999759305268526,
-0.07500078529119492,
-0.035066310316324234,
-0.049372296780347824,
-0.08968182653188705,
-0.025312285870313644,
-0.05721375718712807,
0.01593996211886406,
-0.08871398121118546,
-0.01043248176574707,
0.17129023373126984,
-0.08373894542455673,
0.03112531453371048,
-0.0771549642086029,
0.12773412466049194,
0.021679336205124855,
0.06055336445569992,
0.06928374618291855,
-0.03709561377763748,
0.02335198223590851,
-0.061263710260391235,
-0.021541541442275047,
-0.1391163021326065,
0.05412702262401581,
-0.01159603800624609,
-0.21899667382240295,
0.011687363497912884,
-0.013360005803406239,
-0.0035046529956161976,
0.16639915108680725,
0.10152827948331833,
-0.029894618317484856,
0.050070568919181824,
0.26605960726737976,
-0.06622738391160965,
-0.33482420444488525,
0.00963693019002676,
-0.0031079044565558434,
-0.014845646917819977,
0.004459950141608715,
-0.09631180018186569,
0.13427402079105377,
0.13715720176696777,
0.0009762249537743628,
-0.011338038370013237,
-0.08488811552524567,
-0.09733002632856369,
0.10616640746593475,
0.035315025597810745,
0.34509149193763733,
-0.005417261738330126,
-0.02738131396472454,
-0.009780886583030224,
-0.1672600507736206,
0.014930116944015026,
0.011944852769374847,
0.09077165275812149,
0.03037027083337307,
-0.007817170582711697,
0.011380041018128395,
-0.027900446206331253,
0.10012778639793396,
0.007320045493543148,
0.01344059593975544,
-0.03773348033428192,
-0.2316737323999405,
0.0668945237994194,
0.048488061875104904,
0.03267316520214081,
-0.07397747784852982,
-0.07817624509334564,
0.0019524759845808148,
-0.042654138058423996,
-0.06291165202856064,
0.09978166222572327,
-0.04030730575323105,
-0.12783578038215637,
0.028369024395942688,
0.07295747101306915,
-0.02068682760000229,
-0.02650213986635208,
0.16200187802314758,
-0.1219107061624527,
0.05292983725667,
0.1074405238032341,
0.09768179804086685,
-0.2217288762331009,
-0.00807399395853281,
0.02896174229681492,
-0.0665682703256607,
0.056221578270196915,
0.08859178423881531,
0.049519386142492294,
0.1186772957444191,
0.006179977208375931,
0.09265639632940292,
0.07955056428909302,
0.005326799117028713,
-0.05001546069979668,
0.10720154643058777,
-0.11758057028055191,
0.001696040970273316,
-0.08854646235704422,
-0.04081987589597702,
-0.0075743719935417175,
0.06622638553380966,
0.1682213544845581,
-0.04464561492204666,
0.012610036879777908,
-0.013659479096531868,
-0.032217249274253845,
-0.07943450659513474,
0.0931912437081337,
-0.000889953167643398,
-0.016507169231772423,
-0.05654756352305412,
0.038478121161460876,
-0.030957158654928207,
-0.10295969247817993,
0.047976940870285034,
0.009635252878069878,
-0.07903193682432175,
-0.07655669748783112,
-0.09013359993696213,
0.16419224441051483,
-0.14779843389987946,
-0.050107192248106,
-0.16647101938724518,
-0.13792984187602997,
0.0351497121155262,
0.13187743723392487,
0.1296432614326477,
-0.008164137601852417,
-0.0976482629776001,
-0.04701995104551315,
-0.009620933793485165,
0.06872348487377167,
0.010104977525770664,
0.0037198755890130997,
-0.012375964783132076,
0.057389065623283386,
0.010345663875341415,
0.027680106461048126,
-0.07487154006958008,
-0.04203559830784798,
-0.1392824947834015,
0.01847134903073311,
-0.10578358918428421,
-0.03264564275741577,
-0.04457135871052742,
-0.05654791370034218,
0.00031686649890616536,
-0.03881421312689781,
-0.0888516753911972,
-0.002784206997603178,
-0.08070947974920273,
0.0651087760925293,
0.05803019180893898,
0.028431134298443794,
-0.0323859341442585,
-0.006424286402761936,
0.04015842080116272,
0.0038513634353876114,
0.020844046026468277,
0.0076978071592748165,
-0.03520139306783676,
0.08031334728002548,
-0.1336965262889862,
0.02526545152068138,
0.03180641308426857,
0.031416550278663635,
0.04382186383008957,
-0.057781822979450226,
0.02900146320462227,
0.09802407771348953,
0.031635113060474396,
0.02064121514558792,
0.10732699185609818,
-0.037946779280900955,
0.11071330308914185,
0.04728290066123009,
-0.19339706003665924,
-0.020075228065252304,
0.06386101990938187,
0.061128776520490646,
0.02952059730887413,
0.09788382798433304,
-0.05318831279873848,
-0.008707490749657154,
-0.004326631780713797,
0.02004290744662285,
0.017707187682390213,
-0.10778115689754486,
-0.1387905776500702,
-0.08271375298500061,
-0.0061654821038246155,
-0.012878740206360817,
0.10893957316875458,
0.07394368201494217,
-0.02382185310125351,
0.06000611558556557,
0.014467760920524597,
0.08748861402273178,
-0.031124601140618324,
0.12258797138929367,
-0.005587113555520773,
-0.011827082373201847,
-0.13237358629703522,
0.06722806394100189,
0.02953164465725422,
0.14525896310806274,
0.14668318629264832,
0.07229864597320557,
0.08185852319002151,
0.12212201207876205,
0.07688561081886292,
-0.009954370558261871,
-0.12371310591697693,
-0.12590448558330536,
0.050300586968660355,
0.08629050105810165,
-0.05892156437039375,
0.1274457573890686,
0.12104086577892303,
-0.0771367996931076,
0.04379015788435936,
-0.05467469245195389,
-0.08355782181024551,
-0.1707722395658493,
-0.1820979118347168,
-0.03486917167901993,
-0.08174645900726318,
0.006730018649250269,
-0.07505512237548828,
0.022261515259742737,
0.032372377812862396,
0.06279337406158447,
-0.02204577438533306,
0.1442277580499649,
-0.2010038197040558,
-0.05632574111223221,
0.07472685724496841,
-0.003963501192629337,
0.10129675269126892,
-0.05277107283473015,
-0.045922040939331055,
-0.10202439874410629,
-0.013512330129742622,
-0.05279054492712021,
-0.011356249451637268,
0.060240112245082855,
-0.11127710342407227,
-0.011896533891558647,
-0.039075225591659546,
-0.04390639811754227,
-0.015370420180261135,
0.03418566659092903,
0.20131126046180725,
-0.03796285390853882,
-0.02034745365381241,
0.014719138853251934,
0.1114301085472107,
-0.03547929227352142,
-0.04265199601650238,
-0.08660777658224106,
0.16232110559940338,
0.04543018713593483,
0.14054977893829346,
-0.04487547278404236,
-0.013113211840391159,
0.0181724913418293,
0.25621289014816284,
0.24363812804222107,
-0.09239732474088669,
0.04115289822220802,
0.05488105118274689,
0.044844850897789,
0.05857498198747635,
0.08482653647661209,
-0.003074247157201171,
0.25542357563972473,
-0.060540731996297836,
-0.043012585490942,
-0.08295463025569916,
0.01650966703891754,
-0.05234149843454361,
0.03269610553979874,
0.10757958143949509,
-0.04850698262453079,
-0.09105966240167618,
0.010757490992546082,
0.07449416816234589,
-0.09824302792549133,
-0.02712334878742695,
-0.1799563616514206,
-0.03900374472141266,
-0.030768947675824165,
0.028265614062547684,
0.011362903751432896,
0.15655164420604706,
-0.016333801671862602,
-0.010269558988511562,
0.06322826445102692,
0.0410502627491951,
-0.1478872001171112,
-0.11990973353385925,
0.1152111291885376,
-0.010502733290195465,
0.1554267555475235,
-0.007371162995696068,
0.11734168976545334,
0.08249034732580185,
0.10905168205499649,
0.05360936000943184,
0.08775375038385391,
0.049619078636169434,
-0.08744072914123535,
-0.05119097977876663,
-0.12973062694072723,
-0.06281891465187073,
0.06432101130485535,
0.018700286746025085,
-0.132529154419899,
0.0909426286816597,
-0.03991098701953888,
-0.15154840052127838,
-0.04390029236674309,
0.11474774777889252,
-0.12901237607002258,
0.11621715873479843,
0.14527852833271027,
0.05877261236310005,
-0.07499067485332489,
-0.06582003831863403,
0.013771380297839642,
0.10004675388336182,
-0.04701502248644829,
-0.045811090618371964,
-0.15926820039749146,
0.011145868338644505,
0.042994193732738495,
-0.0024411766789853573,
-0.2191149890422821,
-0.08007529377937317,
-0.04691212251782417,
0.033389851450920105,
-0.08028160035610199,
-0.022747954353690147,
0.0676540806889534,
-0.002936344360932708,
0.014828904531896114,
-0.1250675767660141,
0.02579987794160843,
0.02214694395661354,
-0.055350758135318756,
-0.08264188468456268
] |
null | null |
transformers
|
tags:
- generated_from_trainer
datasets:
- xtreme
metrics:
- f1
model-index:
- name: xlm-roberta-base-finetuned-panx-it
results:
- task:
name: Token Classification
type: token-classification
dataset:
name: xtreme
type: xtreme
args: PAN-X.it
metrics:
- name: F1
type: f1
value: 0.9097618003799502
---
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# xlm-roberta-base-finetuned-panx-it
This model is a fine-tuned version of [xlm-roberta-base](https://huggingface.co/xlm-roberta-base) on the xtreme dataset.
It achieves the following results on the evaluation set:
- Loss: 0.1417
- F1: 0.9098
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 5e-05
- train_batch_size: 24
- eval_batch_size: 24
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 3
### Training results
| Training Loss | Epoch | Step | Validation Loss | F1 |
|:-------------:|:-----:|:----:|:---------------:|:------:|
| 0.2754 | 1.0 | 834 | 0.1683 | 0.8717 |
| 0.1366 | 2.0 | 1668 | 0.1449 | 0.8921 |
| 0.0863 | 3.0 | 2502 | 0.1417 | 0.9098 |
### Framework versions
- Transformers 4.16.2
- Pytorch 1.10.0+cu111
- Datasets 1.18.3
- Tokenizers 0.11.0
|
{"license": "mit", "widget": [{"text": "Luigi \u00e8 nato a Roma."}, {"text": "Antonio ha chiesto ad Alessia di recarsi alla sede INAIL."}]}
|
token-classification
|
bullmount/xlm-roberta-base-finetuned-panx-it
|
[
"transformers",
"pytorch",
"tensorboard",
"xlm-roberta",
"token-classification",
"license:mit",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #xlm-roberta #token-classification #license-mit #autotrain_compatible #endpoints_compatible #region-us
|
tags:
* generated\_from\_trainer
datasets:
* xtreme
metrics:
* f1
model-index:
* name: xlm-roberta-base-finetuned-panx-it
results:
+ task:
name: Token Classification
type: token-classification
dataset:
name: xtreme
type: xtreme
args: URL
metrics:
- name: F1
type: f1
value: 0.9097618003799502
---
xlm-roberta-base-finetuned-panx-it
==================================
This model is a fine-tuned version of xlm-roberta-base on the xtreme dataset.
It achieves the following results on the evaluation set:
* Loss: 0.1417
* F1: 0.9098
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 5e-05
* train\_batch\_size: 24
* eval\_batch\_size: 24
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 3
### Training results
### Framework versions
* Transformers 4.16.2
* Pytorch 1.10.0+cu111
* Datasets 1.18.3
* Tokenizers 0.11.0
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5e-05\n* train\\_batch\\_size: 24\n* eval\\_batch\\_size: 24\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.16.2\n* Pytorch 1.10.0+cu111\n* Datasets 1.18.3\n* Tokenizers 0.11.0"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #xlm-roberta #token-classification #license-mit #autotrain_compatible #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5e-05\n* train\\_batch\\_size: 24\n* eval\\_batch\\_size: 24\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.16.2\n* Pytorch 1.10.0+cu111\n* Datasets 1.18.3\n* Tokenizers 0.11.0"
] |
[
50,
98,
4,
35
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #xlm-roberta #token-classification #license-mit #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5e-05\n* train\\_batch\\_size: 24\n* eval\\_batch\\_size: 24\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3### Training results### Framework versions\n\n\n* Transformers 4.16.2\n* Pytorch 1.10.0+cu111\n* Datasets 1.18.3\n* Tokenizers 0.11.0"
] |
[
-0.10683615505695343,
0.04097318276762962,
-0.0015042561572045088,
0.10815544426441193,
0.20283709466457367,
0.03254285454750061,
0.10996439307928085,
0.10387466102838516,
-0.09940650314092636,
0.01965448632836342,
0.11378271132707596,
0.19544577598571777,
0.002150478772819042,
0.10531587898731232,
-0.07472489029169083,
-0.2552812695503235,
-0.024636216461658478,
0.04404507577419281,
-0.08209066838026047,
0.12562741339206696,
0.08894611895084381,
-0.1578873097896576,
0.07164544612169266,
-0.00401840778067708,
-0.2547611892223358,
0.012686985544860363,
0.04043234512209892,
-0.059539344161748886,
0.15448381006717682,
0.018760263919830322,
0.15470540523529053,
0.009688249789178371,
0.1078699603676796,
-0.1488005667924881,
0.00960344448685646,
0.05618904158473015,
0.008773392997682095,
0.08749496936798096,
0.058099955320358276,
0.0023979980032891035,
0.08059809356927872,
-0.10786716639995575,
0.053614672273397446,
0.009169748984277248,
-0.11613278836011887,
-0.22135913372039795,
-0.079383984208107,
0.0041810013353824615,
0.06368269771337509,
0.09070656448602676,
0.002771790372207761,
0.19492009282112122,
-0.10499308258295059,
0.09762131422758102,
0.20662343502044678,
-0.29672762751579285,
-0.07767347991466522,
0.0612156018614769,
0.0208571869879961,
0.08219639211893082,
-0.11501266062259674,
-0.004776538349688053,
0.06640030443668365,
0.04604140669107437,
0.12360206246376038,
-0.043816938996315,
-0.10541880875825882,
0.030368071049451828,
-0.14906539022922516,
-0.0063984389416873455,
0.0823012962937355,
0.014857074245810509,
-0.03528944030404091,
-0.017030702903866768,
-0.05819535255432129,
-0.15024513006210327,
-0.04741890728473663,
-0.02138076350092888,
0.05577891319990158,
-0.06063684821128845,
-0.09469932317733765,
0.013826912268996239,
-0.10331116616725922,
-0.05717596411705017,
-0.0866919457912445,
0.19584138691425323,
0.03921845182776451,
0.019717223942279816,
-0.02885318174958229,
0.10389178991317749,
0.004820198751986027,
-0.12960919737815857,
0.0345882847905159,
0.0341167077422142,
-0.03676564618945122,
-0.06626386195421219,
-0.06672008335590363,
-0.06864362955093384,
0.009828475303947926,
0.08604796230792999,
-0.054362379014492035,
0.04702307656407356,
0.04139578714966774,
0.04111592099070549,
-0.09132102876901627,
0.20453809201717377,
-0.031838592141866684,
-0.015641778707504272,
0.014551959931850433,
0.03474672883749008,
-0.005266216583549976,
-0.005448652897030115,
-0.1142241358757019,
0.005556819494813681,
0.10830437391996384,
0.023609863594174385,
-0.07102943956851959,
0.06640439480543137,
-0.03689761459827423,
-0.021077772602438927,
-0.017745550721883774,
-0.09436957538127899,
0.05365670472383499,
-0.015955358743667603,
-0.08249531686306,
-0.009112824685871601,
-0.0037146087270230055,
0.01278697233647108,
-0.009674283675849438,
0.16259630024433136,
-0.09838683158159256,
0.05062616616487503,
-0.11748481541872025,
-0.11606039851903915,
0.00001643653195060324,
-0.07161791622638702,
0.021433109417557716,
-0.10082810372114182,
-0.13260318338871002,
-0.016033513471484184,
0.05350218713283539,
-0.027776727452874184,
-0.044287241995334625,
-0.027770310640335083,
-0.0743594542145729,
0.0062506129033863544,
-0.02049962617456913,
0.18540233373641968,
-0.05253069847822189,
0.11247780174016953,
0.050606872886419296,
0.06362906098365784,
-0.04294796660542488,
0.044827744364738464,
-0.09344829618930817,
-0.001984403468668461,
-0.19236038625240326,
0.025331560522317886,
-0.06156744807958603,
0.07730059325695038,
-0.06470208615064621,
-0.11303458362817764,
0.03668667748570442,
-0.000024446972020086832,
0.07282450050115585,
0.07205995172262192,
-0.16801242530345917,
-0.07874787598848343,
0.13261888921260834,
-0.05329686030745506,
-0.09766042977571487,
0.10593801736831665,
-0.0656752660870552,
0.023405475541949272,
0.07415773719549179,
0.14543446898460388,
0.05293973907828331,
-0.10144913196563721,
0.017701560631394386,
-0.019110998138785362,
0.031299930065870285,
-0.09123094379901886,
0.03955869749188423,
0.03204727545380592,
-0.008042366243898869,
0.033152926713228226,
-0.05307818204164505,
0.05260170251131058,
-0.11164107918739319,
-0.08450651168823242,
-0.04910752549767494,
-0.0941028967499733,
0.0351998433470726,
0.08363547921180725,
0.08254624158143997,
-0.11139784008264542,
-0.06612528860569,
0.09504056721925735,
0.06477958709001541,
-0.035692453384399414,
0.017052721232175827,
-0.060557007789611816,
0.06282974034547806,
-0.06310182064771652,
-0.033533208072185516,
-0.19251075387001038,
-0.04381470009684563,
0.00022947389516048133,
0.03269113227725029,
0.03481700271368027,
0.051996249705553055,
0.06849009543657303,
0.0619124136865139,
-0.05572156980633736,
-0.006071621552109718,
-0.004613372031599283,
-0.000990897649899125,
-0.14957423508167267,
-0.19629590213298798,
-0.027014346793293953,
-0.019607141613960266,
0.08451423794031143,
-0.1954420655965805,
0.01919148676097393,
-0.04789946228265762,
0.07270286977291107,
0.0043899500742554665,
0.006574282422661781,
-0.0693175420165062,
0.10041922330856323,
-0.02453623339533806,
-0.04272332414984703,
0.07161284983158112,
-0.0012469339417293668,
-0.0634850561618805,
-0.05192047730088234,
-0.10650444030761719,
0.2165801227092743,
0.14618979394435883,
-0.13652750849723816,
-0.0937538668513298,
0.015788478776812553,
-0.05386967957019806,
-0.01912013255059719,
-0.06383614987134933,
0.060223523527383804,
0.17011572420597076,
-0.01780698448419571,
0.15290842950344086,
-0.06259465962648392,
-0.04928981885313988,
0.024872900918126106,
-0.03797314316034317,
0.038242094218730927,
0.10759253799915314,
0.14668110013008118,
-0.10407159477472305,
0.13093961775302887,
0.13628235459327698,
-0.11143464595079422,
0.11912654340267181,
-0.03610312566161156,
-0.06241393834352493,
-0.025864286348223686,
-0.0337800495326519,
0.0074884057976305485,
0.12292952090501785,
-0.09257670491933823,
-0.008271687664091587,
0.013118231669068336,
0.018971197307109833,
0.018904654309153557,
-0.240179643034935,
-0.0509757436811924,
0.02021675556898117,
-0.018939606845378876,
-0.005866101942956448,
-0.022736532613635063,
0.024423660710453987,
0.11933613568544388,
-0.00813310593366623,
-0.09738575667142868,
0.027484161779284477,
0.007368510589003563,
-0.07425704598426819,
0.21422447264194489,
-0.06837578117847443,
-0.12482515722513199,
-0.09616003185510635,
-0.08456388860940933,
-0.04080714285373688,
-0.0000473098480142653,
0.039537206292152405,
-0.09844183176755905,
-0.03968941792845726,
-0.02644599974155426,
0.0003784647269640118,
-0.003839347744360566,
0.05144635960459709,
-0.010110427625477314,
0.004692524671554565,
0.06722568720579147,
-0.10424645245075226,
-0.011040679179131985,
-0.07157819718122482,
-0.0685780942440033,
0.05779885873198509,
0.07015710324048996,
0.11240451782941818,
0.16643570363521576,
-0.04759897664189339,
0.0036347899585962296,
-0.02776673249900341,
0.2249801903963089,
-0.07286687940359116,
-0.039883993566036224,
0.10163664072751999,
-0.003930757287889719,
0.048045169562101364,
0.11066700518131256,
0.0866934135556221,
-0.09760529547929764,
0.0021226226817816496,
0.029744748026132584,
-0.04038788750767708,
-0.21372996270656586,
-0.04909203201532364,
-0.05956420674920082,
-0.04930466040968895,
0.07504309713840485,
0.033061519265174866,
0.04748227819800377,
0.066460520029068,
0.05682065337896347,
0.08495889604091644,
-0.08343112468719482,
0.05396818742156029,
0.11410211771726608,
0.05508068576455116,
0.14088141918182373,
-0.04918118193745613,
-0.09134229272603989,
0.026275519281625748,
-0.019053027033805847,
0.236675426363945,
0.01865624077618122,
0.0782182589173317,
0.04858676344156265,
0.18847785890102386,
0.014556040987372398,
0.08074595779180527,
0.008583351969718933,
-0.07259362190961838,
-0.003297365503385663,
-0.03282510116696358,
-0.012673452496528625,
0.009931111708283424,
-0.03152116760611534,
0.04496786370873451,
-0.10788962990045547,
-0.027532050386071205,
0.05509228631854057,
0.21911613643169403,
0.020861448720097542,
-0.31764212250709534,
-0.06392065435647964,
-0.004612716846168041,
-0.03803084045648575,
-0.003369963262230158,
-0.0018290742300450802,
0.08973480761051178,
-0.09553717821836472,
0.028571955859661102,
-0.08075781911611557,
0.0925719141960144,
-0.018230577930808067,
0.04352030158042908,
0.0759444534778595,
0.11811979115009308,
-0.0013736224500462413,
0.06597819924354553,
-0.3130641579627991,
0.2866200804710388,
0.014431553892791271,
0.0885244682431221,
-0.07682368904352188,
-0.010117745958268642,
0.03666957840323448,
0.06051069498062134,
0.03672831505537033,
-0.021937860175967216,
-0.05879279971122742,
-0.21634769439697266,
-0.026179321110248566,
0.037249889224767685,
0.10883191227912903,
0.0016916224267333746,
0.10262652486562729,
-0.026637466624379158,
-0.0034565969835966825,
0.07837001979351044,
-0.033280596137046814,
-0.05236164852976799,
-0.07171345502138138,
-0.021022971719503403,
0.012114845216274261,
-0.08400122076272964,
-0.05251460522413254,
-0.12408973276615143,
-0.1553165167570114,
0.16403499245643616,
0.011498959735035896,
-0.017319614067673683,
-0.12088324874639511,
0.09780587255954742,
0.06837479770183563,
-0.08028173446655273,
0.04285675659775734,
0.014629855751991272,
0.04684416577219963,
0.026844030246138573,
-0.07294876128435135,
0.11269993335008621,
-0.06784303486347198,
-0.1502106934785843,
-0.06417279690504074,
0.08496811985969543,
0.01736474595963955,
0.0673334002494812,
-0.010651919059455395,
0.0288509763777256,
-0.026204172521829605,
-0.09708449989557266,
0.037672508507966995,
-0.04915861785411835,
0.0754372626543045,
0.011323636397719383,
-0.04553218558430672,
-0.007408654782921076,
-0.059236232191324234,
-0.028799185529351234,
0.17696140706539154,
0.23982587456703186,
-0.10274548828601837,
-0.005549288354814053,
0.01615467295050621,
-0.06341763585805893,
-0.19830264151096344,
0.08255014568567276,
0.06216806173324585,
0.008419104851782322,
0.05636247619986534,
-0.13971194624900818,
0.14453396201133728,
0.10249421745538712,
-0.009280884638428688,
0.12656913697719574,
-0.31325221061706543,
-0.13354001939296722,
0.09627562016248703,
0.17093788087368011,
0.1441386193037033,
-0.1378307193517685,
-0.012400035746395588,
-0.008680460043251514,
-0.09552542865276337,
0.11398555338382721,
-0.06194211542606354,
0.1261398047208786,
-0.02367290109395981,
0.09341850131750107,
0.004906702321022749,
-0.06778282672166824,
0.10534351319074631,
0.0027237595058977604,
0.11582639813423157,
-0.05705719441175461,
-0.057684414088726044,
0.03193723410367966,
-0.022317267954349518,
-0.006670762784779072,
-0.05817635729908943,
0.020353898406028748,
-0.06381531059741974,
-0.019195126369595528,
-0.08422243595123291,
0.04931620880961418,
-0.031068451702594757,
-0.06371812522411346,
-0.04077243432402611,
0.03321288526058197,
0.01912543550133705,
-0.03697865828871727,
0.11412595957517624,
0.020106473937630653,
0.15394820272922516,
0.1045311838388443,
0.07701766490936279,
-0.07963130623102188,
-0.055959925055503845,
-0.004509780555963516,
-0.018719427287578583,
0.06083660200238228,
-0.1232188269495964,
0.015951642766594887,
0.148636594414711,
0.025842424482107162,
0.11400626599788666,
0.09183110296726227,
-0.013063883408904076,
0.013153381645679474,
0.07823879271745682,
-0.15035685896873474,
-0.09108298271894455,
0.011180066503584385,
-0.07832586020231247,
-0.08024219423532486,
0.060225002467632294,
0.07792165875434875,
-0.07886912673711777,
-0.011747624725103378,
-0.008097159676253796,
-0.01763138733804226,
-0.05916029214859009,
0.22576037049293518,
0.08101631700992584,
0.04772621765732765,
-0.10589192807674408,
0.044772788882255554,
0.06045854836702347,
-0.07184193283319473,
-0.015516618266701698,
0.08871378004550934,
-0.07214518636465073,
-0.03284451737999916,
0.12163364887237549,
0.19100207090377808,
-0.08594595640897751,
-0.024551885202527046,
-0.15176407992839813,
-0.12330945581197739,
0.06805167347192764,
0.17673644423484802,
0.11273205280303955,
-0.004468568600714207,
-0.05713219568133354,
0.023618176579475403,
-0.1428784430027008,
0.08162181824445724,
0.03840327635407448,
0.08822306990623474,
-0.1510881781578064,
0.20096971094608307,
0.00967887882143259,
0.056198231875896454,
-0.033579371869564056,
0.03729552403092384,
-0.1157800480723381,
0.023040538653731346,
-0.12111905962228775,
-0.042030543088912964,
-0.005798863247036934,
-0.00448305020108819,
-0.00207668193615973,
-0.07671225816011429,
-0.06944349408149719,
0.013286206871271133,
-0.12512065470218658,
-0.015201927162706852,
0.050786275416612625,
0.02628776989877224,
-0.10913550853729248,
-0.038342706859111786,
0.01608913205564022,
-0.04475685954093933,
0.03756329044699669,
0.03801123797893524,
0.02270568534731865,
0.07704290002584457,
-0.15314094722270966,
-0.01377369835972786,
0.07437403500080109,
0.010549924336373806,
0.1036900207400322,
-0.07176283001899719,
0.005964313168078661,
0.01328126061707735,
0.1031729057431221,
0.02876165136694908,
0.06846075505018234,
-0.14223964512348175,
0.009685343131422997,
-0.04219204932451248,
-0.09678538888692856,
-0.0613420233130455,
0.016497671604156494,
0.07945306599140167,
0.008007333613932133,
0.1950826644897461,
-0.08967018127441406,
0.04570625349879265,
-0.22023777663707733,
-0.013296764343976974,
-0.025988470762968063,
-0.11611033231019974,
-0.11400308459997177,
-0.055993519723415375,
0.07787170261144638,
-0.050683751702308655,
0.13156180083751678,
0.058876924216747284,
0.062045566737651825,
0.037756506353616714,
-0.011101153679192066,
0.006422529928386211,
0.029639622196555138,
0.20434029400348663,
0.039473846554756165,
-0.022816821932792664,
0.05264555662870407,
0.07509376108646393,
0.09413610398769379,
0.07897426933050156,
0.22184783220291138,
0.1496228277683258,
-0.005866227205842733,
0.08586940914392471,
0.03531652316451073,
-0.0680239275097847,
-0.162404865026474,
0.02907877042889595,
-0.07271768152713776,
0.08993012458086014,
-0.03107922337949276,
0.18057002127170563,
0.06922924518585205,
-0.16337142884731293,
0.0447227880358696,
-0.07045484334230423,
-0.08814484626054764,
-0.10928524285554886,
-0.0116264708340168,
-0.08879414200782776,
-0.13854779303073883,
0.013301101513206959,
-0.096592977643013,
0.02237365022301674,
0.12270944565534592,
0.009030572138726711,
-0.02842417173087597,
0.15943841636180878,
0.04570535570383072,
0.04450208693742752,
0.05218619480729103,
0.01144077442586422,
-0.016196805983781815,
-0.09528663009405136,
-0.04473979026079178,
-0.04802989959716797,
-0.026881836354732513,
0.03432775288820267,
-0.0698263868689537,
-0.086609847843647,
0.040375832468271255,
-0.016123076900839806,
-0.09906876087188721,
0.023612236604094505,
0.024951769039034843,
0.07663260400295258,
0.039312075823545456,
0.004920803476125002,
0.02676120400428772,
-0.027295682579278946,
0.21365293860435486,
-0.08166255801916122,
-0.0976674035191536,
-0.08251187205314636,
0.2840653657913208,
0.03393873572349548,
-0.00250906846486032,
0.03373156115412712,
-0.05862848088145256,
0.010165777988731861,
0.24173957109451294,
0.20585662126541138,
-0.11717480421066284,
-0.0018027470214292407,
-0.0008746950188651681,
-0.015937432646751404,
-0.03669455647468567,
0.1508186012506485,
0.1293751299381256,
0.08627160638570786,
-0.10879699140787125,
-0.04518071934580803,
-0.06795763224363327,
-0.006349444389343262,
-0.04285857826471329,
0.04925071448087692,
0.04851069673895836,
0.008384432643651962,
-0.04230181500315666,
0.05803648754954338,
-0.0500839427113533,
-0.0961291566491127,
0.09019067883491516,
-0.19642241299152374,
-0.16907663643360138,
-0.00399376405403018,
0.12740348279476166,
-0.013817706145346165,
0.06080381199717522,
-0.03546006605029106,
0.0007587769068777561,
0.03960249200463295,
-0.03115997090935707,
-0.0773441344499588,
-0.09131912142038345,
0.1042492613196373,
-0.10953594744205475,
0.1850593537092209,
-0.047531742602586746,
0.09218685328960419,
0.11989860981702805,
0.06572117656469345,
-0.0687599927186966,
0.06589935719966888,
0.04341399297118187,
-0.11874373257160187,
0.045813221484422684,
0.09362736344337463,
-0.025201138108968735,
0.04555230960249901,
0.03738481551408768,
-0.12605592608451843,
0.0388665497303009,
-0.09410636872053146,
-0.033344727009534836,
-0.04701795056462288,
-0.040548596531152725,
-0.0565774142742157,
0.12247946113348007,
0.21550790965557098,
-0.009036307223141193,
0.02550194226205349,
-0.07976392656564713,
0.012310720980167389,
0.05572950839996338,
0.05110480636358261,
-0.09180036932229996,
-0.24912960827350616,
0.0204634889960289,
0.07317287474870682,
-0.03268589451909065,
-0.2167896330356598,
-0.09207865595817566,
-0.0009371929918415844,
-0.08475879579782486,
-0.09517937898635864,
0.08145812153816223,
0.08979341387748718,
0.05279810354113579,
-0.05544363707304001,
-0.110825315117836,
-0.07812155783176422,
0.14647260308265686,
-0.1453765630722046,
-0.08099633455276489
] |
null | null | null |
mmmm
|
{}
| null |
bumhead/SnarlyTrain
|
[
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#region-us
|
mmmm
|
[] |
[
"TAGS\n#region-us \n"
] |
[
6
] |
[
"passage: TAGS\n#region-us \n"
] |
[
0.024608636274933815,
-0.026205500587821007,
-0.009666500613093376,
-0.10395516455173492,
0.08638657629489899,
0.059816278517246246,
0.01882290467619896,
0.020661840215325356,
0.23975107073783875,
-0.005599027033895254,
0.1219947561621666,
0.0015615287702530622,
-0.037353623658418655,
0.03733762726187706,
-0.0035912662278860807,
-0.17583473026752472,
0.03876631706953049,
-0.018274923786520958,
0.01843859627842903,
0.026470553129911423,
-0.07776834815740585,
-0.07564429938793182,
0.015296397730708122,
-0.10247814655303955,
-0.083692267537117,
0.11002834886312485,
0.031466204673051834,
-0.019670886918902397,
0.10779199749231339,
-0.04243955761194229,
0.18699054419994354,
-0.011512263678014278,
-0.11213519424200058,
-0.2536850869655609,
0.021806683391332626,
-0.01765260472893715,
-0.08747660368680954,
0.01506110467016697,
0.0665089413523674,
-0.09014441072940826,
-0.0588928684592247,
0.0795099288225174,
-0.01132340170443058,
0.04246443510055542,
-0.27593839168548584,
-0.12684126198291779,
-0.05297930911183357,
-0.1421966552734375,
0.08651168644428253,
0.04035491496324539,
0.008764253929257393,
0.15506891906261444,
-0.20897391438484192,
0.004104613792151213,
0.08255259692668915,
-0.2538507878780365,
0.05591634660959244,
0.17671173810958862,
0.03623908758163452,
0.18037272989749908,
0.0060391901060938835,
0.11029672622680664,
0.0716743916273117,
-0.024263937026262283,
-0.17590197920799255,
-0.08127854019403458,
-0.04696211963891983,
0.16642488539218903,
-0.06727185100317001,
-0.14248386025428772,
0.34701237082481384,
0.00015008423360995948,
0.009657775051891804,
0.16921205818653107,
-0.059524230659008026,
-0.09972117841243744,
0.07259953022003174,
0.016484731808304787,
0.018492350354790688,
0.1471305936574936,
0.16307872533798218,
-0.0458691343665123,
-0.13837823271751404,
-0.018630273640155792,
-0.22798998653888702,
0.17510560154914856,
-0.03248048573732376,
0.13137903809547424,
-0.27447956800460815,
0.01684025302529335,
-0.2570667266845703,
0.0032130838371813297,
0.04178816080093384,
-0.06004921346902847,
-0.0226522795855999,
-0.013265985064208508,
-0.08018817007541656,
0.004899587947875261,
0.06192673370242119,
0.1266920566558838,
-0.06128726154565811,
0.06128238886594772,
-0.09319206327199936,
0.141696035861969,
0.07166698575019836,
0.07868369668722153,
0.13037432730197906,
0.041205424815416336,
-0.07187089323997498,
-0.21872246265411377,
-0.0026476888451725245,
-0.06275863200426102,
-0.09502086788415909,
-0.0020165652967989445,
-0.11606067419052124,
0.17244569957256317,
-0.030802514404058456,
-0.09825427830219269,
-0.11208184063434601,
0.09148659557104111,
-0.032992321997880936,
-0.03437839448451996,
-0.03552987426519394,
-0.020977836102247238,
0.019381176680326462,
0.04704452306032181,
-0.1548958420753479,
-0.005131472367793322,
0.07039852440357208,
0.11502562463283539,
-0.1346137970685959,
-0.003783059772104025,
-0.07908964157104492,
0.03039063885807991,
0.07654735445976257,
-0.16510222852230072,
0.03158547356724739,
-0.1124754324555397,
-0.07531405985355377,
0.002912673633545637,
-0.015710093080997467,
-0.016202643513679504,
0.166526660323143,
-0.0020451415330171585,
0.0714716836810112,
-0.026345307007431984,
-0.05890209600329399,
-0.11243434250354767,
-0.08489254862070084,
0.05390460044145584,
0.03670717030763626,
0.03266148269176483,
-0.2193479984998703,
0.014805203303694725,
-0.12762966752052307,
0.1360815018415451,
-0.10566820204257965,
-0.04705966264009476,
-0.022842247039079666,
0.20562705397605896,
0.037286072969436646,
0.08762791007757187,
-0.22171171009540558,
0.039756543934345245,
-0.05404696613550186,
0.18480908870697021,
-0.1502426266670227,
-0.0799463614821434,
0.20813211798667908,
-0.07964949309825897,
-0.10115210711956024,
0.021235812455415726,
0.020391687750816345,
0.026287272572517395,
0.0766737088561058,
0.4564172327518463,
-0.09766800701618195,
-0.09146861732006073,
0.10178250074386597,
0.17055274546146393,
-0.12427149713039398,
-0.1827561855316162,
0.06446871906518936,
-0.16666454076766968,
-0.1973118633031845,
0.0018917324487119913,
0.09222044050693512,
0.038269978016614914,
-0.07875611633062363,
-0.020746968686580658,
0.06325206160545349,
-0.0007678253459744155,
0.09095914661884308,
0.03755716234445572,
0.09034032374620438,
-0.08716782182455063,
0.11115926504135132,
-0.05017651244997978,
0.004037132486701012,
0.1343354731798172,
0.027325427159667015,
-0.03223329409956932,
0.08694463223218918,
-0.0485352948307991,
0.05295134335756302,
-0.1662379503250122,
-0.15068690478801727,
0.03398871049284935,
0.06283251196146011,
0.03186952322721481,
0.1280253529548645,
0.08141885697841644,
-0.10732853412628174,
0.022690722718834877,
-0.004228927195072174,
0.058398615568876266,
0.03891623765230179,
0.006107209715992212,
0.008764320984482765,
0.0961301177740097,
-0.10607069730758667,
-0.13589619100093842,
-0.07336436957120895,
-0.014715781435370445,
0.14371353387832642,
-0.0302802175283432,
0.07690227776765823,
-0.004240254405885935,
0.00013200697139836848,
0.06930823624134064,
0.08137880265712738,
0.016412746161222458,
0.08971183747053146,
-0.05237193778157234,
-0.05160155147314072,
0.10863113403320312,
-0.13533565402030945,
0.17837053537368774,
0.14053137600421906,
-0.20532016456127167,
0.029453208670020103,
-0.06838275492191315,
0.03670361638069153,
-0.008162540383636951,
0.0975119024515152,
-0.08272241055965424,
-0.02106042578816414,
0.013134466484189034,
0.0052274600602686405,
-0.013007243163883686,
0.017682146281003952,
-0.07295988500118256,
-0.07787393033504486,
-0.10233919322490692,
0.08436838537454605,
0.11562882363796234,
-0.10282530635595322,
0.14214380085468292,
0.4384984076023102,
0.11495281755924225,
0.21582984924316406,
-0.09581480920314789,
-0.0412987545132637,
0.007486371789127588,
0.0001535322517156601,
-0.04476691037416458,
0.08031861484050751,
-0.15973517298698425,
-0.038901735097169876,
0.027348900213837624,
0.07128690183162689,
0.11475157737731934,
-0.14959022402763367,
-0.09639324247837067,
-0.00793045200407505,
0.0022841424215584993,
-0.1249532699584961,
0.023905446752905846,
-0.03974650055170059,
0.04015624523162842,
0.07232289016246796,
-0.021535737439990044,
0.13939237594604492,
-0.04166141897439957,
-0.0639561116695404,
0.07585346698760986,
-0.2017085999250412,
-0.23179671168327332,
-0.12309670448303223,
-0.14680525660514832,
0.04366797208786011,
0.05154111236333847,
0.01726446859538555,
-0.17635835707187653,
-0.015074856579303741,
0.07706750929355621,
0.07820965349674225,
-0.20886357128620148,
-0.022814949974417686,
-0.004290030337870121,
0.0895976573228836,
-0.10227091610431671,
-0.0017130117630586028,
-0.04419664293527603,
-0.10150232166051865,
0.0017003051470965147,
0.07279510796070099,
-0.137485533952713,
0.13807645440101624,
0.21589438617229462,
0.07225540280342102,
0.07359948754310608,
-0.019093448296189308,
0.09936179965734482,
-0.10856141895055771,
-0.16549113392829895,
0.08348225057125092,
-0.06234746053814888,
0.047262318432331085,
0.17534415423870087,
0.03307317942380905,
-0.13904969394207,
-0.015682822093367577,
-0.0402069091796875,
-0.15603256225585938,
-0.238995760679245,
-0.09178274869918823,
-0.1182505264878273,
0.16442428529262543,
0.0009358620154671371,
0.06651917099952698,
0.08258313685655594,
-0.022042419761419296,
0.16447891294956207,
-0.07379321753978729,
-0.07578866183757782,
-0.006978808436542749,
0.12375060468912125,
-0.056660156697034836,
-0.03080669604241848,
-0.10566964000463486,
-0.008295975625514984,
0.1151021271944046,
0.15304014086723328,
0.12214863300323486,
0.2957419455051422,
0.08268889784812927,
0.026645636186003685,
0.08958091586828232,
0.17622539401054382,
0.09495089203119278,
0.07838419824838638,
-0.045413073152303696,
-0.014814783819019794,
0.014317171648144722,
-0.04022889584302902,
0.010141594335436821,
0.14683100581169128,
-0.2679629921913147,
-0.006678564939647913,
-0.2710230350494385,
0.0965198427438736,
-0.10913380235433578,
0.11837165057659149,
-0.01015760749578476,
0.10194015502929688,
0.11082887649536133,
0.03233652561903,
-0.03858073800802231,
0.16613617539405823,
0.08450309932231903,
-0.11277695000171661,
0.001758623169735074,
0.03737903758883476,
0.09715615212917328,
-0.02818971499800682,
0.12721189856529236,
-0.11048974841833115,
-0.1464834064245224,
0.013753619976341724,
0.07152791321277618,
-0.15373679995536804,
0.3138748109340668,
0.012069208547472954,
-0.13481520116329193,
-0.01481647603213787,
-0.09957809001207352,
-0.006440147757530212,
0.1254177987575531,
0.09333524852991104,
0.07935678958892822,
-0.2185502052307129,
-0.13339371979236603,
0.05872276425361633,
-0.00575496768578887,
0.22408108413219452,
-0.034034017473459244,
-0.11356475204229355,
-0.027013886719942093,
0.04241163283586502,
-0.06043251231312752,
0.08524788916110992,
0.023536119610071182,
-0.08113526552915573,
-0.032957352697849274,
0.05323701351881027,
0.012368366122245789,
0.00524376705288887,
0.09360801428556442,
0.020107939839363098,
-0.0009265501867048442,
0.01785753294825554,
0.047885000705718994,
-0.0675911232829094,
-0.1984109878540039,
0.09357594698667526,
-0.05215044692158699,
0.0015536568826064467,
-0.08013670891523361,
-0.15122665464878082,
-0.08837161958217621,
-0.16009655594825745,
0.12540200352668762,
-0.034406669437885284,
0.12700119614601135,
-0.06619787961244583,
0.17341409623622894,
-0.07871770113706589,
0.04481020197272301,
-0.047349292784929276,
0.050332702696323395,
-0.007268077693879604,
-0.07756082713603973,
0.16585899889469147,
-0.15564003586769104,
0.01809087023139,
0.19572502374649048,
-0.018915493041276932,
0.07177707552909851,
0.021322092041373253,
-0.0636206790804863,
0.23147478699684143,
0.3014698624610901,
0.008138049393892288,
0.1665448248386383,
0.3018903136253357,
-0.07466315478086472,
-0.2642788887023926,
-0.05505012720823288,
-0.2841376066207886,
-0.05371501296758652,
0.10716094076633453,
-0.22523896396160126,
0.06986407935619354,
0.14383509755134583,
-0.06471995264291763,
0.30228954553604126,
-0.21825523674488068,
0.012589273042976856,
0.15434536337852478,
-0.08868814259767532,
0.5515313148498535,
-0.1133413165807724,
-0.17677772045135498,
-0.008122089318931103,
-0.08741296827793121,
0.10602109134197235,
-0.0340677872300148,
0.06877441704273224,
0.013465235009789467,
0.04797380417585373,
0.048932258039712906,
-0.03111894056200981,
0.22701001167297363,
0.008710170164704323,
0.09015397727489471,
-0.07378865778446198,
-0.18624304234981537,
0.11639340221881866,
-0.04359482601284981,
-0.08891059458255768,
0.0849778801202774,
-0.05942516401410103,
-0.11078983545303345,
0.04663389176130295,
-0.07950539886951447,
-0.024862350896000862,
0.08423490077257156,
-0.04678233340382576,
-0.042606171220541,
-0.008054176345467567,
-0.1618063747882843,
-0.0002289071271661669,
0.31360217928886414,
-0.07096036523580551,
0.16695955395698547,
0.03677211329340935,
0.00038613268407061696,
-0.11027684062719345,
0.030288029462099075,
-0.05203165486454964,
-0.021576624363660812,
0.09578979015350342,
-0.11096979677677155,
0.03204701095819473,
0.14160704612731934,
-0.04864364117383957,
0.05846960097551346,
0.09256096184253693,
-0.0849417969584465,
0.007583672646433115,
0.17753590643405914,
-0.17537221312522888,
-0.1273445188999176,
-0.006135711446404457,
-0.09862716495990753,
0.14055661857128143,
0.04394126310944557,
0.05191568285226822,
0.16669964790344238,
0.03967129811644554,
-0.029474308714270592,
-0.02817419543862343,
-0.1153380498290062,
-0.0201893113553524,
0.040153320878744125,
0.00045633706031367183,
-0.08791285753250122,
0.2262638509273529,
0.06409153342247009,
-0.1328488290309906,
-0.051157206296920776,
0.2161225974559784,
-0.06805316358804703,
-0.04911920800805092,
-0.223562553524971,
0.10752306133508682,
-0.07112517952919006,
-0.0965060144662857,
0.05453834682703018,
-0.02270081453025341,
0.005106312222778797,
0.181985542178154,
0.03941008821129799,
0.11070270836353302,
0.03738937899470329,
-0.02448922023177147,
0.15798696875572205,
-0.142850860953331,
-0.14191335439682007,
-0.025354057550430298,
-0.08757315576076508,
-0.13844476640224457,
-0.026804137974977493,
0.1617041826248169,
-0.09177309274673462,
-0.14772607386112213,
-0.2621181011199951,
0.10968475043773651,
-0.16432365775108337,
-0.10192688554525375,
-0.03469514101743698,
-0.08968492597341537,
0.0696166530251503,
0.030301768332719803,
-0.03093348816037178,
-0.06706760823726654,
-0.18593791127204895,
0.0816768929362297,
0.06349513679742813,
0.045533183962106705,
-0.017847947776317596,
0.0067379772663116455,
0.1720137596130371,
0.025955144315958023,
0.10040043294429779,
0.16762186586856842,
0.011397695168852806,
0.2246655523777008,
-0.1671202927827835,
-0.11496317386627197,
0.1336962729692459,
-0.026543032377958298,
0.06762003898620605,
0.16792191565036774,
-0.0772583931684494,
0.015526676550507545,
-0.028136352077126503,
0.07066910713911057,
-0.11003983020782471,
-0.105624258518219,
0.007937257178127766,
0.02567129209637642,
-0.2755882740020752,
-0.005599735304713249,
-0.19717298448085785,
0.14788752794265747,
0.02579621411859989,
0.03297143429517746,
0.10257530212402344,
0.10404334217309952,
0.08312062919139862,
-0.0017710148822516203,
0.03226327523589134,
-0.1176818460226059,
0.02753005363047123,
-0.059239376336336136,
-0.020663779228925705,
0.017624232918024063,
0.36952024698257446,
-0.03603357449173927,
-0.046802736818790436,
0.003710439894348383,
0.1307835876941681,
-0.02139742486178875,
0.017395347356796265,
0.13209912180900574,
0.12607666850090027,
-0.08595693111419678,
-0.1504845917224884,
0.04888554662466049,
-0.04565655067563057,
-0.02836887165904045,
0.1464131623506546,
0.05905961990356445,
0.1050296202301979,
0.0908031314611435,
-0.014463032595813274,
-0.00318976235575974,
0.012856799177825451,
-0.15486004948616028,
0.06223496049642563,
-0.010558074340224266,
0.012565906159579754,
0.017934376373887062,
0.15238402783870697,
-0.005540105979889631,
0.07739730179309845,
-0.09889880567789078,
0.004208535887300968,
-0.13498884439468384,
-0.07913459837436676,
0.03617347031831741,
-0.13393273949623108,
0.04141177982091904,
-0.01871878281235695,
0.029611799865961075,
0.30386561155319214,
0.02558239921927452,
-0.020639164373278618,
0.12512871623039246,
-0.1214587539434433,
-0.12050267308950424,
-0.001594188273884356,
-0.029960084706544876,
0.0791488066315651,
-0.02633434161543846,
-0.0997740775346756,
-0.1001306027173996,
-0.15166029334068298,
-0.09759195148944855,
0.05182836204767227,
-0.04993441700935364,
-0.059362251311540604,
-0.17634081840515137,
-0.05707859992980957,
-0.05147340148687363,
0.14025864005088806,
-0.12263951450586319,
0.15159130096435547,
-0.014490418136119843,
0.004084470681846142,
0.04405883327126503,
0.1950942426919937,
-0.03644494712352753,
0.08714226633310318,
0.0154351145029068,
0.1522706001996994,
-0.05119588226079941,
0.14720745384693146,
-0.10931728035211563,
-0.04014137014746666,
-0.06710435450077057,
0.21513493359088898,
0.25630924105644226,
-0.06136954948306084,
-0.008937356993556023,
-0.012760217301547527,
0.058654606342315674,
0.1073930487036705,
0.16049085557460785,
0.002326392102986574,
0.2802925705909729,
-0.03133585304021835,
0.04815128445625305,
0.02901598811149597,
0.013607407920062542,
-0.06336209923028946,
0.03397751972079277,
0.07539387792348862,
-0.035039983689785004,
-0.1412304788827896,
0.15837742388248444,
-0.21980468928813934,
0.18157227337360382,
0.11640069633722305,
-0.19996967911720276,
-0.013728445395827293,
-0.04882071167230606,
0.1689416468143463,
-0.0856364443898201,
0.1637246012687683,
-0.0903693437576294,
-0.2108195722103119,
-0.2056000679731369,
0.03867346793413162,
-0.34623071551322937,
-0.254462867975235,
0.10422009229660034,
0.1488201916217804,
0.04015883058309555,
-0.018507536500692368,
-0.019967829808592796,
-0.018367022275924683,
0.04877542704343796,
-0.0067357709631323814,
0.06014643982052803,
0.031397558748722076,
-0.02988368645310402,
-0.24127542972564697,
-0.029804671183228493,
0.023964406922459602,
-0.07093082368373871,
0.07464958727359772,
-0.06874357163906097,
-0.022495782002806664,
0.08059766888618469,
-0.03066304884850979,
0.03298592567443848,
-0.035373736172914505,
-0.16326889395713806,
0.027529051527380943,
0.03900543600320816,
0.036012712866067886,
0.00634160777553916,
0.0008072225609794259,
-0.03455270454287529,
0.0644603744149208,
-0.16716794669628143,
-0.16015739738941193,
0.14140215516090393,
-0.06745140254497528,
0.2779497504234314,
-0.05812826007604599,
-0.0809100940823555,
0.04766704887151718,
-0.03426874056458473,
0.1807648241519928,
-0.07756473124027252,
0.047254521399736404,
0.12766779959201813,
0.011127962730824947,
0.03121316432952881,
-0.3092964291572571,
0.11082969605922699,
-0.000795336440205574,
-0.006093299947679043,
-0.07581598311662674
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# bert-finetuned-ner
This model is a fine-tuned version of [bert-base-cased](https://huggingface.co/bert-base-cased) on the conll2003 dataset.
It achieves the following results on the evaluation set:
- Loss: 0.0586
- Precision: 0.9390
- Recall: 0.9554
- F1: 0.9471
- Accuracy: 0.9873
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 2e-05
- train_batch_size: 8
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 3
### Training results
| Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
|:-------------:|:-----:|:----:|:---------------:|:---------:|:------:|:------:|:--------:|
| 0.0877 | 1.0 | 1756 | 0.0662 | 0.9081 | 0.9344 | 0.9210 | 0.9827 |
| 0.0376 | 2.0 | 3512 | 0.0599 | 0.9362 | 0.9502 | 0.9431 | 0.9862 |
| 0.0209 | 3.0 | 5268 | 0.0586 | 0.9390 | 0.9554 | 0.9471 | 0.9873 |
### Framework versions
- Transformers 4.14.1
- Pytorch 1.10.0+cu111
- Datasets 1.16.1
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "datasets": ["conll2003"], "metrics": ["precision", "recall", "f1", "accuracy"], "model-index": [{"name": "bert-finetuned-ner", "results": [{"task": {"type": "token-classification", "name": "Token Classification"}, "dataset": {"name": "conll2003", "type": "conll2003", "args": "conll2003"}, "metrics": [{"type": "precision", "value": 0.9389679126695336, "name": "Precision"}, {"type": "recall", "value": 0.9554022214742511, "name": "Recall"}, {"type": "f1", "value": 0.9471137804471137, "name": "F1"}, {"type": "accuracy", "value": 0.9873138282215812, "name": "Accuracy"}]}]}]}
|
token-classification
|
butchland/bert-finetuned-ner
|
[
"transformers",
"pytorch",
"tensorboard",
"bert",
"token-classification",
"generated_from_trainer",
"dataset:conll2003",
"license:apache-2.0",
"model-index",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #bert #token-classification #generated_from_trainer #dataset-conll2003 #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us
|
bert-finetuned-ner
==================
This model is a fine-tuned version of bert-base-cased on the conll2003 dataset.
It achieves the following results on the evaluation set:
* Loss: 0.0586
* Precision: 0.9390
* Recall: 0.9554
* F1: 0.9471
* Accuracy: 0.9873
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 2e-05
* train\_batch\_size: 8
* eval\_batch\_size: 8
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 3
### Training results
### Framework versions
* Transformers 4.14.1
* Pytorch 1.10.0+cu111
* Datasets 1.16.1
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.14.1\n* Pytorch 1.10.0+cu111\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #bert #token-classification #generated_from_trainer #dataset-conll2003 #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.14.1\n* Pytorch 1.10.0+cu111\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
67,
98,
4,
33
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #bert #token-classification #generated_from_trainer #dataset-conll2003 #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3### Training results### Framework versions\n\n\n* Transformers 4.14.1\n* Pytorch 1.10.0+cu111\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
-0.1074492484331131,
0.11698339134454727,
-0.002382210921496153,
0.1228412538766861,
0.15522177517414093,
0.034925371408462524,
0.12759685516357422,
0.12138662487268448,
-0.09012874215841293,
0.023809487000107765,
0.1260349005460739,
0.1622915118932724,
0.019299766048789024,
0.10621467232704163,
-0.04514283314347267,
-0.25333675742149353,
-0.0017165860626846552,
0.0517612099647522,
-0.06100789085030556,
0.13198880851268768,
0.09229730814695358,
-0.1326514184474945,
0.09054055064916611,
0.015087531879544258,
-0.19073572754859924,
0.004297820385545492,
0.006543699651956558,
-0.05684104934334755,
0.14740616083145142,
0.022828247398138046,
0.12947258353233337,
-0.002718306379392743,
0.09081345051527023,
-0.19105727970600128,
0.0076556941494345665,
0.052574705332517624,
0.005916724447160959,
0.09870415180921555,
0.04674506187438965,
0.012507249601185322,
0.1093934029340744,
-0.05999625101685524,
0.058369364589452744,
0.017845958471298218,
-0.11371832340955734,
-0.2193291187286377,
-0.09011918306350708,
0.052433986216783524,
0.07725948095321655,
0.09823783487081528,
0.002000556094571948,
0.14198370277881622,
-0.0872398167848587,
0.0857541486620903,
0.22507791221141815,
-0.29311639070510864,
-0.0643317922949791,
0.045756224542856216,
0.00894700363278389,
0.04477374628186226,
-0.10443688184022903,
-0.03683751821517944,
0.047344665974378586,
0.04718010127544403,
0.13055557012557983,
-0.030835913494229317,
-0.11140602827072144,
0.017103519290685654,
-0.14745162427425385,
-0.03813447803258896,
0.1578463315963745,
0.05014149472117424,
-0.03117956407368183,
-0.040708888322114944,
-0.061766866594552994,
-0.16966094076633453,
-0.029884710907936096,
-0.02384377084672451,
0.049626465886831284,
-0.02810305543243885,
-0.05234047770500183,
0.004001964349299669,
-0.10687029361724854,
-0.0711294487118721,
-0.08009450882673264,
0.12081657350063324,
0.045054513961076736,
0.01439168956130743,
-0.02492179349064827,
0.11239320039749146,
0.005621978547424078,
-0.11508764326572418,
0.027036886662244797,
0.025861483067274094,
0.004830540157854557,
-0.04190923646092415,
-0.049720220267772675,
-0.04711132496595383,
0.013334420509636402,
0.13291248679161072,
-0.044823385775089264,
0.035333938896656036,
0.05097811296582222,
0.04450058564543724,
-0.091117262840271,
0.1824113428592682,
-0.05764651298522949,
-0.027077393606305122,
0.000619573169387877,
0.05172759294509888,
0.014620828442275524,
-0.0024202996864914894,
-0.11949733644723892,
0.009715660475194454,
0.10352765023708344,
0.008732435293495655,
-0.06999596208333969,
0.07202479988336563,
-0.05941421911120415,
-0.028132013976573944,
0.014345812611281872,
-0.08557400852441788,
0.02705213986337185,
0.0013859840109944344,
-0.0777030661702156,
-0.02198226936161518,
0.026393773034214973,
0.01983082853257656,
-0.011309427209198475,
0.10028909146785736,
-0.09576771408319473,
0.015749700367450714,
-0.09275253862142563,
-0.10872729867696762,
0.01987026073038578,
-0.0997060164809227,
0.02972911112010479,
-0.09430713206529617,
-0.16465304791927338,
-0.0066184718161821365,
0.06545637547969818,
-0.021454088389873505,
-0.05943804234266281,
-0.0456552617251873,
-0.06899554282426834,
0.00531307328492403,
-0.011819109320640564,
0.12852510809898376,
-0.06482800841331482,
0.09113333374261856,
0.02068488486111164,
0.06170446425676346,
-0.044247571378946304,
0.05700473114848137,
-0.1002402976155281,
0.01578603684902191,
-0.14183582365512848,
0.01933932676911354,
-0.058111920952796936,
0.05459638684988022,
-0.09470342099666595,
-0.10357604175806046,
0.0180387981235981,
-0.0070784506388008595,
0.0686822384595871,
0.08589933067560196,
-0.17190009355545044,
-0.07285673171281815,
0.15207892656326294,
-0.06989258527755737,
-0.12216252833604813,
0.11659850925207138,
-0.061540812253952026,
0.04416836053133011,
0.0574338324368,
0.15312549471855164,
0.07762718200683594,
-0.08201837539672852,
0.0025980526115745306,
0.011850755661725998,
0.05176456272602081,
-0.07189375162124634,
0.07456862181425095,
0.005097848363220692,
0.01813308708369732,
0.029326578602194786,
-0.03730258345603943,
0.058305587619543076,
-0.09182056784629822,
-0.10061073303222656,
-0.032046422362327576,
-0.09333965182304382,
0.04132094979286194,
0.07383086532354355,
0.06823448091745377,
-0.08949977904558182,
-0.08154026418924332,
0.059639379382133484,
0.09323980659246445,
-0.05539923906326294,
0.022184792906045914,
-0.0654938817024231,
0.07319385558366776,
-0.048037976026535034,
-0.028885824605822563,
-0.17170684039592743,
-0.03550689294934273,
0.010363485664129257,
-0.005698459222912788,
0.007978023029863834,
0.0419367291033268,
0.06356967240571976,
0.06307624280452728,
-0.04619128257036209,
-0.020592838525772095,
-0.03257044032216072,
0.003909502178430557,
-0.1308802217245102,
-0.20537114143371582,
-0.0437607578933239,
-0.019096076488494873,
0.14261186122894287,
-0.20658805966377258,
0.036347873508930206,
-0.014267523773014545,
0.0808197408914566,
0.014669153839349747,
-0.006509921047836542,
-0.04075078293681145,
0.0748903825879097,
-0.046737801283597946,
-0.049635130912065506,
0.07326773554086685,
0.0034824691247195005,
-0.08464500308036804,
-0.051442861557006836,
-0.08885911852121353,
0.17276199162006378,
0.1286809891462326,
-0.10641954094171524,
-0.07458057254552841,
-0.019270090386271477,
-0.06501860916614532,
-0.03959280252456665,
-0.04009370133280754,
0.030275188386440277,
0.17660082876682281,
-0.007429101970046759,
0.14235615730285645,
-0.0685112401843071,
-0.04221781715750694,
0.023431137204170227,
-0.03239874914288521,
0.01408835593611002,
0.12372821569442749,
0.14432679116725922,
-0.08080087602138519,
0.15618951618671417,
0.14546947181224823,
-0.08951109647750854,
0.1254820078611374,
-0.04212890937924385,
-0.07276850938796997,
-0.021425342187285423,
-0.030836565420031548,
-0.007592375855892897,
0.11712001264095306,
-0.15428291261196136,
-0.00423008855432272,
0.03334968537092209,
0.020756877958774567,
0.020467225462198257,
-0.22722162306308746,
-0.04016052559018135,
0.0361030250787735,
-0.04041792452335358,
-0.0029038607608526945,
-0.01406033057719469,
0.0014550635823979974,
0.10257510840892792,
0.00004982904647476971,
-0.10807113349437714,
0.04004950448870659,
0.003162970533594489,
-0.07934761047363281,
0.21105873584747314,
-0.08195175975561142,
-0.14421994984149933,
-0.12410376220941544,
-0.08330855518579483,
-0.04924323409795761,
0.0020626247860491276,
0.05790961533784866,
-0.08233749121427536,
-0.03102177195250988,
-0.07023419439792633,
0.006768215913325548,
0.005081826355308294,
0.03178548440337181,
0.004248214885592461,
-0.003142031142488122,
0.06755750626325607,
-0.11269905418157578,
-0.010785999707877636,
-0.06117203086614609,
-0.05839593708515167,
0.0383724607527256,
0.03438854590058327,
0.11423667520284653,
0.15530991554260254,
-0.012438434176146984,
0.010183842852711678,
-0.026534661650657654,
0.22993440926074982,
-0.05604952946305275,
-0.023947222158312798,
0.1327197551727295,
-0.014717011712491512,
0.04718660190701485,
0.1146547868847847,
0.07481953501701355,
-0.08011692762374878,
-0.0022365842014551163,
0.0405840203166008,
-0.03180375695228577,
-0.2252926528453827,
-0.044505100697278976,
-0.050189532339572906,
-0.005877647548913956,
0.0946928933262825,
0.02563065104186535,
0.03712499514222145,
0.07627096027135849,
0.04248274862766266,
0.08003371208906174,
-0.050784721970558167,
0.0534493550658226,
0.11656016856431961,
0.03647409379482269,
0.12217642366886139,
-0.0394224189221859,
-0.057882342487573624,
0.042382679879665375,
0.007236721459776163,
0.22604255378246307,
0.010324254631996155,
0.13233377039432526,
0.07119962573051453,
0.18803825974464417,
-0.010555770248174667,
0.0770735964179039,
-0.016418414190411568,
-0.03603683412075043,
-0.018565330654382706,
-0.03795492276549339,
-0.03915299475193024,
0.026838485151529312,
-0.057380352169275284,
0.0693938136100769,
-0.1152411550283432,
0.012818515300750732,
0.048618387430906296,
0.26370498538017273,
0.0378287211060524,
-0.3293401002883911,
-0.0948655754327774,
-0.004423939622938633,
-0.036594681441783905,
-0.017035868018865585,
0.031502194702625275,
0.09494968503713608,
-0.09151501953601837,
0.014458815567195415,
-0.06943138688802719,
0.0880339965224266,
-0.056055400520563126,
0.041392359882593155,
0.0913524404168129,
0.09109028428792953,
0.013207006268203259,
0.08891794085502625,
-0.2767941653728485,
0.28171712160110474,
-0.0003302557743154466,
0.05706098675727844,
-0.07686538249254227,
0.00802754145115614,
0.03539071977138519,
0.06384152173995972,
0.07220026850700378,
-0.010547850281000137,
-0.017158932983875275,
-0.19474747776985168,
-0.06615003943443298,
0.03013208508491516,
0.058142490684986115,
-0.04148917645215988,
0.08791504800319672,
-0.03259216248989105,
0.004544517491012812,
0.07662494480609894,
0.015194493345916271,
-0.04282085597515106,
-0.09813029319047928,
-0.005489773582667112,
0.028669195249676704,
-0.05398347228765488,
-0.06729821860790253,
-0.118915855884552,
-0.12922832369804382,
0.15101999044418335,
-0.030786095187067986,
-0.032090071588754654,
-0.10279916226863861,
0.08175195753574371,
0.07480268180370331,
-0.08833976089954376,
0.04754483327269554,
0.0038506477139890194,
0.0677163377404213,
0.03597554191946983,
-0.06387605518102646,
0.10786700993776321,
-0.07636013627052307,
-0.16371212899684906,
-0.07156892865896225,
0.09996990859508514,
0.03906526416540146,
0.06409886479377747,
-0.007093713153153658,
0.00844359491020441,
-0.03998633846640587,
-0.08369043469429016,
0.023215098306536674,
0.006013697944581509,
0.08836453408002853,
0.0040573072619736195,
-0.06686697900295258,
0.017252907156944275,
-0.05843450129032135,
-0.03305281326174736,
0.19354066252708435,
0.22534266114234924,
-0.1006816029548645,
0.01912996731698513,
0.04437734931707382,
-0.07093784958124161,
-0.18757683038711548,
0.03355225920677185,
0.053458381444215775,
0.0037573541048914194,
0.03949031978845596,
-0.18323898315429688,
0.13949596881866455,
0.1158108040690422,
-0.014781365171074867,
0.10206872224807739,
-0.32641953229904175,
-0.11550803482532501,
0.14347624778747559,
0.14691412448883057,
0.09755928069353104,
-0.13539323210716248,
-0.019939765334129333,
-0.009012483060359955,
-0.14127957820892334,
0.11391156166791916,
-0.07830836623907089,
0.11488717794418335,
-0.03278020769357681,
0.08372743427753448,
0.002491330960765481,
-0.06233123317360878,
0.11650881171226501,
0.02319377288222313,
0.09790835529565811,
-0.053713005036115646,
-0.04474485293030739,
0.0322415791451931,
-0.03864048793911934,
0.023878300562500954,
-0.07901585102081299,
0.03142489492893219,
-0.09861916303634644,
-0.023711100220680237,
-0.07237778604030609,
0.04100198298692703,
-0.03856939822435379,
-0.07293682545423508,
-0.03898164629936218,
0.03127877041697502,
0.05260429531335831,
-0.012888211756944656,
0.13168494403362274,
0.03883565962314606,
0.14021098613739014,
0.09855381399393082,
0.06878593564033508,
-0.07315035909414291,
-0.08856244385242462,
-0.030779171735048294,
-0.015954777598381042,
0.06403955072164536,
-0.12418590486049652,
0.02255711890757084,
0.1448213905096054,
0.020959828048944473,
0.14191775023937225,
0.08032190054655075,
-0.028779631480574608,
0.0007462025969289243,
0.055655062198638916,
-0.1615155041217804,
-0.0720653086900711,
-0.0023991859052330256,
-0.04987620934844017,
-0.11637001484632492,
0.05983209237456322,
0.09337449818849564,
-0.07405837625265121,
-0.009627113118767738,
-0.003131087403744459,
0.012198276817798615,
-0.05327559635043144,
0.19238801300525665,
0.06314712017774582,
0.047574542462825775,
-0.09776027500629425,
0.07158654928207397,
0.0483444407582283,
-0.06971530616283417,
0.0010317033156752586,
0.04760708659887314,
-0.08618787676095963,
-0.04897281527519226,
0.05808137729763985,
0.17526297271251678,
-0.0540875643491745,
-0.05159357190132141,
-0.1367105096578598,
-0.11269891262054443,
0.07689550518989563,
0.14571158587932587,
0.11669349670410156,
0.016178684309124947,
-0.06229240819811821,
0.005924403201788664,
-0.11049982905387878,
0.09536439925432205,
0.03543989732861519,
0.0679175853729248,
-0.15270884335041046,
0.14183805882930756,
0.013160254806280136,
0.039910152554512024,
-0.016278037801384926,
0.028599178418517113,
-0.10369930416345596,
0.007954966276884079,
-0.11612364649772644,
-0.026025181636214256,
-0.03528023138642311,
0.010059275664389133,
0.0011471844045445323,
-0.059306785464286804,
-0.061372045427560806,
0.016899151727557182,
-0.11253875494003296,
-0.017007356509566307,
0.04048798233270645,
0.0697140246629715,
-0.11508741229772568,
-0.03390509635210037,
0.028061002492904663,
-0.05908668413758278,
0.07176516205072403,
0.04314565286040306,
0.026339514181017876,
0.04992508888244629,
-0.12845487892627716,
0.017647841945290565,
0.07000238448381424,
0.027308980002999306,
0.0748385339975357,
-0.10340788960456848,
-0.007719729095697403,
-0.0033635280560702085,
0.03773968294262886,
0.014791525900363922,
0.07650606334209442,
-0.138172909617424,
-0.01106279157102108,
-0.019647879526019096,
-0.07669321447610855,
-0.06348075717687607,
0.021045951172709465,
0.10026916116476059,
0.011957299895584583,
0.2044110894203186,
-0.0673333927989006,
0.04147513583302498,
-0.211604043841362,
0.005715911276638508,
-0.011702721938490868,
-0.10424303263425827,
-0.11583338677883148,
-0.05898692086338997,
0.051905322819948196,
-0.06318709254264832,
0.15212970972061157,
0.02940960042178631,
0.017088336870074272,
0.021638134494423866,
-0.02164112776517868,
0.022257620468735695,
0.01731210947036743,
0.19802862405776978,
0.03581344708800316,
-0.03000180795788765,
0.05675666406750679,
0.04638662561774254,
0.09956304728984833,
0.10928119719028473,
0.18701471388339996,
0.14631158113479614,
-0.006462522782385349,
0.09129258990287781,
0.04398084804415703,
-0.06853420287370682,
-0.1606759876012802,
0.03992724046111107,
-0.043195921927690506,
0.1079442948102951,
-0.017093220725655556,
0.23306845128536224,
0.0653676763176918,
-0.16740486025810242,
0.038290031254291534,
-0.05356709659099579,
-0.08045876026153564,
-0.10948701947927475,
-0.05737118050456047,
-0.07799782603979111,
-0.12965507805347443,
-0.0020153315272182226,
-0.11372067034244537,
-0.0010245360899716616,
0.13361915946006775,
0.010151488706469536,
-0.023559337481856346,
0.1504921317100525,
0.014137991704046726,
0.032543789595365524,
0.04005391150712967,
0.01756606437265873,
-0.04085351526737213,
-0.12860828638076782,
-0.06432311236858368,
-0.01705271378159523,
-0.013268639333546162,
0.03198952600359917,
-0.07181167602539062,
-0.04131164774298668,
0.03422418609261513,
-0.011277218349277973,
-0.0927838608622551,
0.006133797112852335,
-0.00003377004759386182,
0.05376178026199341,
0.03637528046965599,
0.010147860273718834,
0.02969863824546337,
-0.006638644728809595,
0.1963392198085785,
-0.07455683499574661,
-0.05623834207653999,
-0.11009179800748825,
0.24299025535583496,
0.030063027516007423,
-0.013708963058888912,
0.040418531745672226,
-0.06391185522079468,
0.004957623314112425,
0.24791058897972107,
0.21168950200080872,
-0.08397267758846283,
-0.011812274344265461,
0.020674483850598335,
-0.012975463643670082,
-0.0370255745947361,
0.09942377358675003,
0.13944822549819946,
0.04939046502113342,
-0.09116866439580917,
-0.04577267915010452,
-0.061295367777347565,
-0.009391785599291325,
-0.02970074489712715,
0.06570304185152054,
0.0516682006418705,
0.008400795049965382,
-0.042431943118572235,
0.0451652854681015,
-0.05519978702068329,
-0.1087324321269989,
0.06559963524341583,
-0.2031203657388687,
-0.16926433145999908,
-0.012517301365733147,
0.10494889318943024,
0.002346363151445985,
0.05948646739125252,
-0.030714116990566254,
0.0010001829359680414,
0.09050954878330231,
-0.014642005786299706,
-0.10043344646692276,
-0.08653035014867783,
0.103233203291893,
-0.08629322797060013,
0.23261870443820953,
-0.046659402549266815,
0.06518238037824631,
0.12650945782661438,
0.0640711709856987,
-0.07552318274974823,
0.054950349032878876,
0.051911499351263046,
-0.06818891316652298,
0.016116956248879433,
0.058742914348840714,
-0.028383202850818634,
0.08238182961940765,
0.041805967688560486,
-0.12826018035411835,
0.016925251111388206,
-0.06600135564804077,
-0.06154686212539673,
-0.04159378632903099,
-0.028420230373740196,
-0.05703960731625557,
0.13549529016017914,
0.21287673711776733,
-0.03071114420890808,
-0.011741322465240955,
-0.07470434159040451,
0.020976915955543518,
0.05613837018609047,
0.01152550708502531,
-0.05816882103681564,
-0.21442344784736633,
0.021222906187176704,
0.032427966594696045,
-0.018105754628777504,
-0.22518673539161682,
-0.0975896492600441,
0.012530527077615261,
-0.07710176706314087,
-0.0942244604229927,
0.06438511610031128,
0.0836399495601654,
0.05926011502742767,
-0.06047607585787773,
-0.035878583788871765,
-0.08018461614847183,
0.13807518780231476,
-0.14637711644172668,
-0.09329812973737717
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.