modelId
stringlengths 4
112
| sha
stringlengths 40
40
| lastModified
stringlengths 24
24
| tags
sequence | pipeline_tag
stringclasses 29
values | private
bool 1
class | author
stringlengths 2
38
⌀ | config
null | id
stringlengths 4
112
| downloads
float64 0
36.8M
⌀ | likes
float64 0
712
⌀ | library_name
stringclasses 17
values | readme
stringlengths 0
186k
| embedding
sequence |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
allenai/cs_roberta_base | f56079f4997a5660c9deffca2827798eb39ac6cd | 2021-05-20T13:02:35.000Z | [
"pytorch",
"jax",
"roberta",
"transformers"
] | null | false | allenai | null | allenai/cs_roberta_base | 3,095 | 1 | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
Wavepaw/DialoGPT-medium-WardenIngo | 78044c2b1cda28107bd5b6d8e1fae32d96103210 | 2022-04-23T21:20:51.000Z | [
"pytorch",
"gpt2",
"text-generation",
"transformers",
"conversational"
] | conversational | false | Wavepaw | null | Wavepaw/DialoGPT-medium-WardenIngo | 3,093 | null | transformers | ---
tags:
- conversational
---
# Warden Ingo DialoGPT Model | [
-0.053123053163290024,
0.004645736888051033,
0.002100412268191576,
-0.036995138972997665,
0.03757556900382042,
-0.058671873062849045,
0.22322465479373932,
0.005124442745000124,
0.06205340102314949,
0.00667716795578599,
0.007695036008954048,
-0.06804350763559341,
-0.0382518507540226,
0.03556392341852188,
0.01658288948237896,
0.08089578151702881,
0.01786310411989689,
0.0020705293864011765,
-0.06216679513454437,
0.01559846755117178,
-0.003711978904902935,
0.08188273012638092,
0.016642648726701736,
0.020168086513876915,
-0.08789068460464478,
-0.0548318512737751,
-0.05805375054478645,
0.023866811767220497,
0.07207430154085159,
-0.059794239699840546,
-0.042672403156757355,
0.07305517047643661,
0.0590723380446434,
0.030901068821549416,
-0.08515165746212006,
0.04446139931678772,
0.05367562547326088,
0.00317190564237535,
-0.05346103012561798,
-0.03375895321369171,
-0.053812891244888306,
0.060727063566446304,
-0.07022163271903992,
-0.017729507759213448,
0.04246833175420761,
-0.03736710175871849,
-0.13596871495246887,
-0.018362993374466896,
-0.04289732500910759,
-0.024494856595993042,
-0.07955165952444077,
-0.012632617726922035,
0.005828250665217638,
0.1195412427186966,
-0.02263808622956276,
0.018028924241662025,
-0.04865206405520439,
-0.032011978328228,
0.07322709262371063,
0.03435150161385536,
-0.04222768172621727,
-0.017666609957814217,
-0.09438173472881317,
0.047957584261894226,
-0.027747169137001038,
0.0520755760371685,
-0.0459420382976532,
0.03289823606610298,
-0.0154527947306633,
0.06794163584709167,
0.012963072396814823,
-0.011252007447183132,
0.007784631103277206,
-0.06994841992855072,
-0.01575586386024952,
0.05115015432238579,
-0.03030785545706749,
0.006284763570874929,
0.05838875472545624,
-0.06101745739579201,
0.04388817772269249,
-0.06088782474398613,
0.043535638600587845,
-0.010179374366998672,
0.018808789551258087,
-0.0034216283820569515,
-0.04212789610028267,
-0.04791301488876343,
0.018862560391426086,
0.04906061664223671,
-0.02080596797168255,
-0.055086344480514526,
0.05347742885351181,
-0.017204411327838898,
0.00945817120373249,
0.06044726446270943,
-0.021647784858942032,
-0.057202357798814774,
-0.049077946692705154,
0.11188387870788574,
-0.004384469706565142,
0.035458773374557495,
-0.02136467769742012,
-0.07472967356443405,
0.013970330357551575,
0.009932883083820343,
0.03205317631363869,
-0.039897553622722626,
0.009101281873881817,
0.02829604037106037,
-0.022809194400906563,
-0.04319426044821739,
0.04648986831307411,
-0.0073776585049927235,
0.07984559237957001,
-0.060906533151865005,
0.012355836108326912,
-0.03437395021319389,
0.04952043667435646,
0.026704125106334686,
0.006850333884358406,
0.01778172142803669,
-0.06967480480670929,
-0.03509385883808136,
-0.00036742017255164683,
0.02629663795232773,
-0.015825197100639343,
-1.703885122659652e-33,
0.06225254759192467,
0.038095783442258835,
0.000046671353629790246,
0.09204495698213577,
0.06923089176416397,
0.04512159526348114,
-0.05427144095301628,
-0.03599999472498894,
-0.03322033956646919,
-0.0065186889842152596,
-0.03861211985349655,
-0.07210899144411087,
-0.07652889937162399,
0.032522398978471756,
0.06330589950084686,
-0.018611567094922066,
-0.09547271579504013,
0.07055328041315079,
-0.007089204154908657,
-0.08725171536207199,
0.026808610185980797,
0.026356950402259827,
-0.04491857811808586,
0.03690599650144577,
0.11089470982551575,
0.0938267707824707,
0.043516919016838074,
-0.12645576894283295,
0.004893670789897442,
0.05928553640842438,
-0.03764733672142029,
0.02857108600437641,
-0.02159992977976799,
0.001586638274602592,
-0.002716998802497983,
-0.010806454345583916,
0.022049596533179283,
-0.03931109607219696,
-0.020030567422509193,
-0.09383658319711685,
-0.024224475026130676,
-0.003310454310849309,
-0.06753827631473541,
-0.04759043827652931,
-0.059358179569244385,
-0.011587570421397686,
-0.008436579257249832,
0.03543941304087639,
0.036808863282203674,
-0.020341692492365837,
-0.03818414360284805,
0.017075877636671066,
-0.019888773560523987,
-0.060622069984674454,
0.008334577083587646,
-0.034369517117738724,
-0.0014815895119681954,
-0.01684698276221752,
0.023807352408766747,
-0.006546278949826956,
0.026188962161540985,
0.0032615463715046644,
0.03401901200413704,
-0.05207080394029617,
0.091631680727005,
-0.028056560084223747,
-0.07058455049991608,
-0.020047197118401527,
0.04411819949746132,
-0.03341076523065567,
-0.07225538790225983,
-0.0038668964989483356,
-0.033566463738679886,
0.05843677371740341,
-0.054332535713911057,
0.0011198214488103986,
-0.01903803087770939,
-0.047785673290491104,
0.03354187682271004,
0.06576856970787048,
-0.051490601152181625,
-0.06906302273273468,
-0.07522745430469513,
0.061334993690252304,
0.010892413556575775,
-0.08359523117542267,
0.07093023508787155,
-0.11171170324087143,
0.01537596620619297,
0.07021855562925339,
0.03369662165641785,
0.07564578205347061,
-0.06696714460849762,
0.016692958772182465,
-0.11450417339801788,
-2.4833498247246454e-34,
-0.006695407908409834,
-0.012187670916318893,
-0.0752146765589714,
0.04304353892803192,
-0.018358618021011353,
0.014826194383203983,
0.011894729919731617,
0.08419296145439148,
0.06645800918340683,
-0.04813088849186897,
-0.019235949963331223,
0.07743826508522034,
0.054711613804101944,
0.045389652252197266,
0.08458258211612701,
-0.016339702531695366,
0.028039028868079185,
-0.040822647511959076,
0.011109696701169014,
0.018495414406061172,
0.050664450973272324,
-0.050905849784612656,
-0.12823045253753662,
0.03657078370451927,
-0.001239557284861803,
-0.024718035012483597,
-0.01717999018728733,
0.05388953164219856,
0.06913839280605316,
-0.032124634832143784,
-0.02958270162343979,
-0.037964463233947754,
-0.0218005683273077,
0.02230231463909149,
0.00435391440987587,
-0.004448352847248316,
0.11041107773780823,
-0.021028030663728714,
-0.005220262799412012,
0.055450644344091415,
0.04678529128432274,
-0.016643693670630455,
-0.009980530478060246,
-0.02659030072391033,
-0.03726094961166382,
-0.050073184072971344,
-0.009520248509943485,
-0.0003638320486061275,
-0.11067958921194077,
-0.00558274844661355,
0.029191235080361366,
-0.027533888816833496,
-0.07162048667669296,
-0.01817893050611019,
-0.08007565885782242,
-0.03422462195158005,
0.018723903223872185,
-0.09077959507703781,
-0.06466060876846313,
0.03954651579260826,
-0.00566817494109273,
-0.02490239590406418,
0.02210770733654499,
-0.03251640126109123,
0.04578747600317001,
0.033869706094264984,
-0.027292056009173393,
0.024396507069468498,
-0.02914743311703205,
-0.08778827637434006,
0.12363717705011368,
-0.0292381402105093,
-0.006047720089554787,
-0.00001536571107862983,
0.04533442482352257,
-0.004402237944304943,
0.018138140439987183,
-0.06767413020133972,
0.06198440119624138,
-0.12420304864645004,
-0.01019895076751709,
0.009642904624342918,
0.01718064583837986,
0.053646959364414215,
-0.0001212595307151787,
0.018168434500694275,
0.02900432050228119,
0.07359107583761215,
0.04292534664273262,
0.021568341180682182,
0.003627142868936062,
0.026675105094909668,
0.02032172866165638,
0.10563953220844269,
-0.020265208557248116,
-2.681443689311891e-8,
-0.11114000529050827,
-0.016919953748583794,
-0.007959861308336258,
0.05351977422833443,
0.007570653222501278,
-0.021739529445767403,
0.06611113250255585,
0.00024857508833520114,
-0.0566285103559494,
-0.016100404784083366,
0.06959587335586548,
0.051781754940748215,
-0.03337182104587555,
0.0352637879550457,
0.05627909675240517,
0.027369655668735504,
-0.06297440826892853,
0.052120745182037354,
-0.025523653253912926,
-0.031617674976587296,
0.06501448154449463,
0.01915258727967739,
-0.040709927678108215,
0.031154677271842957,
-0.020062439143657684,
-0.009171227924525738,
-0.08394819498062134,
0.011799882166087627,
0.004889448639005423,
0.08831489831209183,
0.061342962086200714,
0.07854363322257996,
-0.09204810112714767,
0.01901528611779213,
-0.06003652885556221,
0.09294800460338593,
-0.06240655481815338,
-0.06678175926208496,
0.00607944093644619,
-0.04641042277216911,
0.06226838007569313,
-0.005851969588547945,
-0.08894045650959015,
-0.043393574655056,
0.0562632791697979,
0.0412314310669899,
0.007410647813230753,
-0.09930716454982758,
0.015319684520363808,
0.034379638731479645,
-0.03726782649755478,
-0.00749689107760787,
0.0971323773264885,
0.04489705339074135,
0.03647920489311218,
-0.024440843611955643,
0.06675857305526733,
-0.02528393641114235,
0.08792971819639206,
0.02370082587003708,
0.06837555766105652,
0.05655165761709213,
0.0475982204079628,
0.019131653010845184
] |
KoboldAI/GPT-J-6B-Shinen | afa5a11b24cb23eee708e17c83b920a788e9e07b | 2022-03-20T18:48:45.000Z | [
"pytorch",
"gptj",
"text-generation",
"en",
"arxiv:2101.00027",
"transformers",
"license:mit"
] | text-generation | false | KoboldAI | null | KoboldAI/GPT-J-6B-Shinen | 3,092 | 1 | transformers | ---
language: en
license: mit
---
# GPT-J 6B - Shinen
## Model Description
GPT-J 6B-Shinen is a finetune created using EleutherAI's GPT-J 6B model. Compared to GPT-Neo-2.7-Horni, this model is much heavier on the sexual content.
**Warning: THIS model is NOT suitable for use by minors. The model will output X-rated content.**
## Training data
The training data contains user-generated stories from sexstories.com. All stories are tagged using the following way:
```
[Theme: <theme1>, <theme2> ,<theme3>]
<Story goes here>
```
### How to use
You can use this model directly with a pipeline for text generation. This example generates a different sequence each time it's run:
```py
>>> from transformers import pipeline
>>> generator = pipeline('text-generation', model='KoboldAI/GPT-J-6B-Shinen')
>>> generator("She was staring at me", do_sample=True, min_length=50)
[{'generated_text': 'She was staring at me with a look that said it all. She wanted me so badly tonight that I wanted'}]
```
### Limitations and Biases
The core functionality of GPT-J is taking a string of text and predicting the next token. While language models are widely used for tasks other than this, there are a lot of unknowns with this work. When prompting GPT-J it is important to remember that the statistically most likely next token is often not the token that produces the most "accurate" text. Never depend upon GPT-J to produce factually accurate output.
GPT-J was trained on the Pile, a dataset known to contain profanity, lewd, and otherwise abrasive language. Depending upon use case GPT-J may produce socially unacceptable text. See [Sections 5 and 6 of the Pile paper](https://arxiv.org/abs/2101.00027) for a more detailed analysis of the biases in the Pile.
As with all language models, it is hard to predict in advance how GPT-J will respond to particular prompts and offensive content may occur without warning. We recommend having a human curate or filter the outputs before releasing them, both to censor undesirable content and to improve the quality of the results.
### BibTeX entry and citation info
The model uses the following model as base:
```bibtex
@misc{gpt-j,
author = {Wang, Ben and Komatsuzaki, Aran},
title = {{GPT-J-6B: A 6 Billion Parameter Autoregressive Language Model}},
howpublished = {\url{https://github.com/kingoflolz/mesh-transformer-jax}},
year = 2021,
month = May
}
```
## Acknowledgements
This project would not have been possible without compute generously provided by Google through the
[TPU Research Cloud](https://sites.research.google/trc/), as well as the Cloud TPU team for providing early access to the [Cloud TPU VM](https://cloud.google.com/blog/products/compute/introducing-cloud-tpu-vms) Alpha.
| [
-0.08336648344993591,
0.02847003936767578,
-0.010457075200974941,
0.08082298934459686,
0.005438848864287138,
-0.02593851275742054,
-0.01720978505909443,
0.012822921387851238,
-0.02447722665965557,
-0.08372512459754944,
-0.04064306244254112,
0.014758502133190632,
0.02365623600780964,
0.0084183095023036,
0.036147408187389374,
0.0386345311999321,
-0.004789033439010382,
0.0208586435765028,
-0.03630920499563217,
-0.09437240660190582,
0.12846820056438446,
0.06613311171531677,
0.07425795495510101,
0.03913627937436104,
-0.03384818509221077,
-0.022599369287490845,
-0.0051032924093306065,
0.03525448963046074,
0.019844118505716324,
0.029920659959316254,
0.04979931190609932,
0.0625184029340744,
-0.046687833964824677,
0.03526720404624939,
-0.07158658653497696,
0.08275730162858963,
-0.04884172976016998,
-0.02113017998635769,
-0.00014523378922604024,
0.045411769300699234,
0.07491125911474228,
-0.03452685847878456,
-0.06565467268228531,
-0.042070694267749786,
0.07489797472953796,
-0.10815513879060745,
-0.1345314085483551,
-0.09271524101495743,
-0.06316056102514267,
-0.004374136682599783,
-0.06703628599643707,
-0.07583516091108322,
0.0012225197860971093,
0.02020547352731228,
-0.025866791605949402,
0.061845943331718445,
-0.008638019673526287,
-0.09406852722167969,
0.08764379471540451,
-0.10110907256603241,
-0.011242564767599106,
-0.020792430266737938,
-0.07120125740766525,
-0.0390327163040638,
-0.0076993959955871105,
-0.036512576043605804,
0.00721163721755147,
0.09773537516593933,
0.011303748935461044,
0.004497921094298363,
-0.019312800839543343,
0.05488281697034836,
-0.12079630047082901,
0.029143676161766052,
-0.032836560159921646,
0.06229419633746147,
0.04825391620397568,
-0.05794665217399597,
-0.027263151481747627,
-0.03961211442947388,
0.005063995718955994,
-0.05726412683725357,
0.0446108914911747,
0.02895507402718067,
-0.04321141913533211,
0.011829370632767677,
0.03907252103090286,
0.04319595545530319,
-0.026929346844553947,
0.028733475133776665,
-0.08102905005216599,
-0.04391835629940033,
0.03626242280006409,
0.011443553492426872,
-0.060712508857250214,
0.0010794906411319971,
-0.02307300642132759,
-0.04962560161948204,
-0.031440846621990204,
0.0697236955165863,
-0.01423693634569645,
-0.004477505572140217,
0.03429454565048218,
0.034331779927015305,
-0.08145178854465485,
-0.08010111004114151,
-0.0007662010029889643,
0.06463483721017838,
-0.05470532551407814,
-0.01029013842344284,
0.05655123293399811,
-0.015053458511829376,
-0.01768917217850685,
-0.057360000908374786,
0.03881412371993065,
0.05004529654979706,
-0.05775976926088333,
-0.0038826270028948784,
-0.034629277884960175,
0.12375231832265854,
0.06386371701955795,
0.0452127680182457,
-0.07363584637641907,
0.09513518959283829,
-0.017890799790620804,
-0.05770449340343475,
-0.011658113449811935,
6.909265593965547e-33,
0.04891517013311386,
-0.0019158065551891923,
0.02943119779229164,
0.03581174463033676,
0.06224852055311203,
0.13750693202018738,
0.006915140897035599,
-0.0766451507806778,
-0.07584689557552338,
-0.020572714507579803,
-0.03676677122712135,
0.06311093270778656,
-0.07443224638700485,
-0.004389635287225246,
-0.03306932374835014,
-0.003822163213044405,
0.01530675683170557,
0.017793990671634674,
-0.005757497623562813,
0.05771574005484581,
0.05017700791358948,
0.0659373328089714,
-0.03444141522049904,
-0.036609068512916565,
-0.08461694419384003,
0.048601601272821426,
-0.028433050960302353,
-0.044789716601371765,
-0.06519544869661331,
0.008774937130510807,
-0.06850610673427582,
-0.016167661175131798,
0.08553944528102875,
0.061636969447135925,
0.028007762506604195,
-0.018779058009386063,
0.03044017404317856,
-0.07936771214008331,
0.015527884475886822,
-0.04901246726512909,
0.004956947173923254,
-0.0012431662762537599,
0.0675184428691864,
-0.03808407112956047,
-0.06656625121831894,
0.056604333221912384,
0.025839662179350853,
0.023726290091872215,
-0.017635714262723923,
0.06646724790334702,
-0.061852503567934036,
0.10550728440284729,
-0.058370765298604965,
0.023555191233754158,
-0.021153585985302925,
0.03656698390841484,
0.04001432657241821,
0.015750223770737648,
0.10011398792266846,
-0.024762023240327835,
0.08521717041730881,
0.0424552820622921,
0.03635881841182709,
-0.005584369413554668,
0.045327078551054,
0.031131107360124588,
0.026221677660942078,
0.05203757807612419,
0.04794451966881752,
0.021116361021995544,
-0.05537072569131851,
0.01947861909866333,
-0.044403545558452606,
0.010343452915549278,
0.051796164363622665,
-0.05546790733933449,
0.02115705981850624,
-0.03747103363275528,
-0.05199481174349785,
0.035429444164037704,
-0.045491963624954224,
-0.002534030005335808,
-0.04368457943201065,
-0.07726690918207169,
-0.031502317637205124,
0.0002443855337332934,
0.034511469304561615,
-0.03282016143202782,
-0.008015268482267857,
0.0008890804019756615,
-0.06022671237587929,
-0.04918426647782326,
-0.03306480497121811,
-0.028320953249931335,
-0.008265084587037563,
-7.478306246151292e-33,
0.03716087341308594,
0.022481679916381836,
-0.007558738812804222,
0.09264566004276276,
0.05191410332918167,
-0.07226280868053436,
0.041111089289188385,
0.01894548162817955,
-0.009059390984475613,
-0.016601867973804474,
0.034596990793943405,
-0.015506459400057793,
-0.017405293881893158,
-0.06567666679620743,
0.11583781987428665,
0.011187474243342876,
-0.011632346548140049,
-0.04168141260743141,
-0.010400009341537952,
0.05109289661049843,
-0.059228479862213135,
0.08164121955633163,
-0.19119229912757874,
0.08187031745910645,
-0.0009484285837970674,
-0.02849092148244381,
0.02800469845533371,
-0.009615478105843067,
0.032898277044296265,
-0.006525902077555656,
0.005267659202218056,
0.015942050144076347,
-0.053984880447387695,
0.03868967667222023,
-0.0734330266714096,
0.010554392822086811,
0.08098824322223663,
0.031320054084062576,
0.011915030889213085,
0.11661230772733688,
0.03314460813999176,
0.004099555313587189,
-0.060671620070934296,
0.0768970251083374,
-0.048887304961681366,
0.07256262004375458,
0.0268689151853323,
0.007180522195994854,
0.017074380069971085,
-0.000984557787887752,
0.0010897988686338067,
-0.023700060322880745,
-0.05657557025551796,
-0.0031565751414746046,
-0.04236060380935669,
-0.16009733080863953,
0.024503277614712715,
-0.007291204761713743,
-0.08254746347665787,
-0.0037580346688628197,
0.015624807216227055,
-0.02556190825998783,
-0.007431840058416128,
-0.0816602036356926,
-0.04423320293426514,
-0.12134912610054016,
-0.01654181070625782,
-0.04543161764740944,
0.019734296947717667,
0.026959890499711037,
0.01939614675939083,
-0.007371085695922375,
0.05145027115941048,
-0.008292952552437782,
-0.014151972718536854,
-0.026437828317284584,
-0.09062179177999496,
-0.007563780061900616,
-0.015786075964570045,
-0.01766606979072094,
-0.012514402158558369,
0.05326542258262634,
0.0544021911919117,
0.06398214399814606,
0.06889402121305466,
-0.0521501824259758,
0.05860485881567001,
0.10686850547790527,
-0.05582934990525246,
-0.010713420808315277,
-0.05709432438015938,
0.057790763676166534,
-0.0008084829896688461,
0.054678160697221756,
-0.03660566359758377,
-5.60855859532694e-8,
-0.017657959833741188,
-0.08681433647871017,
-0.09963632375001907,
0.07075276225805283,
-0.01913798600435257,
-0.007524365093559027,
0.008825232274830341,
-0.006113145500421524,
-0.02438204362988472,
0.008268374018371105,
0.01918480545282364,
-0.04926086589694023,
-0.020396124571561813,
-0.023215401917696,
0.026750238612294197,
0.014607111923396587,
0.06873587518930435,
0.08566540479660034,
-0.014876144006848335,
-0.02943062037229538,
0.028782637789845467,
0.03904436528682709,
-0.011215845122933388,
-0.028866777196526527,
-0.03580674156546593,
0.025853045284748077,
-0.02790037915110588,
0.0023160127457231283,
0.003723245346918702,
-0.0017193666426464915,
0.0758751854300499,
-0.05110244080424309,
-0.03391920030117035,
0.025393372401595116,
0.02902311645448208,
0.03717551380395889,
0.010690494440495968,
0.009855817072093487,
0.07784535735845566,
0.011671208776533604,
0.06641589850187302,
0.016804927960038185,
-0.03653420880436897,
0.015402266755700111,
-0.018812932074069977,
0.009735832922160625,
-0.028053313493728638,
-0.16890814900398254,
0.0703713670372963,
0.09974709153175354,
-0.019146909937262535,
-0.04175271838903427,
-0.024283362552523613,
-0.018754862248897552,
0.08113930374383926,
-0.04262866824865341,
0.023072468116879463,
0.013112397864460945,
-0.018269820138812065,
0.02739783562719822,
0.0374176912009716,
0.0010290972422808409,
0.04714606702327728,
-0.0596732534468174
] |
pranavpsv/genre-story-generator-v2 | b9950761c9c3fabf7d6365f4be8cf0d6b79673b4 | 2021-05-23T11:01:02.000Z | [
"pytorch",
"jax",
"gpt2",
"text-generation",
"transformers"
] | text-generation | false | pranavpsv | null | pranavpsv/genre-story-generator-v2 | 3,090 | 1 | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
uer/bart-base-chinese-cluecorpussmall | 913015fd84e5f0ed219a1c7a8c3819b07006d179 | 2022-07-15T08:17:16.000Z | [
"pytorch",
"tf",
"bart",
"text2text-generation",
"zh",
"dataset:CLUECorpusSmall",
"arxiv:1909.05658",
"transformers",
"autotrain_compatible"
] | text2text-generation | false | uer | null | uer/bart-base-chinese-cluecorpussmall | 3,063 | 4 | transformers | ---
language: zh
datasets: CLUECorpusSmall
widget:
- text: "作为电子[MASK]的平台,京东绝对是领先者。如今的刘强[MASK]已经是身价过[MASK]的老板。"
---
# Chinese BART
## Model description
This model is pre-trained by [UER-py](https://github.com/dbiir/UER-py/), which is introduced in [this paper](https://arxiv.org/abs/1909.05658).
You can download the set of Chinese BART models either from the [UER-py Modelzoo page](https://github.com/dbiir/UER-py/wiki/Modelzoo), or via HuggingFace from the links below:
| | Link |
| ----------------- | :----------------------------: |
| **BART-Base** | [**L=6/H=768 (Base)**][base] |
| **BART-Large** | [**L=12/H=1024 (Large)**][large] |
## How to use
You can use this model directly with a pipeline for text2text generation (take the case of BART-Base):
```python
>>> from transformers import BertTokenizer, BartForConditionalGeneration, Text2TextGenerationPipeline
>>> tokenizer = BertTokenizer.from_pretrained("uer/bart-base-chinese-cluecorpussmall")
>>> model = BartForConditionalGeneration.from_pretrained("uer/bart-base-chinese-cluecorpussmall")
>>> text2text_generator = Text2TextGenerationPipeline(model, tokenizer)
>>> text2text_generator("中国的首都是[MASK]京", max_length=50, do_sample=False)
[{'generated_text': '中 国 的 首 都 是 北 京'}]
```
## Training data
[CLUECorpusSmall](https://github.com/CLUEbenchmark/CLUECorpus2020/) is used as training data.
## Training procedure
The model is pre-trained by [UER-py](https://github.com/dbiir/UER-py/) on [Tencent Cloud](https://cloud.tencent.com/). We pre-train 1,000,000 steps with a sequence length of 512.
Taking the case of BART-Base
```
python3 preprocess.py --corpus_path corpora/cluecorpussmall.txt \
--vocab_path models/google_zh_vocab.txt \
--dataset_path cluecorpussmall_bart_seq512_dataset.pt \
--processes_num 32 --seq_length 512 \
--data_processor bart
```
```
python3 pretrain.py --dataset_path cluecorpussmall_bart_seq512_dataset.pt \
--vocab_path models/google_zh_vocab.txt \
--config_path models/bart/base_config.json \
--output_model_path models/cluecorpussmall_bart_base_seq512_model.bin \
--world_size 8 --gpu_ranks 0 1 2 3 4 5 6 7 \
--total_steps 1000000 --save_checkpoint_steps 100000 --report_steps 50000 \
--learning_rate 5e-5 --batch_size 8 \
--span_masking --span_max_length 3
```
Finally, we convert the pre-trained model into Huggingface's format:
```
python3 scripts/convert_bart_from_uer_to_huggingface.py --input_model_path cluecorpussmall_bart_base_seq512_model.bin-1000000 \
--output_model_path pytorch_model.bin \
--layers_num 6
```
### BibTeX entry and citation info
```
@article{lewis2019bart,
title={Bart: Denoising sequence-to-sequence pre-training for natural language generation, translation, and comprehension},
author={Lewis, Mike and Liu, Yinhan and Goyal, Naman and Ghazvininejad, Marjan and Mohamed, Abdelrahman and Levy, Omer and Stoyanov, Ves and Zettlemoyer, Luke},
journal={arXiv preprint arXiv:1910.13461},
year={2019}
}
@article{zhao2019uer,
title={UER: An Open-Source Toolkit for Pre-training Models},
author={Zhao, Zhe and Chen, Hui and Zhang, Jinbin and Zhao, Xin and Liu, Tao and Lu, Wei and Chen, Xi and Deng, Haotang and Ju, Qi and Du, Xiaoyong},
journal={EMNLP-IJCNLP 2019},
pages={241},
year={2019}
}
```
[base]:https://huggingface.co/uer/bart-base-chinese-cluecorpussmall
[large]:https://huggingface.co/uer/bart-large-chinese-cluecorpussmall | [
-0.09160938113927841,
-0.02788405679166317,
0.07121438533067703,
-0.0012630769051611423,
-0.0361480712890625,
0.0755472183227539,
-0.018255943432450294,
-0.0018830489134415984,
0.0012223231606185436,
-0.021688055247068405,
0.0864364355802536,
-0.04519790783524513,
0.03732806816697121,
0.017286183312535286,
0.06652085483074188,
0.006929123308509588,
0.032427847385406494,
0.002266921568661928,
0.041355110704898834,
-0.013235974125564098,
0.06326054781675339,
0.004623189568519592,
0.037782348692417145,
-0.05490463227033615,
-0.0026594761293381453,
-0.0694405734539032,
-0.012132842093706131,
-0.0630282536149025,
0.07756340503692627,
0.010562127456068993,
-0.0213584266602993,
0.020224783569574356,
0.07293102890253067,
0.029611743986606598,
0.04994228482246399,
0.0722116157412529,
0.08104820549488068,
0.04878198355436325,
-0.00024487634073011577,
0.04777109995484352,
0.015608157031238079,
0.04431982338428497,
-0.01457028929144144,
-0.012167710810899734,
0.08932182937860489,
-0.020531250163912773,
-0.018863495439291,
0.021808279678225517,
-0.08801876753568649,
-0.015700172632932663,
-0.07750508189201355,
-0.007509422488510609,
0.07473420351743698,
-0.00537147605791688,
0.002208857564255595,
0.013457597233355045,
0.026349375024437904,
-0.02875438705086708,
0.054409418255090714,
-0.06957563012838364,
-0.0819014459848404,
0.03508685901761055,
-0.029770122841000557,
0.01997341401875019,
-0.040306542068719864,
0.031363312155008316,
-0.0384690947830677,
0.08298464864492416,
-0.03027929738163948,
0.02577224001288414,
-0.028060482814908028,
-0.020613329485058784,
0.04419917240738869,
-0.0482332818210125,
-0.025059500709176064,
-0.10582119226455688,
0.08243197202682495,
-0.06611370295286179,
-0.025092273950576782,
-0.050902705639600754,
-0.028387807309627533,
-0.02065170928835869,
0.05452084541320801,
0.050667185336351395,
0.02193591371178627,
-0.009157104417681694,
-0.008507538586854935,
-0.01872207596898079,
0.003128145821392536,
0.0029556925874203444,
-0.011221721768379211,
-0.0576668418943882,
-0.04320378601551056,
0.01796448789536953,
-0.029743878170847893,
0.02965935319662094,
0.03909048065543175,
0.01884196512401104,
-0.05301668494939804,
0.06283173710107803,
0.07425370067358017,
0.0035438381601125,
0.06166648864746094,
-0.0887681245803833,
0.043935831636190414,
0.053111523389816284,
-0.062298379838466644,
-0.04297798499464989,
0.11483285576105118,
0.008937125094234943,
-0.046566881239414215,
-0.051580943167209625,
-0.02668916806578636,
-0.10205156356096268,
-0.028687609359622,
-0.04933050647377968,
0.0007908322149887681,
-0.039001770317554474,
0.0050998409278690815,
-0.019380783662199974,
0.04744979739189148,
0.02674189768731594,
0.0353105254471302,
0.018656810745596886,
-0.07154962420463562,
-0.0017735621659085155,
0.016042133793234825,
-2.0227600058392756e-33,
0.016693368554115295,
0.01063965167850256,
-0.008358174934983253,
-0.016989799216389656,
0.00912699569016695,
0.00542267644777894,
0.07319983094930649,
0.004509414546191692,
-0.07016213238239288,
-0.041093263775110245,
0.002072972944006324,
0.02946660853922367,
-0.14056281745433807,
0.0886591225862503,
-0.05413970351219177,
-0.00596365612000227,
-0.06746456772089005,
0.018537836149334908,
0.06347833573818207,
0.000782790535595268,
0.102253757417202,
-0.0010200466495007277,
0.004434359725564718,
-0.0881003737449646,
0.012349161319434643,
0.06907647848129272,
0.06234268471598625,
-0.10533220320940018,
-0.05531421676278114,
0.04784492030739784,
-0.08174276351928711,
0.0846058800816536,
-0.0028043428901582956,
-0.010272625833749771,
-0.08141162991523743,
-0.007620023563504219,
-0.046752333641052246,
-0.07477813959121704,
-0.03101201355457306,
-0.06726387143135071,
-0.00681333290413022,
0.008738270029425621,
0.0006147322710603476,
-0.06254876405000687,
-0.06693749129772186,
-0.013620984740555286,
0.0022019855678081512,
-0.07009430229663849,
0.012531314976513386,
-0.0008173588430508971,
0.02467826008796692,
-0.03618909418582916,
-0.0850701779127121,
0.0482095330953598,
0.006035711616277695,
-0.08580109477043152,
0.0007767865317873657,
0.02491900324821472,
0.026812303811311722,
-0.024861689656972885,
0.02870495431125164,
-0.027963705360889435,
0.05386378616094589,
0.02687452919781208,
0.044587504118680954,
0.03278703987598419,
-0.020661165937781334,
-0.012977907434105873,
0.016469823196530342,
0.018300238996744156,
-0.040757860988378525,
-0.031171146780252457,
0.006538121495395899,
0.017050622031092644,
-0.033466458320617676,
-0.04171395301818848,
-0.06509577482938766,
-0.026248574256896973,
-0.0175138209015131,
0.042742058634757996,
-0.025457115843892097,
-0.010272564366459846,
-0.028007710352540016,
0.01681791990995407,
-0.04562865197658539,
-0.014064855873584747,
0.13763397932052612,
-0.017878420650959015,
-0.013156033121049404,
-0.023616837337613106,
-0.04138410463929176,
-0.0860367938876152,
0.04277414828538895,
-0.017870184034109116,
-0.07199890911579132,
-1.426242173203059e-33,
-0.03489596024155617,
0.06383388489484787,
-0.04212641716003418,
-0.009373604319989681,
-0.06750618666410446,
-0.11076775193214417,
0.0712476298213005,
0.18250630795955658,
0.008999256417155266,
-0.01753035932779312,
-0.04641369357705116,
-0.044899530708789825,
0.05431282892823219,
-0.006701440084725618,
0.08738867193460464,
0.05843104422092438,
0.028081173077225685,
0.06998473405838013,
0.0022413507103919983,
-0.019444232806563377,
0.0675208568572998,
-0.07110142707824707,
-0.12220486998558044,
0.10223302245140076,
0.04413989931344986,
0.020461129024624825,
0.03804663196206093,
-0.006497279740869999,
-0.027524009346961975,
-0.02864277921617031,
-0.05674920231103897,
0.031080376356840134,
0.0015983518678694963,
0.09378717094659805,
-0.07922520488500595,
0.0221722349524498,
-0.052185386419296265,
0.03147697448730469,
-0.024388877674937248,
-0.007469189818948507,
0.08738096803426743,
-0.023867156356573105,
-0.05592723563313484,
0.04844645783305168,
-0.01784880831837654,
0.014193209819495678,
-0.12129106372594833,
-0.020161012187600136,
0.015926741063594818,
-0.07034420222043991,
0.0046648187562823296,
0.002866952447220683,
-0.061579760164022446,
-0.06314124166965485,
-0.12140009552240372,
-0.03596964105963707,
0.04005119577050209,
-0.029564375057816505,
-0.05405149981379509,
-0.07017537206411362,
-0.07510019838809967,
-0.0059370924718678,
-0.005222553852945566,
0.06205184385180473,
0.010243081487715244,
-0.04519195854663849,
0.035164400935173035,
-0.011250904761254787,
-0.01616959646344185,
-0.05149183049798012,
0.036371972411870956,
0.09057112038135529,
0.04711072891950607,
0.06284314393997192,
0.0317816361784935,
0.026929263025522232,
-0.018284624442458153,
-0.03719661757349968,
0.004618807230144739,
-0.05272853001952171,
-0.01395784318447113,
-0.05969653278589249,
0.11796668916940689,
0.04487783834338188,
-0.010512088425457478,
0.015966396778821945,
-0.045970410108566284,
0.060383234173059464,
0.032705243676900864,
0.017712794244289398,
-0.0010890050325542688,
0.02207341231405735,
-0.023652473464608192,
0.04590798169374466,
0.005662001669406891,
-4.7404448366705765e-8,
-0.05558279901742935,
-0.06985776126384735,
0.014273496344685555,
-0.008576683700084686,
-0.1549677848815918,
0.002566693117842078,
0.004951844923198223,
-0.030551305040717125,
0.002830065321177244,
-0.021718665957450867,
0.057932425290346146,
0.06613753736019135,
-0.059468045830726624,
0.05591071397066116,
-0.04184267297387123,
0.04363025352358818,
0.026166105642914772,
0.06219201534986496,
-0.01488490030169487,
-0.02284654602408409,
0.02742859348654747,
0.04913126304745674,
0.06441696733236313,
-0.008497880771756172,
-0.013402332551777363,
-0.051980651915073395,
-0.13025067746639252,
0.07006049156188965,
-0.038366612046957016,
-0.024136412888765335,
-0.0017906520515680313,
-0.016165195032954216,
-0.04131676256656647,
0.0024620480835437775,
0.06548815220594406,
0.09009777754545212,
-0.05836551636457443,
-0.10164288431406021,
-0.014237585477530956,
0.04726499319076538,
0.06730661541223526,
-0.022873563691973686,
-0.02434351295232773,
-0.023816311731934547,
0.13001714646816254,
0.021556345745921135,
0.03057565726339817,
-0.1131574735045433,
0.0880676880478859,
0.04665597900748253,
0.00043984747026115656,
-0.03509930893778801,
0.0005829600850120187,
-0.07576209306716919,
-0.044217802584171295,
0.03524484857916832,
-0.07878077030181885,
0.0037351909559220076,
0.04433329775929451,
-0.0440676175057888,
0.032756365835666656,
0.07468773424625397,
0.036858391016721725,
0.07298166304826736
] |
gagan3012/k2t | 57b9e3132e50734633ce283fdc96e463837b6cb6 | 2021-09-22T08:27:36.000Z | [
"pytorch",
"t5",
"text2text-generation",
"en",
"dataset:WebNLG",
"dataset:Dart",
"transformers",
"keytotext",
"k2t",
"Keywords to Sentences",
"license:mit",
"autotrain_compatible"
] | text2text-generation | false | gagan3012 | null | gagan3012/k2t | 3,054 | null | transformers | ---
language: en
thumbnail: Keywords to Sentences
tags:
- keytotext
- k2t
- Keywords to Sentences
license: mit
datasets:
- WebNLG
- Dart
metrics:
- NLG
---
# keytotext

Idea is to build a model which will take keywords as inputs and generate sentences as outputs.
### Keytotext is powered by Huggingface 🤗
[](https://pypi.org/project/keytotext/)
[](https://pepy.tech/project/keytotext)
[](https://colab.research.google.com/github/gagan3012/keytotext/blob/master/Examples/K2T.ipynb)
[](https://share.streamlit.io/gagan3012/keytotext/UI/app.py)
## Model:
Keytotext is based on the Amazing T5 Model:
- `k2t`: [Model](https://huggingface.co/gagan3012/k2t)
- `k2t-tiny`: [Model](https://huggingface.co/gagan3012/k2t-tiny)
- `k2t-base`: [Model](https://huggingface.co/gagan3012/k2t-base)
Training Notebooks can be found in the [`Training Notebooks`](https://github.com/gagan3012/keytotext/tree/master/Training%20Notebooks) Folder
## Usage:
Example usage: [](https://colab.research.google.com/github/gagan3012/keytotext/blob/master/Examples/K2T.ipynb)
Example Notebooks can be found in the [`Notebooks`](https://github.com/gagan3012/keytotext/tree/master/Examples) Folder
```
pip install keytotext
```

## UI:
UI: [](https://share.streamlit.io/gagan3012/keytotext/UI/app.py)
```
pip install streamlit-tags
```
This uses a custom streamlit component built by me: [GitHub](https://github.com/gagan3012/streamlit-tags)

| [
-0.08639142662286758,
0.014906492084264755,
0.010412236675620079,
-0.01271976437419653,
0.026354726403951645,
0.06640716642141342,
0.07711213827133179,
-0.03710361197590828,
0.00682399608194828,
-0.028035085648298264,
0.03174244239926338,
-0.04752771928906441,
0.1264314353466034,
0.006841250229626894,
0.06286109238862991,
0.03985648602247238,
-0.05546389892697334,
0.06303709000349045,
-0.06474361568689346,
-0.0711473748087883,
0.1088334396481514,
-0.00546354241669178,
0.0860673263669014,
-0.0468708761036396,
-0.048332568258047104,
0.08534009009599686,
-0.003937178757041693,
0.005800290498882532,
0.048420585691928864,
0.005396752618253231,
0.04088176414370537,
-0.0025084586814045906,
0.02825705148279667,
0.08293388038873672,
-0.01325206458568573,
0.04216558858752251,
-0.0659041777253151,
0.059870924800634384,
-0.03204542398452759,
-0.04462602734565735,
-0.019015703350305557,
-0.05116875842213631,
-0.03041761927306652,
0.012231848202645779,
0.1308668702840805,
-0.01792226918041706,
-0.10773609578609467,
-0.002116328338161111,
-0.03400535508990288,
0.013980276882648468,
-0.13680700957775116,
-0.05363713949918747,
0.007432752754539251,
-0.043879155069589615,
-0.002759758848696947,
0.025069406256079674,
-0.0013191591715440154,
-0.09085962176322937,
0.02003588154911995,
-0.08409114927053452,
0.04734103009104729,
0.017652859911322594,
-0.030232977122068405,
-0.012949148193001747,
-0.0011071407934650779,
0.009097675792872906,
0.032879263162612915,
0.08224430680274963,
0.025736847892403603,
0.03742239996790886,
-0.02683105133473873,
-0.015508539974689484,
0.02624564804136753,
-0.07741151005029678,
-0.029482852667570114,
0.02371688187122345,
-0.0013477443717420101,
-0.039731256663799286,
0.02316761575639248,
-0.054824311286211014,
-0.025038830935955048,
-0.06584291160106659,
0.06216134503483772,
0.06307815760374069,
0.07547388970851898,
0.044223133474588394,
0.029411612078547478,
-0.03735700249671936,
-0.050153013318777084,
0.030053535476326942,
-0.0355861522257328,
-0.10896333307027817,
0.06885257363319397,
0.024620287120342255,
-0.009257667697966099,
0.04677562043070793,
-0.0008488744497299194,
-0.09601826965808868,
-0.04445844143629074,
0.08701155334711075,
-0.04357835277915001,
-0.004693223629146814,
-0.030557356774806976,
-0.12475904822349548,
0.02905714325606823,
0.011535780504345894,
0.02653435803949833,
-0.03621869161725044,
0.010383561253547668,
-0.025888897478580475,
-0.014759916812181473,
0.018820639699697495,
-0.08048263937234879,
-0.03979982063174248,
0.008160502649843693,
0.00433713523671031,
-0.013368592597544193,
-0.020600538700819016,
0.11127343773841858,
0.06532176584005356,
0.04679478704929352,
0.046499453485012054,
-0.10396476089954376,
0.02215898036956787,
-0.06221260502934456,
-0.058215633034706116,
0.02164292521774769,
7.138083650876767e-33,
0.08165940642356873,
0.04377109184861183,
0.032828353345394135,
-0.026942742988467216,
0.02609930746257305,
0.004239984788000584,
-0.05082085728645325,
-0.049057383090257645,
-0.06363600492477417,
-0.03375815972685814,
-0.02447802759706974,
-0.005428798962384462,
-0.05605531111359596,
0.05478779226541519,
0.035522591322660446,
-0.07224803417921066,
-0.06185455992817879,
0.05376013368368149,
0.04374661296606064,
0.009800189174711704,
-0.02253848873078823,
-0.014272144064307213,
0.02233794890344143,
-0.06545137614011765,
-0.04998644068837166,
0.09638746082782745,
0.04737352952361107,
-0.005936041008681059,
-0.07856500148773193,
0.022361911833286285,
-0.03428089991211891,
-0.04551931470632553,
0.0333954393863678,
-0.007709038909524679,
-0.03694332018494606,
-0.054258059710264206,
0.011447537690401077,
-0.04062134400010109,
-0.004414423834532499,
-0.027484919875860214,
-0.0372978039085865,
-0.03816176950931549,
0.02336251549422741,
-0.09500344097614288,
-0.0702924057841301,
0.02944764867424965,
-0.006524689495563507,
0.04868096485733986,
0.03005683235824108,
0.012318439781665802,
-0.012961525470018387,
0.056270401924848557,
-0.0439610593020916,
-0.019840674474835396,
-0.02973402664065361,
-0.008089662529528141,
0.06244231387972832,
0.018684716895222664,
0.056635551154613495,
-0.041110847145318985,
-0.048059482127428055,
0.025948531925678253,
0.14732445776462555,
0.0000865792972035706,
0.07137967646121979,
0.02678299881517887,
-0.007789393421262503,
-0.016056625172495842,
0.033922579139471054,
-0.03782181441783905,
-0.044209375977516174,
0.022529324516654015,
-0.01969945803284645,
0.003272020723670721,
0.04173143208026886,
-0.019581133499741554,
0.031692150980234146,
-0.032778300344944,
-0.020563218742609024,
0.03460006043314934,
0.0024613486602902412,
-0.017160924151539803,
0.023876158520579338,
-0.01922094263136387,
0.0409889817237854,
0.014029835350811481,
0.02223276160657406,
-0.11361486464738846,
-0.0067470078356564045,
-0.01616556942462921,
-0.06306073069572449,
-0.02759343571960926,
0.005768440663814545,
-0.10022852569818497,
-0.0500975102186203,
-7.689741680349728e-33,
0.08285751938819885,
0.045189667493104935,
-0.04307846352458,
0.05306718870997429,
0.06619368493556976,
-0.026732420548796654,
-0.0060609327629208565,
0.07364170253276825,
0.0037911233957856894,
-0.011012732982635498,
-0.044700223952531815,
0.018648765981197357,
0.02073138765990734,
-0.06604012846946716,
0.04858747497200966,
-0.017055682837963104,
0.031388260424137115,
0.04583945870399475,
0.038023363798856735,
0.05721542239189148,
0.06657705456018448,
0.015284724533557892,
-0.07683469355106354,
0.07698831707239151,
-0.06927430629730225,
0.03388653323054314,
0.04741756245493889,
0.05949903652071953,
-0.00776034826412797,
-0.02988039329648018,
0.061599940061569214,
0.005319260526448488,
-0.0944753885269165,
0.017981290817260742,
-0.09099384397268295,
-0.0606733113527298,
0.013619611039757729,
-0.004995743278414011,
-0.0374947227537632,
0.12027011066675186,
0.10497893393039703,
-0.00027579101151786745,
-0.06522159278392792,
-0.038387224078178406,
-0.05424903333187103,
-0.025606943294405937,
-0.056827329099178314,
-0.01381139550358057,
-0.0007843022467568517,
-0.00991485733538866,
0.052909042686223984,
-0.0007022452191449702,
-0.09610460698604584,
-0.028180589899420738,
-0.07151950895786285,
-0.042124684900045395,
-0.014913707971572876,
-0.03815792128443718,
-0.022183427587151527,
0.0009679138311184943,
-0.08082763850688934,
-0.06357615441083908,
0.0459003746509552,
-0.003148420248180628,
0.06453397125005722,
-0.09868398308753967,
-0.029137227684259415,
-0.00026877890923060477,
-0.036904022097587585,
-0.030875492841005325,
0.04050855711102486,
0.005043954588472843,
0.04632300138473511,
0.10080903023481369,
0.01946915313601494,
0.020002348348498344,
0.018077172338962555,
0.005044457968324423,
0.09145309031009674,
-0.09184250235557556,
0.024112533777952194,
-0.023932302370667458,
0.07492684572935104,
0.039176132529973984,
0.055823516100645065,
0.019668851047754288,
-0.010771951638162136,
0.08510751277208328,
0.07996337115764618,
-0.011272347532212734,
-0.00024353618209715933,
0.057283833622932434,
0.07200218737125397,
0.09569545090198517,
0.06160005182027817,
-6.411641351178332e-8,
-0.060616135597229004,
0.020171891897916794,
-0.06969981640577316,
0.045233871787786484,
-0.11351776868104935,
-0.02403872273862362,
0.012077449820935726,
-0.05854114517569542,
0.036854855716228485,
-0.10971222072839737,
0.1005786880850792,
0.021850991994142532,
-0.08360321074724197,
-0.014830535277724266,
-0.043762076646089554,
0.10800990462303162,
-0.05784069374203682,
0.06558817625045776,
-0.0012842576252296567,
-0.06599776446819305,
0.033443253487348557,
0.038804080337285995,
-0.051198866218328476,
0.011476757936179638,
0.016107069328427315,
0.03632146865129471,
-0.06946207582950592,
0.07858014851808548,
-0.0031273802742362022,
-0.023315606638789177,
0.052064307034015656,
-0.010627693496644497,
-0.0032497388310730457,
-0.024157429113984108,
0.011110049672424793,
0.018170764669775963,
-0.016466716304421425,
-0.10634341090917587,
0.04818892106413841,
0.06968441605567932,
0.04789314791560173,
-0.004466956946998835,
-0.10023285448551178,
-0.012688787654042244,
-0.006287903990596533,
0.045384541153907776,
0.057408031076192856,
-0.06705500930547714,
0.034485939890146255,
-0.00927425641566515,
-0.03377991542220116,
-0.013811933808028698,
-0.05954139679670334,
-0.0763753354549408,
0.010272162966430187,
0.06553521007299423,
0.022431630641222,
0.043839119374752045,
0.09331068396568298,
0.0193606186658144,
0.05134516581892967,
0.0036889745388180017,
-0.03102853149175644,
0.03773566707968712
] |
castorini/tct_colbert-v2-hnp-msmarco | 3b46a821282996e0ada304e4bcc5d659712972a8 | 2021-08-12T01:05:56.000Z | [
"pytorch",
"bert",
"feature-extraction",
"transformers"
] | feature-extraction | false | castorini | null | castorini/tct_colbert-v2-hnp-msmarco | 3,050 | null | transformers | This model is to reproduce a variant of TCT-ColBERT-V2 dense retrieval models described in the following paper:
> Sheng-Chieh Lin, Jheng-Hong Yang, and Jimmy Lin. [In-Batch Negatives for Knowledge Distillation with Tightly-CoupledTeachers for Dense Retrieval.](https://cs.uwaterloo.ca/~jimmylin/publications/Lin_etal_2021_RepL4NLP.pdf) _RepL4NLP 2021_.
You can find our reproduction report in Pyserini [here](https://github.com/castorini/pyserini/blob/master/docs/experiments-tct_colbert-v2.md).
| [
-0.06552200764417648,
0.000058389607147546485,
0.027566729113459587,
0.02865372784435749,
-0.0338166318833828,
-0.02888583205640316,
0.009454486891627312,
-0.026040788739919662,
0.019496437162160873,
-0.025860048830509186,
-0.03151804953813553,
0.00042619623127393425,
0.07251160591840744,
0.03885521739721298,
-0.039182644337415695,
0.0056867096573114395,
0.14014050364494324,
0.0000892202733666636,
-0.038582026958465576,
-0.08106247335672379,
-0.030217504128813744,
0.03647250309586525,
0.013598630204796791,
-0.05549664795398712,
-0.054617639631032944,
-0.08698343485593796,
-0.09954982995986938,
-0.13087674975395203,
0.06066317483782768,
-0.091403529047966,
0.02857282944023609,
0.04080758988857269,
-0.08862943202257156,
0.08369533717632294,
-0.03175317123532295,
0.04657581448554993,
-0.07859865576028824,
0.06276510655879974,
0.020139213651418686,
0.0797201544046402,
0.038443367928266525,
0.1104363426566124,
-0.08548920601606369,
0.030856210738420486,
0.055233292281627655,
-0.0240398570895195,
-0.016426794230937958,
-0.03263535723090172,
0.0012003992451354861,
-0.049025461077690125,
-0.04402880743145943,
0.055912572890520096,
-0.001299801398999989,
0.04434642195701599,
0.023782258853316307,
-0.0006296649225987494,
0.008849818259477615,
-0.0032210249919444323,
-0.0968170166015625,
-0.03599094972014427,
0.02963446080684662,
-0.0628385841846466,
-0.11139549314975739,
-0.05552215874195099,
0.09259888529777527,
0.006672578398138285,
-0.025966007262468338,
0.09266193211078644,
0.04476410150527954,
-0.018047627061605453,
-0.06205762177705765,
0.09195561707019806,
-0.08898194879293442,
-0.06442049890756607,
0.018693359568715096,
0.06678235530853271,
0.04093093052506447,
-0.013650004751980305,
-0.021542277187108994,
-0.03862485662102699,
-0.018625227734446526,
-0.06369225680828094,
0.03594082221388817,
-0.10047119855880737,
0.057608578354120255,
-0.05608290433883667,
-0.00059797108406201,
-0.04988684877753258,
0.05793888121843338,
-0.016841435804963112,
0.03279927745461464,
0.019128425046801567,
-0.019889766350388527,
-0.04313817620277405,
0.009939586743712425,
0.015844576060771942,
0.09965202212333679,
-0.0107114901766181,
0.03008376806974411,
0.10497894883155823,
-0.01799013651907444,
0.10488715022802353,
0.02080347016453743,
-0.09579846262931824,
0.028302278369665146,
-0.029966210946440697,
-0.008418013341724873,
0.05257394537329674,
0.028826627880334854,
-0.0876169353723526,
-0.00013527952251024544,
0.07789786159992218,
0.03105873242020607,
-0.028638148680329323,
-0.04308353736996651,
-0.015051915310323238,
-0.008353689685463905,
0.03738444298505783,
-0.010693114250898361,
-0.03097233921289444,
-0.021693943068385124,
-0.014884384348988533,
-0.03822555020451546,
-0.009253693744540215,
-0.05684266239404678,
0.0031433466356247663,
-0.02921431139111519,
2.8916047984014838e-33,
0.00639967480674386,
0.022496938705444336,
0.022362366318702698,
0.006077449303120375,
0.028782395645976067,
0.014448706991970539,
0.04428304731845856,
-0.03817877173423767,
-0.071678526699543,
-0.0013012213166803122,
-0.02142452634871006,
0.01375502161681652,
-0.07261835038661957,
0.04568508267402649,
-0.0325024388730526,
-0.0833761915564537,
-0.060363177210092545,
0.06228477880358696,
-0.019246824085712433,
-0.0389782190322876,
0.09231830388307571,
0.0349465012550354,
-0.007389503996819258,
-0.09399697929620743,
-0.007289293687790632,
-0.068391352891922,
0.02439524047076702,
0.016430364921689034,
-0.06194885075092316,
0.017210692167282104,
-0.05813579633831978,
0.054756078869104385,
0.018292857334017754,
0.07085178792476654,
-0.046551551669836044,
-0.011709166690707207,
-0.01053999736905098,
-0.022297710180282593,
0.04990213364362717,
-0.07927227765321732,
0.02410275861620903,
0.04029271379113197,
0.08067166060209274,
-0.060791898518800735,
-0.13009697198867798,
-0.047061771154403687,
0.07571867853403091,
0.00891770701855421,
0.01882002502679825,
-0.01159472856670618,
0.024271266534924507,
-0.00787921529263258,
-0.0750865787267685,
-0.038768917322158813,
0.07034046947956085,
0.009606078267097473,
0.09102720767259598,
0.06440087407827377,
0.0685427114367485,
0.09175073355436325,
0.04232662543654442,
0.01924450509250164,
0.00008471695036860183,
0.005925728008151054,
0.050814852118492126,
0.00025734564405865967,
-0.1018020287156105,
0.011106966994702816,
0.1043127104640007,
0.019839005544781685,
0.010518732480704784,
0.0520947203040123,
0.05515294522047043,
-0.14775767922401428,
0.12049369513988495,
-0.10830747336149216,
-0.01613825373351574,
-0.10747577995061874,
0.033479269593954086,
-0.04715580493211746,
-0.010051899589598179,
-0.1369124948978424,
0.008689514361321926,
-0.0417947992682457,
-0.10082445293664932,
-0.004175179172307253,
0.016665248200297356,
-0.09752865135669708,
-0.010352354496717453,
-0.09748473763465881,
-0.028001055121421814,
0.06587488204240799,
0.00834812130779028,
0.01049806922674179,
0.049701984971761703,
-2.3968697155251074e-33,
-0.008683137595653534,
-0.01574787311255932,
-0.017224902287125587,
0.08855876326560974,
0.041075561195611954,
-0.035281699150800705,
-0.015126668848097324,
0.023379290476441383,
-0.05358194187283516,
-0.04372020065784454,
0.017849590629339218,
0.030899014323949814,
-0.02576831355690956,
-0.0016038573812693357,
0.008993654511868954,
-0.011466111056506634,
0.0791444331407547,
-0.03660322353243828,
-0.007903357967734337,
0.02263760194182396,
-0.017694778740406036,
-0.0405568853020668,
-0.11616618186235428,
0.04751692712306976,
0.07362685352563858,
0.022656945511698723,
-0.009610106237232685,
0.03545993193984032,
-0.013352676294744015,
0.03212137892842293,
-0.0063040852546691895,
0.016554147005081177,
0.011569014750421047,
0.02773820422589779,
-0.048447754234075546,
0.005825436674058437,
0.06626954674720764,
0.04848959296941757,
-0.0696960836648941,
0.08321774750947952,
-0.037075821310281754,
0.029842378571629524,
-0.06923579424619675,
0.0030368573497980833,
0.030977461487054825,
0.06122254580259323,
-0.06579910963773727,
0.0021987806539982557,
0.11733733117580414,
0.042295847088098526,
0.06020481511950493,
-0.008553546853363514,
-0.0021327140275388956,
0.08353278785943985,
-0.060970231890678406,
0.04415334761142731,
-0.01157286949455738,
-0.05245355889201164,
0.017823580652475357,
-0.01030973345041275,
-0.02390805445611477,
-0.028628000989556313,
0.0008810880244709551,
-0.03306811675429344,
0.024756355211138725,
-0.04513293877243996,
0.012352077290415764,
0.03941994532942772,
0.0003296479117125273,
0.018269674852490425,
-0.00979618914425373,
0.0005954161169938743,
0.05492573231458664,
-0.05705109238624573,
0.039036739617586136,
0.03658609464764595,
0.04649624973535538,
0.0210450179874897,
-0.030133437365293503,
0.005614611320197582,
-0.08140776306390762,
0.04462121054530144,
0.06080979108810425,
0.030209308490157127,
0.02990819327533245,
0.010438752360641956,
-0.03199005499482155,
-0.0014156802790239453,
-0.018432749435305595,
0.03961370140314102,
0.007654346060007811,
-0.049490492790937424,
0.03764679655432701,
0.048591192811727524,
0.09532391279935837,
-4.7900286404001235e-8,
-0.04227442666888237,
-0.05907237529754639,
-0.08861266076564789,
0.04430428519845009,
0.03961068019270897,
-0.0345374159514904,
0.0067137423902750015,
0.04890606552362442,
-0.06282272189855576,
0.015599899925291538,
-0.0035378688480705023,
0.020445087924599648,
-0.0236747357994318,
-0.05163266509771347,
0.09336017817258835,
0.05905446037650108,
0.04985262081027031,
0.039021968841552734,
-0.030904991552233696,
-0.0028118693735450506,
0.07961538434028625,
-0.008738324046134949,
0.09299842268228531,
-0.02026420086622238,
0.0021845558658242226,
0.005380804650485516,
-0.02619870938360691,
0.04581069201231003,
0.0471164733171463,
0.04405439645051956,
0.007289363071322441,
0.008326039649546146,
-0.002049881499260664,
-0.0329008623957634,
0.04138391464948654,
0.10513217002153397,
-0.08390583097934723,
-0.010781912133097649,
-0.020915551111102104,
0.037238046526908875,
0.005243427585810423,
-0.007307755295187235,
-0.009507620707154274,
0.037783026695251465,
0.04431367293000221,
-0.014659078791737556,
-0.055872295051813126,
-0.00032391175045631826,
0.08571166545152664,
0.06982062011957169,
0.031489577144384384,
0.020075833424925804,
-0.03195672109723091,
0.005755839869379997,
0.03893188759684563,
0.04724116623401642,
-0.054790642112493515,
-0.033809415996074677,
-0.005703093484044075,
-0.0774904265999794,
0.09286750853061676,
-0.016679147258400917,
-0.007359853480011225,
-0.0012526011560112238
] |
hfl/chinese-pert-base | 54f84f9b553c9184d92e1d476010299aac42cf86 | 2022-02-24T02:57:09.000Z | [
"pytorch",
"tf",
"bert",
"feature-extraction",
"zh",
"transformers",
"license:cc-by-nc-sa-4.0"
] | feature-extraction | false | hfl | null | hfl/chinese-pert-base | 3,043 | 4 | transformers | ---
language:
- zh
license: "cc-by-nc-sa-4.0"
---
# Please use 'Bert' related functions to load this model!
Under construction...
Please visit our GitHub repo for more information: https://github.com/ymcui/PERT | [
-0.12923598289489746,
-0.018512636423110962,
-0.0004741573939099908,
-0.013131977058947086,
-0.0017897309735417366,
0.0610409751534462,
0.011662166565656662,
0.047987572848796844,
0.013258756138384342,
0.02254491113126278,
0.07376112788915634,
-0.08120303601026535,
-0.0025661978870630264,
0.06571333855390549,
0.03673092648386955,
0.11702848970890045,
-0.003589663887396455,
-0.008796547539532185,
0.05659417062997818,
-0.0037628458812832832,
0.02817552350461483,
0.07863412797451019,
0.03087080456316471,
-0.029272951185703278,
0.01802046410739422,
-0.061344392597675323,
-0.05752941220998764,
0.030810218304395676,
0.06944942474365234,
0.02108507975935936,
0.03207305818796158,
0.047584086656570435,
0.07329251617193222,
0.09064754843711853,
0.10156797617673874,
0.03984937071800232,
-0.004934605211019516,
-0.06367447972297668,
-0.03237669914960861,
0.023529628291726112,
-0.016288455575704575,
0.006110745016485453,
0.004631986375898123,
-0.04063720256090164,
0.08338017016649246,
-0.029715070500969887,
0.008410758338868618,
0.00030456349486485124,
-0.028721123933792114,
-0.05830256640911102,
-0.06570542603731155,
-0.050039030611515045,
0.01101426500827074,
-0.03195278346538544,
0.025020593777298927,
-0.002971215872094035,
-0.05324367806315422,
-0.042424507439136505,
-0.02878481335937977,
-0.09378588199615479,
-0.06265624612569809,
0.011388624086976051,
-0.047671351581811905,
0.013011740520596504,
-0.007165675982832909,
0.06861501187086105,
-0.05718676745891571,
0.04518861696124077,
-0.050100985914468765,
-0.04130970686674118,
-0.027679190039634705,
-0.05787593126296997,
0.022908514365553856,
0.0077895973809063435,
-0.0013560460647568107,
-0.005701874382793903,
0.08378371596336365,
-0.06522887945175171,
0.04636232182383537,
-0.09042834490537643,
-0.04568847641348839,
0.023911412805318832,
0.06310350447893143,
0.05342746898531914,
0.08351384103298187,
0.05255836248397827,
0.010434398427605629,
-0.036133524030447006,
-0.009853018447756767,
-0.02874349243938923,
-0.02830456756055355,
-0.050200991332530975,
-0.02817036770284176,
0.0182985607534647,
-0.09047749638557434,
-0.005318205803632736,
0.06777548789978027,
-0.02316160872578621,
-0.02823624573647976,
0.08525726199150085,
-0.03257032111287117,
0.06494150310754776,
0.10050229728221893,
-0.04099456965923309,
-0.006441172678023577,
0.07416383922100067,
0.05484291538596153,
0.0010504620149731636,
0.004097574856132269,
-0.0608861930668354,
-0.06116409972310066,
0.010005215182900429,
-0.03707411140203476,
-0.10693453252315521,
-0.007982387207448483,
-0.031170787289738655,
-0.03684964030981064,
0.018435468897223473,
0.06234508007764816,
-0.014394786208868027,
-0.05484136566519737,
-0.013802285306155682,
-0.10943259298801422,
0.055558942258358,
-0.045901354402303696,
-0.014244657009840012,
0.027120014652609825,
3.550217706860155e-34,
0.003796109464019537,
0.031640902161598206,
0.01812124438583851,
0.017713233828544617,
0.03904947638511658,
0.029641808941960335,
0.04439457505941391,
-0.0503147654235363,
-0.10066010802984238,
-0.009691298939287663,
-0.006512122228741646,
-0.054155439138412476,
-0.10133186727762222,
0.013887189328670502,
-0.09561844170093536,
-0.021278951317071915,
0.006080287508666515,
0.01884954608976841,
0.06625881791114807,
0.0033359124790877104,
0.08742926269769669,
0.06097802519798279,
-0.010062388144433498,
-0.08843550831079483,
-0.028843989595770836,
0.07341750711202621,
0.11665353178977966,
-0.11620746552944183,
0.0047335876151919365,
0.04707707464694977,
-0.0032456088811159134,
0.10415321588516235,
-0.03058914840221405,
-0.05745863541960716,
-0.002662905026227236,
-0.03151887655258179,
-0.008292271755635738,
-0.004424066282808781,
-0.004894298501312733,
-0.04087227210402489,
0.014097226783633232,
0.03818047419190407,
-0.042046722024679184,
-0.05695146694779396,
-0.023209845647215843,
-0.008551491424441338,
0.04955184832215309,
0.08574365079402924,
0.06293590366840363,
0.010278352536261082,
-0.0013906335225328803,
0.001184638706035912,
-0.1276485174894333,
0.054845813661813736,
-0.007343862671405077,
-0.058528680354356766,
0.018381772562861443,
0.05907537043094635,
0.06029075011610985,
-0.020161177963018417,
0.009740151464939117,
-0.0064237527549266815,
-0.022737642750144005,
-0.03687125816941261,
0.0988253504037857,
-0.056154973804950714,
-0.09784182161092758,
-0.06011875718832016,
0.06522849202156067,
0.061826061457395554,
-0.027300791814923286,
-0.016376323997974396,
0.049501676112413406,
0.03726595640182495,
0.025391949340701103,
-0.07592149078845978,
-0.038231298327445984,
-0.050794824957847595,
0.08570615947246552,
-0.028358664363622665,
-0.08885154128074646,
0.02970464900135994,
-0.04872675985097885,
-0.03691519424319267,
0.04940095916390419,
-0.009274200536310673,
0.03899942338466644,
-0.057125747203826904,
-0.0303916335105896,
-0.009603639133274555,
0.05738584324717522,
-0.10349065065383911,
0.03733550012111664,
-0.00753966486081481,
-0.11376528441905975,
-1.1264260023765997e-33,
-0.05494670569896698,
0.05986429750919342,
-0.09394079446792603,
-0.005748421419411898,
-0.03913240507245064,
-0.11569488793611526,
0.034342240542173386,
0.11699915677309036,
0.029503535479307175,
-0.0031792782247066498,
0.009191400371491909,
-0.06416063010692596,
0.04630282148718834,
-0.017700353637337685,
0.14671777188777924,
0.03231849521398544,
-0.02827352285385132,
0.02354155108332634,
-0.018636386841535568,
0.044457659125328064,
-0.021375538781285286,
0.016879495233297348,
-0.029238320887088776,
0.066282719373703,
0.010106551460921764,
0.033863384276628494,
0.012503995560109615,
0.08182650804519653,
0.03761724382638931,
-0.01751866564154625,
-0.02235371433198452,
0.050651587545871735,
-0.08327173441648483,
0.037968579679727554,
-0.0982799381017685,
-0.022932177409529686,
0.014380873180925846,
0.08619602024555206,
-0.015197870321571827,
-0.01473687868565321,
0.08115525543689728,
-0.01874224655330181,
-0.11842580884695053,
0.01135655865073204,
0.018087318167090416,
0.034367404878139496,
-0.017772739753127098,
-0.042322833091020584,
0.0223257876932621,
-0.06571101397275925,
0.02162647433578968,
-0.09149239957332611,
0.011906792409718037,
-0.06916426122188568,
-0.061213284730911255,
0.018311677500605583,
0.06029774248600006,
-0.043052420020103455,
0.005529648624360561,
-0.08133391290903091,
-0.0756162777543068,
-0.029952386394143105,
0.059543427079916,
-0.01741541177034378,
0.040672264993190765,
-0.05516893044114113,
-0.04159339889883995,
0.020513925701379776,
0.030061520636081696,
-0.05955956503748894,
0.004528730176389217,
0.07789464294910431,
0.013474411331117153,
-0.04265544191002846,
-0.03452587127685547,
-0.0166045892983675,
-0.032741811126470566,
-0.041525550186634064,
0.040414515882730484,
-0.0074431574903428555,
-0.035023629665374756,
0.026660777628421783,
0.10141996294260025,
0.07465779781341553,
-0.03603312373161316,
0.01767299696803093,
0.08698553591966629,
0.051261767745018005,
0.004379456862807274,
0.02462385967373848,
-0.008936458267271519,
0.10388224571943283,
0.0376911386847496,
0.1361260563135147,
0.023234285414218903,
-4.014469823232503e-8,
-0.07331808656454086,
-0.047486137598752975,
-0.060612134635448456,
0.03424645587801933,
-0.0067790960893034935,
-0.017025411128997803,
-0.010979312472045422,
-0.046939667314291,
-0.005744980648159981,
-0.007706987205892801,
0.03851311653852463,
0.0523034892976284,
-0.03742307424545288,
-0.006504340097308159,
-0.027879836037755013,
0.04992368072271347,
0.024736158549785614,
0.05701468512415886,
-0.01336917094886303,
-0.05340183526277542,
-0.05938003957271576,
0.008925025351345539,
0.04814376309514046,
0.030580053105950356,
-0.04832862690091133,
-0.017585888504981995,
-0.03654449060559273,
0.09521213173866272,
0.04159201309084892,
-0.03278389200568199,
0.014366637915372849,
0.025061754509806633,
-0.0336531363427639,
0.006308683194220066,
0.04292600601911545,
0.02367093227803707,
-0.024501340463757515,
-0.05429227650165558,
0.01731819473206997,
-0.021548526361584663,
0.08309400081634521,
0.050385843962430954,
-0.04280586540699005,
0.007125850301235914,
0.10625652968883514,
-0.01105034165084362,
-0.05266684666275978,
-0.07792714983224869,
0.0400112122297287,
0.02692778781056404,
0.025637228041887283,
-0.04448695853352547,
-0.07930754125118256,
0.037372469902038574,
-0.04132803529500961,
0.024484004825353622,
-0.043764904141426086,
-0.015976468101143837,
0.010787603445351124,
-0.0338372141122818,
-0.029382269829511642,
0.025884881615638733,
0.05676000192761421,
0.0009684168035164475
] |
Luyu/co-condenser-marco | e0cef0ab2410aae0f0994366ddefb5649a266709 | 2021-08-13T13:54:21.000Z | [
"pytorch",
"bert",
"fill-mask",
"transformers",
"autotrain_compatible"
] | fill-mask | false | Luyu | null | Luyu/co-condenser-marco | 3,030 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
minimaxir/magic-the-gathering | c0c296822d2bf6584d7ffa2fbb3d1c893dab1311 | 2021-05-23T09:35:52.000Z | [
"pytorch",
"jax",
"gpt2",
"text-generation",
"transformers"
] | text-generation | false | minimaxir | null | minimaxir/magic-the-gathering | 3,025 | null | transformers | # magic-the-gathering
A small (~1M parameters) GPT-2 model trained on Magic: The Gathering cards from sets up to and including _Strixhaven_ and _Commander 2021_.
The model was trained 8 hours on a V100 on about ~22k unique encoded cards, with 10 permutations of each possible card.
Examples of encoded cards:
```
<|toughness|><|text|>Counter target spell unless its controller pays {X}.<|power|><|type|>Instant<|loyalty|><|manaCost|>{X}{U}<|name|>Clash of Wills
```
```
<|loyalty|><|text|>~ enters the battlefield tapped.
{T}: Add {C}.
{T}: Add {U} or {R}. ~ deals 1 damage to you.<|toughness|><|name|>Caldera Lake<|power|><|manaCost|><|type|>Land
```
```
<|loyalty|>5<|text|>+1: Scry 1, then draw a card.
−2: Return target creature to its owner's hand.
−8: You get an emblem with "Whenever an opponent casts their first spell each turn, counter that spell."<|name|>Jace, Unraveler of Secrets<|toughness|><|type|>Legendary Planeswalker — Jace<|manaCost|>{3}{U}{U}<|power|>
```
The generated cards follow a similar schema, however because the model learns all possible permutations of the schema, the user can prompt the generation with any combination of schema.
| [
-0.07631202787160873,
0.008557764813303947,
-0.0690014660358429,
0.012271730229258537,
-0.015097047202289104,
-0.021149083971977234,
0.0393780916929245,
0.03353429213166237,
-0.02276848815381527,
-0.08853849768638611,
-0.012026028707623482,
-0.0834113359451294,
0.020452139899134636,
-0.043449874967336655,
-0.07302678376436234,
0.03498112037777901,
-0.021603131666779518,
-0.008671157993376255,
-0.014365583658218384,
0.009169312193989754,
0.02986089140176773,
0.015299133025109768,
-0.01785152405500412,
0.0764157697558403,
-0.0323781855404377,
0.03056969866156578,
-0.05049196258187294,
0.002078753663226962,
-0.009793534874916077,
-0.0410955548286438,
0.011427235789597034,
0.07399430871009827,
-0.025480641052126884,
0.02727787382900715,
-0.02247978001832962,
0.018098527565598488,
-0.10038372874259949,
-0.013441435992717743,
-0.010151851922273636,
-0.014498108066618443,
0.02742750383913517,
-0.007459886837750673,
-0.0264744870364666,
0.026538798585534096,
0.006092702969908714,
0.0011537937680259347,
0.00466945581138134,
0.014043333940207958,
-0.06410165131092072,
-0.019040951505303383,
-0.04034470394253731,
0.026091819629073143,
-0.033663954585790634,
-0.029684016481041908,
0.05388730764389038,
-0.008241276256740093,
-0.12097068130970001,
-0.08741651475429535,
0.004504511598497629,
-0.07451409846544266,
0.016423841938376427,
-0.06044542416930199,
-0.037414103746414185,
0.02460198663175106,
0.007056947331875563,
-0.030788181349635124,
-0.018652671948075294,
0.0376681312918663,
0.04924444854259491,
-0.0506378598511219,
0.04136787727475166,
0.06987712532281876,
-0.05675129592418671,
-0.06061027944087982,
0.009103553369641304,
0.10539724677801132,
-0.015051806345582008,
-0.04499652609229088,
0.01072670053690672,
-0.061672892421483994,
-0.04654167219996452,
-0.039375655353069305,
0.04257122054696083,
0.0022991765290498734,
0.05431574583053589,
-0.000515562598593533,
0.0556083545088768,
0.11081643402576447,
0.12661831080913544,
-0.03172450140118599,
0.037351325154304504,
0.05993133783340454,
0.04294295236468315,
0.046217698603868484,
-0.009286902844905853,
0.02682524174451828,
0.06406847387552261,
-0.033751506358385086,
-0.12905371189117432,
0.08874095976352692,
0.024661395698785782,
0.022327497601509094,
0.005999726243317127,
0.021745307371020317,
0.028041033074259758,
-0.002588150789961219,
-0.03707289695739746,
0.0542159229516983,
-0.01445190142840147,
-0.08009041100740433,
0.05416867136955261,
-0.0286415982991457,
-0.02174927480518818,
-0.021018462255597115,
-0.01990339159965515,
0.1219443753361702,
-0.04923626780509949,
0.017490876838564873,
-0.01843930594623089,
0.074259914457798,
-0.03274895250797272,
-0.0809185579419136,
-0.016344621777534485,
-0.004084549378603697,
-0.045817047357559204,
-0.026655280962586403,
-0.0027375726494938135,
2.931478852478322e-33,
0.011699606664478779,
0.009081054478883743,
-0.05647364258766174,
0.02052537351846695,
0.01564324088394642,
0.005578183103352785,
-0.02794376201927662,
-0.03100016340613365,
0.006057970225811005,
0.04072796180844307,
-0.06759275496006012,
0.01874011941254139,
-0.004602125380188227,
0.10940447449684143,
-0.003111670957878232,
-0.04871216416358948,
-0.008665665052831173,
-0.025830885395407677,
-0.025306761264801025,
-0.01701829582452774,
0.051748063415288925,
0.07442064583301544,
-0.03880561515688896,
-0.10089041292667389,
-0.05503019317984581,
0.0005751373246312141,
-0.04952802509069443,
-0.037869591265916824,
0.044996228069067,
0.049306999891996384,
-0.005917791277170181,
-0.07431533932685852,
0.006804279983043671,
-0.005823841318488121,
0.06655294448137283,
0.027554437518119812,
0.08049879968166351,
-0.07370108366012573,
0.02668577991425991,
-0.01992654614150524,
-0.05314858630299568,
-0.005852108355611563,
0.06340929120779037,
-0.10652513056993484,
-0.004798092879354954,
-0.05795111134648323,
-0.008286978118121624,
0.02482283115386963,
-0.0999397411942482,
0.06801529228687286,
-0.05315865948796272,
-0.005907940212637186,
-0.016667984426021576,
0.049217209219932556,
-0.03454288840293884,
0.02224808558821678,
-0.022899296134710312,
0.03508581221103668,
0.050375621765851974,
0.13905979692935944,
-0.03502439707517624,
-0.06310801953077316,
0.004948441870510578,
0.026681862771511078,
-0.030283769592642784,
0.041876420378685,
-0.09739919751882553,
-0.0361025296151638,
0.08169177174568176,
0.015026241540908813,
-0.05262398719787598,
0.030396871268749237,
-0.016272934153676033,
-0.05419382452964783,
0.048609409481287,
-0.06501208990812302,
0.08945706486701965,
0.02834749035537243,
-0.13160216808319092,
0.07574209570884705,
-0.10359066724777222,
0.05297648534178734,
-0.07379911839962006,
0.027780408039689064,
0.006872319616377354,
-0.020271891728043556,
0.04413219541311264,
-0.1079234927892685,
-0.07813841104507446,
0.02938341721892357,
-0.046769533306360245,
0.00020297379523981363,
0.09219356626272202,
-0.059596117585897446,
0.09411616623401642,
-5.2119766476529384e-33,
-0.028046298772096634,
0.011177717708051205,
0.082991823554039,
0.08551149815320969,
0.04446300119161606,
-0.11630026251077652,
0.08068118989467621,
0.041462916880846024,
0.01166238822042942,
-0.024292629212141037,
-0.019682418555021286,
0.10845252871513367,
-0.03507915884256363,
-0.03788023814558983,
0.06038970127701759,
-0.08893866091966629,
-0.01075404416769743,
0.03948899358510971,
0.039938490837812424,
0.04878613352775574,
0.0455741249024868,
0.0745568498969078,
-0.09354628622531891,
0.06413780152797699,
0.027715710923075676,
0.11043240875005722,
0.01066048163920641,
-0.039630256593227386,
0.030897231772542,
-0.03130012005567551,
0.03150646761059761,
0.029579095542430878,
-0.029683703556656837,
0.08217676728963852,
-0.08478638529777527,
0.046121153980493546,
0.1460907757282257,
0.10337581485509872,
-0.02425849623978138,
0.06766711175441742,
0.09162376821041107,
-0.00029208287014625967,
0.0002231018734164536,
0.05015510320663452,
-0.059131260961294174,
0.025075985118746758,
0.07988812029361725,
-0.007862770929932594,
0.022753175348043442,
-0.01722702942788601,
0.04024564474821091,
-0.06775664538145065,
-0.08339709043502808,
0.05011406168341637,
0.019233116880059242,
-0.0809669941663742,
0.04422289505600929,
-0.032485075294971466,
-0.04587646201252937,
0.003658676752820611,
0.013133702799677849,
-0.030420886352658272,
0.02411239966750145,
-0.012674886733293533,
0.006099590100347996,
-0.04532203450798988,
-0.07433668524026871,
0.024294910952448845,
0.00909041240811348,
0.03720066696405411,
-0.0711335688829422,
0.042062822729349136,
-0.027494873851537704,
-0.039740342646837234,
0.0245429128408432,
-0.013019493781030178,
-0.10204777121543884,
-0.020232127979397774,
0.04903925210237503,
-0.08619167655706406,
-0.05070999637246132,
-0.04607377573847771,
0.01797676272690296,
0.012765629217028618,
0.05596167966723442,
0.015855858102440834,
0.05266337841749191,
-0.04264850541949272,
-0.011389918625354767,
-0.04558569937944412,
-0.028452634811401367,
0.0842687338590622,
0.08455465734004974,
0.040172431617975235,
-0.04775160178542137,
-6.772029337298591e-8,
-0.005970214959233999,
0.01544034481048584,
-0.025894535705447197,
0.008503418415784836,
0.00934555009007454,
-0.06320907175540924,
-0.00815316941589117,
-0.02532004751265049,
-0.05084317922592163,
-0.061910681426525116,
0.09276694059371948,
-0.012901756912469864,
-0.013722231611609459,
-0.10010276734828949,
0.04726885259151459,
0.006503364536911249,
-0.04155709594488144,
-0.0603204220533371,
-0.07919922471046448,
-0.03721233457326889,
0.008332902565598488,
-0.041568003594875336,
-0.014027637429535389,
-0.037643395364284515,
-0.06734848022460938,
0.015428779646754265,
-0.0016767259221524,
-0.03141259402036667,
0.06683600693941116,
0.01505696028470993,
0.1040235310792923,
-0.08524751663208008,
0.05476919189095497,
0.05916684865951538,
0.06919775903224945,
0.0016812176909297705,
-0.1009623259305954,
-0.007262944243848324,
0.03510400280356407,
0.0302655678242445,
-0.016637787222862244,
-0.000401566328946501,
0.011908433400094509,
-0.018251970410346985,
0.017945338040590286,
-0.03323215991258621,
-0.052092526108026505,
-0.07474544644355774,
-0.00205072364769876,
0.0030104753095656633,
0.003233365248888731,
0.03635207563638687,
-0.00032448131241835654,
-0.0013979197246953845,
0.04990732669830322,
-0.04214075952768326,
0.03134269267320633,
-0.03842734917998314,
-0.018670016899704933,
-0.030567843466997147,
0.03487417846918106,
-0.004144697450101376,
-0.08732772618532181,
-0.010368670336902142
] |
KoboldAI/GPT-Neo-2.7B-Horni-LN | 40eb749c615988ae901c51f4cc7308ac08e8c2a4 | 2021-12-30T12:18:58.000Z | [
"pytorch",
"gpt_neo",
"text-generation",
"transformers"
] | text-generation | false | KoboldAI | null | KoboldAI/GPT-Neo-2.7B-Horni-LN | 3,023 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
jonatasgrosman/wav2vec2-large-xlsr-53-chinese-zh-cn | 9e8a155701c0fa9a84fed4adfcf5edb4ada4342c | 2022-07-27T23:36:42.000Z | [
"pytorch",
"jax",
"wav2vec2",
"automatic-speech-recognition",
"zh",
"dataset:common_voice",
"transformers",
"audio",
"speech",
"xlsr-fine-tuning-week",
"license:apache-2.0",
"model-index"
] | automatic-speech-recognition | false | jonatasgrosman | null | jonatasgrosman/wav2vec2-large-xlsr-53-chinese-zh-cn | 3,023 | 6 | transformers | ---
language: zh
datasets:
- common_voice
metrics:
- wer
- cer
tags:
- audio
- automatic-speech-recognition
- speech
- xlsr-fine-tuning-week
license: apache-2.0
model-index:
- name: XLSR Wav2Vec2 Chinese (zh-CN) by Jonatas Grosman
results:
- task:
name: Speech Recognition
type: automatic-speech-recognition
dataset:
name: Common Voice zh-CN
type: common_voice
args: zh-CN
metrics:
- name: Test WER
type: wer
value: 82.37
- name: Test CER
type: cer
value: 19.03
---
# Fine-tuned XLSR-53 large model for speech recognition in Chinese
Fine-tuned [facebook/wav2vec2-large-xlsr-53](https://huggingface.co/facebook/wav2vec2-large-xlsr-53) on Chinese using the train and validation splits of [Common Voice 6.1](https://huggingface.co/datasets/common_voice), [CSS10](https://github.com/Kyubyong/css10) and [ST-CMDS](http://www.openslr.org/38/).
When using this model, make sure that your speech input is sampled at 16kHz.
This model has been fine-tuned thanks to the GPU credits generously given by the [OVHcloud](https://www.ovhcloud.com/en/public-cloud/ai-training/) :)
The script used for training can be found here: https://github.com/jonatasgrosman/wav2vec2-sprint
## Usage
The model can be used directly (without a language model) as follows...
Using the [HuggingSound](https://github.com/jonatasgrosman/huggingsound) library:
```python
from huggingsound import SpeechRecognitionModel
model = SpeechRecognitionModel("jonatasgrosman/wav2vec2-large-xlsr-53-chinese-zh-cn")
audio_paths = ["/path/to/file.mp3", "/path/to/another_file.wav"]
transcriptions = model.transcribe(audio_paths)
```
Writing your own inference script:
```python
import torch
import librosa
from datasets import load_dataset
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
LANG_ID = "zh-CN"
MODEL_ID = "jonatasgrosman/wav2vec2-large-xlsr-53-chinese-zh-cn"
SAMPLES = 10
test_dataset = load_dataset("common_voice", LANG_ID, split=f"test[:{SAMPLES}]")
processor = Wav2Vec2Processor.from_pretrained(MODEL_ID)
model = Wav2Vec2ForCTC.from_pretrained(MODEL_ID)
# Preprocessing the datasets.
# We need to read the audio files as arrays
def speech_file_to_array_fn(batch):
speech_array, sampling_rate = librosa.load(batch["path"], sr=16_000)
batch["speech"] = speech_array
batch["sentence"] = batch["sentence"].upper()
return batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
inputs = processor(test_dataset["speech"], sampling_rate=16_000, return_tensors="pt", padding=True)
with torch.no_grad():
logits = model(inputs.input_values, attention_mask=inputs.attention_mask).logits
predicted_ids = torch.argmax(logits, dim=-1)
predicted_sentences = processor.batch_decode(predicted_ids)
for i, predicted_sentence in enumerate(predicted_sentences):
print("-" * 100)
print("Reference:", test_dataset[i]["sentence"])
print("Prediction:", predicted_sentence)
```
| Reference | Prediction |
| ------------- | ------------- |
| 宋朝末年年间定居粉岭围。 | 宋朝末年年间定居分定为 |
| 渐渐行动不便 | 建境行动不片 |
| 二十一年去世。 | 二十一年去世 |
| 他们自称恰哈拉。 | 他们自称家哈<unk> |
| 局部干涩的例子包括有口干、眼睛干燥、及阴道干燥。 | 菊物干寺的例子包括有口肝眼睛干照以及阴到干<unk> |
| 嘉靖三十八年,登进士第三甲第二名。 | 嘉靖三十八年登进士第三甲第二名 |
| 这一名称一直沿用至今。 | 这一名称一直沿用是心 |
| 同时乔凡尼还得到包税合同和许多明矾矿的经营权。 | 同时桥凡妮还得到包税合同和许多民繁矿的经营权 |
| 为了惩罚西扎城和塞尔柱的结盟,盟军在抵达后将外城烧毁。 | 为了曾罚西扎城和塞尔素的节盟盟军在抵达后将外曾烧毁 |
| 河内盛产黄色无鱼鳞的鳍射鱼。 | 合类生场环色无鱼林的骑射鱼 |
## Evaluation
The model can be evaluated as follows on the Chinese (zh-CN) test data of Common Voice.
```python
import torch
import re
import librosa
from datasets import load_dataset, load_metric
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
LANG_ID = "zh-CN"
MODEL_ID = "jonatasgrosman/wav2vec2-large-xlsr-53-chinese-zh-cn"
DEVICE = "cuda"
CHARS_TO_IGNORE = [",", "?", "¿", ".", "!", "¡", ";", ";", ":", '""', "%", '"', "�", "ʿ", "·", "჻", "~", "՞",
"؟", "،", "।", "॥", "«", "»", "„", "“", "”", "「", "」", "‘", "’", "《", "》", "(", ")", "[", "]",
"{", "}", "=", "`", "_", "+", "<", ">", "…", "–", "°", "´", "ʾ", "‹", "›", "©", "®", "—", "→", "。",
"、", "﹂", "﹁", "‧", "~", "﹏", ",", "{", "}", "(", ")", "[", "]", "【", "】", "‥", "〽",
"『", "』", "〝", "〟", "⟨", "⟩", "〜", ":", "!", "?", "♪", "؛", "/", "\\", "º", "−", "^", "'", "ʻ", "ˆ"]
test_dataset = load_dataset("common_voice", LANG_ID, split="test")
wer = load_metric("wer.py") # https://github.com/jonatasgrosman/wav2vec2-sprint/blob/main/wer.py
cer = load_metric("cer.py") # https://github.com/jonatasgrosman/wav2vec2-sprint/blob/main/cer.py
chars_to_ignore_regex = f"[{re.escape(''.join(CHARS_TO_IGNORE))}]"
processor = Wav2Vec2Processor.from_pretrained(MODEL_ID)
model = Wav2Vec2ForCTC.from_pretrained(MODEL_ID)
model.to(DEVICE)
# Preprocessing the datasets.
# We need to read the audio files as arrays
def speech_file_to_array_fn(batch):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
speech_array, sampling_rate = librosa.load(batch["path"], sr=16_000)
batch["speech"] = speech_array
batch["sentence"] = re.sub(chars_to_ignore_regex, "", batch["sentence"]).upper()
return batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
# Preprocessing the datasets.
# We need to read the audio files as arrays
def evaluate(batch):
inputs = processor(batch["speech"], sampling_rate=16_000, return_tensors="pt", padding=True)
with torch.no_grad():
logits = model(inputs.input_values.to(DEVICE), attention_mask=inputs.attention_mask.to(DEVICE)).logits
pred_ids = torch.argmax(logits, dim=-1)
batch["pred_strings"] = processor.batch_decode(pred_ids)
return batch
result = test_dataset.map(evaluate, batched=True, batch_size=8)
predictions = [x.upper() for x in result["pred_strings"]]
references = [x.upper() for x in result["sentence"]]
print(f"WER: {wer.compute(predictions=predictions, references=references, chunk_size=1000) * 100}")
print(f"CER: {cer.compute(predictions=predictions, references=references, chunk_size=1000) * 100}")
```
**Test Result**:
In the table below I report the Word Error Rate (WER) and the Character Error Rate (CER) of the model. I ran the evaluation script described above on other models as well (on 2021-05-13). Note that the table below may show different results from those already reported, this may have been caused due to some specificity of the other evaluation scripts used.
| Model | WER | CER |
| ------------- | ------------- | ------------- |
| jonatasgrosman/wav2vec2-large-xlsr-53-chinese-zh-cn | **82.37%** | **19.03%** |
| ydshieh/wav2vec2-large-xlsr-53-chinese-zh-cn-gpt | 84.01% | 20.95% |
## Citation
If you want to cite this model you can use this:
```bibtex
@misc{grosman2021xlsr53-large-chinese,
title={Fine-tuned {XLSR}-53 large model for speech recognition in {C}hinese},
author={Grosman, Jonatas},
howpublished={\url{https://huggingface.co/jonatasgrosman/wav2vec2-large-xlsr-53-chinese-zh-cn}},
year={2021}
}
```
| [
-0.14335742592811584,
0.0003548183012753725,
0.032126959413290024,
-0.07487169653177261,
0.052972324192523956,
0.04533425718545914,
-0.01097940281033516,
-0.015149196609854698,
-0.07390313595533371,
-0.11262983828783035,
0.041863903403282166,
-0.14432907104492188,
-0.005091923754662275,
0.003603231394663453,
0.006638195365667343,
-0.032352183014154434,
-0.025559604167938232,
-0.008418932557106018,
-0.07015040516853333,
-0.044917844235897064,
0.016341406852006912,
0.03853416442871094,
0.046720750629901886,
0.015691891312599182,
0.01592038758099079,
-0.031336460262537,
-0.046127576380968094,
0.0532805435359478,
0.07353778183460236,
-0.017979614436626434,
0.057294029742479324,
0.08112818002700806,
0.07572583109140396,
0.0444500595331192,
0.02974686026573181,
0.025125078856945038,
0.006718513555824757,
-0.01628178358078003,
-0.006732855457812548,
0.006325908936560154,
0.017545286566019058,
0.005946807097643614,
0.04304763302206993,
-0.061869774013757706,
0.03982117399573326,
0.069974385201931,
-0.0658273696899414,
-0.05799578130245209,
-0.06799234449863434,
0.04515165835618973,
-0.04993078485131264,
0.009016752243041992,
0.041501376777887344,
0.13966213166713715,
-0.03944054991006851,
0.0308220311999321,
-0.02735769748687744,
0.04911326617002487,
0.008641757071018219,
0.00802245270460844,
-0.09938876330852509,
0.01521478034555912,
-0.062412288039922714,
-0.010710008442401886,
-0.010973169468343258,
0.011012233793735504,
-0.09169648587703705,
-0.03681809827685356,
-0.02537183277308941,
0.0055065518245100975,
-0.05519627407193184,
0.054234810173511505,
-0.015732986852526665,
0.01348644495010376,
-0.05879409238696098,
-0.005123276729136705,
0.03570564463734627,
-0.07566466182470322,
0.024663731455802917,
-0.10152101516723633,
-0.026451922953128815,
-0.017932388931512833,
0.03949315845966339,
-0.0911896750330925,
0.0999322310090065,
0.005329869221895933,
-0.031227534636855125,
-0.004054677207022905,
-0.050640325993299484,
-0.01279474887996912,
0.042705386877059937,
0.01819157786667347,
-0.0010583873372524977,
0.13191157579421997,
0.023359552025794983,
0.09015598893165588,
0.009427765384316444,
0.07337035238742828,
0.03328786417841911,
0.06682845205068588,
0.018913395702838898,
-0.01727224513888359,
0.044880691915750504,
-0.027869082987308502,
-0.000633789284620434,
-0.05843614786863327,
0.050349071621894836,
0.022051671519875526,
0.04492533206939697,
-0.0020436400081962347,
0.040962185710668564,
-0.003602509154006839,
-0.0020340161863714457,
-0.031113767996430397,
-0.0028087457176297903,
0.04165714234113693,
-0.09453314542770386,
-0.0003597356553655118,
-0.040450166910886765,
0.025038253515958786,
-0.08677317947149277,
-0.03235037252306938,
0.0005528798792511225,
-0.02425888180732727,
0.04803105443716049,
-0.03534313291311264,
0.027017006650567055,
5.744722809731092e-33,
-0.021030008792877197,
0.05236247926950455,
-0.004745750222355127,
-0.038027457892894745,
-0.0406821072101593,
-0.024908462539315224,
-0.043371252715587616,
0.0038666485343128443,
-0.07302767783403397,
0.00849151611328125,
-0.07604556530714035,
0.033219002187252045,
-0.10631123185157776,
-0.01421809196472168,
0.021474134176969528,
0.03667602315545082,
-0.020388854667544365,
-0.004604470916092396,
-0.04849080741405487,
0.0036841141991317272,
0.20661556720733643,
0.03192479535937309,
0.037516262382268906,
-0.05865310877561569,
0.04515884071588516,
0.0002173309330828488,
0.06083521991968155,
-0.08079276978969574,
-0.021292632445693016,
0.03992708399891853,
-0.0968737006187439,
-0.02507256157696247,
0.013635022565722466,
0.024973347783088684,
0.010822495445609093,
0.030665477737784386,
0.012820825912058353,
0.03780418634414673,
-0.06859447807073593,
-0.06372903287410736,
0.04170713201165199,
0.009404821321368217,
0.009040169417858124,
-0.041231393814086914,
-0.014350987039506435,
-0.058501821011304855,
-0.023267431184649467,
-0.0003139738691970706,
0.0380307137966156,
0.013464093208312988,
-0.05153605714440346,
-0.0305520910769701,
-0.06148846819996834,
0.032363224774599075,
-0.016175154596567154,
-0.057648248970508575,
0.059787239879369736,
0.04111426696181297,
-0.010455621406435966,
0.03430010378360748,
-0.009992877952754498,
-0.0335577167570591,
-0.002321567153558135,
0.055569570511579514,
0.02875833585858345,
-0.044321928173303604,
-0.06991275399923325,
-0.03291190043091774,
-0.020121432840824127,
-0.0032069047447293997,
-0.041691459715366364,
-0.07127244770526886,
0.0931185781955719,
0.028155947104096413,
0.04445832222700119,
-0.05978186056017876,
0.014133439399302006,
-0.017797531560063362,
-0.012666551396250725,
0.012865100055932999,
-0.048069097101688385,
0.054403383284807205,
-0.059095196425914764,
-0.023594094440340996,
-0.008196074515581131,
-0.03855886310338974,
0.008902824483811855,
-0.06275448948144913,
0.01687062717974186,
0.023586858063936234,
-0.031250499188899994,
0.03971622884273529,
-0.006704637315124273,
-0.032636817544698715,
-0.032867949455976486,
-7.011621029879428e-33,
-0.011003293097019196,
0.16186785697937012,
0.00036776199704036117,
0.07723790407180786,
0.0334133543074131,
-0.024917317554354668,
0.11229752749204636,
0.08431011438369751,
-0.010882113128900528,
-0.03411829471588135,
0.051122914999723434,
-0.07142408937215805,
0.051091268658638,
0.004819956608116627,
-0.00007409613317577168,
-0.029115797951817513,
-0.0350104421377182,
0.10983906686306,
0.07650762051343918,
0.07173541933298111,
0.07876544445753098,
0.028658265247941017,
-0.11634718626737595,
0.04633808135986328,
-0.049719855189323425,
0.026315046474337578,
-0.012149265967309475,
0.02060525305569172,
0.04255565255880356,
0.0343816913664341,
-0.07872672379016876,
0.029209883883595467,
-0.1116361916065216,
0.03386767581105232,
-0.029179954901337624,
-0.038990568369627,
-0.004196156281977892,
0.014754918403923512,
-0.016382642090320587,
0.0841730609536171,
0.04333977773785591,
0.04627449810504913,
-0.12523111701011658,
-0.02011021226644516,
0.03130097687244415,
-0.03807096183300018,
-0.022107213735580444,
0.009273315779864788,
-0.04881465807557106,
-0.03165090084075928,
0.041475169360637665,
-0.028078176081180573,
0.0002357789344387129,
0.032744500786066055,
-0.025581207126379013,
-0.03254828229546547,
0.03620128333568573,
-0.07878411561250687,
-0.09245437383651733,
-0.041142161935567856,
0.0017345768865197897,
0.02327408269047737,
-0.0800032839179039,
-0.03331725299358368,
0.06940308958292007,
0.034642551094293594,
0.045381296426057816,
0.0031600475776940584,
0.09466871619224548,
-0.010344495065510273,
-0.02776946872472763,
0.014910265803337097,
0.03736336901783943,
-0.03465382009744644,
-0.06183842569589615,
0.0008470003958791494,
-0.09663348644971848,
-0.023123551160097122,
0.03125477582216263,
-0.039005815982818604,
-0.024878591299057007,
0.004008753225207329,
0.1020391434431076,
0.05301303043961525,
0.004451615270227194,
0.1157783791422844,
-0.0017988034524023533,
0.025512095540761948,
0.015946466475725174,
-0.006948402151465416,
-0.033768296241760254,
0.06791580468416214,
0.04048260301351547,
0.06528949737548828,
-0.09393569827079773,
-5.363852295658944e-8,
-0.09979087859392166,
-0.059492457658052444,
-0.03893459960818291,
-0.012405109591782093,
-0.048374999314546585,
-0.07257919758558273,
-0.037685394287109375,
-0.011758179403841496,
0.04168110713362694,
0.015077747404575348,
0.06609907746315002,
0.0026852309238165617,
-0.08450242131948471,
0.03298340365290642,
0.02548588253557682,
-0.04442863538861275,
-0.02135016955435276,
0.1334145963191986,
-0.04860329627990723,
-0.02197158895432949,
0.0243205688893795,
0.010050804354250431,
0.03563477098941803,
0.024234866723418236,
-0.003504305612295866,
-0.034737516194581985,
-0.08670911937952042,
0.11179939657449722,
-0.06398548185825348,
0.00039540333091281354,
-0.01942317560315132,
0.014805637300014496,
0.051143188029527664,
-0.011052601039409637,
0.05905994027853012,
0.05880296602845192,
-0.027223244309425354,
-0.015119312331080437,
0.032263319939374924,
0.06591033190488815,
0.06129881739616394,
0.024238599464297295,
-0.057772304862737656,
-0.013056187890470028,
0.09420730918645859,
-0.08815866708755493,
-0.005011488683521748,
-0.09118618816137314,
0.04427170008420944,
0.025354431942105293,
-0.0019873231649398804,
-0.020275991410017014,
-0.002079776953905821,
-0.030075443908572197,
0.0940266102552414,
0.07220258563756943,
-0.046717699617147446,
-0.030927740037441254,
0.03515182062983513,
-0.020417071878910065,
0.05824051797389984,
-0.024191949516534805,
-0.028223982080817223,
0.014360823668539524
] |
pierreguillou/bert-base-cased-squad-v1.1-portuguese | fda61a9dc93104d7944a4abf5d48d51eba229a13 | 2022-01-04T09:57:53.000Z | [
"pytorch",
"tf",
"jax",
"bert",
"question-answering",
"pt",
"dataset:brWaC",
"dataset:squad",
"dataset:squad_v1_pt",
"transformers",
"bert-base",
"license:mit",
"autotrain_compatible"
] | question-answering | false | pierreguillou | null | pierreguillou/bert-base-cased-squad-v1.1-portuguese | 3,022 | 13 | transformers | ---
language: pt
license: mit
tags:
- question-answering
- bert
- bert-base
- pytorch
datasets:
- brWaC
- squad
- squad_v1_pt
metrics:
- squad
widget:
- text: "Quando começou a pandemia de Covid-19 no mundo?"
context: "A pandemia de COVID-19, também conhecida como pandemia de coronavírus, é uma pandemia em curso de COVID-19, uma doença respiratória aguda causada pelo coronavírus da síndrome respiratória aguda grave 2 (SARS-CoV-2). A doença foi identificada pela primeira vez em Wuhan, na província de Hubei, República Popular da China, em 1 de dezembro de 2019, mas o primeiro caso foi reportado em 31 de dezembro do mesmo ano."
- text: "Onde foi descoberta a Covid-19?"
context: "A pandemia de COVID-19, também conhecida como pandemia de coronavírus, é uma pandemia em curso de COVID-19, uma doença respiratória aguda causada pelo coronavírus da síndrome respiratória aguda grave 2 (SARS-CoV-2). A doença foi identificada pela primeira vez em Wuhan, na província de Hubei, República Popular da China, em 1 de dezembro de 2019, mas o primeiro caso foi reportado em 31 de dezembro do mesmo ano."
---
# Portuguese BERT base cased QA (Question Answering), finetuned on SQUAD v1.1

## Introduction
The model was trained on the dataset SQUAD v1.1 in portuguese from the [Deep Learning Brasil group](http://www.deeplearningbrasil.com.br/) on Google Colab.
The language model used is the [BERTimbau Base](https://huggingface.co/neuralmind/bert-base-portuguese-cased) (aka "bert-base-portuguese-cased") from [Neuralmind.ai](https://neuralmind.ai/): BERTimbau Base is a pretrained BERT model for Brazilian Portuguese that achieves state-of-the-art performances on three downstream NLP tasks: Named Entity Recognition, Sentence Textual Similarity and Recognizing Textual Entailment. It is available in two sizes: Base and Large.
## Informations on the method used
All the informations are in the blog post : [NLP | Modelo de Question Answering em qualquer idioma baseado no BERT base (estudo de caso em português)](https://medium.com/@pierre_guillou/nlp-modelo-de-question-answering-em-qualquer-idioma-baseado-no-bert-base-estudo-de-caso-em-12093d385e78)
## Notebooks in Google Colab & GitHub
- Google Colab: [colab_question_answering_BERT_base_cased_squad_v11_pt.ipynb](https://colab.research.google.com/drive/18ueLdi_V321Gz37x4gHq8mb4XZSGWfZx?usp=sharing)
- GitHub: [colab_question_answering_BERT_base_cased_squad_v11_pt.ipynb](https://github.com/piegu/language-models/blob/master/colab_question_answering_BERT_base_cased_squad_v11_pt.ipynb)
## Performance
The results obtained are the following:
```
f1 = 82.50
exact match = 70.49
```
## How to use the model... with Pipeline
```python
import transformers
from transformers import pipeline
# source: https://pt.wikipedia.org/wiki/Pandemia_de_COVID-19
context = r"""
A pandemia de COVID-19, também conhecida como pandemia de coronavírus, é uma pandemia em curso de COVID-19,
uma doença respiratória aguda causada pelo coronavírus da síndrome respiratória aguda grave 2 (SARS-CoV-2).
A doença foi identificada pela primeira vez em Wuhan, na província de Hubei, República Popular da China,
em 1 de dezembro de 2019, mas o primeiro caso foi reportado em 31 de dezembro do mesmo ano.
Acredita-se que o vírus tenha uma origem zoonótica, porque os primeiros casos confirmados
tinham principalmente ligações ao Mercado Atacadista de Frutos do Mar de Huanan, que também vendia animais vivos.
Em 11 de março de 2020, a Organização Mundial da Saúde declarou o surto uma pandemia. Até 8 de fevereiro de 2021,
pelo menos 105 743 102 casos da doença foram confirmados em pelo menos 191 países e territórios,
com cerca de 2 308 943 mortes e 58 851 440 pessoas curadas.
"""
model_name = 'pierreguillou/bert-base-cased-squad-v1.1-portuguese'
nlp = pipeline("question-answering", model=model_name)
question = "Quando começou a pandemia de Covid-19 no mundo?"
result = nlp(question=question, context=context)
print(f"Answer: '{result['answer']}', score: {round(result['score'], 4)}, start: {result['start']}, end: {result['end']}")
# Answer: '1 de dezembro de 2019', score: 0.713, start: 328, end: 349
```
## How to use the model... with the Auto classes
```python
from transformers import AutoTokenizer, AutoModelForQuestionAnswering
tokenizer = AutoTokenizer.from_pretrained("pierreguillou/bert-base-cased-squad-v1.1-portuguese")
model = AutoModelForQuestionAnswering.from_pretrained("pierreguillou/bert-base-cased-squad-v1.1-portuguese")
```
Or just clone the model repo:
```python
git lfs install
git clone https://huggingface.co/pierreguillou/bert-base-cased-squad-v1.1-portuguese
# if you want to clone without large files – just their pointers
# prepend your git clone with the following env var:
GIT_LFS_SKIP_SMUDGE=1
```
## Limitations and bias
The training data used for this model come from Portuguese SQUAD. It could contain a lot of unfiltered content, which is far from neutral, and biases.
## Author
Portuguese BERT base cased QA (Question Answering), finetuned on SQUAD v1.1 was trained and evaluated by [Pierre GUILLOU](https://www.linkedin.com/in/pierreguillou/) thanks to the Open Source code, platforms and advices of many organizations ([link to the list](https://medium.com/@pierre_guillou/nlp-modelo-de-question-answering-em-qualquer-idioma-baseado-no-bert-base-estudo-de-caso-em-12093d385e78#c572)). In particular: [Hugging Face](https://huggingface.co/), [Neuralmind.ai](https://neuralmind.ai/), [Deep Learning Brasil group](http://www.deeplearningbrasil.com.br/), [Google Colab](https://colab.research.google.com/) and [AI Lab](https://ailab.unb.br/).
## Citation
If you use our work, please cite:
```bibtex
@inproceedings{pierreguillou2021bertbasecasedsquadv11portuguese,
title={Portuguese BERT base cased QA (Question Answering), finetuned on SQUAD v1.1},
author={Pierre Guillou},
year={2021}
}
``` | [
-0.05660419911146164,
0.10063573718070984,
0.00047643203288316727,
-0.03491484746336937,
0.07747656106948853,
0.02959076315164566,
0.09457069635391235,
0.09629715234041214,
0.026812467724084854,
-0.005734567996114492,
0.10742472112178802,
-0.12167851626873016,
-0.010153146460652351,
0.029611660167574883,
0.023138325661420822,
-0.024407923221588135,
-0.030408136546611786,
-0.05669739842414856,
-0.027162808924913406,
0.04452678933739662,
0.05829334259033203,
0.099542535841465,
0.053081877529621124,
0.0031316655222326517,
-0.026088016107678413,
0.027289969846606255,
-0.09231670200824738,
0.004045647103339434,
-0.04303011670708656,
0.08717070519924164,
0.033507563173770905,
0.11525942385196686,
0.06960780918598175,
0.0546523854136467,
0.05986741930246353,
-0.04083765670657158,
0.031328391283750534,
0.014541566371917725,
-0.022277988493442535,
0.06479597091674805,
-0.004653762094676495,
-0.02220124378800392,
0.02920183166861534,
-0.03803837671875954,
0.07534851133823395,
-0.010760375298559666,
-0.09925761073827744,
0.057422805577516556,
0.024729575961828232,
0.006349596194922924,
-0.09446389973163605,
-0.003189638489857316,
-0.04764217510819435,
-0.008479970507323742,
-0.009172968566417694,
0.03790828213095665,
-0.07312575727701187,
-0.08728654682636261,
0.021706724539399147,
-0.05017649754881859,
-0.07966717332601547,
-0.003878437215462327,
-0.05430351197719574,
0.09292981773614883,
-0.030419528484344482,
-0.09280093759298325,
0.027918249368667603,
0.016421068459749222,
-0.07323826104402542,
0.10359683632850647,
-0.020073143765330315,
-0.008712163195014,
-0.03926842287182808,
0.08628102391958237,
-0.09087719768285751,
0.07991776615381241,
-0.0051336344331502914,
-0.01892801932990551,
0.07673948258161545,
-0.09068992733955383,
0.0592840202152729,
0.023896435275673866,
0.14120279252529144,
-0.014128901064395905,
0.07051580399274826,
0.003251568414270878,
0.04978365823626518,
0.0011296691372990608,
0.015878859907388687,
-0.014817914925515652,
-0.053207412362098694,
-0.021661868318915367,
0.07019823044538498,
0.1032334491610527,
-0.024594001471996307,
0.007495002821087837,
0.024749070405960083,
0.01225523091852665,
-0.019312230870127678,
0.037011753767728806,
0.04349878430366516,
-0.030151866376399994,
0.041888125240802765,
0.04765523597598076,
-0.10223008692264557,
-0.01777845062315464,
0.020959705114364624,
-0.01295944582670927,
0.042934950441122055,
0.025126202031970024,
-0.004001924768090248,
-0.0008379683713428676,
-0.016833120957016945,
-0.14763785898685455,
-0.015071279369294643,
0.07260660827159882,
-0.045263368636369705,
-0.04103972762823105,
-0.0001736747653922066,
-0.004482971038669348,
0.021825753152370453,
-0.1321895867586136,
-0.08618632704019547,
-0.04154946282505989,
0.02204972505569458,
0.01605907827615738,
0.06559224426746368,
1.1309405435322304e-32,
0.04946549981832504,
-0.00045709317782893777,
0.07390020787715912,
0.05028802901506424,
0.0041126105934381485,
0.013722631148993969,
-0.047506678849458694,
-0.020860157907009125,
-0.07319021970033646,
-0.00613985164090991,
-0.10384204983711243,
-0.028489740565419197,
-0.03242528811097145,
0.02949327602982521,
0.013723847456276417,
0.04444705322384834,
0.03548083454370499,
-0.01028483733534813,
0.012782595120370388,
0.008459477685391903,
0.03728903457522392,
0.04230310022830963,
0.025358697399497032,
-0.019832756370306015,
0.003999188542366028,
0.07270555943250656,
-0.037947878241539,
-0.06588294357061386,
-0.018682019785046577,
0.04157685860991478,
-0.028771789744496346,
0.008642060682177544,
0.04203518480062485,
-0.0369013249874115,
-0.04215311259031296,
-0.012265610508620739,
-0.009500782005488873,
-0.06596425920724869,
-0.016134656965732574,
0.04083739221096039,
-0.002327209571376443,
-0.005337581969797611,
-0.05980563908815384,
-0.02218150720000267,
0.02773144654929638,
-0.08019812405109406,
-0.0007683929288759828,
-0.039669111371040344,
-0.04571547359228134,
0.05100974440574646,
-0.06937994062900543,
0.027459679171442986,
-0.03459290415048599,
-0.010026199743151665,
0.021097425371408463,
-0.005238209385424852,
-0.0664772316813469,
0.06410349160432816,
-0.007387733552604914,
-0.03350917622447014,
0.049198053777217865,
-0.026803281158208847,
-0.020154889672994614,
0.06878997385501862,
-0.012812793254852295,
-0.09092095494270325,
0.021417755633592606,
0.031616292893886566,
0.02867880091071129,
0.07964819669723511,
-0.019225431606173515,
-0.03642602264881134,
-0.026479866355657578,
-0.02323140762746334,
0.07333720475435257,
-0.019407225772738457,
0.020319076254963875,
-0.038190193474292755,
-0.0724368467926979,
-0.022119887173175812,
-0.02242356725037098,
0.025620650500059128,
-0.0012516725109890103,
0.010966658592224121,
-0.01779918745160103,
0.029187846928834915,
0.021443139761686325,
0.01319079753011465,
-0.08586172014474869,
-0.007581603713333607,
0.0019043961074203253,
0.02850472927093506,
0.0037151265423744917,
0.0200271625071764,
-0.06360498070716858,
-1.1948534203491859e-32,
-0.024078430607914925,
-0.0033483384177088737,
-0.08328231424093246,
-0.028051869943737984,
0.01292575802654028,
0.031663928180933,
0.07716495543718338,
0.04619831591844559,
0.08958950638771057,
-0.10830102860927582,
-0.0018557016737759113,
-0.057738177478313446,
-0.0045601665042340755,
-0.058737874031066895,
0.0015459356363862753,
0.06131986901164055,
-0.06194015592336655,
0.020274728536605835,
-0.10129213333129883,
0.059732332825660706,
0.006783058401197195,
-0.030736824497580528,
-0.021341251209378242,
0.026074986904859543,
-0.04377981275320053,
0.05558835715055466,
0.12561050057411194,
-0.07770506292581558,
-0.03297986090183258,
-0.04262382909655571,
-0.04252834990620613,
-0.016368385404348373,
-0.04326925799250603,
0.12381283193826675,
-0.04683361575007439,
0.000869272684212774,
0.017446324229240417,
-0.08564843237400055,
-0.07677825540304184,
0.04520844668149948,
0.0056381565518677235,
0.03681354224681854,
0.010262912139296532,
-0.004992757458239794,
0.009269514121115208,
-0.004327399656176567,
-0.023210110142827034,
-0.042724646627902985,
0.04498881474137306,
-0.00022489263210445642,
0.0027611544355750084,
-0.029577720910310745,
-0.06973682343959808,
0.03735799342393875,
-0.0815594345331192,
-0.08219446241855621,
-0.013729068450629711,
-0.09354711323976517,
-0.10643617063760757,
-0.04506269097328186,
0.02033282443881035,
-0.004859630949795246,
-0.08861220628023148,
0.005497724749147892,
0.07584349066019058,
-0.0195994321256876,
-0.016231713816523552,
0.07128944993019104,
0.032884519547224045,
0.00013961389777250588,
0.0019049469847232103,
-0.08541619032621384,
-0.14974406361579895,
-0.07113438844680786,
-0.058010995388031006,
-0.013505990616977215,
-0.034686822444200516,
-0.002006418304517865,
-0.031950339674949646,
0.03873535990715027,
-0.008557400666177273,
-0.06726565957069397,
0.02805127203464508,
0.027077246457338333,
0.005432657897472382,
-0.0104295052587986,
-0.03314245864748955,
-0.035235919058322906,
-0.04067680612206459,
0.07238361984491348,
-0.039809148758649826,
-0.0029848243575543165,
0.00809545535594225,
0.026339851319789886,
-0.07870090007781982,
-6.428025756122224e-8,
0.0673142671585083,
-0.0399533174932003,
-0.05798117816448212,
0.022166645154356956,
-0.01507109496742487,
-0.012251515872776508,
-0.052730295807123184,
-0.009000647813081741,
0.05128566548228264,
0.05273238942027092,
0.09320645034313202,
0.038141246885061264,
-0.025452926754951477,
-0.01136099360883236,
0.011495798826217651,
0.07037415355443954,
-0.09011109918355942,
0.08877522498369217,
-0.043950822204351425,
-0.10654125362634659,
-0.0238510649651289,
0.017223114147782326,
-0.01971282623708248,
-0.050373855978250504,
-0.0459897555410862,
0.02272934280335903,
-0.015100079588592052,
0.028907403349876404,
0.012784010730683804,
-0.05458573251962662,
-0.06165655329823494,
-0.027368532493710518,
-0.026480233296751976,
-0.028028277680277824,
0.02271389588713646,
0.04194968566298485,
0.09878478944301605,
-0.09287133067846298,
0.060491207987070084,
-0.040197260677814484,
0.11387024819850922,
0.0066058821976184845,
-0.02250497415661812,
0.02606392279267311,
0.05660415068268776,
-0.013149014674127102,
0.06765405833721161,
-0.008728495799005032,
0.030420800670981407,
-0.01574065536260605,
-0.0520024448633194,
-0.009882157668471336,
0.013159814290702343,
0.006802889984101057,
-0.07687092572450638,
0.058192282915115356,
0.011839241720736027,
-0.026819849386811256,
0.03392721712589264,
-0.006093511823564768,
0.09689653664827347,
0.01064430084079504,
-0.005459346808493137,
0.017133137211203575
] |
TurkuNLP/bert-base-finnish-uncased-v1 | 8dce1e623b1b072e4d95f82d11051678b068d37a | 2021-05-18T22:46:38.000Z | [
"pytorch",
"tf",
"jax",
"bert",
"fill-mask",
"fi",
"arxiv:1912.07076",
"arxiv:1908.04212",
"transformers",
"autotrain_compatible"
] | fill-mask | false | TurkuNLP | null | TurkuNLP/bert-base-finnish-uncased-v1 | 3,020 | null | transformers | ---
language: fi
---
## Quickstart
**Release 1.0** (November 25, 2019)
Download the models here:
* Cased Finnish BERT Base: [bert-base-finnish-cased-v1.zip](http://dl.turkunlp.org/finbert/bert-base-finnish-cased-v1.zip)
* Uncased Finnish BERT Base: [bert-base-finnish-uncased-v1.zip](http://dl.turkunlp.org/finbert/bert-base-finnish-uncased-v1.zip)
We generally recommend the use of the cased model.
Paper presenting Finnish BERT: [arXiv:1912.07076](https://arxiv.org/abs/1912.07076)
## What's this?
A version of Google's [BERT](https://github.com/google-research/bert) deep transfer learning model for Finnish. The model can be fine-tuned to achieve state-of-the-art results for various Finnish natural language processing tasks.
FinBERT features a custom 50,000 wordpiece vocabulary that has much better coverage of Finnish words than e.g. the previously released [multilingual BERT](https://github.com/google-research/bert/blob/master/multilingual.md) models from Google:
| Vocabulary | Example |
|------------|---------|
| FinBERT | Suomessa vaihtuu kesän aikana sekä pääministeri että valtiovarain ##ministeri . |
| Multilingual BERT | Suomessa vai ##htuu kes ##än aikana sekä p ##ää ##minister ##i että valt ##io ##vara ##in ##minister ##i . |
FinBERT has been pre-trained for 1 million steps on over 3 billion tokens (24B characters) of Finnish text drawn from news, online discussion, and internet crawls. By contrast, Multilingual BERT was trained on Wikipedia texts, where the Finnish Wikipedia text is approximately 3% of the amount used to train FinBERT.
These features allow FinBERT to outperform not only Multilingual BERT but also all previously proposed models when fine-tuned for Finnish natural language processing tasks.
## Results
### Document classification

FinBERT outperforms multilingual BERT (M-BERT) on document classification over a range of training set sizes on the Yle news (left) and Ylilauta online discussion (right) corpora. (Baseline classification performance with [FastText](https://fasttext.cc/) included for reference.)
[[code](https://github.com/spyysalo/finbert-text-classification)][[Yle data](https://github.com/spyysalo/yle-corpus)] [[Ylilauta data](https://github.com/spyysalo/ylilauta-corpus)]
### Named Entity Recognition
Evaluation on FiNER corpus ([Ruokolainen et al 2019](https://arxiv.org/abs/1908.04212))
| Model | Accuracy |
|--------------------|----------|
| **FinBERT** | **92.40%** |
| Multilingual BERT | 90.29% |
| [FiNER-tagger](https://github.com/Traubert/FiNer-rules) (rule-based) | 86.82% |
(FiNER tagger results from [Ruokolainen et al. 2019](https://arxiv.org/pdf/1908.04212.pdf))
[[code](https://github.com/jouniluoma/keras-bert-ner)][[data](https://github.com/mpsilfve/finer-data)]
### Part of speech tagging
Evaluation on three Finnish corpora annotated with [Universal Dependencies](https://universaldependencies.org/) part-of-speech tags: the Turku Dependency Treebank (TDT), FinnTreeBank (FTB), and Parallel UD treebank (PUD)
| Model | TDT | FTB | PUD |
|-------------------|-------------|-------------|-------------|
| **FinBERT** | **98.23%** | **98.39%** | **98.08%** |
| Multilingual BERT | 96.97% | 95.87% | 97.58% |
[[code](https://github.com/spyysalo/bert-pos)][[data](http://hdl.handle.net/11234/1-2837)]
## Use with PyTorch
If you want to use the model with the huggingface/transformers library, follow the steps in [huggingface_transformers.md](https://github.com/TurkuNLP/FinBERT/blob/master/huggingface_transformers.md)
## Previous releases
### Release 0.2
**October 24, 2019** Beta version of the BERT base uncased model trained from scratch on a corpus of Finnish news, online discussions, and crawled data.
Download the model here: [bert-base-finnish-uncased.zip](http://dl.turkunlp.org/finbert/bert-base-finnish-uncased.zip)
### Release 0.1
**September 30, 2019** We release a beta version of the BERT base cased model trained from scratch on a corpus of Finnish news, online discussions, and crawled data.
Download the model here: [bert-base-finnish-cased.zip](http://dl.turkunlp.org/finbert/bert-base-finnish-cased.zip)
| [
-0.16832590103149414,
-0.0689454898238182,
0.06604766100645065,
-0.017779672518372536,
0.01866876147687435,
0.04125729203224182,
-0.020137451589107513,
0.05643831193447113,
0.01880800724029541,
-0.01820128783583641,
0.014650757424533367,
0.04931866005063057,
-0.011718042194843292,
0.09333109110593796,
-0.013041829690337181,
0.005817146506160498,
0.05682358518242836,
-0.011451474390923977,
-0.04755101352930069,
0.05295844003558159,
0.03762798011302948,
0.039542678743600845,
0.03220761939883232,
-0.09331932663917542,
0.0792045071721077,
-0.0027472865767776966,
-0.0064142909832298756,
-0.11644624918699265,
0.0590452216565609,
0.03798658773303032,
-0.009782621636986732,
0.04098382219672203,
0.023739002645015717,
0.046902887523174286,
0.05334951728582382,
0.04048549383878708,
-0.030823642387986183,
-0.0037657914217561483,
-0.004836801905184984,
0.030656134709715843,
0.015010601840913296,
-0.06571071594953537,
-0.004179308190941811,
0.028120046481490135,
0.07893235981464386,
0.021226083859801292,
-0.05179494619369507,
0.0324537493288517,
-0.040529411286115646,
0.015357084572315216,
-0.050853315740823746,
-0.06318361312150955,
0.030304841697216034,
0.1093766912817955,
0.0029605089221149683,
0.01277479249984026,
0.012539597228169441,
-0.03602689877152443,
-0.06761147081851959,
-0.06604637950658798,
-0.07870063930749893,
-0.04869686812162399,
-0.02318565733730793,
-0.05589253827929497,
-0.024272354319691658,
0.03991943597793579,
-0.05249008163809776,
0.02558538317680359,
-0.013918831944465637,
0.01347399689257145,
0.08632492274045944,
0.0665351003408432,
-0.07413452863693237,
0.05149277672171593,
-0.01731865480542183,
-0.059165142476558685,
0.06362774223089218,
0.0078397486358881,
0.038476210087537766,
-0.03819228708744049,
0.05556609109044075,
-0.004500233568251133,
0.04566420242190361,
-0.0076868473552167416,
0.05413345992565155,
-0.008437146432697773,
0.05726317688822746,
-0.05125740170478821,
-0.031025810167193413,
-0.024539558216929436,
-0.003078011330217123,
-0.060800693929195404,
0.054514072835445404,
-0.06370440125465393,
0.01873585395514965,
-0.007042538840323687,
0.0594291165471077,
0.04184011369943619,
-0.00308129726909101,
0.04596604034304619,
0.010547947138547897,
0.028903815895318985,
0.05243714526295662,
-0.09324076026678085,
0.040345001965761185,
0.04135417193174362,
-0.00899846013635397,
-0.014016935601830482,
0.05391883850097656,
-0.1011422798037529,
-0.03240356594324112,
0.0014691069955006242,
-0.046034254133701324,
-0.0578252337872982,
-0.01232054177671671,
-0.0530252605676651,
-0.02054746262729168,
0.010875308886170387,
0.07067493349313736,
0.07625438272953033,
0.033693552017211914,
-0.002057289006188512,
0.01061182375997305,
0.07812049984931946,
-0.06376396864652634,
0.0385364331305027,
-0.03551929444074631,
1.1844033123045742e-33,
0.046116527169942856,
0.017103811725974083,
-0.0744488388299942,
-0.028437014669179916,
0.03968004882335663,
-0.061552468687295914,
-0.010606056079268456,
-0.0081177344545722,
-0.05294928327202797,
0.0038044520188122988,
-0.1262291669845581,
0.040534310042858124,
-0.07397375255823135,
0.042334627360105515,
-0.041555628180503845,
0.050831831991672516,
-0.011966211721301079,
0.03780831769108772,
0.07857401669025421,
-0.0015085962368175387,
0.1247340515255928,
0.07814398407936096,
0.008556274697184563,
-0.05106620863080025,
-0.05179943889379501,
0.03508375212550163,
0.052866674959659576,
-0.09661241620779037,
0.030643217265605927,
0.03692648559808731,
-0.10718328505754471,
0.02607465535402298,
-0.05405851826071739,
0.04138139635324478,
-0.025351112708449364,
0.01285143569111824,
-0.05807268247008324,
-0.0679917261004448,
0.029928794130682945,
-0.06212475150823593,
0.002403205493465066,
-0.0007153888000175357,
-0.0052373833023011684,
-0.024281129240989685,
0.03224460780620575,
-0.03276074305176735,
-0.00787276215851307,
-0.07683880627155304,
0.03682113438844681,
-0.038276009261608124,
0.04966512694954872,
0.017552850767970085,
-0.08735542744398117,
0.0013984576798975468,
-0.05456743389368057,
-0.021512867882847786,
0.1083449274301529,
0.024654021486639977,
0.03977801278233528,
0.048605117946863174,
0.056248199194669724,
0.012607735581696033,
0.04903484880924225,
0.018288787454366684,
-0.006787883583456278,
-0.03261452540755272,
-0.02502807229757309,
0.009722801856696606,
0.02023273892700672,
-0.005967225879430771,
-0.036139313131570816,
-0.002015264704823494,
0.028225868940353394,
0.008300303481519222,
0.000583386979997158,
-0.0707060769200325,
0.042324986308813095,
-0.07563436776399612,
-0.06607364118099213,
-0.038837216794490814,
-0.028811698779463768,
0.04558003693819046,
-0.05310758203268051,
-0.025818688794970512,
-0.09311403334140778,
0.017712609842419624,
0.024003392085433006,
-0.08596080541610718,
-0.03565855324268341,
-0.026766950264573097,
0.03942741081118584,
-0.036949966102838516,
-0.022440237924456596,
0.03727922961115837,
0.023566892370581627,
-1.413189134956643e-33,
0.0425250306725502,
-0.03120284155011177,
-0.06786565482616425,
0.017282327637076378,
-0.08982669562101364,
-0.08516135066747665,
0.028678085654973984,
0.15145254135131836,
0.0005790084833279252,
0.00035772143746726215,
-0.06320331245660782,
-0.04434660077095032,
0.004968659020960331,
0.006872681900858879,
0.016460783779621124,
0.044784851372241974,
-0.024896614253520966,
-0.05562500283122063,
-0.002551846671849489,
-0.010216613300144672,
0.002976977964863181,
-0.01615665853023529,
-0.10513849556446075,
0.11382026970386505,
-0.015058598481118679,
0.028772184625267982,
-0.04662241414189339,
-0.020298423245549202,
0.006253927014768124,
0.019528159871697426,
-0.08809094876050949,
-0.04973017796874046,
-0.04378927871584892,
0.04491366446018219,
-0.07290477305650711,
0.08265568315982819,
0.057749465107917786,
-0.032845743000507355,
-0.019746897742152214,
0.02680938132107258,
-0.0011028335429728031,
-0.0184455756098032,
-0.005022071301937103,
0.0630035325884819,
0.024631870910525322,
-0.0005471727927215397,
-0.07493521273136139,
-0.020499126985669136,
0.03405992314219475,
-0.10103511810302734,
0.06461793929338455,
0.05575565621256828,
-0.08242619782686234,
-0.003651220817118883,
-0.05181926488876343,
-0.03750837221741676,
0.01880769245326519,
-0.05864262580871582,
-0.05771360918879509,
0.03419221192598343,
-0.0067413728684186935,
-0.046501293778419495,
0.01674908958375454,
-0.020312601700425148,
-0.03915776312351227,
-0.030291464179754257,
-0.03331121429800987,
0.04825925827026367,
-0.04516962543129921,
-0.013562670908868313,
-0.05383250117301941,
-0.0460733063519001,
0.06889442354440689,
0.06889642775058746,
0.05230417847633362,
-0.016514284536242485,
0.0881609171628952,
-0.010384775698184967,
0.0025701322592794895,
-0.02375083789229393,
-0.0699077695608139,
-0.07143134623765945,
-0.02789389342069626,
0.0940752848982811,
0.04399382323026657,
0.05127042159438133,
0.07145849615335464,
0.014695345424115658,
-0.019241023808717728,
-0.027980247512459755,
0.0011926344595849514,
0.06664161384105682,
-0.011155681684613228,
0.10503429174423218,
0.02109633944928646,
-5.0986518118634194e-8,
-0.039198800921440125,
0.027316870167851448,
-0.015786394476890564,
0.02310723438858986,
0.002691560657694936,
-0.060532718896865845,
-0.04460718855261803,
0.033627673983573914,
-0.03548480570316315,
-0.018720945343375206,
-0.06955549120903015,
0.08834508061408997,
-0.1135820522904396,
0.0021357766818255186,
-0.020914247259497643,
0.10196313261985779,
-0.01750326342880726,
0.04350122809410095,
0.019822517409920692,
0.05720309168100357,
-0.008800622075796127,
0.07975928485393524,
0.03047710657119751,
-0.09824725985527039,
-0.033770523965358734,
-0.058190979063510895,
0.006631056312471628,
0.07864527404308319,
0.005514843855053186,
-0.0010194593342021108,
-0.04235389083623886,
0.07751278579235077,
-0.09066629409790039,
0.008210672996938229,
0.10421665757894516,
0.11239985376596451,
-0.012460666708648205,
-0.013225413858890533,
-0.03870641067624092,
0.054552048444747925,
0.08927083015441895,
0.03205835819244385,
-0.09841764718294144,
0.0007199624087661505,
0.07108572125434875,
0.006966763641685247,
-0.005336441099643707,
-0.05922568589448929,
0.04559919238090515,
0.09603320062160492,
0.07216943800449371,
-0.05644990876317024,
-0.028644710779190063,
0.1009560227394104,
-0.013739552348852158,
0.039588212966918945,
-0.04765984043478966,
-0.046629648655653,
0.046830710023641586,
0.034526973962783813,
0.04384591430425644,
0.06203554943203926,
0.008627096191048622,
0.11331696808338165
] |
florentiino/DialoGPT-small-harrypotter | f45e5d9b12e85a4a2c7c1ad13cdc55c06996c923 | 2022-07-23T06:43:40.000Z | [
"pytorch",
"gpt2",
"text-generation",
"transformers",
"conversational"
] | conversational | false | florentiino | null | florentiino/DialoGPT-small-harrypotter | 3,017 | null | transformers | ---
tags:
- conversational
---
# Have a chat with Dumbledore
| [
-0.024625185877084732,
-0.0510760173201561,
0.03674760460853577,
-0.024174345657229424,
0.01721636950969696,
-0.07089806348085403,
0.13766254484653473,
-0.004875658545643091,
0.01595097780227661,
-0.07136965543031693,
-0.04966743662953377,
-0.05024844780564308,
-0.029419852420687675,
-0.006106208544224501,
-0.008752011694014072,
0.010795005597174168,
0.00668153865262866,
-0.05181850865483284,
-0.012879542075097561,
0.07106131315231323,
0.020435791462659836,
0.05850423499941826,
0.06565169245004654,
0.0255343746393919,
0.028256651014089584,
-0.06332959979772568,
-0.015169287100434303,
0.008538947440683842,
-0.03439217805862427,
0.015432129614055157,
0.08844929933547974,
0.049569856375455856,
0.03585183992981911,
0.04037903621792793,
-0.07659102976322174,
0.02511831559240818,
0.04005773738026619,
0.07556924968957901,
0.026079311966896057,
-0.052831247448921204,
-0.034492261707782745,
-0.06865394860506058,
-0.09954027086496353,
0.06809287518262863,
-0.030915675684809685,
0.010153534822165966,
-0.09153773635625839,
-0.016910463571548462,
-0.08087105304002762,
0.07423141598701477,
-0.0946434736251831,
0.020434578880667686,
0.029664210975170135,
0.08618006855249405,
0.012479506433010101,
0.051732830703258514,
-0.05400887876749039,
-0.024752072989940643,
0.10313476622104645,
0.04291628301143646,
-0.0033344198018312454,
-0.04942459985613823,
0.02399592287838459,
0.06418557465076447,
-0.026575367897748947,
0.02339349500834942,
-0.04086070880293846,
0.05794013664126396,
-0.07005872577428818,
0.044002778828144073,
0.0373968631029129,
-0.010809036903083324,
0.060502078384160995,
-0.03771815821528435,
0.03346656262874603,
-0.013788529671728611,
-0.07123047858476639,
-0.04413438215851784,
-0.011181906796991825,
-0.00818636454641819,
-0.07318505644798279,
-0.11948367208242416,
0.0013342383317649364,
0.004744373261928558,
0.005530691239982843,
-0.007242071442306042,
0.04492449760437012,
-0.06028817221522331,
-0.07070612907409668,
0.03198087215423584,
-0.028175538405776024,
0.0023256249260157347,
0.07651140540838242,
0.06363768130540848,
0.008720497600734234,
0.00638539856299758,
0.02677980810403824,
0.002190365456044674,
-0.0968065857887268,
0.12923386693000793,
-0.025173135101795197,
0.016092579811811447,
0.025635039433836937,
-0.016455963253974915,
-0.029643798246979713,
0.018353870138525963,
-0.03390689939260483,
-0.04735473543405533,
-0.00610411586239934,
-0.03747420012950897,
-0.05604678764939308,
-0.06477370858192444,
0.08361925929784775,
-0.059334542602300644,
0.06876290589570999,
-0.01783306524157524,
0.11682640761137009,
0.03367678076028824,
0.060419004410505295,
-0.03290233016014099,
0.05676569789648056,
0.026157783344388008,
-0.03991607576608658,
-0.04516817256808281,
-0.018608780577778816,
-0.01977313868701458,
-0.07849320024251938,
-3.6272060206010395e-33,
0.11618183553218842,
-0.03539658710360527,
0.014914200641214848,
0.05117662996053696,
0.004797737579792738,
0.0355900377035141,
-0.09218946099281311,
-0.04948112368583679,
-0.004621579311788082,
-0.06704875081777573,
0.09449372440576553,
-0.11528614163398743,
-0.025449009612202644,
0.03989936038851738,
-0.005955499596893787,
-0.00043970884871669114,
0.003701987210661173,
0.03696262091398239,
0.057616326957941055,
-0.046280693262815475,
-0.07413335144519806,
-0.003173281904309988,
0.007785172667354345,
0.08683795481920242,
0.020466094836592674,
0.07762964069843292,
0.062109023332595825,
-0.09040780365467072,
0.05055088549852371,
0.048626918345689774,
-0.05622052401304245,
-0.039151307195425034,
-0.01259978674352169,
-0.018845783546566963,
-0.0199104156345129,
-0.00797424465417862,
-0.013276095502078533,
-0.07284907251596451,
-0.039279427379369736,
-0.05881473422050476,
-0.031162941828370094,
-0.05061497166752815,
-0.0491454117000103,
-0.10761558264493942,
-0.021380938589572906,
0.0509008914232254,
0.03746747970581055,
0.0337066575884819,
0.03333504870533943,
0.07304508984088898,
-0.04416019096970558,
-0.009000985883176327,
-0.002455516019836068,
-0.008550162427127361,
-0.014550724066793919,
-0.017133865505456924,
-0.04965829849243164,
0.010905899107456207,
0.08602438867092133,
-0.02534009888768196,
-0.030631249770522118,
0.022386452183127403,
0.017973121255636215,
-0.092824786901474,
0.04157564043998718,
0.021543607115745544,
-0.04898369684815407,
-0.013873131945729256,
-0.0323585569858551,
-0.09616398066282272,
-0.06950162351131439,
0.07319679856300354,
-0.05270669609308243,
0.07495884597301483,
-0.08561448007822037,
0.05667969584465027,
-0.07369394600391388,
0.001365323318168521,
-0.007394281215965748,
0.021818041801452637,
-0.03127925470471382,
-0.048967260867357254,
-0.061464086174964905,
-0.00032681127777323127,
-0.03123142011463642,
-0.005452726501971483,
-0.008956735953688622,
-0.1414901167154312,
0.024595385417342186,
0.07532999664545059,
0.0013174191117286682,
-0.018790585920214653,
-0.049723070114851,
-0.05335784703493118,
-0.07956226170063019,
2.01890291500064e-33,
0.06339868158102036,
-0.01920233853161335,
-0.097213976085186,
0.08498293906450272,
0.034676551818847656,
-0.003953848034143448,
-0.004754004068672657,
0.0654304251074791,
0.046940285712480545,
-0.041099466383457184,
-0.054790984839200974,
0.022072257474064827,
-0.07715795934200287,
-0.01548374630510807,
0.13589376211166382,
0.07507301867008209,
0.11615891009569168,
-0.035498667508363724,
-0.022587904706597328,
-0.0005653473199345171,
0.04713378846645355,
-0.028984440490603447,
-0.0789085403084755,
0.004395901691168547,
0.01804041489958763,
0.03057497926056385,
-0.015019893646240234,
0.07137125730514526,
0.015673626214265823,
-0.049161817878484726,
0.018933342769742012,
0.04803697019815445,
-0.04979651793837547,
-0.04083861783146858,
0.028024684637784958,
0.0024244626984000206,
0.0476214624941349,
-0.027102667838335037,
0.020116206258535385,
0.01617329940199852,
0.038512952625751495,
-0.019107835367321968,
0.017578354105353355,
0.024782819673419,
0.013748624362051487,
0.002788190497085452,
-0.014744587242603302,
-0.018875790759921074,
-0.07080623507499695,
0.03559843450784683,
0.062019381672143936,
-0.036921124905347824,
-0.03492362052202225,
-0.13937310874462128,
-0.022995902225375175,
-0.04612351581454277,
0.03231620788574219,
0.039307478815317154,
-0.05849303677678108,
-0.041974615305662155,
-0.02654881216585636,
-0.08996672183275223,
0.059541005641222,
0.004060712642967701,
-0.009119213558733463,
-0.005738405045121908,
-0.089176706969738,
0.031496625393629074,
-0.04176530987024307,
0.0041967579163610935,
0.07940532267093658,
-0.016745487228035927,
-0.04965726286172867,
0.028753913938999176,
0.0629245787858963,
0.07911351323127747,
0.05043807625770569,
0.03205503523349762,
0.03416992723941803,
0.03224237263202667,
0.0049498542211949825,
0.04819860681891441,
0.012155490927398205,
0.015346969477832317,
0.07829362154006958,
0.005251107271760702,
0.00949660874903202,
-0.01007759477943182,
-0.028693323954939842,
-0.08811420202255249,
0.04430216923356056,
0.002890906762331724,
0.055464960634708405,
0.05272670462727547,
0.06116664782166481,
-2.3277815230926535e-8,
-0.0611872635781765,
-0.07331554591655731,
-0.004759558942168951,
-0.020448820665478706,
0.03503040969371796,
0.03931130841374397,
0.04284561797976494,
0.02836737409234047,
-0.004020356573164463,
0.004247364122420549,
0.017519276589155197,
0.04403679072856903,
-0.03584391623735428,
0.04161128029227257,
0.06518099457025528,
-0.007748038042336702,
-0.033405400812625885,
-0.022292792797088623,
-0.03737823665142059,
0.00933124590665102,
0.030575348064303398,
-0.02652718871831894,
-0.0676565170288086,
0.055614810436964035,
0.014478246681392193,
0.007147961296141148,
0.04504135996103287,
-0.014933372847735882,
-0.02986464835703373,
0.08656080812215805,
0.05726241692900658,
0.059603940695524216,
-0.07297045737504959,
-0.06853717565536499,
-0.02059454284608364,
-0.016817227005958557,
-0.10793878883123398,
0.0005346992402337492,
0.06717477738857269,
0.06113841012120247,
0.04155363142490387,
0.04689422994852066,
-0.04086204245686531,
-0.016726689413189888,
0.01712457835674286,
0.0244436152279377,
-0.07297345250844955,
-0.05877208337187767,
-0.04830559343099594,
-0.07536761462688446,
-0.06412439048290253,
-0.007092484273016453,
0.11150238662958145,
0.01723354682326317,
0.031768884509801865,
0.016443977132439613,
0.06832527369260788,
0.0720524787902832,
0.015455186367034912,
0.02002689242362976,
0.09016445279121399,
0.09732864797115326,
-0.01222043763846159,
0.012380401603877544
] |
AlexKay/xlm-roberta-large-qa-multilingual-finedtuned-ru | 6cc14366f0cc95428a695d30594a93dd6935d800 | 2022-07-19T15:33:20.000Z | [
"pytorch",
"xlm-roberta",
"question-answering",
"en",
"ru",
"multilingual",
"arxiv:1912.09723",
"transformers",
"license:apache-2.0",
"autotrain_compatible"
] | question-answering | false | AlexKay | null | AlexKay/xlm-roberta-large-qa-multilingual-finedtuned-ru | 3,012 | 9 | transformers | ---
language:
- en
- ru
- multilingual
license: apache-2.0
---
# XLM-RoBERTa large model whole word masking finetuned on SQuAD
Pretrained model using a masked language modeling (MLM) objective.
Fine tuned on English and Russian QA datasets
## Used QA Datasets
SQuAD + SberQuAD
[SberQuAD original paper](https://arxiv.org/pdf/1912.09723.pdf) is here! Recommend to read!
## Evaluation results
The results obtained are the following (SberQUaD):
```
f1 = 84.3
exact_match = 65.3
| [
-0.06948976218700409,
-0.0954682007431984,
-0.03821945935487747,
0.004773532971739769,
-0.014061320573091507,
0.07217200845479965,
-0.020948501303792,
-0.009653305634856224,
0.006475478410720825,
-0.010058666579425335,
0.04818214848637581,
-0.005931887309998274,
0.04493245482444763,
-0.0021821106784045696,
-0.006205377168953419,
0.04647597670555115,
0.021656576544046402,
0.0011606246698647738,
-0.061152923852205276,
-0.13597477972507477,
0.03507023677229881,
0.02008378878235817,
0.06815143674612045,
0.010371154174208641,
0.0716189295053482,
-0.005792214535176754,
-0.003074070904403925,
0.026668529957532883,
0.06326702982187271,
-0.033226724714040756,
0.05404751002788544,
0.07593050599098206,
0.1260640025138855,
0.023312725126743317,
0.008484955877065659,
0.039501775056123734,
-0.013400642201304436,
-0.03944800794124603,
0.03360899165272713,
0.021438945084810257,
-0.09239208698272705,
-0.03909960389137268,
-0.022116336971521378,
-0.05569741129875183,
0.06841763854026794,
-0.06897703558206558,
-0.06490366160869598,
0.05132698640227318,
0.03036215901374817,
-0.055392660200595856,
-0.0989532619714737,
-0.07798852026462555,
0.010681984946131706,
0.03751353174448013,
-0.036800824105739594,
-0.1020505353808403,
0.02884986437857151,
-0.020678745582699776,
-0.041238509118556976,
-0.014111251570284367,
-0.07893679291009903,
-0.007853738032281399,
-0.05268815904855728,
0.023422960191965103,
-0.05847880244255066,
-0.013850436545908451,
-0.02263224683701992,
0.036715101450681686,
-0.007436008658260107,
0.0924181118607521,
-0.012966012582182884,
0.026951942592859268,
-0.08986668288707733,
0.03261500224471092,
0.0026074154302477837,
0.018302911892533302,
0.004843299742788076,
-0.019054792821407318,
0.08419818431138992,
-0.08284765481948853,
0.048932772129774094,
-0.06594530493021011,
0.03709863871335983,
0.01154317520558834,
0.03584849089384079,
-0.020762421190738678,
-0.019950006157159805,
0.01495527382940054,
0.007243726868182421,
0.021771825850009918,
-0.0331059992313385,
-0.05342014878988266,
0.07750272005796432,
-0.007892266847193241,
-0.06938154995441437,
0.013791092671453953,
0.04349688068032265,
0.06696371734142303,
-0.020106563344597816,
0.05562738701701164,
0.005840637721121311,
-0.029086032882332802,
-0.014354240149259567,
0.006148679181933403,
-0.09257593750953674,
-0.014410886913537979,
0.03734695538878441,
0.059539247304201126,
0.06338782608509064,
-0.13393238186836243,
0.07902111113071442,
-0.030694875866174698,
-0.04800824075937271,
0.05877726152539253,
0.017234710976481438,
0.031899794936180115,
0.0979820117354393,
-0.013056382536888123,
-0.05671665072441101,
0.031023791059851646,
-0.047945763915777206,
0.01183480303734541,
0.025286342948675156,
-0.013117176480591297,
0.03565319627523422,
0.03549712151288986,
-0.06949101388454437,
6.529842466143006e-33,
0.11779652535915375,
0.060547806322574615,
-0.05012192204594612,
-0.027507547289133072,
0.03545251861214638,
-0.03519269451498985,
0.010569081641733646,
-0.009397290647029877,
-0.061714839190244675,
0.06678342074155807,
-0.021329505369067192,
-0.0018831812776625156,
-0.0527486577630043,
0.021063905209302902,
-0.006146265659481287,
0.02597833052277565,
-0.010032124817371368,
0.011648563668131828,
-0.025078019127249718,
0.038043878972530365,
0.11209467053413391,
-0.00917617604136467,
-0.0365942046046257,
-0.04778091982007027,
0.007298975717276335,
0.06397977471351624,
0.09261557459831238,
-0.08186963200569153,
-0.013388345018029213,
0.04657834768295288,
-0.07119447737932205,
0.04414913430809975,
-0.04019463434815407,
0.046945441514253616,
0.005615980830043554,
-0.04394889622926712,
0.03417051210999489,
-0.03739903122186661,
-0.01788521744310856,
-0.01757441833615303,
0.014466225169599056,
0.007137371692806482,
0.022923627868294716,
-0.04797125235199928,
0.00975491851568222,
-0.040482472628355026,
-0.0013943093363195658,
0.03721670061349869,
0.08232336491346359,
0.06076410785317421,
0.03338923677802086,
0.01049614604562521,
-0.02542903460562229,
0.013670756481587887,
-0.005666155368089676,
0.04002007097005844,
0.05412900075316429,
0.06210092082619667,
0.03507057949900627,
0.011820395477116108,
0.06868810951709747,
-0.02101275511085987,
0.030219770967960358,
-0.005090126767754555,
0.05996938794851303,
-0.048521582037210464,
-0.034528691321611404,
-0.07258575409650803,
0.0039815353229641914,
0.059909287840127945,
-0.01196246687322855,
-0.06512081623077393,
0.009195257909595966,
0.07408976554870605,
0.019777730107307434,
-0.0907977893948555,
0.016434360295534134,
0.011458082124590874,
-0.0015649867709726095,
-0.06034646928310394,
-0.00744808791205287,
0.08436231315135956,
0.012821285054087639,
-0.08296526223421097,
-0.11339063197374344,
-0.027084287256002426,
0.12608879804611206,
-0.03918873146176338,
-0.017134854570031166,
-0.058758825063705444,
0.0006741757388226688,
0.042122866958379745,
0.06376971304416656,
0.01509333960711956,
-0.05578480288386345,
-6.554029731779113e-33,
-0.02774551883339882,
0.058015525341033936,
-0.022494861856102943,
0.06898826360702515,
-0.03341825678944588,
-0.026542440056800842,
0.09252084791660309,
0.1381969004869461,
0.0416257344186306,
0.010759296827018261,
0.05133870989084244,
-0.06890104711055756,
-0.0032103070989251137,
-0.029811661690473557,
0.09182826429605484,
-0.07659243792295456,
0.0440780408680439,
-0.017252592369914055,
-0.03821089491248131,
0.026809532195329666,
-0.08305191993713379,
0.03822648525238037,
0.031659986823797226,
0.027324266731739044,
-0.021870126947760582,
-0.031113456934690475,
0.0003500089223962277,
0.019057296216487885,
-0.046582333743572235,
0.04783878102898598,
0.007580955978482962,
-0.006526125129312277,
-0.09624837338924408,
0.0016290368512272835,
-0.136318176984787,
0.04045698419213295,
-0.005100967828184366,
0.0063113858923316,
-0.0072687300853431225,
0.056553393602371216,
0.012058703228831291,
-0.00031604268588125706,
-0.12749332189559937,
0.06302352249622345,
-0.00867523904889822,
-0.0858379527926445,
-0.0722234770655632,
-0.07015356421470642,
-0.009357371367514133,
-0.07531248033046722,
0.00047010250273160636,
0.01706063002347946,
-0.020570354536175728,
0.03025190159678459,
-0.0577855221927166,
-0.07092072069644928,
-0.035054437816143036,
-0.045470405369997025,
-0.04072060436010361,
-0.011157505214214325,
-0.014619508758187294,
0.04760940000414848,
-0.03275799751281738,
-0.018263883888721466,
0.06991121172904968,
-0.06543862074613571,
0.002091375645250082,
0.06744451075792313,
0.01261969469487667,
-0.02068175934255123,
0.0747375413775444,
-0.05397424101829529,
0.030401676893234253,
0.14343592524528503,
0.03082050010561943,
-0.0438721738755703,
-0.11355897784233093,
0.02307184971868992,
0.020340485498309135,
0.03556086868047714,
-0.005264613311737776,
0.019534599035978317,
-0.045804087072610855,
0.06642685830593109,
0.03150225803256035,
0.03652330860495567,
0.02186432294547558,
0.09284969419240952,
-0.004586859606206417,
0.007542969658970833,
0.029460160061717033,
0.019918780773878098,
0.03195996209979057,
0.046824414283037186,
0.038160037249326706,
-4.90932947627698e-8,
-0.005490773823112249,
0.007288034074008465,
-0.09235270321369171,
0.015181870199739933,
-0.04104539006948471,
-0.09614653140306473,
-0.0821804478764534,
-0.023321736603975296,
-0.021578092128038406,
0.03492935374379158,
-0.0351756326854229,
0.012054618448019028,
-0.10028146207332611,
0.024184323847293854,
-0.08062905073165894,
0.034861475229263306,
0.014872784726321697,
0.12235341966152191,
-0.03878355771303177,
-0.023954126983880997,
0.013537950813770294,
0.027340879663825035,
-0.025311769917607307,
-0.059817660599946976,
0.01273043267428875,
-0.003938071429729462,
-0.11187873780727386,
0.024645337834954262,
0.009510420262813568,
0.07021874189376831,
-0.045304637402296066,
0.037666626274585724,
-0.06619682908058167,
-0.03450917452573776,
0.021076420322060585,
0.053551048040390015,
-0.00879594124853611,
0.01264737918972969,
0.01473865658044815,
0.07096122205257416,
0.0002495883672963828,
-0.05015358701348305,
-0.08502607047557831,
-0.02104271575808525,
0.0408330038189888,
0.02703852392733097,
-0.052214283496141434,
-0.1598069965839386,
-0.0178317129611969,
0.019609583541750908,
0.1601017415523529,
-0.04809658229351044,
0.011876745149493217,
0.02384147047996521,
0.012009330093860626,
0.020545801147818565,
-0.00259701581671834,
-0.0055046239867806435,
0.06763061881065369,
-0.016246790066361427,
0.03108331188559532,
0.008250844664871693,
-0.03296349197626114,
-0.017389824613928795
] |
neuraly/bert-base-italian-cased-sentiment | bea83f326b616d7fe641bc3ed92a5ce18c97dfed | 2021-09-22T09:29:18.000Z | [
"pytorch",
"tf",
"jax",
"bert",
"text-classification",
"it",
"transformers",
"sentiment",
"Italian",
"license:mit"
] | text-classification | false | neuraly | null | neuraly/bert-base-italian-cased-sentiment | 3,000 | 2 | transformers | ---
language: it
thumbnail: https://neuraly.ai/static/assets/images/huggingface/thumbnail.png
tags:
- sentiment
- Italian
license: mit
widget:
- text: Huggingface è un team fantastico!
---
# 🤗 + neuraly - Italian BERT Sentiment model
## Model description
This model performs sentiment analysis on Italian sentences. It was trained starting from an instance of [bert-base-italian-cased](https://huggingface.co/dbmdz/bert-base-italian-cased), and fine-tuned on an Italian dataset of tweets, reaching 82% of accuracy on the latter one.
## Intended uses & limitations
#### How to use
```python
import torch
from torch import nn
from transformers import AutoTokenizer, AutoModelForSequenceClassification
# Load the tokenizer
tokenizer = AutoTokenizer.from_pretrained("neuraly/bert-base-italian-cased-sentiment")
# Load the model, use .cuda() to load it on the GPU
model = AutoModelForSequenceClassification.from_pretrained("neuraly/bert-base-italian-cased-sentiment")
sentence = 'Huggingface è un team fantastico!'
input_ids = tokenizer.encode(sentence, add_special_tokens=True)
# Create tensor, use .cuda() to transfer the tensor to GPU
tensor = torch.tensor(input_ids).long()
# Fake batch dimension
tensor = tensor.unsqueeze(0)
# Call the model and get the logits
logits, = model(tensor)
# Remove the fake batch dimension
logits = logits.squeeze(0)
# The model was trained with a Log Likelyhood + Softmax combined loss, hence to extract probabilities we need a softmax on top of the logits tensor
proba = nn.functional.softmax(logits, dim=0)
# Unpack the tensor to obtain negative, neutral and positive probabilities
negative, neutral, positive = proba
```
#### Limitations and bias
A possible drawback (or bias) of this model is related to the fact that it was trained on a tweet dataset, with all the limitations that come with it. The domain is strongly related to football players and teams, but it works surprisingly well even on other topics.
## Training data
We trained the model by combining the two tweet datasets taken from [Sentipolc EVALITA 2016](http://www.di.unito.it/~tutreeb/sentipolc-evalita16/data.html). Overall the dataset consists of 45K pre-processed tweets.
The model weights come from a pre-trained instance of [bert-base-italian-cased](https://huggingface.co/dbmdz/bert-base-italian-cased). A huge "thank you" goes to that team, brilliant work!
## Training procedure
#### Preprocessing
We tried to save as much information as possible, since BERT captures extremely well the semantic of complex text sequences. Overall we removed only **@mentions**, **urls** and **emails** from every tweet and kept pretty much everything else.
#### Hardware
- **GPU**: Nvidia GTX1080ti
- **CPU**: AMD Ryzen7 3700x 8c/16t
- **RAM**: 64GB DDR4
#### Hyperparameters
- Optimizer: **AdamW** with learning rate of **2e-5**, epsilon of **1e-8**
- Max epochs: **5**
- Batch size: **32**
- Early Stopping: **enabled** with patience = 1
Early stopping was triggered after 3 epochs.
## Eval results
The model achieves an overall accuracy on the test set equal to 82%
The test set is a 20% split of the whole dataset.
## About us
[Neuraly](https://neuraly.ai) is a young and dynamic startup committed to designing AI-driven solutions and services through the most advanced Machine Learning and Data Science technologies. You can find out more about who we are and what we do on our [website](https://neuraly.ai).
## Acknowledgments
Thanks to the generous support from the [Hugging Face](https://huggingface.co/) team,
it is possible to download the model from their S3 storage and live test it from their inference API 🤗.
| [
-0.10875582695007324,
-0.04549013450741768,
0.02796611189842224,
0.06115186959505081,
0.029791146516799927,
-0.009639611467719078,
0.010277057997882366,
0.051446594297885895,
0.024766940623521805,
-0.09655532985925674,
0.012806755490601063,
-0.04750920087099075,
-0.01012419443577528,
0.04774587228894234,
-0.01101715862751007,
0.020164087414741516,
0.04669049382209778,
-0.03100239671766758,
-0.11393947154283524,
-0.08875764161348343,
0.0785803496837616,
0.028998786583542824,
0.08189257979393005,
-0.001121783279813826,
0.05268464237451553,
0.004453438799828291,
-0.039225898683071136,
-0.015685146674513817,
0.03752097487449646,
0.06992378830909729,
-0.01547912135720253,
0.05522861331701279,
-0.03718235343694687,
0.09852829575538635,
-0.016428610309958458,
0.06822467595338821,
-0.06587206572294235,
-0.03186693787574768,
0.034992896020412445,
0.037209268659353256,
-0.006506324280053377,
-0.048878833651542664,
0.008057274855673313,
-0.013757451437413692,
0.10576679557561874,
0.03085763193666935,
0.006103023886680603,
0.04411808028817177,
-0.04097779095172882,
-0.027677929028868675,
-0.10593252629041672,
-0.01645137369632721,
0.0615200437605381,
0.06514088064432144,
-0.06573708355426788,
0.06475920230150223,
0.06445266306400299,
-0.023081336170434952,
0.027400730177760124,
-0.08447585999965668,
-0.029974181205034256,
-0.02574474923312664,
0.02417108416557312,
0.004798238165676594,
-0.03798475116491318,
-0.00440082186833024,
-0.055175501853227615,
-0.001521222642622888,
-0.026417668908834457,
-0.014493748545646667,
0.05177142098546028,
0.025505581870675087,
0.03977454453706741,
0.08334735035896301,
0.0012459835270419717,
-0.01711074821650982,
0.11859423667192459,
-0.029948469251394272,
0.051029156893491745,
-0.10919538885354996,
0.03483033925294876,
-0.05246783420443535,
0.09383850544691086,
0.03422951698303223,
0.10297416895627975,
-0.039005864411592484,
0.07044588774442673,
0.0017713544657453895,
-0.031930118799209595,
0.051492538303136826,
-0.017465395852923393,
-0.07121884077787399,
0.040118683129549026,
-0.012921029701828957,
0.03547196462750435,
0.035941172391176224,
-0.030662281438708305,
-0.038625318557024,
-0.10930411517620087,
0.0903419479727745,
0.023547470569610596,
-0.005728784017264843,
0.018608346581459045,
-0.01817498356103897,
-0.022692924365401268,
0.05603596568107605,
0.02844811975955963,
0.026482852175831795,
0.028246169909834862,
-0.056594494730234146,
-0.016873573884367943,
0.026342367753386497,
-0.03471503406763077,
-0.07821521908044815,
0.03927832096815109,
-0.05196777731180191,
-0.016825923696160316,
0.04296876862645149,
0.05690639093518257,
0.09737014025449753,
0.0039644003845751286,
0.019804932177066803,
-0.031608399003744125,
0.024872470647096634,
0.009584419429302216,
0.033924493938684464,
-0.05478537455201149,
5.040390104605394e-33,
-0.004604259040206671,
0.03831924498081207,
0.005037982016801834,
-0.053736183792352676,
-0.055181071162223816,
0.0000934611598495394,
-0.018386397510766983,
-0.012979917228221893,
-0.11530730128288269,
-0.04603321850299835,
-0.091303251683712,
0.045352641493082047,
-0.07194667309522629,
0.0653553456068039,
-0.03625177964568138,
-0.022515421733260155,
-0.012881447561085224,
0.0015771834878250957,
0.05877877399325371,
-0.008709350600838661,
0.07397471368312836,
0.03204476833343506,
0.005158181767910719,
-0.07227826118469238,
-0.11423540115356445,
0.034876223653554916,
0.08940836042165756,
-0.04839261993765831,
-0.06780204176902771,
0.031108930706977844,
-0.11076905578374863,
0.06941713392734528,
0.012492389418184757,
-0.01660287193953991,
0.0600607767701149,
-0.04091508314013481,
-0.028821133077144623,
-0.015258138999342918,
0.01609751209616661,
-0.024432845413684845,
-0.021523188799619675,
0.09460785239934921,
-0.002259780652821064,
-0.07238180935382843,
-0.0346493273973465,
0.05126021057367325,
-0.029620790854096413,
-0.029212120920419693,
0.036125604063272476,
-0.006049794144928455,
0.060276325792074203,
0.015834979712963104,
-0.027001963928341866,
0.06713254749774933,
0.01923360303044319,
0.010330966673791409,
0.0685550794005394,
0.015232369303703308,
0.11984743922948837,
-0.06360689550638199,
0.023842161521315575,
0.01115382555872202,
0.06737688183784485,
-0.0487569235265255,
0.04852103441953659,
0.03291940689086914,
-0.026713091880083084,
0.07861967384815216,
-0.03688408061861992,
-0.0077490853145718575,
-0.02495543844997883,
0.004476257599890232,
-0.012599773705005646,
0.014384484849870205,
-0.005126059055328369,
-0.057744912803173065,
0.021651893854141235,
-0.09558136016130447,
-0.025168275460600853,
-0.012103092856705189,
-0.022101176902651787,
-0.043151598423719406,
0.052579108625650406,
-0.0936867818236351,
-0.06684289127588272,
-0.0024186288937926292,
0.03816855326294899,
-0.0649871900677681,
-0.007839555852115154,
0.02847333997488022,
-0.03800695389509201,
-0.05028749257326126,
0.020232591778039932,
0.011447095312178135,
-0.08138784766197205,
-4.896288823531982e-33,
-0.027256067842245102,
0.02067556604743004,
-0.08001889288425446,
0.053913556039333344,
-0.08724471926689148,
-0.0742276981472969,
-0.014008740894496441,
0.10050927102565765,
0.017337007448077202,
0.005003396887332201,
0.06980875134468079,
-0.07888428121805191,
-0.05793340504169464,
-0.07196428626775742,
0.046382494270801544,
0.009180307388305664,
0.013658934272825718,
0.018479997292160988,
-0.003921180963516235,
0.013520408421754837,
-0.0269016120582819,
-0.02483372949063778,
-0.1341760903596878,
0.06442292034626007,
-0.1234678328037262,
0.0665905624628067,
0.02345593087375164,
0.0323130302131176,
0.02759307622909546,
-0.037182267755270004,
-0.003633483313024044,
0.013851710595190525,
-0.06667913496494293,
0.0752612054347992,
-0.019590286538004875,
0.060255471616983414,
-0.007817942649126053,
-0.08577191829681396,
0.04626675695180893,
0.07734361290931702,
0.12382517755031586,
-0.0024097454734146595,
-0.03963131830096245,
0.049641337245702744,
-0.05001438036561012,
0.024077940732240677,
-0.09072043001651764,
-0.04518216848373413,
-0.04283794388175011,
-0.02396438643336296,
0.054505471140146255,
-0.022369522601366043,
-0.07933474332094193,
0.009719949215650558,
-0.05172562971711159,
-0.09400004148483276,
0.052734799683094025,
-0.05433474853634834,
-0.08101394772529602,
-0.0017759768525138497,
-0.050796836614608765,
-0.035408951342105865,
-0.00940584298223257,
-0.055992674082517624,
-0.019651811569929123,
-0.038847915828228,
-0.06533636897802353,
0.059515196830034256,
0.0460871085524559,
0.01907147467136383,
0.04046114906668663,
0.05186884105205536,
0.03029596246778965,
0.008775517344474792,
-0.0049223750829696655,
0.03244848921895027,
0.025546204298734665,
0.00817915704101324,
0.04215855523943901,
-0.0654691532254219,
-0.036785632371902466,
-0.06549771875143051,
0.0010680531850084662,
0.003217046381905675,
-0.03112044744193554,
0.02148030325770378,
0.029155515134334564,
0.06854955106973648,
0.00742447329685092,
0.024759773164987564,
0.012106288224458694,
0.026592761278152466,
0.06585802137851715,
0.03773682564496994,
0.06596720963716507,
-5.3421199908143535e-8,
-0.07133639603853226,
-0.050733089447021484,
0.008675280958414078,
0.09862164407968521,
-0.06937725841999054,
-0.010885896161198616,
0.003899394068866968,
0.04325634241104126,
-0.0023761065676808357,
-0.06052650883793831,
0.008366920053958893,
0.041782476007938385,
-0.09317836165428162,
-0.033231817185878754,
-0.03423868492245674,
0.08843320608139038,
-0.04109974205493927,
0.043101660907268524,
0.04573922976851463,
-0.015292070806026459,
0.023725321516394615,
0.021474076434969902,
-0.004470204934477806,
-0.0706934779882431,
0.016780506819486618,
-0.03768867254257202,
-0.03853476792573929,
0.0755123421549797,
-0.07990654557943344,
-0.014828355982899666,
0.0184711292386055,
-0.032004814594984055,
-0.05429479107260704,
-0.038756970316171646,
0.05782764405012131,
0.13148944079875946,
-0.047906119376420975,
-0.09545096009969711,
0.008501973934471607,
-0.022026102989912033,
0.06019972264766693,
0.054928310215473175,
-0.08248993009328842,
-0.03989066556096077,
0.041263483464717865,
-0.004898495972156525,
0.018170511350035667,
-0.07848460227251053,
0.0248368252068758,
0.03388893976807594,
0.010378807783126831,
0.005872388370335102,
-0.10337264835834503,
0.10251963883638382,
0.016654008999466896,
-0.006272118538618088,
-0.016669638454914093,
-0.013593444600701332,
0.03330734372138977,
0.06272847950458527,
0.008294151164591312,
0.025528941303491592,
-0.012666000053286552,
-0.0007034153677523136
] |
hackathon-pln-es/jurisbert-clas-art-convencion-americana-dh | 95326522994b34d3aa50dd46621701260d27d323 | 2022-03-28T18:21:03.000Z | [
"pytorch",
"tensorboard",
"roberta",
"text-classification",
"es",
"transformers",
"license:cc-by-nc-4.0"
] | text-classification | false | hackathon-pln-es | null | hackathon-pln-es/jurisbert-clas-art-convencion-americana-dh | 2,999 | 6 | transformers | ---
license: cc-by-nc-4.0
language: es
widget:
- text: "ADOPCIÓN. EL INTERÉS SUPERIOR DEL MENOR DE EDAD SE BASA EN LA IDONEIDAD DE LOS ADOPTANTES, DENTRO DE LA CUAL SON IRRELEVANTES EL TIPO DE FAMILIA AL QUE AQUÉL SERÁ INTEGRADO, ASÍ COMO LA ORIENTACIÓN SEXUAL O EL ESTADO CIVIL DE ÉSTOS."
---
## Descripción del modelo
hackathon-pln-es/jurisbert-clas-art-convencion-americana-dh, es un modelo de clasificación de texto entrenado en un corpus de datos en español de manera supervisada.
Este modelo fue entrenado con [scjnugacj/jurisbert](https://huggingface.co/scjnugacj/jurisbert) un modelo de enmascaramiento preentrenado con un corpus jurídico en español.
Por lo tanto, nuestro jurisbert-clas-art-convencion-interamericana-dh toma un texto ingresado y predice en que categoría de los 30 artículos de la Convención Americana de Derechos Humanos pertenece.
## Usos previstos y limitaciones
Puede usar el modelo para obtener los artículos de la Convención Americana de Derechos Humanos que tengan más relación al texto que está introduciendo.
Tenga en cuenta que este modelo está destinado principalmente a ajustarse en tareas de clasificación, cuando quiera obtener principalmente que artículos tienen mayor relación a su tema en cuestión.
## Cómo utilizar
```python
#Para instalar SimpleTransformers:
pip install simpletransformers
from simpletransformers.classification import ClassificationModel
# Creando un ClassificationModel
model = ClassificationModel(
"roberta", "hackathon-pln-es/jurisbert-clas-art-convencion-americana-dh", use_cuda=True)
predecir = ["adoptar a un niño"]
predictions, raw_outputs = model.predict(predecir)
predictions
```
## Datos de entrenamiento
El modelo hackathon-pln-es/jurisbert-clas-art-convencion-interamericana-dh se entrenó previamente en un conjunto de datos que consta de 6,089 textos con su etiquetado a diferentes 30 tipos de artículos.
## Procedimiento de entrenamiento
Los textos se transforman utilizando SimpleTransformers en el que se entrenó una época con modelo base Roberta y modelo especifico Jurisbert el cual es un modelo de enmascaramiento con corpus jurídico en español.
## Variables y métricas
Para entrenar se usaron el 90% (5,480) de nuestros datos, al hacer la evaluación:
Train: 5,480
Test: 609
## Resultados de evaluación
| | precision | recall | f1-score | support |
|---|---|---|---|---|
| accuracy | | |0.75 | 609 |
| macro avg | 0.69 |0.64 |0.64 | 609 |
| weighted avg | 0.76 | 0.75 |0.74 | 609 |
Accuracy: 0.7504105
## Equipo
El equipo esta conformado por @gpalomeque @aureliopvs @cecilimacias @giomadariaga @cattsytabla | [
-0.027731822803616524,
0.049689725041389465,
-0.06737323105335236,
-0.12571120262145996,
-0.047327470034360886,
-0.003781575709581375,
0.009865270927548409,
0.01909583993256092,
0.02140594646334648,
0.05121125653386116,
0.0911291167140007,
-0.013326028361916542,
0.022675441578030586,
0.01222508866339922,
0.07703716307878494,
0.00609491067007184,
-0.047992460429668427,
0.024540569633245468,
-0.0015699422219768167,
0.07397381961345673,
0.1350974440574646,
0.009717145934700966,
0.010222304612398148,
0.018820222467184067,
-0.07493042200803757,
-0.0028428551740944386,
-0.025328723713755608,
0.013027070090174675,
-0.0918845683336258,
-0.009349938482046127,
-0.06400178372859955,
0.13084451854228973,
0.11724510788917542,
0.038796182721853256,
0.02726263925433159,
-0.027608739212155342,
0.01130226906388998,
-0.02986428514122963,
0.006443427875638008,
0.047314029186964035,
-0.09946337342262268,
0.027096491307020187,
-0.027315417304635048,
-0.02183431386947632,
-0.0057119326665997505,
-0.09544026851654053,
-0.02535388246178627,
0.08863574266433716,
-0.04131026566028595,
-0.0219411738216877,
-0.06771446019411087,
-0.06737273186445236,
0.008890265598893166,
0.043531905859708786,
-0.011357618495821953,
-0.05507499724626541,
-0.05124061182141304,
0.02005264163017273,
0.07321394234895706,
0.046499814838171005,
-0.0329740084707737,
0.08019223809242249,
-0.04537596553564072,
0.0811198353767395,
0.02296365052461624,
-0.007198774255812168,
0.06141851097345352,
0.02529972977936268,
-0.12954272329807281,
0.01674560084939003,
0.028425119817256927,
-0.03561531379818916,
-0.03084217570722103,
0.06292860954999924,
-0.05407087504863739,
0.06320931762456894,
0.00042882165871560574,
0.04953515902161598,
0.06500028818845749,
-0.19276346266269684,
0.0018342941766604781,
0.049525756388902664,
0.018610944971442223,
-0.0637940987944603,
0.02942955121397972,
0.0559515506029129,
-0.022282518446445465,
-0.025256134569644928,
0.05747509375214577,
0.046745605766773224,
0.048052459955215454,
-0.00689111789688468,
0.025563867762684822,
-0.025362102314829826,
0.019567333161830902,
-0.008613242767751217,
0.10071219503879547,
-0.015710854902863503,
0.00038759244489483535,
0.04579125717282295,
0.09394022822380066,
0.014487739652395248,
0.009108137339353561,
0.003303786041215062,
0.009098981507122517,
-0.008462459780275822,
0.0689932107925415,
-0.013445155695080757,
0.0086295735090971,
0.01966042071580887,
-0.02905057929456234,
-0.021256722509860992,
-0.05933845043182373,
-0.0998176857829094,
-0.04024308919906616,
-0.05626934394240379,
0.03481287136673927,
-0.027826307341456413,
0.07193360477685928,
-0.029203081503510475,
-0.07446128129959106,
-0.044764090329408646,
-0.06037493050098419,
-0.08025185763835907,
0.016948066651821136,
-0.01396110374480486,
-0.010470597073435783,
8.977875573287653e-33,
-0.0369686558842659,
-0.0037251883186399937,
-0.011108486913144588,
0.08191992342472076,
0.039252474904060364,
-0.026196081191301346,
-0.043613169342279434,
-0.012971460819244385,
-0.11972832679748535,
0.0008903665584512055,
-0.057235486805438995,
0.023047879338264465,
-0.02640162967145443,
0.03295668959617615,
0.06000930443406105,
0.029674243181943893,
-0.0059080361388623714,
-0.07516423612833023,
0.03854699805378914,
0.046522993594408035,
-0.005276676267385483,
0.07684231549501419,
0.029794733971357346,
0.012418163008987904,
-0.07262413948774338,
0.11745437979698181,
0.0010891801211982965,
-0.11231498420238495,
-0.012456211261451244,
0.08071569353342056,
-0.01583375595510006,
0.0029870739672333,
0.03729754686355591,
-0.01675744354724884,
0.011312330141663551,
-0.04018937423825264,
0.04853661358356476,
-0.03607332706451416,
0.015808656811714172,
0.0022596355993300676,
-0.03447180241346359,
-0.02374877780675888,
-0.004804977681487799,
0.0025415371637791395,
-0.008415142074227333,
0.017403602600097656,
0.03254435956478119,
0.0044844248332083225,
0.04369058087468147,
0.06644672155380249,
-0.04645800590515137,
0.015714487060904503,
-0.04420367628335953,
-0.05426115170121193,
0.048178303986787796,
0.05409957468509674,
-0.07215186953544617,
0.032555460929870605,
-0.0398264080286026,
-0.017707431688904762,
0.017289146780967712,
0.03988571837544441,
0.028451543301343918,
-0.019401811063289642,
-0.0007891116547398269,
-0.03489461913704872,
-0.0826711654663086,
-0.05878368392586708,
0.1532110571861267,
-0.014315178617835045,
-0.05264086276292801,
-0.008757497183978558,
-0.02987680397927761,
0.11094636470079422,
0.0025453614071011543,
0.04581295698881149,
0.0013529349816963077,
-0.040817949920892715,
-0.0020972341299057007,
0.030513178557157516,
-0.12285323441028595,
0.012351791374385357,
-0.00026574646471999586,
-0.007464401889592409,
0.05554419010877609,
0.014639590866863728,
0.04245702177286148,
0.008622854948043823,
0.053390853106975555,
0.037162214517593384,
0.049614496529102325,
-0.003422643058001995,
-0.03503348305821419,
-0.060208339244127274,
0.019807118922472,
-1.0536452512783385e-32,
-0.02568984590470791,
-0.006672441028058529,
-0.05094991251826286,
-0.0375497043132782,
-0.08570867031812668,
-0.06895262002944946,
0.006667109671980143,
0.0018922451417893171,
-0.006096288561820984,
-0.1440839022397995,
0.0077367364428937435,
-0.08942262828350067,
0.07890105992555618,
-0.024554543197155,
0.0757274255156517,
0.008500670082867146,
-0.08950039744377136,
-0.07914422452449799,
-0.01634981669485569,
0.07458031922578812,
0.02807295322418213,
-0.03325020894408226,
-0.03444483131170273,
0.018758218735456467,
0.029376061633229256,
-0.06764844059944153,
-0.035726286470890045,
0.04165269434452057,
0.028985802084207535,
0.007514603901654482,
0.022115936502814293,
-0.0038830647245049477,
-0.030065638944506645,
0.08614183217287064,
-0.022083677351474762,
-0.04978850856423378,
0.03621114045381546,
0.0037316223606467247,
0.024266190826892853,
0.05574432387948036,
-0.0006544359494000673,
0.06614095717668533,
-0.04244477301836014,
0.0014914674684405327,
-0.026325328275561333,
0.011289743706583977,
-0.07380561530590057,
-0.011424187570810318,
-0.002803186886012554,
-0.07722239941358566,
0.0685199499130249,
-0.04245154932141304,
0.011110163293778896,
-0.1038990318775177,
0.034228838980197906,
-0.0690682902932167,
-0.00820417795330286,
-0.09551659226417542,
-0.09114200621843338,
0.04187675192952156,
0.037037935107946396,
0.04002995043992996,
-0.0832085907459259,
-0.01764071173965931,
0.06436585634946823,
0.012227311730384827,
-0.07553984969854355,
0.06620737910270691,
-0.032022517174482346,
-0.05322878062725067,
0.08559836447238922,
-0.07216767966747284,
-0.08700212091207504,
-0.04556294530630112,
-0.025864381343126297,
-0.035002321004867554,
0.008074035868048668,
0.030372997745871544,
-0.008100204169750214,
0.01264851726591587,
-0.022142918780446053,
0.03163306787610054,
-0.02280312404036522,
0.023028042167425156,
-0.001256659161299467,
0.05528617277741432,
-0.0697149857878685,
0.04731621965765953,
-0.05388316139578819,
0.051580071449279785,
-0.029158899560570717,
0.03868456557393074,
-0.04854249209165573,
0.10731597989797592,
-0.012608900666236877,
-6.243939765226969e-8,
-0.045353975147008896,
-0.01933373138308525,
-0.06719256937503815,
-0.05690867081284523,
-0.003225253662094474,
0.03298063948750496,
-0.03633134067058563,
-0.09761976450681686,
0.023445449769496918,
0.03496689721941948,
0.006137975491583347,
0.03790544718503952,
-0.048764556646347046,
0.021343115717172623,
-0.06020403280854225,
0.038076579570770264,
0.055770065635442734,
0.05987020954489708,
-0.019558560103178024,
-0.025518814101815224,
0.07479643821716309,
-0.00553415110334754,
-0.04884905740618706,
-0.0018020763527601957,
-0.027544571086764336,
0.009635849855840206,
-0.05912076309323311,
-0.038539234548807144,
-0.04923044890165329,
0.03607592731714249,
-0.03602948039770126,
0.022093961015343666,
-0.03875528275966644,
-0.03719806671142578,
0.019464785233139992,
0.0026808830443769693,
-0.038859717547893524,
-0.010204696096479893,
0.016683565452694893,
0.02815305069088936,
0.13826636970043182,
-0.0597958080470562,
-0.06864452362060547,
-0.03550146520137787,
0.07330570369958878,
-0.06239447370171547,
0.04420355707406998,
0.0842721238732338,
0.05512673407793045,
-0.012954982928931713,
-0.0728410929441452,
0.0019410888198763132,
-0.04178798198699951,
-0.0016327250050380826,
0.004567085765302181,
-0.03834324702620506,
0.04610488936305046,
0.074775829911232,
-0.009754568338394165,
0.02524186484515667,
0.07558757811784744,
0.050291236490011215,
0.03862646222114563,
-0.05764361470937729
] |
wietsedv/xlm-roberta-base-ft-udpos28-en | 8fb5e06a6295a01d03bbc4af8359458bfcf21b57 | 2022-02-25T09:58:19.000Z | [
"pytorch",
"xlm-roberta",
"token-classification",
"en",
"dataset:universal_dependencies",
"transformers",
"part-of-speech",
"license:apache-2.0",
"model-index",
"autotrain_compatible"
] | token-classification | false | wietsedv | null | wietsedv/xlm-roberta-base-ft-udpos28-en | 2,994 | null | transformers |
---
language:
- en
license: apache-2.0
library_name: transformers
tags:
- part-of-speech
- token-classification
datasets:
- universal_dependencies
metrics:
- accuracy
model-index:
- name: xlm-roberta-base-ft-udpos28-en
results:
- task:
type: token-classification
name: Part-of-Speech Tagging
dataset:
type: universal_dependencies
name: Universal Dependencies v2.8
metrics:
- type: accuracy
name: English Test accuracy
value: 96.0
- type: accuracy
name: Dutch Test accuracy
value: 90.4
- type: accuracy
name: German Test accuracy
value: 88.6
- type: accuracy
name: Italian Test accuracy
value: 87.8
- type: accuracy
name: French Test accuracy
value: 87.4
- type: accuracy
name: Spanish Test accuracy
value: 90.3
- type: accuracy
name: Russian Test accuracy
value: 91.0
- type: accuracy
name: Swedish Test accuracy
value: 94.0
- type: accuracy
name: Norwegian Test accuracy
value: 89.6
- type: accuracy
name: Danish Test accuracy
value: 91.6
- type: accuracy
name: Low Saxon Test accuracy
value: 57.4
- type: accuracy
name: Akkadian Test accuracy
value: 26.4
- type: accuracy
name: Armenian Test accuracy
value: 88.5
- type: accuracy
name: Welsh Test accuracy
value: 70.6
- type: accuracy
name: Old East Slavic Test accuracy
value: 76.5
- type: accuracy
name: Albanian Test accuracy
value: 82.3
- type: accuracy
name: Slovenian Test accuracy
value: 79.0
- type: accuracy
name: Guajajara Test accuracy
value: 17.2
- type: accuracy
name: Kurmanji Test accuracy
value: 76.9
- type: accuracy
name: Turkish Test accuracy
value: 79.1
- type: accuracy
name: Finnish Test accuracy
value: 87.2
- type: accuracy
name: Indonesian Test accuracy
value: 86.9
- type: accuracy
name: Ukrainian Test accuracy
value: 87.6
- type: accuracy
name: Polish Test accuracy
value: 87.2
- type: accuracy
name: Portuguese Test accuracy
value: 90.0
- type: accuracy
name: Kazakh Test accuracy
value: 82.5
- type: accuracy
name: Latin Test accuracy
value: 79.6
- type: accuracy
name: Old French Test accuracy
value: 53.4
- type: accuracy
name: Buryat Test accuracy
value: 58.8
- type: accuracy
name: Kaapor Test accuracy
value: 9.2
- type: accuracy
name: Korean Test accuracy
value: 64.0
- type: accuracy
name: Estonian Test accuracy
value: 88.4
- type: accuracy
name: Croatian Test accuracy
value: 87.9
- type: accuracy
name: Gothic Test accuracy
value: 20.5
- type: accuracy
name: Swiss German Test accuracy
value: 47.6
- type: accuracy
name: Assyrian Test accuracy
value: 14.6
- type: accuracy
name: North Sami Test accuracy
value: 32.0
- type: accuracy
name: Naija Test accuracy
value: 47.5
- type: accuracy
name: Latvian Test accuracy
value: 87.5
- type: accuracy
name: Chinese Test accuracy
value: 47.5
- type: accuracy
name: Tagalog Test accuracy
value: 73.5
- type: accuracy
name: Bambara Test accuracy
value: 27.7
- type: accuracy
name: Lithuanian Test accuracy
value: 87.3
- type: accuracy
name: Galician Test accuracy
value: 87.1
- type: accuracy
name: Vietnamese Test accuracy
value: 66.4
- type: accuracy
name: Greek Test accuracy
value: 87.6
- type: accuracy
name: Catalan Test accuracy
value: 89.7
- type: accuracy
name: Czech Test accuracy
value: 88.1
- type: accuracy
name: Erzya Test accuracy
value: 47.6
- type: accuracy
name: Bhojpuri Test accuracy
value: 50.7
- type: accuracy
name: Thai Test accuracy
value: 59.5
- type: accuracy
name: Marathi Test accuracy
value: 82.2
- type: accuracy
name: Basque Test accuracy
value: 76.0
- type: accuracy
name: Slovak Test accuracy
value: 88.5
- type: accuracy
name: Kiche Test accuracy
value: 25.4
- type: accuracy
name: Yoruba Test accuracy
value: 18.5
- type: accuracy
name: Warlpiri Test accuracy
value: 29.1
- type: accuracy
name: Tamil Test accuracy
value: 83.4
- type: accuracy
name: Maltese Test accuracy
value: 21.1
- type: accuracy
name: Ancient Greek Test accuracy
value: 66.8
- type: accuracy
name: Icelandic Test accuracy
value: 84.8
- type: accuracy
name: Mbya Guarani Test accuracy
value: 24.1
- type: accuracy
name: Urdu Test accuracy
value: 67.0
- type: accuracy
name: Romanian Test accuracy
value: 85.7
- type: accuracy
name: Persian Test accuracy
value: 76.7
- type: accuracy
name: Apurina Test accuracy
value: 28.6
- type: accuracy
name: Japanese Test accuracy
value: 34.1
- type: accuracy
name: Hungarian Test accuracy
value: 86.0
- type: accuracy
name: Hindi Test accuracy
value: 74.1
- type: accuracy
name: Classical Chinese Test accuracy
value: 29.4
- type: accuracy
name: Komi Permyak Test accuracy
value: 47.4
- type: accuracy
name: Faroese Test accuracy
value: 77.0
- type: accuracy
name: Sanskrit Test accuracy
value: 25.6
- type: accuracy
name: Livvi Test accuracy
value: 63.2
- type: accuracy
name: Arabic Test accuracy
value: 80.7
- type: accuracy
name: Wolof Test accuracy
value: 26.1
- type: accuracy
name: Bulgarian Test accuracy
value: 90.8
- type: accuracy
name: Akuntsu Test accuracy
value: 18.3
- type: accuracy
name: Makurap Test accuracy
value: 5.5
- type: accuracy
name: Kangri Test accuracy
value: 43.0
- type: accuracy
name: Breton Test accuracy
value: 64.1
- type: accuracy
name: Telugu Test accuracy
value: 84.7
- type: accuracy
name: Cantonese Test accuracy
value: 54.0
- type: accuracy
name: Old Church Slavonic Test accuracy
value: 53.7
- type: accuracy
name: Karelian Test accuracy
value: 69.7
- type: accuracy
name: Upper Sorbian Test accuracy
value: 75.6
- type: accuracy
name: South Levantine Arabic Test accuracy
value: 66.3
- type: accuracy
name: Komi Zyrian Test accuracy
value: 39.9
- type: accuracy
name: Irish Test accuracy
value: 67.0
- type: accuracy
name: Nayini Test accuracy
value: 44.9
- type: accuracy
name: Munduruku Test accuracy
value: 12.3
- type: accuracy
name: Manx Test accuracy
value: 25.4
- type: accuracy
name: Skolt Sami Test accuracy
value: 29.9
- type: accuracy
name: Afrikaans Test accuracy
value: 89.3
- type: accuracy
name: Old Turkish Test accuracy
value: 37.1
- type: accuracy
name: Tupinamba Test accuracy
value: 23.1
- type: accuracy
name: Belarusian Test accuracy
value: 89.1
- type: accuracy
name: Serbian Test accuracy
value: 88.4
- type: accuracy
name: Moksha Test accuracy
value: 44.1
- type: accuracy
name: Western Armenian Test accuracy
value: 80.1
- type: accuracy
name: Scottish Gaelic Test accuracy
value: 59.0
- type: accuracy
name: Khunsari Test accuracy
value: 43.2
- type: accuracy
name: Hebrew Test accuracy
value: 90.6
- type: accuracy
name: Uyghur Test accuracy
value: 75.8
- type: accuracy
name: Chukchi Test accuracy
value: 32.6
---
# XLM-RoBERTa base Universal Dependencies v2.8 POS tagging: English
This model is part of our paper called:
- Make the Best of Cross-lingual Transfer: Evidence from POS Tagging with over 100 Languages
Check the [Space](https://huggingface.co/spaces/wietsedv/xpos) for more details.
## Usage
```python
from transformers import AutoTokenizer, AutoModelForTokenClassification
tokenizer = AutoTokenizer.from_pretrained("wietsedv/xlm-roberta-base-ft-udpos28-en")
model = AutoModelForTokenClassification.from_pretrained("wietsedv/xlm-roberta-base-ft-udpos28-en")
```
| [
-0.009440306574106216,
-0.08241916447877884,
-0.049981001764535904,
-0.03255234286189079,
-0.005536343902349472,
0.005440914072096348,
-0.02642454393208027,
-0.01594807580113411,
-0.0511232428252697,
-0.04423541575670242,
0.0035184824373573065,
-0.1470787227153778,
-0.02857961319386959,
-0.017095351591706276,
0.0022438671439886093,
-0.06093977764248848,
0.01941053941845894,
0.005911483895033598,
-0.04658824950456619,
-0.026910504326224327,
0.014855345711112022,
0.10442907363176346,
0.09343723952770233,
-0.048119284212589264,
0.06726185977458954,
-0.028889760375022888,
-0.08891497552394867,
0.01237676851451397,
-0.01038086973130703,
-0.002162123564630747,
0.006966943386942148,
0.08857733011245728,
0.05603577941656113,
0.06442353874444962,
0.015285293571650982,
-0.012034628540277481,
0.0711924359202385,
-0.028910107910633087,
0.013150905258953571,
0.03487541526556015,
-0.048644546419382095,
-0.038969554007053375,
0.024242764338850975,
-0.014869166538119316,
0.03721602261066437,
0.0027550430968403816,
-0.09784254431724548,
-0.013225114904344082,
-0.047896090894937515,
0.0717846229672432,
-0.12899184226989746,
-0.0021070505026727915,
0.06516070663928986,
0.062010377645492554,
-0.03395555168390274,
-0.007447325624525547,
-0.04047059267759323,
0.050766944885253906,
-0.009814830496907234,
0.029695939272642136,
-0.037182193249464035,
-0.07064934074878693,
-0.09146127849817276,
0.013628588989377022,
-0.08076711744070053,
0.0013748352648690343,
-0.02470593899488449,
-0.021154742687940598,
0.02583695761859417,
-0.011666223406791687,
-0.06984686851501465,
0.03682694956660271,
-0.022317683324217796,
0.1230083703994751,
0.016477221623063087,
0.022682877257466316,
-0.04141340032219887,
0.006582020781934261,
0.07800167798995972,
-0.11868461221456528,
-0.034938257187604904,
-0.03148838132619858,
-0.02888430468738079,
0.01564038172364235,
0.08864820003509521,
-0.03724121302366257,
0.09145240485668182,
0.033886540681123734,
-0.05189136415719986,
0.001971681835129857,
-0.044141337275505066,
-0.021693425253033638,
0.025419892743229866,
0.05970602110028267,
-0.01973048225045204,
0.03432956710457802,
0.06001568213105202,
0.07747624814510345,
-0.05419468879699707,
0.038172539323568344,
-0.03921491652727127,
-0.04982833191752434,
-0.007168990559875965,
-0.04547995701432228,
-0.07362660020589828,
-0.027974890545010567,
-0.03751179575920105,
0.057359155267477036,
0.04406595602631569,
-0.07603725790977478,
-0.010270425118505955,
-0.03797619789838791,
-0.06067894026637077,
-0.12122351676225662,
0.014803760685026646,
0.007864778861403465,
-0.04754095897078514,
-0.04433732107281685,
0.08009421080350876,
0.054003506898880005,
-0.0699288472533226,
0.018621940165758133,
-0.03293078392744064,
-0.05095387622714043,
0.03185027837753296,
0.009121802635490894,
-0.01865818351507187,
9.33862817739929e-33,
0.03968552127480507,
0.042699411511421204,
0.0007493314333260059,
0.02149386703968048,
-0.0559798888862133,
-0.05797368288040161,
-0.08335389941930771,
0.010400412604212761,
-0.05755391716957092,
-0.0030254872981458902,
0.0025153965689241886,
0.04999307915568352,
-0.09758679568767548,
0.021786930039525032,
0.0662076398730278,
0.08055838942527771,
0.012289976701140404,
0.04333438724279404,
-0.06729690730571747,
0.07307548075914383,
0.13119786977767944,
-0.011416090652346611,
0.06915395706892014,
-0.014528324827551842,
0.02590397372841835,
0.04584915563464165,
0.051001593470573425,
-0.04984094202518463,
-0.030246613547205925,
0.02972635067999363,
-0.04285486415028572,
-0.06388664245605469,
-0.01655888929963112,
-0.05859396234154701,
0.03756340965628624,
-0.020510755479335785,
-0.019281139597296715,
0.0067796045914292336,
-0.0490308441221714,
-0.0465124137699604,
-0.016684988513588905,
0.0025201914831995964,
-0.016675475984811783,
-0.016825828701257706,
0.03217921778559685,
-0.035748470574617386,
-0.02289905957877636,
-0.04313620924949646,
0.08306395262479782,
-0.008199789561331272,
-0.02055303007364273,
-0.027071624994277954,
-0.027617163956165314,
0.06367726624011993,
0.029414169490337372,
0.04614363610744476,
0.028985491022467613,
0.11223859339952469,
0.007028356194496155,
-0.035329319536685944,
-0.06122627854347229,
0.0027225688099861145,
-0.006193416193127632,
-0.057185184210538864,
0.0910896584391594,
-0.045938294380903244,
-0.0219722893089056,
-0.001954020233824849,
0.032768964767456055,
-0.03904582932591438,
0.023336973041296005,
-0.030152395367622375,
0.04413570091128349,
0.08294713497161865,
0.01910594291985035,
0.049980293959379196,
0.021664729341864586,
-0.029181068763136864,
0.019527114927768707,
0.03442607820034027,
-0.06437281519174576,
0.02774675004184246,
-0.03409011662006378,
-0.051306914538145065,
-0.0429515577852726,
-0.031812943518161774,
0.07919707894325256,
-0.005000718869268894,
-0.023504730314016342,
-0.01816132850944996,
-0.038541652262210846,
0.07133889198303223,
-0.05281658470630646,
-0.06422996520996094,
-0.0814526379108429,
-1.0969428224493513e-32,
-0.08612369000911713,
0.07156627625226974,
-0.049474310129880905,
0.11571942269802094,
0.00858466885983944,
0.006260734982788563,
0.08748283982276917,
0.09889893978834152,
0.0436832569539547,
0.034456055611371994,
0.04379430413246155,
-0.09516556560993195,
0.043724190443754196,
-0.0057453284971416,
0.016858983784914017,
0.011877748183906078,
-0.08887466043233871,
0.005443636327981949,
0.0009272677125409245,
0.0845409631729126,
0.0004037889011669904,
0.11302760243415833,
-0.057758599519729614,
0.12351042032241821,
-0.041960518807172775,
-0.013974555768072605,
-0.06340839713811874,
0.014165492728352547,
-0.020038973540067673,
-0.09007016569375992,
0.010839229449629784,
0.01946164481341839,
-0.09519007802009583,
0.032955341041088104,
-0.02215760387480259,
-0.05635635927319527,
0.028831375762820244,
-0.001856883056461811,
-0.01624663919210434,
0.127038836479187,
0.009259521029889584,
0.028073301538825035,
-0.10356292873620987,
-0.05941809341311455,
-0.00386372790671885,
-0.08771971613168716,
0.0032934658229351044,
0.002508090576156974,
-0.04592530056834221,
-0.06112850084900856,
0.0887802466750145,
0.03709143400192261,
-0.07593881338834763,
0.029686544090509415,
0.03540944308042526,
-0.05471651256084442,
0.009700859896838665,
-0.04545191675424576,
-0.07873775064945221,
0.0028557325713336468,
-0.02511310949921608,
0.04323917254805565,
-0.005719215143471956,
0.02038739062845707,
0.1055702343583107,
-0.023310434073209763,
-0.06580611318349838,
0.017777901142835617,
0.01097680814564228,
-0.01205110177397728,
-0.013404704630374908,
-0.030553540214896202,
0.009184018708765507,
-0.01651204191148281,
0.004082618281245232,
-0.02154483273625374,
-0.024997534230351448,
-0.006153067573904991,
-0.0021839714609086514,
-0.022554980590939522,
-0.05166700482368469,
-0.02423262782394886,
0.0597655326128006,
0.10405190289020538,
0.04160236194729805,
0.10493592172861099,
0.02543136104941368,
0.0005373121239244938,
0.023977896198630333,
0.031256094574928284,
-0.011272731237113476,
0.04200483486056328,
-0.07476174831390381,
0.07336042076349258,
0.0028620976954698563,
-7.070126883945704e-8,
-0.04087204858660698,
0.006352301221340895,
-0.07112137228250504,
0.050715528428554535,
0.015117664821445942,
-0.05625374987721443,
-0.060547228902578354,
0.06022300198674202,
-0.024471841752529144,
-0.02213876135647297,
0.01807837374508381,
0.005205856636166573,
-0.12215674668550491,
0.013208527117967606,
0.026041459292173386,
-0.07241465896368027,
-0.02199443057179451,
0.19106996059417725,
-0.03564852103590965,
-0.018757518380880356,
0.028278909623622894,
0.035382747650146484,
-0.009103626012802124,
-0.012105719186365604,
0.024217186495661736,
-0.015861835330724716,
-0.03779818117618561,
0.04779842495918274,
-0.01474266592413187,
0.015570227988064289,
0.0351732112467289,
0.0071165780536830425,
-0.038604918867349625,
-0.09699676930904388,
0.03357353061437607,
0.028582844883203506,
-0.04061611369252205,
0.016505366191267967,
0.01261092722415924,
0.06121884286403656,
0.038234222680330276,
0.02608942799270153,
-0.10740194469690323,
0.05527428910136223,
0.032125264406204224,
-0.06390649825334549,
-0.06932875514030457,
-0.058028627187013626,
0.013378903269767761,
-0.02291204035282135,
-0.007216109428554773,
0.00703116599470377,
-0.04702604562044144,
-0.005748517345637083,
0.01404422428458929,
0.05318956449627876,
-0.0018850162159651518,
-0.018620185554027557,
0.01653231680393219,
-0.03434514254331589,
0.11825191229581833,
-0.013988949358463287,
-0.012582993134856224,
0.019339565187692642
] |
microsoft/BiomedVLP-CXR-BERT-specialized | b59c09e51ab2410b24f4be214bbb49043fe63fc2 | 2022-07-11T14:52:06.000Z | [
"pytorch",
"cxr-bert",
"en",
"arxiv:2204.09817",
"arxiv:2103.00020",
"arxiv:2002.05709",
"transformers",
"exbert",
"license:mit",
"fill-mask"
] | fill-mask | false | microsoft | null | microsoft/BiomedVLP-CXR-BERT-specialized | 2,994 | 5 | transformers | ---
language: en
tags:
- exbert
license: mit
pipeline_tag: fill-mask
widget:
- text: "Left pleural effusion with adjacent [MASK]."
example_title: "Radiology 1"
- text: "Heart size normal and lungs are [MASK]."
example_title: "Radiology 2"
inference: false
---
# CXR-BERT-specialized
[CXR-BERT](https://arxiv.org/abs/2204.09817) is a chest X-ray (CXR) domain-specific language model that makes use of an improved vocabulary, novel pretraining procedure, weight regularization, and text augmentations. The resulting model demonstrates improved performance on radiology natural language inference, radiology masked language model token prediction, and downstream vision-language processing tasks such as zero-shot phrase grounding and image classification.
First, we pretrain [**CXR-BERT-general**](https://huggingface.co/microsoft/BiomedVLP-CXR-BERT-general) from a randomly initialized BERT model via Masked Language Modeling (MLM) on abstracts [PubMed](https://pubmed.ncbi.nlm.nih.gov/) and clinical notes from the publicly-available [MIMIC-III](https://physionet.org/content/mimiciii/1.4/) and [MIMIC-CXR](https://physionet.org/content/mimic-cxr/). In that regard, the general model is expected be applicable for research in clinical domains other than the chest radiology through domain specific fine-tuning.
**CXR-BERT-specialized** is continually pretrained from CXR-BERT-general to further specialize in the chest X-ray domain. At the final stage, CXR-BERT is trained in a multi-modal contrastive learning framework, similar to the [CLIP](https://arxiv.org/abs/2103.00020) framework. The latent representation of [CLS] token is utilized to align text/image embeddings.
## Model variations
| Model | Model identifier on HuggingFace | Vocabulary | Note |
| ------------------------------------------------- | ----------------------------------------------------------------------------------------------------------- | -------------- | --------------------------------------------------------- |
| CXR-BERT-general | [microsoft/BiomedVLP-CXR-BERT-general](https://huggingface.co/microsoft/BiomedVLP-CXR-BERT-general) | PubMed & MIMIC | Pretrained for biomedical literature and clinical domains |
| CXR-BERT-specialized (after multi-modal training) | [microsoft/BiomedVLP-CXR-BERT-specialized](https://huggingface.co/microsoft/BiomedVLP-CXR-BERT-specialized) | PubMed & MIMIC | Pretrained for chest X-ray domain |
## Image model
**CXR-BERT-specialized** is jointly trained with a ResNet-50 image model in a multi-modal contrastive learning framework. Prior to multi-modal learning, the image model is pre-trained on the same set of images in MIMIC-CXR using [SimCLR](https://arxiv.org/abs/2002.05709). The corresponding model definition and its loading functions can be accessed through our [HI-ML-Multimodal](https://github.com/microsoft/hi-ml/blob/main/hi-ml-multimodal/src/health_multimodal/image/model/model.py) GitHub repository. The joint image and text model, namely [BioViL](https://arxiv.org/abs/2204.09817), can be used in phrase grounding applications as shown in this python notebook [example](https://mybinder.org/v2/gh/microsoft/hi-ml/HEAD?labpath=hi-ml-multimodal%2Fnotebooks%2Fphrase_grounding.ipynb). Additionally, please check the [MS-CXR benchmark](https://physionet.org/content/ms-cxr/0.1/) for a more systematic evaluation of joint image and text models in phrase grounding tasks.
## Citation
The corresponding manuscript is accepted to be presented at the [**European Conference on Computer Vision (ECCV) 2022**](https://eccv2022.ecva.net/)
```bibtex
@misc{https://doi.org/10.48550/arxiv.2204.09817,
doi = {10.48550/ARXIV.2204.09817},
url = {https://arxiv.org/abs/2204.09817},
author = {Boecking, Benedikt and Usuyama, Naoto and Bannur, Shruthi and Castro, Daniel C. and Schwaighofer, Anton and Hyland, Stephanie and Wetscherek, Maria and Naumann, Tristan and Nori, Aditya and Alvarez-Valle, Javier and Poon, Hoifung and Oktay, Ozan},
title = {Making the Most of Text Semantics to Improve Biomedical Vision-Language Processing},
publisher = {arXiv},
year = {2022},
}
```
## Model Use
### Intended Use
This model is intended to be used solely for (I) future research on visual-language processing and (II) reproducibility of the experimental results reported in the reference paper.
#### Primary Intended Use
The primary intended use is to support AI researchers building on top of this work. CXR-BERT and its associated models should be helpful for exploring various clinical NLP & VLP research questions, especially in the radiology domain.
#### Out-of-Scope Use
**Any** deployed use case of the model --- commercial or otherwise --- is currently out of scope. Although we evaluated the models using a broad set of publicly-available research benchmarks, the models and evaluations are not intended for deployed use cases. Please refer to [the associated paper](https://arxiv.org/abs/2204.09817) for more details.
### How to use
Here is how to use this model to extract radiological sentence embeddings and obtain their cosine similarity in the joint space (image and text):
```python
import torch
from transformers import AutoModel, AutoTokenizer
# Load the model and tokenizer
url = "microsoft/BiomedVLP-CXR-BERT-specialized"
tokenizer = AutoTokenizer.from_pretrained(url, trust_remote_code=True)
model = AutoModel.from_pretrained(url, trust_remote_code=True)
# Input text prompts (e.g., reference, synonym, contradiction)
text_prompts = ["There is no pneumothorax or pleural effusion",
"No pleural effusion or pneumothorax is seen",
"The extent of the pleural effusion is constant."]
# Tokenize and compute the sentence embeddings
tokenizer_output = tokenizer.batch_encode_plus(batch_text_or_text_pairs=text_prompts,
add_special_tokens=True,
padding='longest',
return_tensors='pt')
embeddings = model.get_projected_text_embeddings(input_ids=tokenizer_output.input_ids,
attention_mask=tokenizer_output.attention_mask)
# Compute the cosine similarity of sentence embeddings obtained from input text prompts.
sim = torch.mm(embeddings, embeddings.t())
```
## Data
This model builds upon existing publicly-available datasets:
- [PubMed](https://pubmed.ncbi.nlm.nih.gov/)
- [MIMIC-III](https://physionet.org/content/mimiciii/)
- [MIMIC-CXR](https://physionet.org/content/mimic-cxr/)
These datasets reflect a broad variety of sources ranging from biomedical abstracts to intensive care unit notes to chest X-ray radiology notes. The radiology notes are accompanied with their associated chest x-ray DICOM images in MIMIC-CXR dataset.
## Performance
We demonstrate that this language model achieves state-of-the-art results in radiology natural language inference through its improved vocabulary and novel language pretraining objective leveraging semantics and discourse characteristics in radiology reports.
A highlight of comparison to other common models, including [ClinicalBERT](https://aka.ms/clinicalbert) and [PubMedBERT](https://aka.ms/pubmedbert):
| | RadNLI accuracy (MedNLI transfer) | Mask prediction accuracy | Avg. # tokens after tokenization | Vocabulary size |
| ----------------------------------------------- | :-------------------------------: | :----------------------: | :------------------------------: | :-------------: |
| RadNLI baseline | 53.30 | - | - | - |
| ClinicalBERT | 47.67 | 39.84 | 78.98 (+38.15%) | 28,996 |
| PubMedBERT | 57.71 | 35.24 | 63.55 (+11.16%) | 28,895 |
| CXR-BERT (after Phase-III) | 60.46 | 77.72 | 58.07 (+1.59%) | 30,522 |
| **CXR-BERT (after Phase-III + Joint Training)** | **65.21** | **81.58** | **58.07 (+1.59%)** | 30,522 |
CXR-BERT also contributes to better vision-language representation learning through its improved text encoding capability. Below is the zero-shot phrase grounding performance on the **MS-CXR** dataset, which evaluates the quality of image-text latent representations.
| Vision–Language Pretraining Method | Text Encoder | MS-CXR Phrase Grounding (Avg. CNR Score) |
| ---------------------------------- | ------------ | :--------------------------------------: |
| Baseline | ClinicalBERT | 0.769 |
| Baseline | PubMedBERT | 0.773 |
| ConVIRT | ClinicalBERT | 0.818 |
| GLoRIA | ClinicalBERT | 0.930 |
| **BioViL** | **CXR-BERT** | **1.027** |
| **BioViL-L** | **CXR-BERT** | **1.142** |
Additional details about performance can be found in the corresponding paper, [Making the Most of Text Semantics to Improve Biomedical Vision-Language Processing](https://arxiv.org/abs/2204.09817).
## Limitations
This model was developed using English corpora, and thus can be considered English-only.
## Further information
Please refer to the corresponding paper, ["Making the Most of Text Semantics to Improve Biomedical Vision-Language Processing", ECCV'22](https://arxiv.org/abs/2204.09817) for additional details on the model training and evaluation.
For additional inference pipelines with CXR-BERT, please refer to the [HI-ML-Multimodal GitHub](https://aka.ms/biovil-code) repository.
| [
-0.047580890357494354,
-0.05406125262379646,
0.05029718205332756,
0.012643080204725266,
0.029990961775183678,
0.03519583120942116,
0.024371156468987465,
0.015552010387182236,
-0.0035104602575302124,
-0.059041980654001236,
-0.02772294357419014,
-0.0687270537018776,
0.05773886665701866,
0.11707661300897598,
0.00654644938185811,
0.01662430912256241,
0.008062239736318588,
-0.01392091903835535,
-0.07394550740718842,
-0.026247521862387657,
0.04917038232088089,
0.10750283300876617,
0.06485995650291443,
-0.04926038160920143,
0.034818459302186966,
-0.053469546139240265,
-0.06704803556203842,
-0.11353158205747604,
0.07711318135261536,
0.05113735795021057,
0.055676281452178955,
0.018702369183301926,
0.07095582038164139,
0.06605285406112671,
-0.007811278570443392,
0.03236830234527588,
-0.030361752957105637,
0.07791236788034439,
0.005874333903193474,
0.05482471361756325,
-0.0447196438908577,
-0.030911481007933617,
-0.056787919253110886,
0.006347388494759798,
0.06115950271487236,
-0.029201431199908257,
-0.017412574961781502,
-0.04317047446966171,
-0.06544313579797745,
0.03972453251481056,
-0.07738807052373886,
-0.05663986876606941,
-0.028797078877687454,
0.06736880540847778,
-0.008891325443983078,
0.01593860425055027,
-0.03429054096341133,
-0.06751193851232529,
0.0007851897971704602,
-0.1037062406539917,
-0.1028040274977684,
-0.08647190779447556,
0.004950588569045067,
0.06087467074394226,
-0.02702365815639496,
-0.00016101563232950866,
-0.01575641706585884,
0.01034587249159813,
0.001896222704090178,
0.0642860010266304,
-0.019671037793159485,
0.06463340669870377,
0.06324681639671326,
0.05250662937760353,
-0.010693972930312157,
-0.003813644405454397,
0.10759638994932175,
-0.005532192066311836,
0.042690277099609375,
-0.07551049441099167,
0.007627390790730715,
0.02674628049135208,
0.07325547933578491,
0.006579838693141937,
0.09140246361494064,
0.051156528294086456,
0.006344390567392111,
-0.02119855210185051,
-0.07741466164588928,
0.004860725253820419,
-0.010703382082283497,
-0.10269027948379517,
0.04720916971564293,
0.020505301654338837,
0.039953067898750305,
-0.025303874164819717,
0.03282865136861801,
-0.06667425483465195,
0.0015604221262037754,
0.04648855701088905,
0.06268079578876495,
-0.027752608060836792,
0.008364974521100521,
-0.039636678993701935,
0.06631319224834442,
-0.024121716618537903,
0.011403861455619335,
-0.004326878115534782,
0.02438872680068016,
-0.04690714180469513,
0.0588524267077446,
0.04357532411813736,
0.023764334619045258,
-0.10590197145938873,
0.011523721739649773,
0.06423904001712799,
0.0245859045535326,
-0.050702568143606186,
0.02742856927216053,
0.08329014480113983,
-0.012293601408600807,
-0.03347575664520264,
-0.011392641812562943,
-0.04535629227757454,
0.010220522992312908,
-0.04899057373404503,
-0.016058774664998055,
3.0369482463959286e-33,
-0.006348712835460901,
0.047580938786268234,
0.027205126360058784,
0.014041214250028133,
0.02696012146770954,
0.00827399455010891,
-0.027206655591726303,
0.005505695939064026,
-0.014475960284471512,
-0.04132673889398575,
-0.02195344865322113,
0.07007832825183868,
-0.07447191327810287,
0.0944698303937912,
-0.03714553266763687,
0.037122976034879684,
-0.06685954332351685,
0.11223849654197693,
-0.02480917237699032,
0.012271836400032043,
0.09168615937232971,
-0.005127761512994766,
-0.021093159914016724,
-0.03836758807301521,
-0.005850327666848898,
0.05772402510046959,
0.05637955665588379,
-0.10919957607984543,
-0.042511485517024994,
0.02335413545370102,
-0.18145930767059326,
0.055613480508327484,
0.033809926360845566,
0.023498907685279846,
-0.016925999894738197,
-0.03161243721842766,
0.024999868124723434,
-0.05100831016898155,
0.02832709066569805,
0.0003977681917604059,
-0.027660194784402847,
0.051348842680454254,
-0.0353362075984478,
-0.1006506085395813,
-0.00806883629411459,
-0.07279964536428452,
-0.02714153192937374,
-0.03024713695049286,
-0.032286543399095535,
-0.03178383782505989,
0.0862995833158493,
0.01162915863096714,
-0.05579650402069092,
-0.0258654598146677,
0.029417859390378,
0.013112584128975868,
0.009788927622139454,
0.021669438108801842,
0.02616058848798275,
0.003522305516526103,
0.03237719088792801,
-0.005988637916743755,
0.029874876141548157,
0.0716717317700386,
0.03373570367693901,
-0.035685304552316666,
-0.04150910675525665,
-0.012643138878047466,
-0.005877866875380278,
0.0328061617910862,
-0.05460627004504204,
0.01585494913160801,
0.007304877042770386,
-0.026793470606207848,
0.06500822305679321,
-0.022358743473887444,
-0.002795711625367403,
-0.042802825570106506,
-0.07628700137138367,
0.044440604746341705,
-0.030312534421682358,
0.01231450866907835,
-0.07405083626508713,
-0.027078231796622276,
-0.06944484263658524,
-0.05190208554267883,
0.0565488301217556,
-0.054866742342710495,
0.02030211314558983,
-0.010161707177758217,
0.04034166410565376,
-0.040559351444244385,
-0.035192716866731644,
0.016304755583405495,
-0.08700621873140335,
-2.5513276528422405e-33,
-0.053754035383462906,
0.06282427906990051,
-0.044099800288677216,
0.034483879804611206,
-0.049510762095451355,
-0.07538343966007233,
0.08151102066040039,
0.1454012095928192,
0.05804828926920891,
-0.06981425732374191,
0.050824105739593506,
-0.010554435662925243,
-0.08753081411123276,
0.015004185028374195,
0.01484031043946743,
0.022828945890069008,
0.012691165320575237,
0.045189350843429565,
-0.062138043344020844,
0.0530434213578701,
0.06780917942523956,
0.0020961288828402758,
-0.1238105371594429,
0.05591680482029915,
-0.051275234669446945,
0.10802274942398071,
-0.004436491057276726,
0.04087165743112564,
0.047556452453136444,
-0.04179380461573601,
-0.06248321011662483,
0.05070476606488228,
-0.03565944358706474,
0.04124279320240021,
-0.033318620175123215,
0.03611934930086136,
0.02638825587928295,
0.024256983771920204,
-0.032445278018713,
-0.0062882425263524055,
0.10274068266153336,
0.00829943548887968,
-0.06439045816659927,
0.017512375488877296,
-0.03647562861442566,
-0.0636826902627945,
-0.03685712069272995,
-0.072936050593853,
0.06516920775175095,
-0.0420096293091774,
-0.028432268649339676,
-0.050554290413856506,
-0.08690451830625534,
0.032249536365270615,
-0.09505367279052734,
-0.08500953018665314,
-0.014579794369637966,
-0.08354652673006058,
-0.03038129210472107,
0.007113773841410875,
-0.027654269710183144,
0.035919997841119766,
0.018073493614792824,
-0.025520453229546547,
-0.005674772895872593,
-0.03064138814806938,
-0.028805484995245934,
0.06064208596944809,
-0.042381610721349716,
0.0028704942669719458,
0.040573377162218094,
-0.013261719606816769,
-0.005167463328689337,
0.034277454018592834,
0.0256925281137228,
0.07589889317750931,
0.06658867001533508,
-0.09171724319458008,
-0.07525088638067245,
-0.030643485486507416,
-0.04154103249311447,
-0.08137597143650055,
0.0506400503218174,
0.07464331388473511,
0.03488202393054962,
0.13362449407577515,
0.010316064581274986,
-0.0025366568006575108,
0.003257643897086382,
-0.013332889415323734,
-0.02577676996588707,
0.029399506747722626,
0.003970762249082327,
0.03065105341374874,
-0.027951041236519814,
-4.4550837685619626e-8,
-0.10503938794136047,
-0.01835993118584156,
-0.03065478429198265,
0.023755978792905807,
-0.06847673654556274,
-0.0656040757894516,
-0.030734339728951454,
0.032206159085035324,
-0.018492572009563446,
-0.0523919016122818,
0.04788671061396599,
0.08301300555467606,
-0.05877387896180153,
-0.04546450451016426,
0.03516960144042969,
0.05486457422375679,
-0.048375289887189865,
0.06974340975284576,
0.037779055535793304,
-0.09697848558425903,
-0.026501180604100227,
0.01963108219206333,
0.05475484952330589,
-0.022111233323812485,
0.017129437997937202,
-0.0396430641412735,
0.021915575489401817,
0.11321830004453659,
0.04308318719267845,
0.022288689389824867,
-0.026034224778413773,
0.06356076151132584,
-0.05744623765349388,
0.051978763192892075,
0.06124763563275337,
0.026643257588148117,
0.01659419760107994,
-0.05152137205004692,
-0.04410107061266899,
0.07611837983131409,
0.1112656220793724,
0.014229180291295052,
-0.0333232618868351,
-0.02301260270178318,
0.042362991720438004,
-0.006094327662140131,
-0.0007516957120969892,
-0.11966560781002045,
0.06306338310241699,
-0.06812454015016556,
0.05835442617535591,
-0.028880717232823372,
0.010084508918225765,
0.016995634883642197,
-0.032971449196338654,
0.09435506910085678,
-0.018187085166573524,
-0.0044366647489368916,
0.02861875668168068,
0.03785160928964615,
0.016964660957455635,
0.03502045199275017,
0.030178526416420937,
0.02566545270383358
] |
yikuan8/Clinical-Longformer | dc05ee5437027609b953618bc8e2b725a30bd670 | 2022-04-10T17:44:49.000Z | [
"pytorch",
"longformer",
"fill-mask",
"en",
"arxiv:2201.11838",
"transformers",
"clinical",
"autotrain_compatible"
] | fill-mask | false | yikuan8 | null | yikuan8/Clinical-Longformer | 2,992 | 5 | transformers | ---
language: "en"
tags:
- longformer
- clinical
---
<span style="font-size:larger;">**Clinical-Longformer**</span> is a clinical knowledge enriched version of Longformer that was further pre-trained using MIMIC-III clinical notes. It allows up to 4,096 tokens as the model input. Clinical-Longformer consistently out-performs ClinicalBERT across 10 baseline dataset for at least 2 percent. Those downstream experiments broadly cover named entity recognition (NER), question answering (QA), natural language inference (NLI) and text classification tasks. For more details, please refer to [our paper](https://arxiv.org/pdf/2201.11838.pdf). We also provide a sister model at [Clinical-BigBIrd](https://huggingface.co/yikuan8/Clinical-BigBird)
### Pre-training
We initialized Clinical-Longformer from the pre-trained weights of the base version of Longformer. The pre-training process was distributed in parallel to 6 32GB Tesla V100 GPUs. FP16 precision was enabled to accelerate training. We pre-trained Clinical-Longformer for 200,000 steps with batch size of 6×3. The learning rates were 3e-5 for both models. The entire pre-training process took more than 2 weeks.
### Usage
Load the model directly from Transformers:
```
from transformers import AutoTokenizer, AutoModelForMaskedLM
tokenizer = AutoTokenizer.from_pretrained("yikuan8/Clinical-Longformer")
model = AutoModelForMaskedLM.from_pretrained("yikuan8/Clinical-Longformer")
```
### Citing
If you find our model helps, please consider citing this :)
```
@article{li2022clinical,
title={Clinical-Longformer and Clinical-BigBird: Transformers for long clinical sequences},
author={Li, Yikuan and Wehbe, Ramsey M and Ahmad, Faraz S and Wang, Hanyin and Luo, Yuan},
journal={arXiv preprint arXiv:2201.11838},
year={2022}
}
```
### Questions
Please email [email protected]
| [
-0.08255124092102051,
-0.01990354433655739,
0.0360933318734169,
-0.04221920669078827,
-0.021467464044690132,
0.008913778699934483,
-0.12178582698106766,
0.034483131021261215,
0.014481711201369762,
-0.020366651937365532,
-0.03809583559632301,
-0.01566201075911522,
-0.024854138493537903,
0.00071943667717278,
0.031718216836452484,
0.028472021222114563,
0.03408820554614067,
-0.0361444465816021,
-0.0734386146068573,
0.019873633980751038,
0.03368063271045685,
0.0682014748454094,
0.08185423910617828,
0.04118466004729271,
-0.007572066504508257,
-0.0219256691634655,
-0.0623302236199379,
-0.0602097362279892,
0.042758550494909286,
-0.0240623876452446,
0.04737909138202667,
0.03437584266066551,
0.10641161352396011,
0.06863180547952652,
-0.0063917385414242744,
0.038463346660137177,
-0.07964148372411728,
0.028895162045955658,
-0.036714572459459305,
-0.018031908199191093,
0.005492441821843386,
-0.0914565920829773,
-0.02913464605808258,
0.08391952514648438,
0.0833992138504982,
-0.039248574525117874,
-0.05406623333692551,
0.032229602336883545,
0.020291442051529884,
0.030220257118344307,
-0.1205289363861084,
-0.030649399384856224,
0.00982592161744833,
0.12463667243719101,
-0.061669427901506424,
-0.016247011721134186,
-0.002868297277018428,
-0.03006838820874691,
-0.09214887768030167,
0.03804255276918411,
-0.08638884127140045,
-0.014767836779356003,
0.02228401042521,
0.03953831270337105,
-0.06319078803062439,
0.01451965607702732,
0.007849746383726597,
-0.008887785486876965,
-0.016280900686979294,
-0.020906871184706688,
0.007061314769089222,
0.03557340428233147,
-0.018843894824385643,
0.11123616248369217,
0.009374070912599564,
0.012301989831030369,
0.048361171036958694,
0.03184662386775017,
0.029391737654805183,
-0.09856926649808884,
0.03534886613488197,
0.023072946816682816,
0.09281496703624725,
0.0071951402351260185,
0.08996908366680145,
0.023639485239982605,
-0.009023774415254593,
-0.019036170095205307,
-0.10374917089939117,
-0.017643030732870102,
0.016871090978384018,
-0.14749079942703247,
0.08490423113107681,
-0.0016207084991037846,
-0.03076711855828762,
-0.011231983080506325,
0.005876173265278339,
0.006681391969323158,
-0.0237563606351614,
0.048816390335559845,
0.030988339334726334,
0.027222996577620506,
0.031761929392814636,
0.028328439220786095,
-0.06991690397262573,
-0.06540796905755997,
0.0429777167737484,
-0.00023131641501095146,
0.046994470059871674,
-0.07117030769586563,
0.06573709845542908,
0.023388832807540894,
-0.0709594264626503,
0.01779458299279213,
0.013535151258111,
-0.04177906736731529,
-0.027789805084466934,
-0.009091366082429886,
0.0376189649105072,
0.05821431055665016,
-0.09430009871721268,
-0.004324043169617653,
-0.08308939635753632,
-0.05643127113580704,
0.035033855587244034,
0.051904283463954926,
-0.028143059462308884,
3.124890101187813e-33,
0.034408312290906906,
0.08151991665363312,
0.026692356914281845,
0.02099738083779812,
-0.03192288428544998,
0.036423295736312866,
-0.04354526102542877,
0.06781432777643204,
-0.05201543867588043,
0.004772669170051813,
0.003997374791651964,
0.009979838505387306,
0.03781794384121895,
0.08082686364650726,
0.021675627678632736,
0.006542054936289787,
-0.08713456243276596,
0.08188190311193466,
-0.07599586248397827,
-0.010453467257320881,
0.06694763153791428,
-0.012935361824929714,
0.017614178359508514,
0.00436366256326437,
-0.020234398543834686,
0.009522623382508755,
-0.045342326164245605,
-0.06897781044244766,
-0.03450723737478256,
-0.002200421178713441,
-0.1422310769557953,
-0.07718237489461899,
0.05610457435250282,
0.004583647008985281,
0.012693803757429123,
-0.050952229648828506,
0.08668630570173264,
-0.07396294921636581,
0.011346914805471897,
0.004431976471096277,
-0.021698717027902603,
0.06382773816585541,
0.08727417141199112,
-0.024704381823539734,
-0.08185069262981415,
-0.04443608224391937,
-0.014334500767290592,
-0.003049534512683749,
-0.025719216093420982,
-0.017385518178343773,
0.04061632603406906,
-0.05707142502069473,
-0.08576739579439163,
-0.043007202446460724,
0.05032019689679146,
0.07229018956422806,
0.03488874435424805,
0.027884792536497116,
0.03987550735473633,
0.03366599231958389,
0.05646003782749176,
0.023104000836610794,
0.02581660822033882,
0.06084458529949188,
0.02050926350057125,
-0.00833092164248228,
-0.06114715710282326,
-0.05529268458485603,
0.07359253615140915,
0.0008515167864970863,
-0.033942289650440216,
0.03228258341550827,
-0.03472758084535599,
0.03263713791966438,
0.05288869887590408,
-0.042552802711725235,
0.005678935907781124,
-0.09835895150899887,
-0.015243038535118103,
0.048377152532339096,
0.001578471390530467,
0.01017017476260662,
-0.029001004993915558,
0.034518443048000336,
-0.016960036009550095,
-0.11450572311878204,
-0.01426638849079609,
-0.13280583918094635,
-0.0303882397711277,
-0.02122591994702816,
0.023524228483438492,
0.012571550905704498,
0.0004356174322310835,
-0.021779188886284828,
-0.007832355797290802,
-4.1171076176436285e-33,
0.023417215794324875,
-0.013714557513594627,
-0.050878774374723434,
0.02411445416510105,
0.0038393125869333744,
-0.01929296925663948,
-0.01760914921760559,
0.12353485822677612,
-0.02349838800728321,
-0.0565081425011158,
0.09829039871692657,
0.009094305336475372,
0.04155110940337181,
-0.04457653686404228,
-0.0017815479077398777,
-0.016296010464429855,
-0.020327161997556686,
0.06793675571680069,
0.04881059005856514,
0.014317827299237251,
0.07931402325630188,
0.023866942152380943,
-0.1049635261297226,
0.030101465061306953,
0.039032142609357834,
0.040336672216653824,
-0.020336385816335678,
0.04654012247920036,
0.005445805378258228,
-0.029970502480864525,
-0.08754971623420715,
-0.0012271093437448144,
-0.03670186549425125,
-0.02639368362724781,
-0.04692666977643967,
0.008136131800711155,
-0.0039787376299500465,
-0.03965829312801361,
-0.00931341852992773,
-0.01648736000061035,
0.10315703600645065,
0.0026632538065314293,
-0.0774061530828476,
-0.011555332690477371,
-0.040114518254995346,
-0.05923176929354668,
-0.08631324768066406,
-0.01710508204996586,
0.10061034560203552,
0.04093598201870918,
0.04035111889243126,
-0.029691031202673912,
-0.09123945236206055,
0.05199190974235535,
-0.060225971043109894,
-0.11000405997037888,
-0.07189507782459259,
-0.061674248427152634,
-0.09942566603422165,
-0.017640549689531326,
-0.013998524285852909,
0.06502752751111984,
0.010164869017899036,
-0.021956371143460274,
0.07416697591543198,
-0.059035636484622955,
0.012580356560647488,
0.03876613453030586,
0.01400461234152317,
-0.06605387479066849,
0.02166886068880558,
0.004311873111873865,
-0.024716760963201523,
0.06209584325551987,
-0.005047213286161423,
-0.007917010225355625,
-0.04138172045350075,
-0.09425659477710724,
-0.05943409353494644,
-0.037044551223516464,
-0.03439335152506828,
-0.08003047108650208,
0.010036952793598175,
0.07853418588638306,
0.03618653863668442,
0.07612057775259018,
0.07239661365747452,
0.04497857019305229,
0.00752024631947279,
0.058096710592508316,
0.03828185051679611,
0.07853148877620697,
0.017705857753753662,
0.08159001171588898,
-0.07407636195421219,
-5.580578132935443e-8,
-0.004345200955867767,
0.07120812684297562,
-0.016745397821068764,
0.008831482380628586,
-0.008655300363898277,
-0.040234338492155075,
-0.018751949071884155,
0.058836888521909714,
-0.043697889894247055,
0.027483899146318436,
0.04542768374085426,
0.051022302359342575,
-0.02349608764052391,
-0.11798176169395447,
0.057209406048059464,
0.04685722663998604,
0.031745027750730515,
0.05282750725746155,
-0.03833919018507004,
-0.0330410860478878,
0.05371176451444626,
0.009462190791964531,
-0.011540629900991917,
-0.0429377555847168,
0.0201437845826149,
-0.04725290462374687,
-0.05410672351717949,
0.08833666145801544,
-0.0117222024127841,
-0.04731814190745354,
-0.01168152317404747,
0.0650295615196228,
-0.051248203963041306,
0.018772857263684273,
0.03793632239103317,
-0.021064752712845802,
0.05640048161149025,
-0.029433628544211388,
0.022447675466537476,
0.09413409233093262,
0.03761639446020126,
0.049023594707250595,
-0.10905652493238449,
-0.03974458575248718,
0.07704760879278183,
-0.03584722802042961,
-0.03832394629716873,
-0.08723855018615723,
0.045148033648729324,
-0.02960529550909996,
0.04223761335015297,
-0.006652338430285454,
-0.02308850921690464,
0.04807083308696747,
0.02385384775698185,
0.11708825081586838,
0.01285387109965086,
0.06028944253921509,
-0.03682051971554756,
0.013943829573690891,
0.07489749044179916,
-0.010401150211691856,
0.039973072707653046,
0.03352159261703491
] |
mrm8488/bert-small2bert-small-finetuned-cnn_daily_mail-summarization | 3ecce850ed191e6b576e0fb306b30d5da087c2eb | 2020-12-11T21:53:12.000Z | [
"pytorch",
"encoder-decoder",
"text2text-generation",
"en",
"dataset:cnn_dailymail",
"transformers",
"summarization",
"license:apache-2.0",
"autotrain_compatible"
] | summarization | false | mrm8488 | null | mrm8488/bert-small2bert-small-finetuned-cnn_daily_mail-summarization | 2,983 | 2 | transformers | ---
language: en
license: apache-2.0
datasets:
- cnn_dailymail
tags:
- summarization
---
# Bert-small2Bert-small Summarization with 🤗EncoderDecoder Framework
This model is a warm-started *BERT2BERT* ([small](https://huggingface.co/google/bert_uncased_L-4_H-512_A-8)) model fine-tuned on the *CNN/Dailymail* summarization dataset.
The model achieves a **17.37** ROUGE-2 score on *CNN/Dailymail*'s test dataset.
For more details on how the model was fine-tuned, please refer to
[this](https://colab.research.google.com/drive/1Ekd5pUeCX7VOrMx94_czTkwNtLN32Uyu?usp=sharing) notebook.
## Results on test set 📝
| Metric | # Value |
| ------ | --------- |
| **ROUGE-2** | **17.37** |
## Model in Action 🚀
```python
from transformers import BertTokenizerFast, EncoderDecoderModel
import torch
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
tokenizer = BertTokenizerFast.from_pretrained('mrm8488/bert-small2bert-small-finetuned-cnn_daily_mail-summarization')
model = EncoderDecoderModel.from_pretrained('mrm8488/bert-small2bert-small-finetuned-cnn_daily_mail-summarization').to(device)
def generate_summary(text):
# cut off at BERT max length 512
inputs = tokenizer([text], padding="max_length", truncation=True, max_length=512, return_tensors="pt")
input_ids = inputs.input_ids.to(device)
attention_mask = inputs.attention_mask.to(device)
output = model.generate(input_ids, attention_mask=attention_mask)
return tokenizer.decode(output[0], skip_special_tokens=True)
text = "your text to be summarized here..."
generate_summary(text)
```
> Created by [Manuel Romero/@mrm8488](https://twitter.com/mrm8488) | [LinkedIn](https://www.linkedin.com/in/manuel-romero-cs/)
> Made with <span style="color: #e25555;">♥</span> in Spain
| [
-0.06976079195737839,
-0.05695519596338272,
0.05464837700128555,
0.06569110602140427,
0.02951974794268608,
0.0030353472102433443,
-0.03739560768008232,
0.019935933873057365,
0.006794694811105728,
-0.04702373966574669,
0.025251535698771477,
-0.021623238921165466,
-0.005284157581627369,
-0.00284444447606802,
-0.05601479113101959,
-0.001958827255293727,
0.13241970539093018,
-0.05589146912097931,
-0.13651946187019348,
-0.013327140361070633,
0.008686796762049198,
0.04417254403233528,
0.10841309279203415,
-0.026184197515249252,
0.0943732038140297,
-0.03180557116866112,
-0.10102283954620361,
0.0077369920909404755,
0.05366750806570053,
-0.004186083562672138,
0.015007665380835533,
0.06455427408218384,
0.01469375193119049,
0.09373056143522263,
0.04290720820426941,
0.048077378422021866,
0.008333638310432434,
-0.015470625832676888,
0.0076270815916359425,
0.03635464236140251,
0.03580092266201973,
-0.05118850991129875,
0.01976552978157997,
-0.007907544262707233,
0.043923355638980865,
-0.05419759824872017,
-0.038971468806266785,
-0.03858305513858795,
0.008095517754554749,
-0.057578325271606445,
-0.11649966984987259,
0.036629993468523026,
-0.010918467305600643,
0.020765572786331177,
-0.05384557694196701,
0.023113764822483063,
-0.0422978438436985,
-0.006312333047389984,
0.03008868172764778,
-0.09475400298833847,
-0.052148137241601944,
-0.03632141277194023,
-0.022476766258478165,
0.001741558313369751,
0.021142268553376198,
-0.05179338902235031,
-0.03222613409161568,
-0.03811204060912132,
0.01952388323843479,
0.00006985133950365707,
-0.01763962022960186,
0.08035214245319366,
0.05458927899599075,
0.027385670691728592,
0.046792153269052505,
-0.04593604430556297,
0.06283531337976456,
-0.04627533629536629,
0.11663433164358139,
-0.11067774891853333,
-0.022218381986021996,
-0.03454246371984482,
0.04782326892018318,
0.02364891953766346,
0.062185876071453094,
-0.03188827261328697,
0.09575895965099335,
0.00008407881978200749,
-0.0034856151323765516,
-0.03269937261939049,
-0.0004022892389912158,
-0.024688543751835823,
-0.020319562405347824,
0.015018774196505547,
-0.00784416776150465,
0.02992214635014534,
0.05179104581475258,
0.0031740539707243443,
-0.03102007508277893,
0.10241016745567322,
0.06280844658613205,
0.07060020416975021,
0.046794697642326355,
-0.06314576417207718,
0.03135644271969795,
-0.018613198772072792,
0.04587997868657112,
0.06396489590406418,
-0.00918602105230093,
-0.09977972507476807,
0.06716950237751007,
0.0446731299161911,
0.029654093086719513,
-0.051518749445676804,
0.03498168662190437,
-0.04300833120942116,
-0.030325883999466896,
0.007597373798489571,
0.009776737540960312,
0.07404541969299316,
-0.02606840431690216,
-0.01385033130645752,
-0.025022970512509346,
-0.030746327713131905,
-0.07687066495418549,
0.03265251964330673,
-0.004253906663507223,
3.334197155927291e-33,
0.016135191544890404,
-0.04538388550281525,
-0.025950385257601738,
-0.0232902392745018,
-0.013306200504302979,
-0.023916108533740044,
-0.053007449954748154,
0.07671306282281876,
-0.03275071084499359,
-0.011401347815990448,
-0.057172488421201706,
0.012014130130410194,
-0.07277124375104904,
0.0275021493434906,
0.006897720508277416,
0.018792321905493736,
-0.015595410019159317,
0.03193990886211395,
-0.000165461067808792,
0.053176045417785645,
0.09666215628385544,
-0.0017589564668014646,
0.03638104721903801,
-0.08515463769435883,
-0.04923762008547783,
0.026148691773414612,
0.08515208214521408,
-0.0012076252605766058,
-0.10853791236877441,
0.05795158073306084,
-0.1158280298113823,
0.04085982218384743,
-0.030454153195023537,
0.036712516099214554,
-0.00655934726819396,
0.0023461610544472933,
-0.026922985911369324,
-0.03107970394194126,
0.05580083280801773,
-0.036493007093667984,
-0.029782170429825783,
0.0664527490735054,
-0.03028847463428974,
-0.10895015299320221,
-0.04049811139702797,
0.036946676671504974,
0.05154324322938919,
0.01959102414548397,
0.05402527004480362,
-0.0076597342267632484,
0.005223583895713091,
0.005074164364486933,
-0.05480983108282089,
0.008028793148696423,
-0.04755781963467598,
0.03468022868037224,
0.11245492845773697,
-0.004229443613439798,
0.027182213962078094,
0.059076737612485886,
-0.07063888758420944,
0.0037743374705314636,
-0.011110586114227772,
-0.005536756012588739,
0.017386849969625473,
0.00475994823500514,
-0.055256426334381104,
0.02492371015250683,
-0.016989681869745255,
0.03884444385766983,
0.02016359008848667,
-0.011119496077299118,
0.03318804129958153,
0.034716106951236725,
0.05910040810704231,
-0.06779727339744568,
0.08876053243875504,
-0.08085238933563232,
-0.031282611191272736,
0.0035036345943808556,
0.07281002402305603,
0.04366467148065567,
-0.0030900738202035427,
-0.09186915308237076,
-0.10618730634450912,
0.014511866495013237,
0.005197782535105944,
-0.02286994829773903,
-0.06232020631432533,
-0.011993358843028545,
-0.027550674974918365,
-0.03722482547163963,
-0.017182953655719757,
-0.03101796656847,
-0.05858695134520531,
-4.5096459563434844e-33,
-0.03971444442868233,
0.0931682363152504,
-0.11597918719053268,
0.09302929788827896,
-0.019478002563118935,
-0.025178177282214165,
0.022033503279089928,
0.12936219573020935,
-0.012576651759445667,
0.008263868279755116,
0.08156882971525192,
-0.09101372212171555,
-0.043746087700128555,
-0.038657110184431076,
0.042444709688425064,
0.008595285005867481,
-0.03155769035220146,
-0.06261774897575378,
0.03127110004425049,
0.06720048189163208,
-0.04202644154429436,
0.05522767826914787,
-0.10511550307273865,
0.11019347608089447,
-0.02040908671915531,
0.04548147693276405,
-0.06453932821750641,
0.055174048990011215,
-0.03946109116077423,
-0.03858649730682373,
0.017383998259902,
-0.020356500521302223,
-0.06355248391628265,
0.0572102814912796,
-0.009962577372789383,
0.03048350103199482,
0.04910430684685707,
-0.09208637475967407,
0.0023398830089718103,
0.10647307336330414,
0.13938981294631958,
0.03973962366580963,
-0.01840786449611187,
0.02786433883011341,
0.005986180622130632,
0.05949520692229271,
-0.06730445474386215,
-0.08902405202388763,
-0.032115351408720016,
-0.04657614231109619,
0.0137500474229455,
-0.002795442007482052,
-0.06160552799701691,
0.042902976274490356,
-0.10724088549613953,
-0.021765928715467453,
0.007164365146309137,
-0.016690995544195175,
-0.028932463377714157,
0.01385024469345808,
-0.08654335886240005,
-0.035278428345918655,
-0.0599820502102375,
-0.000977833173237741,
0.0057037025690078735,
-0.044290941208601,
-0.010739167220890522,
-0.005250774789601564,
0.0018129844684153795,
0.021380199119448662,
-0.021133026108145714,
-0.08845876157283783,
0.048972465097904205,
-0.010912601836025715,
-0.033605143427848816,
0.04444555193185806,
-0.027258794754743576,
-0.03812464699149132,
0.009176675230264664,
-0.006345786154270172,
-0.0512026809155941,
-0.0582023523747921,
0.049362070858478546,
-0.013709464110434055,
0.016025273129343987,
0.06366105377674103,
0.007832514122128487,
0.05574696138501167,
-0.045074060559272766,
0.08236982673406601,
0.005361183546483517,
-0.028687497600913048,
-0.028461890295147896,
0.07553478330373764,
0.030575543642044067,
-6.19472331209181e-8,
-0.05316963791847229,
0.0007948451093398035,
-0.03740144520998001,
0.10020361095666885,
-0.05753511190414429,
-0.06464137881994247,
-0.007205143570899963,
0.0695427879691124,
0.002961906371638179,
0.043278180062770844,
0.012716060504317284,
0.004892975557595491,
-0.14660756289958954,
0.028734007850289345,
-0.0454559288918972,
0.0321941152215004,
-0.013017046265304089,
0.07272042334079742,
-0.021353717893362045,
-0.05993758141994476,
0.061852794140577316,
0.022126387804746628,
-0.004978118930011988,
-0.05335564166307449,
0.030026862397789955,
0.004622428212314844,
-0.048024099320173264,
0.11387555301189423,
-0.02004195749759674,
-0.026499778032302856,
-0.01957629807293415,
0.022155821323394775,
-0.07259467244148254,
-0.02918589673936367,
0.000771576538681984,
0.06969941407442093,
-0.043753623962402344,
-0.016776666045188904,
0.04073766618967056,
0.08578123152256012,
0.06778516620397568,
0.08715986460447311,
-0.08411682397127151,
-0.0028308317996561527,
0.07084017992019653,
-0.0117263812571764,
-0.02642636001110077,
-0.07726698368787766,
0.05254652723670006,
0.028547124937176704,
0.10657393932342529,
-0.036047566682100296,
-0.05857304856181145,
0.03029099479317665,
-0.06630554050207138,
-0.020087717100977898,
-0.04249643534421921,
-0.01654747687280178,
0.012763161212205887,
-0.019998036324977875,
-0.004063612315803766,
-0.015809644013643265,
-0.08477834612131119,
-0.010202106088399887
] |
inywer/shouko0-3 | 402311526d96c0f9cad11c6d35dfa4c48880ff9d | 2022-07-11T04:34:09.000Z | [
"pytorch",
"gpt2",
"text-generation",
"transformers",
"conversational"
] | conversational | false | inywer | null | inywer/shouko0-3 | 2,983 | null | transformers | ---
tags:
- conversational
---
# inywer/shouko0-3 Model | [
-0.110287606716156,
-0.024537203833460808,
-0.019390473142266273,
-0.008692726492881775,
0.029131833463907242,
-0.02480955421924591,
0.08496580272912979,
0.009292726404964924,
0.028812630102038383,
-0.016147328540682793,
0.04430326819419861,
-0.015353868715465069,
0.020961306989192963,
0.021524474024772644,
-0.016851603984832764,
0.06840289384126663,
-0.0035631998907774687,
-0.06096087396144867,
-0.08522455394268036,
-0.011668507941067219,
0.028303362429142,
0.0663655549287796,
0.03987400606274605,
0.03562942519783974,
-0.02402663230895996,
0.009114362299442291,
-0.03124924749135971,
0.07305643707513809,
0.01317936833947897,
-0.05026211217045784,
0.041070662438869476,
0.13479310274124146,
0.06871602684259415,
0.026511335745453835,
0.0016912642167881131,
0.03610176220536232,
0.03699076920747757,
-0.024230079725384712,
0.0012289072619751096,
-0.01322886347770691,
-0.018093004822731018,
-0.011261248961091042,
-0.06390516459941864,
-0.01102579664438963,
0.03657000511884689,
-0.035962533205747604,
-0.10481221228837967,
-0.014494345523416996,
0.015437028370797634,
-0.019543876871466637,
-0.1461074948310852,
-0.036519672721624374,
0.03594578802585602,
0.10107003897428513,
0.003783934749662876,
0.03057830035686493,
-0.11707362532615662,
-0.04527461156249046,
0.06593208760023117,
-0.001395854982547462,
0.0019959358032792807,
-0.027713429182767868,
-0.04710644483566284,
0.062405776232481,
0.0011209914227947593,
0.017663007602095604,
-0.09714724868535995,
-0.009238061495125294,
-0.053158313035964966,
0.08936727046966553,
0.02575427107512951,
-0.00002269899778184481,
0.020737076178193092,
0.01356779970228672,
0.009571335278451443,
0.03236556425690651,
0.01983928680419922,
-0.04788876324892044,
0.0012046810006722808,
-0.016584079712629318,
-0.019976453855633736,
-0.13672298192977905,
-0.019821705296635628,
-0.005492884665727615,
0.029320919886231422,
-0.0044670687057077885,
-0.01898365281522274,
-0.048888519406318665,
-0.031834062188863754,
-0.0003320835530757904,
-0.0978318527340889,
-0.05558287724852562,
0.05082203075289726,
-0.009502017870545387,
-0.04862983152270317,
0.05447598546743393,
0.029449354857206345,
0.005489342380315065,
-0.07823498547077179,
0.13590949773788452,
-0.006881024222820997,
0.011177627369761467,
0.0018280708463862538,
-0.07057464122772217,
-0.026698999106884003,
-0.007284583058208227,
-0.012249699793756008,
0.019168343394994736,
0.008014613762497902,
-0.02266436628997326,
-0.04379340633749962,
-0.07974555343389511,
-0.011584330350160599,
-0.0464264452457428,
0.01729634962975979,
-0.07007705420255661,
0.061241935938596725,
0.008471662178635597,
0.0306024681776762,
0.005630403757095337,
-0.020266693085432053,
-0.007858400233089924,
-0.06253360211849213,
-0.059302810579538345,
-0.05133822560310364,
0.01818014681339264,
-0.045415036380290985,
-2.0826626469442024e-33,
0.07777474075555801,
0.054876625537872314,
-0.005062775686383247,
0.03773396834731102,
0.014710801653563976,
0.011477014049887657,
-0.050308551639318466,
-0.018776264041662216,
-0.03725181147456169,
-0.009343183599412441,
0.03958340734243393,
-0.04301786422729492,
-0.026337452232837677,
-0.010684440843760967,
0.0227645356208086,
-0.08402873575687408,
-0.04771393910050392,
0.005978963803499937,
-0.06607991456985474,
0.016277184709906578,
0.023093117401003838,
0.02876611426472664,
-0.036504875868558884,
0.050688039511442184,
0.08161618560552597,
0.10843956470489502,
0.07217811793088913,
-0.13459934294223785,
0.014332501217722893,
0.05817856639623642,
0.06159999221563339,
0.026028672233223915,
-0.09852050989866257,
0.01063524093478918,
-0.07717280089855194,
0.002658360404893756,
-0.015054802410304546,
-0.05425138771533966,
-0.020536009222269058,
-0.07322259247303009,
-0.027430471032857895,
-0.014739933423697948,
-0.0638682097196579,
-0.049051910638809204,
-0.035413749516010284,
0.05994098633527756,
0.04169880226254463,
0.015397959388792515,
0.06356561928987503,
-0.003954329062253237,
-0.05046725645661354,
0.024230696260929108,
0.021313771605491638,
-0.026332829147577286,
-0.01317834947258234,
-0.08851049095392227,
0.014681319706141949,
0.027198057621717453,
0.0654229149222374,
0.019253119826316833,
-0.053982481360435486,
-0.0138461384922266,
0.009745467454195023,
-0.10353207588195801,
0.07967281341552734,
-0.05897296965122223,
-0.023083548992872238,
-0.018544957041740417,
-0.00005524568405235186,
0.0021875936072319746,
-0.056165553629398346,
-0.004543427377939224,
-0.02919449284672737,
0.1429615318775177,
-0.02906809002161026,
0.02689190022647381,
-0.0792921930551529,
-0.08820988982915878,
0.037859685719013214,
0.06910453736782074,
-0.07573221623897552,
-0.028855765238404274,
-0.034121401607990265,
0.07320963591337204,
0.006635730154812336,
-0.055325698107481,
0.027381785213947296,
-0.09516679495573044,
0.012355933897197247,
0.05720561742782593,
0.021153010427951813,
0.029759511351585388,
-0.06947258114814758,
-0.005969044286757708,
-0.042102593928575516,
-6.115754560927693e-34,
0.040637705475091934,
0.04718063771724701,
-0.11049795150756836,
0.04879968985915184,
-0.02050396054983139,
0.014726491644978523,
0.06389886885881424,
0.12332187592983246,
-0.0044445800594985485,
-0.015368778258562088,
0.08323969691991806,
-0.0022816115524619818,
-0.01706424169242382,
0.017000161111354828,
0.12532447278499603,
-0.01050830353051424,
0.031618379056453705,
-0.08004945516586304,
0.017448071390390396,
0.03172720596194267,
0.05840772017836571,
0.05021253600716591,
-0.15434607863426208,
0.06272360682487488,
-0.030068645253777504,
0.04312306270003319,
0.04872141033411026,
0.061959411948919296,
0.05312887206673622,
0.021914534270763397,
-0.05037400498986244,
0.00257369177415967,
-0.003921016585081816,
0.0318090096116066,
0.0013475740561261773,
0.059584762901067734,
0.0009199291234835982,
-0.05512864142656326,
0.010757110081613064,
0.08717178553342819,
0.05521947145462036,
0.03793040290474892,
0.016971031203866005,
0.05810599401593208,
-0.03991322219371796,
-0.07393822073936462,
-0.03922002762556076,
0.0032058244105428457,
-0.053698815405368805,
-0.026558464393019676,
0.040522269904613495,
0.022721393033862114,
-0.03843299672007561,
-0.05816708505153656,
-0.018145591020584106,
-0.01737596094608307,
-0.022370927035808563,
-0.04737395793199539,
-0.059582509100437164,
0.010676516219973564,
-0.04811988025903702,
-0.05737531557679176,
0.06406492739915848,
-0.02211390621960163,
-0.039025481790304184,
-0.039041340351104736,
-0.014103004708886147,
-0.045845575630664825,
0.0025716673117130995,
-0.07511439174413681,
0.03388722985982895,
-0.02459910325706005,
0.03324563801288605,
0.0077514913864433765,
0.05558823049068451,
-0.07191810011863708,
-0.029271576553583145,
0.03419451415538788,
0.07177269458770752,
-0.0725482925772667,
-0.032778143882751465,
0.027792710810899734,
0.007845406420528889,
0.08406486362218857,
0.08225855231285095,
-0.01639753393828869,
0.022017521783709526,
0.06601613014936447,
-0.009486748836934566,
-0.050558675080537796,
0.04818936809897423,
0.04420638456940651,
-0.002777618123218417,
0.11398225277662277,
-0.06076139584183693,
-2.9654218636210317e-8,
-0.01148595567792654,
-0.09267286956310272,
0.04646700248122215,
0.0422801598906517,
0.04528765007853508,
-0.019791776314377785,
0.032296039164066315,
-0.016628040000796318,
-0.01683647558093071,
0.013171046040952206,
0.08279106765985489,
0.0758109986782074,
-0.06745004653930664,
0.0492355152964592,
0.049106765538454056,
0.007894305512309074,
-0.04098189249634743,
0.08983214199542999,
-0.020101817324757576,
-0.052109938114881516,
0.006724776234477758,
0.01757727563381195,
-0.05803855508565903,
-0.003515241900458932,
0.08430603891611099,
0.04869891330599785,
-0.05436087027192116,
0.014468489214777946,
-0.021436002105474472,
0.0555775947868824,
0.03298771381378174,
0.05333348363637924,
-0.11289621144533157,
0.026789333671331406,
-0.05128013715147972,
0.0095377117395401,
-0.06335175037384033,
0.008674305863678455,
0.017726538702845573,
-0.0689457505941391,
0.041748836636543274,
0.02922351285815239,
-0.079471156001091,
0.022840891033411026,
0.06277450919151306,
0.042320676147937775,
-0.021398408338427544,
-0.11803857982158661,
-0.03166457638144493,
-0.06212788075208664,
-0.10370726883411407,
-0.04807635024189949,
0.08920935541391373,
0.013458414003252983,
-0.0515717975795269,
0.046817053109407425,
0.036134663969278336,
-0.025584563612937927,
0.012887411750853062,
0.007584435399621725,
0.10433211177587509,
0.03854818269610405,
-0.0004887033137492836,
0.023751741275191307
] |
google/t5-small-lm-adapt | ceece9332ccd73f589b2c764fa0e334c597952d4 | 2021-11-01T13:58:46.000Z | [
"pytorch",
"tf",
"t5",
"text2text-generation",
"en",
"dataset:c4",
"arxiv:2002.05202",
"arxiv:1910.10683",
"transformers",
"t5-lm-adapt",
"license:apache-2.0",
"autotrain_compatible"
] | text2text-generation | false | google | null | google/t5-small-lm-adapt | 2,979 | 3 | transformers | ---
language: en
datasets:
- c4
tags:
- t5-lm-adapt
license: apache-2.0
---
[Google's T5](https://ai.googleblog.com/2020/02/exploring-transfer-learning-with-t5.html) Version 1.1 - LM-Adapted
## Version 1.1 - LM-Adapted
[T5 Version 1.1 - LM Adapted](https://github.com/google-research/text-to-text-transfer-transformer/blob/main/released_checkpoints.md#lm-adapted-t511lm100k) includes the following improvements compared to the original [T5 model](https://huggingface.co/t5-small):
- GEGLU activation in feed-forward hidden layer, rather than ReLU - see [here](https://arxiv.org/abs/2002.05202).
- Dropout was turned off in pre-training (quality win). Dropout should be re-enabled during fine-tuning.
- Pre-trained on C4 only without mixing in the downstream tasks.
- no parameter sharing between embedding and classifier layer
- "xl" and "xxl" replace "3B" and "11B". The model shapes are a bit different - larger `d_model` and smaller `num_heads` and `d_ff`.
and is pretrained on both the denoising and language modeling objective.
More specifically, this checkpoint is initialized from [T5 Version 1.1 - Small](https://huggingface.co/google/https://huggingface.co/google/t5-v1_1-small)
and then trained for an additional 100K steps on the LM objective discussed in the [T5 paper](https://arxiv.org/pdf/1910.10683.pdf).
This adaptation improves the ability of the model to be used for prompt tuning.
**Note**: A popular fine-tuned version of the *T5 Version 1.1 - LM Adapted* model is [BigScience's T0pp](https://huggingface.co/bigscience/T0pp).
Pretraining Dataset: [C4](https://huggingface.co/datasets/c4)
Other Community Checkpoints: [here](https://huggingface.co/models?other=t5-lm-adapt)
Paper: [Exploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer](https://arxiv.org/pdf/1910.10683.pdf)
Authors: *Colin Raffel, Noam Shazeer, Adam Roberts, Katherine Lee, Sharan Narang, Michael Matena, Yanqi Zhou, Wei Li, Peter J. Liu*
## Abstract
Transfer learning, where a model is first pre-trained on a data-rich task before being fine-tuned on a downstream task, has emerged as a powerful technique in natural language processing (NLP). The effectiveness of transfer learning has given rise to a diversity of approaches, methodology, and practice. In this paper, we explore the landscape of transfer learning techniques for NLP by introducing a unified framework that converts every language problem into a text-to-text format. Our systematic study compares pre-training objectives, architectures, unlabeled datasets, transfer approaches, and other factors on dozens of language understanding tasks. By combining the insights from our exploration with scale and our new “Colossal Clean Crawled Corpus”, we achieve state-of-the-art results on many benchmarks covering summarization, question answering, text classification, and more. To facilitate future work on transfer learning for NLP, we release our dataset, pre-trained models, and code.

| [
-0.06136505305767059,
-0.0460362546145916,
0.04922379553318024,
-0.016140956431627274,
0.09096480906009674,
0.0019294912926852703,
-0.03293485566973686,
-0.0479041188955307,
-0.04512881115078926,
-0.07430384308099747,
0.07904095202684402,
0.032333116978406906,
0.006167000159621239,
-0.06988512724637985,
-0.04318031668663025,
0.04499562829732895,
0.11014764755964279,
0.0005602247547358274,
-0.13209009170532227,
-0.07070428133010864,
-0.02083447389304638,
-0.04552580043673515,
0.044642381370067596,
0.04884636774659157,
0.06303030997514725,
0.022902047261595726,
-0.0778324231505394,
-0.05279180034995079,
-0.004595249891281128,
-0.04870473966002464,
-0.0654299333691597,
0.059843510389328,
-0.163761168718338,
-0.0238798800855875,
-0.08270011097192764,
0.052796971052885056,
-0.05103515088558197,
0.02512398734688759,
-0.004102302715182304,
-0.03047441877424717,
0.045021165162324905,
-0.10767420381307602,
-0.024207167327404022,
-0.007442005444318056,
0.04389810562133789,
-0.0029248055070638657,
0.0013804977061226964,
-0.03854745253920555,
-0.015161320567131042,
-0.0010085286339744925,
-0.0476609468460083,
-0.044722504913806915,
-0.025106359273195267,
0.09938069432973862,
-0.11852416396141052,
0.036243028938770294,
-0.003122585127130151,
0.0313812717795372,
-0.010550491511821747,
0.03447740152478218,
-0.056830763816833496,
0.01238534040749073,
-0.006851069629192352,
-0.0023780004121363163,
-0.028349099680781364,
-0.023554086685180664,
0.12196207046508789,
-0.015043286606669426,
-0.008183942176401615,
0.022417215630412102,
-0.022286899387836456,
0.04718618839979172,
-0.04553597420454025,
0.007538128644227982,
0.034715864807367325,
0.013567586429417133,
0.024630675092339516,
0.04899783805012703,
0.03180582821369171,
-0.030242038890719414,
0.07022210210561752,
0.03184244781732559,
0.028192274272441864,
0.029159964993596077,
-0.033382173627614975,
-0.04483551159501076,
-0.048562709242105484,
0.07650622725486755,
0.024966344237327576,
-0.043454185128211975,
0.05248163267970085,
-0.0014856954803690314,
0.005639694631099701,
-0.010933839716017246,
-0.06673455983400345,
-0.0016783969476819038,
-0.02565777488052845,
0.02737331949174404,
0.003845921717584133,
0.04243216663599014,
-0.0056315576657652855,
-0.006938617676496506,
0.06432297825813293,
0.019460529088974,
-0.07485457509756088,
-0.01821562647819519,
0.1235320121049881,
0.037736549973487854,
-0.0025388100184500217,
-0.08010152727365494,
0.098533496260643,
0.05357522517442703,
0.00569243635982275,
-0.014802427962422371,
0.058177631348371506,
-0.013668958097696304,
-0.1040419191122055,
-0.07494553178548813,
-0.014061173424124718,
0.051867976784706116,
-0.071591317653656,
-0.004144074860960245,
0.023329967632889748,
-0.002511540660634637,
-0.015795035287737846,
0.006469693034887314,
-0.04751921817660332,
2.084449765699437e-33,
0.08051036298274994,
0.06472073495388031,
-0.0033595275599509478,
0.028313318267464638,
0.038512878119945526,
-0.026353703811764717,
-0.013458553701639175,
-0.011286159045994282,
-0.06078081578016281,
-0.05233728885650635,
-0.06528346985578537,
-0.05806850641965866,
-0.04275437816977501,
0.04468163102865219,
-0.006440236698836088,
-0.03396961838006973,
-0.018609339371323586,
0.06552015990018845,
0.05375014618039131,
0.03853146731853485,
0.032996710389852524,
0.02981320023536682,
-0.008604881353676319,
-0.0946466401219368,
-0.0011521111009642482,
0.1616973876953125,
-0.026347873732447624,
-0.03250941261649132,
-0.009602900594472885,
0.021912027150392532,
-0.1329939216375351,
0.014284375123679638,
0.02234479784965515,
-0.04392864182591438,
0.036598823964595795,
-0.0061939614824950695,
-0.04769417643547058,
-0.027133725583553314,
-0.021699612960219383,
-0.07072381675243378,
0.04983090981841087,
0.06540527194738388,
0.0018750643357634544,
-0.05337720736861229,
0.00004299858119338751,
-0.007719298824667931,
0.03597563877701759,
-0.058616314083337784,
-0.05113711953163147,
0.01968490146100521,
0.03270117565989494,
-0.04172075539827347,
-0.13324740529060364,
-0.11393357068300247,
-0.0071213701739907265,
0.010819967836141586,
0.040469810366630554,
0.0709693655371666,
-0.005640596617013216,
0.09936254471540451,
0.03810029849410057,
0.04836571216583252,
-0.0038216968532651663,
0.02339962311089039,
0.13521963357925415,
0.02868189476430416,
-0.021135514602065086,
-0.032010771334171295,
0.024113938212394714,
-0.03725326806306839,
-0.06674128770828247,
-0.03137107193470001,
0.055336881428956985,
0.11276492476463318,
0.10712934285402298,
-0.055752385407686234,
-0.0014161454746499658,
-0.07823050767183304,
-0.014730012975633144,
0.03328738734126091,
-0.03219950571656227,
-0.047165583819150925,
0.010400070808827877,
-0.063065305352211,
-0.04508398100733757,
-0.047380439937114716,
0.05705559253692627,
-0.10822255909442902,
-0.0067856344394385815,
0.044731974601745605,
0.022262316197156906,
-0.005536564625799656,
0.01232634112238884,
-0.01275955606251955,
-0.026292765513062477,
-1.7781930796919215e-33,
0.03104516677558422,
-0.0079046580940485,
-0.02610650286078453,
0.11789204925298691,
-0.019738439470529556,
-0.0012854503002017736,
0.07165655493736267,
0.11483294516801834,
0.023872438818216324,
0.002253459533676505,
0.015685470774769783,
0.013367381878197193,
-0.018134647980332375,
-0.0030364112462848425,
0.006167850457131863,
-0.07814348489046097,
0.03122016228735447,
-0.05316220596432686,
-0.020812202244997025,
-0.007405663374811411,
0.10613827407360077,
0.038374342024326324,
-0.03512546047568321,
0.06692123413085938,
0.002903431188315153,
0.04792653024196625,
-0.010013277642428875,
0.0993613451719284,
0.047566551715135574,
-0.03812902048230171,
-0.0285994540899992,
-0.025930332019925117,
0.0027621728368103504,
0.010452830232679844,
0.008105318993330002,
0.05569252744317055,
0.026502307504415512,
0.06773124635219574,
0.012477320618927479,
0.05875828117132187,
0.07720156013965607,
0.018446622416377068,
-0.021733561530709267,
-0.014078169129788876,
-0.04105314984917641,
0.04795299470424652,
-0.09266222268342972,
-0.013897189870476723,
-0.003277061739936471,
0.04189097508788109,
0.0391189344227314,
-0.006311231292784214,
-0.10878362506628036,
-0.07617976516485214,
0.022969158366322517,
-0.054713379591703415,
0.08589642494916916,
-0.03938818722963333,
-0.04744461551308632,
0.014145628549158573,
-0.03940266743302345,
0.003774929093196988,
-0.01961156353354454,
-0.08669114112854004,
0.055611502379179,
0.033068593591451645,
0.04919980466365814,
0.04058634489774704,
0.015264053829014301,
0.04650472104549408,
0.023330846801400185,
-0.03646281361579895,
0.06854382157325745,
-0.0367000550031662,
0.036034923046827316,
-0.051506999880075455,
-0.030094929039478302,
-0.022269325330853462,
-0.0033374654594808817,
-0.08077923208475113,
-0.027207791805267334,
0.0008060533436946571,
0.0009394170483574271,
0.07507717609405518,
0.11755739152431488,
0.08710432052612305,
-0.03791048750281334,
0.051383886486291885,
0.07431841641664505,
-0.025694873183965683,
-0.02098517119884491,
0.026100480929017067,
-0.021380094811320305,
0.07007236778736115,
-0.011104493401944637,
-6.032897914565183e-8,
-0.0554514117538929,
0.04145423695445061,
-0.08887447416782379,
0.06302003562450409,
0.024628030136227608,
-0.01429627276957035,
-0.062139011919498444,
0.04604867845773697,
0.01713915914297104,
-0.007628635037690401,
0.008152849972248077,
-0.0161584485322237,
-0.04445776343345642,
0.019973602145910263,
-0.008313033729791641,
0.011086197569966316,
-0.00493447482585907,
-0.03389189392328262,
-0.02389439567923546,
-0.04230339452624321,
-0.005492796190083027,
0.009927826933562756,
-0.027440806850790977,
-0.04406058043241501,
0.06152572110295296,
-0.08918836712837219,
-0.0036308804992586374,
0.12918224930763245,
0.0188757311552763,
-0.11150874942541122,
-0.017506850883364677,
0.009763195179402828,
-0.009824876673519611,
0.03997378051280975,
0.07355858385562897,
0.051834363490343094,
0.05838958919048309,
-0.01794872246682644,
0.025589533150196075,
0.04894617944955826,
0.038166120648384094,
0.0648309737443924,
-0.053500398993492126,
-0.04025168716907501,
-0.0021051536314189434,
-0.02629951946437359,
-0.005767631810158491,
-0.1264112889766693,
0.005011335015296936,
-0.014665905386209488,
0.041509903967380524,
-0.0024418726097792387,
-0.030292313545942307,
0.058692075312137604,
0.06793086975812912,
-0.03410276025533676,
0.054247766733169556,
-0.019918370991945267,
0.0778636708855629,
0.02472003921866417,
0.050244398415088654,
0.03554826229810715,
-0.005780437495559454,
-0.0028200303204357624
] |
cross-encoder/ms-marco-TinyBERT-L-6 | 36aded87184cc2e8d13ceb3ab10b186facb9f26a | 2021-08-05T08:40:06.000Z | [
"pytorch",
"jax",
"bert",
"text-classification",
"transformers",
"license:apache-2.0"
] | text-classification | false | cross-encoder | null | cross-encoder/ms-marco-TinyBERT-L-6 | 2,974 | 1 | transformers | ---
license: apache-2.0
---
# Cross-Encoder for MS Marco
This model was trained on the [MS Marco Passage Ranking](https://github.com/microsoft/MSMARCO-Passage-Ranking) task.
The model can be used for Information Retrieval: Given a query, encode the query will all possible passages (e.g. retrieved with ElasticSearch). Then sort the passages in a decreasing order. See [SBERT.net Retrieve & Re-rank](https://www.sbert.net/examples/applications/retrieve_rerank/README.html) for more details. The training code is available here: [SBERT.net Training MS Marco](https://github.com/UKPLab/sentence-transformers/tree/master/examples/training/ms_marco)
## Usage with Transformers
```python
from transformers import AutoTokenizer, AutoModelForSequenceClassification
import torch
model = AutoModelForSequenceClassification.from_pretrained('model_name')
tokenizer = AutoTokenizer.from_pretrained('model_name')
features = tokenizer(['How many people live in Berlin?', 'How many people live in Berlin?'], ['Berlin has a population of 3,520,031 registered inhabitants in an area of 891.82 square kilometers.', 'New York City is famous for the Metropolitan Museum of Art.'], padding=True, truncation=True, return_tensors="pt")
model.eval()
with torch.no_grad():
scores = model(**features).logits
print(scores)
```
## Usage with SentenceTransformers
The usage becomes easier when you have [SentenceTransformers](https://www.sbert.net/) installed. Then, you can use the pre-trained models like this:
```python
from sentence_transformers import CrossEncoder
model = CrossEncoder('model_name', max_length=512)
scores = model.predict([('Query', 'Paragraph1'), ('Query', 'Paragraph2') , ('Query', 'Paragraph3')])
```
## Performance
In the following table, we provide various pre-trained Cross-Encoders together with their performance on the [TREC Deep Learning 2019](https://microsoft.github.io/TREC-2019-Deep-Learning/) and the [MS Marco Passage Reranking](https://github.com/microsoft/MSMARCO-Passage-Ranking/) dataset.
| Model-Name | NDCG@10 (TREC DL 19) | MRR@10 (MS Marco Dev) | Docs / Sec |
| ------------- |:-------------| -----| --- |
| **Version 2 models** | | |
| cross-encoder/ms-marco-TinyBERT-L-2-v2 | 69.84 | 32.56 | 9000
| cross-encoder/ms-marco-MiniLM-L-2-v2 | 71.01 | 34.85 | 4100
| cross-encoder/ms-marco-MiniLM-L-4-v2 | 73.04 | 37.70 | 2500
| cross-encoder/ms-marco-MiniLM-L-6-v2 | 74.30 | 39.01 | 1800
| cross-encoder/ms-marco-MiniLM-L-12-v2 | 74.31 | 39.02 | 960
| **Version 1 models** | | |
| cross-encoder/ms-marco-TinyBERT-L-2 | 67.43 | 30.15 | 9000
| cross-encoder/ms-marco-TinyBERT-L-4 | 68.09 | 34.50 | 2900
| cross-encoder/ms-marco-TinyBERT-L-6 | 69.57 | 36.13 | 680
| cross-encoder/ms-marco-electra-base | 71.99 | 36.41 | 340
| **Other models** | | |
| nboost/pt-tinybert-msmarco | 63.63 | 28.80 | 2900
| nboost/pt-bert-base-uncased-msmarco | 70.94 | 34.75 | 340
| nboost/pt-bert-large-msmarco | 73.36 | 36.48 | 100
| Capreolus/electra-base-msmarco | 71.23 | 36.89 | 340
| amberoad/bert-multilingual-passage-reranking-msmarco | 68.40 | 35.54 | 330
| sebastian-hofstaetter/distilbert-cat-margin_mse-T2-msmarco | 72.82 | 37.88 | 720
Note: Runtime was computed on a V100 GPU.
| [
-0.06551434844732285,
-0.07030782848596573,
-0.004193244501948357,
0.05925549939274788,
-0.008339117281138897,
0.08594850450754166,
-0.029806630685925484,
0.0668809562921524,
-0.0017081426922231913,
-0.053372517228126526,
-0.029908085241913795,
0.04448296129703522,
0.032882269471883774,
0.06726106256246567,
-0.07537456601858139,
0.03633607178926468,
0.03515918552875519,
0.08207210898399353,
-0.06935206800699234,
-0.11517231911420822,
0.11709704250097275,
0.06386863440275192,
0.003294477006420493,
0.00046541052870452404,
0.00988371018320322,
-0.033594779670238495,
-0.05713876709342003,
0.031645432114601135,
0.047612983733415604,
-0.03859201818704605,
-0.015525504015386105,
0.016225893050432205,
0.02227805368602276,
0.12129361927509308,
-0.05275660753250122,
0.010071183554828167,
-0.037099964916706085,
-0.08395608514547348,
0.030511634424328804,
-0.02669318951666355,
0.004905817098915577,
0.014815384522080421,
0.004401351325213909,
-0.018087998032569885,
0.0674496740102768,
-0.040612149983644485,
-0.04498275741934776,
-0.0527794286608696,
-0.0236703809350729,
-0.006171537097543478,
-0.12455546110868454,
0.006932578980922699,
0.003174912417307496,
0.12855632603168488,
-0.03582831099629402,
0.015307014808058739,
0.01409848127514124,
-0.06035797670483589,
-0.018671290948987007,
-0.12258797138929367,
-0.014657578431069851,
-0.0777750164270401,
-0.04477490112185478,
-0.05587880313396454,
-0.039161209017038345,
-0.04702410846948624,
0.05149704962968826,
0.020122647285461426,
0.05099637061357498,
-0.041789356619119644,
-0.03816214203834534,
-0.00984547846019268,
-0.022651219740509987,
0.03669372573494911,
0.027147850021719933,
0.011506416834890842,
0.043748848140239716,
0.006412298884242773,
-0.008798730559647083,
-0.019205724820494652,
-0.022683115676045418,
-0.047476671636104584,
0.03884504362940788,
0.06790019571781158,
0.04000888392329216,
-0.09037728607654572,
0.055351581424474716,
-0.004751316737383604,
0.07593019306659698,
0.054840777069330215,
-0.04724317044019699,
-0.10444486886262894,
-0.005942695774137974,
-0.009652319364249706,
-0.0072124553844332695,
0.028655538335442543,
-0.03616023063659668,
0.02581191621720791,
-0.00745102996006608,
0.08205533027648926,
0.01220907736569643,
0.0455733947455883,
-0.0072793434374034405,
-0.0647939145565033,
-0.008302494883537292,
-0.03509024903178215,
0.020318489521741867,
0.08591734617948532,
0.08624237030744553,
-0.10801370441913605,
0.028730152174830437,
0.05466226115822792,
-0.03682667762041092,
-0.04951547458767891,
-0.04091038927435875,
0.03072032332420349,
0.01745777763426304,
0.01880781725049019,
0.027381444349884987,
0.10280455648899078,
0.021882327273488045,
0.0502614751458168,
-0.04101765155792236,
0.006520481780171394,
0.039706774055957794,
0.03255926072597504,
0.00549142062664032,
3.1567309370737274e-33,
0.031631436198949814,
0.036184318363666534,
0.03873542696237564,
-0.04287038743495941,
-0.021931374445557594,
0.027553709223866463,
0.05047571659088135,
0.018734565004706383,
-0.06535763293504715,
-0.08337121456861496,
-0.08402062952518463,
0.1000480204820633,
0.028039295226335526,
0.009971404448151588,
-0.032771993428468704,
-0.04157663881778717,
-0.03132794797420502,
0.038269706070423126,
-0.00038503165706060827,
-0.004887309391051531,
0.09766056388616562,
0.0034397542476654053,
-0.013487061485648155,
-0.09485702961683273,
-0.06410416960716248,
0.006261590868234634,
0.008240710943937302,
-0.06056744605302811,
-0.03581665828824043,
0.045792777091264725,
-0.07411745935678482,
0.003818176919594407,
-0.054917510598897934,
-0.006411452312022448,
0.058813873678445816,
-0.019023925065994263,
0.024945219978690147,
0.005995025392621756,
0.04124457761645317,
-0.10193006694316864,
-0.008553121238946915,
0.0041920035146176815,
0.009790899232029915,
-0.0775105282664299,
-0.0760985016822815,
-0.091363325715065,
-0.05959651246666908,
0.04439897835254669,
0.10261885821819305,
0.008619889616966248,
0.023499231785535812,
-0.024539731442928314,
0.03663754090666771,
-0.01302382443100214,
0.03610073775053024,
0.0037107926327735186,
0.07295622676610947,
0.08553986996412277,
0.0223538875579834,
-0.008839238435029984,
0.07105819880962372,
0.007622885052114725,
0.04582293704152107,
0.0709545835852623,
0.09674448519945145,
-0.06563300639390945,
0.010325885377824306,
-0.009196704253554344,
0.04712365195155144,
0.013321048580110073,
-0.042036935687065125,
0.009014284238219261,
0.011140146292746067,
-0.021909143775701523,
0.027367807924747467,
-0.024419914931058884,
0.043307721614837646,
-0.05418829992413521,
0.005765495356172323,
0.01345110684633255,
-0.05502275750041008,
-0.01115321647375822,
0.06610704958438873,
-0.06899737566709518,
-0.049509044736623764,
0.0484616681933403,
0.04769909381866455,
-0.058808255940675735,
0.028858816251158714,
-0.0542738176882267,
0.022750481963157654,
-0.00505086500197649,
-0.014774950221180916,
0.051618531346321106,
0.027214525267481804,
-2.5353275216886184e-33,
0.030450360849499702,
0.08046966046094894,
-0.0003234162868466228,
0.03802232816815376,
-0.007878380827605724,
-0.0626048892736435,
-0.00681334501132369,
0.07483622431755066,
-0.07814476639032364,
-0.0271192267537117,
0.021147098392248154,
-0.07122867554426193,
0.04170256853103638,
-0.047134559601545334,
0.09637890756130219,
0.015001175925135612,
-0.01022394746541977,
-0.041185420006513596,
0.0010272279614582658,
0.04428262636065483,
-0.030395515263080597,
0.1045537143945694,
-0.12778609991073608,
0.03097398392856121,
-0.05021705850958824,
0.0066887689754366875,
0.05993207171559334,
0.06153075024485588,
-0.051115669310092926,
-0.05596161261200905,
0.008952835574746132,
-0.010182967409491539,
-0.046639539301395416,
-0.011821693740785122,
-0.11413343995809555,
0.04949469119310379,
0.045303359627723694,
-0.06608684360980988,
-0.012006121687591076,
0.17478495836257935,
0.053603656589984894,
0.07820038497447968,
-0.05509981885552406,
0.024438543245196342,
0.015550065785646439,
-0.010224990546703339,
-0.11009189486503601,
-0.011078685522079468,
0.055968232452869415,
-0.011918764561414719,
-0.008252017199993134,
0.06220865622162819,
-0.1100105345249176,
-0.028406303375959396,
-0.04778004810214043,
-0.055997442454099655,
-0.04830880090594292,
-0.006057199090719223,
-0.07452691346406937,
-0.016416721045970917,
-0.03578950837254524,
0.04393342509865761,
0.043630897998809814,
-0.044454921036958694,
0.04546605050563812,
0.017361102625727654,
-0.03696083649992943,
-0.025302814319729805,
-0.09633490443229675,
-0.014079640619456768,
0.010066202841699123,
0.0020460553932935,
0.07180138677358627,
0.0072657945565879345,
0.04648524150252342,
0.006923140957951546,
-0.02679106965661049,
0.04144315794110298,
-0.08075528591871262,
-0.06305990368127823,
-0.04246998950839043,
-0.02271696738898754,
0.022438587620854378,
0.07831341028213501,
-0.0025536546017974615,
0.02548728883266449,
0.038888514041900635,
0.0593283586204052,
0.019792692735791206,
-0.011883817613124847,
-0.00712433410808444,
-0.0924590528011322,
0.007322521880269051,
0.013536423444747925,
-0.0014058271190151572,
-5.612257325537939e-8,
-0.11178408563137054,
0.011767441406846046,
-0.11292534321546555,
0.10666078329086304,
-0.009596572257578373,
0.04854574799537659,
-0.024592895060777664,
0.12138867378234863,
-0.04178207740187645,
0.03947427123785019,
0.039532504975795746,
-0.04245952144265175,
-0.0572766549885273,
-0.008789164945483208,
0.040739625692367554,
0.07998771965503693,
0.0104464590549469,
0.026972780004143715,
-0.0429297499358654,
-0.07985197752714157,
0.049071960151195526,
0.005595630034804344,
0.0912257507443428,
0.003002709010615945,
0.008022909052670002,
0.01386475283652544,
-0.010822411626577377,
0.08995404094457626,
0.012259356677532196,
-0.005086230114102364,
-0.0032537998631596565,
0.037539560347795486,
0.003354709129780531,
-0.010804785415530205,
-0.014186419546604156,
0.09539934247732162,
-0.01739228330552578,
-0.026037249714136124,
-0.031295355409383774,
0.08104768395423889,
0.0386648066341877,
0.031819332391023636,
-0.10380776971578598,
0.05316181853413582,
0.07436396181583405,
0.039600979536771774,
-0.04062947258353233,
-0.044973086565732956,
0.04982079938054085,
-0.0186444241553545,
0.035147421061992645,
-0.08255954086780548,
-0.01344340667128563,
0.036261748522520065,
0.02272883616387844,
0.04955245554447174,
0.03511548042297363,
-0.03906796872615814,
-0.00250044628046453,
0.0012063262984156609,
0.03570898249745369,
0.037359919399023056,
-0.021832287311553955,
-0.016074808314442635
] |
pinkducky/Rachel_Bot | f504fc38da9892219763959aff144a976a6d2487 | 2022-03-21T04:01:39.000Z | [
"pytorch",
"gpt2",
"text-generation",
"transformers",
"conversational"
] | conversational | false | pinkducky | null | pinkducky/Rachel_Bot | 2,968 | null | transformers | ---
tags:
- conversational
---
# My Awesome Model
| [
-0.05090106651186943,
-0.031366977840662,
0.03581665828824043,
-0.0054056984372437,
0.08076965808868408,
-0.04372682794928551,
0.1371273398399353,
0.04686865955591202,
0.07841186225414276,
-0.04006827622652054,
0.029963944107294083,
-0.012565406039357185,
0.036244455724954605,
0.009963933378458023,
-0.039041824638843536,
0.041623249650001526,
0.014348188415169716,
-0.05568447336554527,
-0.07908384501934052,
0.04985930025577545,
-0.024708891287446022,
0.06136726588010788,
0.02305777370929718,
0.03187232092022896,
-0.025241436436772346,
0.052442390471696854,
-0.04537186771631241,
0.042892225086688995,
0.035982418805360794,
-0.05908246338367462,
-0.03677479922771454,
0.0823124423623085,
0.07599936425685883,
0.06003808602690697,
-0.007828108966350555,
-0.022685443982481956,
0.062002647668123245,
-0.01731053926050663,
0.04224047064781189,
-0.042645879089832306,
-0.042309775948524475,
-0.06501581519842148,
-0.05416039749979973,
0.008959675207734108,
0.025023961439728737,
-0.047416966408491135,
-0.045713312923908234,
-0.03602231666445732,
-0.05448457598686218,
0.028215525671839714,
-0.11340142786502838,
-0.09726736694574356,
-0.005307946819812059,
0.07291919738054276,
0.0011562320869415998,
0.08138564229011536,
-0.056777920573949814,
-0.14755024015903473,
0.017829654738307,
0.042331673204898834,
-0.007883528247475624,
0.015210023149847984,
-0.0465548150241375,
0.06283816695213318,
-0.02850412204861641,
0.011164055205881596,
-0.0909573957324028,
0.05635131895542145,
-0.0784955695271492,
0.11919571459293365,
0.039566099643707275,
-0.010491259396076202,
0.06072143092751503,
0.010534262284636497,
0.03941850736737251,
0.020460378378629684,
0.01201328169554472,
-0.036615174263715744,
0.047589585185050964,
0.041880011558532715,
-0.0423911027610302,
-0.12415754795074463,
-0.0178165715187788,
-0.07575485110282898,
-0.011541062965989113,
-0.06666063517332077,
-0.014633537270128727,
-0.03656398504972458,
-0.022767268121242523,
-0.0006290049059316516,
-0.05804314836859703,
-0.024403218179941177,
0.01748640462756157,
0.007189290598034859,
-0.05688001960515976,
0.03201739490032196,
0.016381550580263138,
-0.06272377073764801,
-0.05233392491936684,
0.12503185868263245,
-0.015755128115415573,
0.0417540967464447,
0.03470697999000549,
-0.038441117852926254,
-0.03530082851648331,
-0.03092080168426037,
-0.01916990615427494,
0.07731841504573822,
0.019840005785226822,
-0.06203543394804001,
-0.005670442245900631,
-0.01793004386126995,
-0.00026420739595778286,
-0.025612128898501396,
0.08910390734672546,
-0.08335672318935394,
0.04098793864250183,
0.00003558707612683065,
0.03989935293793678,
-0.037843793630599976,
-0.025859786197543144,
0.04540199786424637,
-0.017805110663175583,
-0.027855155989527702,
-0.016179407015442848,
0.009584194049239159,
-0.06639999896287918,
-4.478864534741249e-33,
0.11222975701093674,
0.06859760731458664,
0.06865804642438889,
0.10353326797485352,
0.05126076564192772,
0.041043270379304886,
-0.0769258663058281,
0.00011796884064096957,
-0.018120763823390007,
0.022435268387198448,
0.024414265528321266,
0.029571479186415672,
-0.031359825283288956,
0.022635547444224358,
0.005277079530060291,
-0.03892066702246666,
-0.06124343350529671,
-0.074563167989254,
-0.047082722187042236,
-0.006601410452276468,
0.005809285677969456,
-0.011806229129433632,
0.008817244321107864,
0.045550331473350525,
0.03609937056899071,
0.0628429427742958,
0.08441631495952606,
-0.07260341942310333,
-0.04593171551823616,
0.06081974878907204,
-0.02747199311852455,
-0.0063775693997740746,
0.004344668239355087,
0.037254698574543,
-0.043208200484514236,
0.027637843042612076,
-0.04124289005994797,
-0.04982233792543411,
0.01631910167634487,
-0.08025228977203369,
0.015886107459664345,
-0.026147479191422462,
-0.07082098722457886,
-0.057068999856710434,
-0.018375905230641365,
0.07975850254297256,
0.06380768865346909,
0.04807998985052109,
0.046053748577833176,
-0.008151701651513577,
-0.03738392889499664,
-0.026806335896253586,
-0.002303558634594083,
0.028527168557047844,
-0.05566997826099396,
-0.05916515365242958,
-0.07913040369749069,
0.010040078312158585,
0.007212954107671976,
-0.027216563001275063,
0.007182564586400986,
-0.023558219894766808,
0.03829099237918854,
-0.1337491273880005,
0.10252182185649872,
-0.013212723657488823,
-0.03376084566116333,
-0.012195839546620846,
0.019834956154227257,
-0.0031055838335305452,
-0.04869721829891205,
0.039330776780843735,
-0.01350850984454155,
0.0303358044475317,
0.008187275379896164,
0.008777179755270481,
-0.066440649330616,
-0.028343193233013153,
0.02277507446706295,
0.05544881150126457,
-0.06433303654193878,
-0.03979376703500748,
-0.03870479762554169,
0.018391091376543045,
-0.007904008962213993,
-0.04048888757824898,
0.010747126303613186,
-0.1100461557507515,
0.08085237443447113,
0.027630235999822617,
0.017611531540751457,
-0.011773657985031605,
-0.03050999902188778,
-0.001084225601516664,
-0.09993167221546173,
2.3631985982007646e-33,
0.07410631328821182,
-0.021655485033988953,
-0.03430446982383728,
0.08957358449697495,
0.0658988431096077,
-0.09670625627040863,
0.03356300666928291,
0.12644141912460327,
-0.007287300191819668,
0.009980403818190098,
0.019260842353105545,
0.017083946615457535,
-0.04549262300133705,
-0.0273654256016016,
0.12685054540634155,
-0.0022603862453252077,
0.03329843282699585,
-0.12125260382890701,
0.010285534895956516,
0.011052831076085567,
0.021403569728136063,
0.02825598232448101,
-0.11815153062343597,
0.029007932171225548,
0.007862820290029049,
0.038198284804821014,
0.009895049035549164,
0.0787750631570816,
0.1255485862493515,
-0.012331538833677769,
-0.041844483464956284,
0.04419497027993202,
0.009287770837545395,
-0.0348893441259861,
0.008455603383481503,
0.014187312684953213,
-0.03560749441385269,
-0.02111499384045601,
0.022057252004742622,
0.053258296102285385,
-0.01398906484246254,
0.00464602978900075,
-0.0208336990326643,
0.05951869487762451,
0.0027540153823792934,
-0.05181148275732994,
-0.002780700335279107,
-0.08395428210496902,
-0.05995841324329376,
-0.008431575261056423,
0.015148011036217213,
-0.018860861659049988,
-0.05646621063351631,
-0.09933912009000778,
-0.049692075699567795,
-0.03389864042401314,
0.08351656794548035,
-0.0013963409001007676,
-0.011192643083631992,
0.01275844406336546,
-0.06585824489593506,
-0.042186468839645386,
0.03605508804321289,
-0.01279937382787466,
0.0062263826839625835,
-0.08892334997653961,
-0.00842227041721344,
-0.044293295592069626,
-0.05458396300673485,
-0.02853185124695301,
0.06938458979129791,
0.0797097310423851,
-0.09960118681192398,
0.069974884390831,
0.030420484021306038,
-0.045343056321144104,
0.02778884395956993,
-0.005752933211624622,
0.047690499573946,
-0.055205538868904114,
-0.059457264840602875,
-0.026828406378626823,
-0.00685330294072628,
0.04476504772901535,
0.04690265282988548,
-0.028691265732049942,
-0.00420738710090518,
0.05791653320193291,
-0.0641462653875351,
-0.04161105677485466,
0.011222575791180134,
0.029472360387444496,
-0.006106182001531124,
0.09066887944936752,
-0.02926107868552208,
-2.4387945884996043e-8,
-0.07912608981132507,
-0.0193485077470541,
0.02513335645198822,
0.06632032245397568,
-0.0006380337290465832,
0.042135242372751236,
0.06511452794075012,
0.004173976369202137,
-0.012236827984452248,
-0.042280975729227066,
0.01634378731250763,
0.07965334504842758,
-0.023353738710284233,
0.0734296441078186,
0.02384823188185692,
0.011830461211502552,
-0.051046840846538544,
0.07105594873428345,
0.00836571492254734,
-0.06696010380983353,
0.025958934798836708,
0.03763088583946228,
-0.07969045639038086,
0.02339162677526474,
0.09492308646440506,
-0.0015179375186562538,
-0.042156774550676346,
0.0774446502327919,
-0.032096054404973984,
0.05440954491496086,
0.06271935254335403,
0.04717527702450752,
-0.06409522145986557,
0.05443163216114044,
-0.06054919585585594,
-0.005295728333294392,
-0.0462062731385231,
-0.04333166405558586,
0.021518342196941376,
-0.018056262284517288,
0.07318705320358276,
0.07725220173597336,
-0.08732577413320541,
0.004433119669556618,
0.057371124625205994,
0.001022679847665131,
0.03494297340512276,
-0.0875731110572815,
-0.006969284266233444,
-0.012489929795265198,
-0.055213987827301025,
-0.056746624410152435,
0.019027967005968094,
0.016803929582238197,
-0.02308109775185585,
0.013455736450850964,
0.06283722817897797,
0.0011627678759396076,
0.028021547943353653,
0.04511001333594322,
0.14455872774124146,
0.04447929561138153,
-0.0382804274559021,
-0.03517691791057587
] |
Edresson/wav2vec2-large-xlsr-coraa-portuguese | 823dceb42ebafb67cb046d10957e261e5489b026 | 2022-03-31T13:28:43.000Z | [
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"pt",
"dataset:CORAA",
"arxiv:2110.15731",
"transformers",
"audio",
"speech",
"portuguese-speech-corpus",
"hf-asr-leaderboard",
"PyTorch",
"license:apache-2.0",
"model-index"
] | automatic-speech-recognition | false | Edresson | null | Edresson/wav2vec2-large-xlsr-coraa-portuguese | 2,956 | 8 | transformers | ---
language: pt
datasets:
- CORAA
metrics:
- wer
tags:
- audio
- speech
- wav2vec2
- pt
- portuguese-speech-corpus
- automatic-speech-recognition
- hf-asr-leaderboard
- speech
- PyTorch
license: apache-2.0
model-index:
- name: Edresson Casanova XLSR Wav2Vec2 Large 53 Portuguese
results:
- task:
name: Speech Recognition
type: automatic-speech-recognition
dataset:
name: CORAA
type: CORAA
args: pt
metrics:
- name: Test CORAA WER
type: wer
value: 25.26
- task:
name: Speech Recognition
type: automatic-speech-recognition
dataset:
name: Common Voice 7
type: mozilla-foundation/common_voice_7_0
args: pt
metrics:
- name: Test WER on Common Voice 7
type: wer
value: 20.08
---
# Wav2vec 2.0 trained with CORAA Portuguese Dataset
This a the demonstration of a fine-tuned Wav2vec model for Portuguese using the following [CORAA dataset](https://github.com/nilc-nlp/CORAA)
# Use this model
```python
from transformers import AutoTokenizer, Wav2Vec2ForCTC
tokenizer = AutoTokenizer.from_pretrained("Edresson/wav2vec2-large-xlsr-coraa-portuguese")
model = Wav2Vec2ForCTC.from_pretrained("Edresson/wav2vec2-large-xlsr-coraa-portuguese")
```
# Results
For the results check the [CORAA article](https://arxiv.org/abs/2110.15731)
# Example test with Common Voice Dataset
```python
dataset = load_dataset("common_voice", "pt", split="test", data_dir="./cv-corpus-6.1-2020-12-11")
resampler = torchaudio.transforms.Resample(orig_freq=48_000, new_freq=16_000)
def map_to_array(batch):
speech, _ = torchaudio.load(batch["path"])
batch["speech"] = resampler.forward(speech.squeeze(0)).numpy()
batch["sampling_rate"] = resampler.new_freq
batch["sentence"] = re.sub(chars_to_ignore_regex, '', batch["sentence"]).lower().replace("’", "'")
return batch
```
```python
ds = dataset.map(map_to_array)
result = ds.map(map_to_pred, batched=True, batch_size=1, remove_columns=list(ds.features.keys()))
print(wer.compute(predictions=result["predicted"], references=result["target"]))
```
| [
-0.08738625049591064,
-0.06697259098291397,
-0.026749230921268463,
-0.037617720663547516,
-0.03624272346496582,
0.04088184982538223,
-0.0603460893034935,
-0.05289452522993088,
-0.010151372291147709,
-0.036580152809619904,
0.0014873944455757737,
-0.14682410657405853,
-0.03581888601183891,
0.0008951806230470538,
-0.01621515117585659,
-0.05753955990076065,
0.021024229004979134,
0.024592019617557526,
-0.05233663693070412,
-0.005352068692445755,
0.03002362884581089,
0.051031097769737244,
0.04918315261602402,
-0.03640298172831535,
0.03570587560534477,
0.052114732563495636,
-0.03446122631430626,
0.013389838859438896,
-0.029027337208390236,
-0.03427330404520035,
0.05528627708554268,
0.008829391561448574,
0.08518663048744202,
0.0011297169839963317,
-0.06813862174749374,
-0.007506183814257383,
-0.0005028731538914144,
-0.023712100461125374,
0.015678877010941505,
-0.017909079790115356,
-0.03835494816303253,
0.030469633638858795,
0.0015432649524882436,
-0.017063293606042862,
-0.0024876149836927652,
-0.039863381534814835,
-0.06148582696914673,
0.02682131715118885,
-0.020490087568759918,
0.05307534709572792,
-0.0859641432762146,
0.02802698314189911,
-0.01423406321555376,
0.06552840769290924,
-0.08339112997055054,
0.005651019047945738,
0.014381575398147106,
0.05178362876176834,
0.04895368963479996,
0.022075822576880455,
-0.02187511883676052,
-0.05010892450809479,
-0.01828223466873169,
-0.01042636763304472,
-0.010648384690284729,
-0.06212849169969559,
-0.0489005409181118,
0.0068810563534498215,
-0.01964012160897255,
-0.011580163612961769,
-0.12635180354118347,
0.06539095938205719,
0.05721293389797211,
0.04419562593102455,
0.021010009571909904,
0.08189515769481659,
0.038650739938020706,
0.001130751334130764,
0.06305941939353943,
-0.05456201359629631,
0.00018161478510592133,
0.0034012426622211933,
-0.05629929527640343,
-0.038752321153879166,
0.10278533399105072,
-0.0046061379835009575,
0.026332635432481766,
-0.02891398034989834,
-0.006270166952162981,
-0.03481604903936386,
-0.025761038064956665,
0.024745075032114983,
-0.013590458780527115,
0.04755070060491562,
0.011295326054096222,
0.02251303195953369,
0.056111887097358704,
0.10850535333156586,
-0.04229819029569626,
0.05431769788265228,
0.014209403656423092,
-0.06682658940553665,
-0.0073660071939229965,
-0.07868881523609161,
-0.04754938930273056,
-0.04663843289017677,
-0.004821800626814365,
0.04659802466630936,
0.11626795679330826,
-0.03835326060652733,
-0.021439271047711372,
-0.00747008016332984,
-0.042799435555934906,
-0.035038284957408905,
0.011109855957329273,
0.05237419158220291,
-0.07636316865682602,
-0.09367368370294571,
0.036269549280405045,
0.015478267334401608,
-0.04295499995350838,
-0.07441922277212143,
0.009158787317574024,
-0.027823301032185555,
0.016708320006728172,
-0.0003980004112236202,
0.015551102347671986,
8.791858000373842e-33,
-0.010640568099915981,
0.052737824618816376,
-0.009381651878356934,
0.009651632979512215,
0.0014747284585610032,
-0.03267808258533478,
-0.06532327085733414,
0.007356449030339718,
-0.011585968546569347,
-0.05844057723879814,
-0.057660095393657684,
0.026765737682580948,
-0.10532022267580032,
0.04720287024974823,
0.02599664218723774,
0.03563696891069412,
-0.004830056801438332,
0.018028870224952698,
-0.04291658103466034,
0.018623633310198784,
0.16594073176383972,
-0.024903710931539536,
0.06227623298764229,
-0.009179583750665188,
0.06942595541477203,
0.041190966963768005,
0.05253252759575844,
-0.07714944332838058,
-0.014586486853659153,
0.055866047739982605,
-0.06665895879268646,
-0.060217197984457016,
-0.003418299602344632,
-0.0392581932246685,
-0.021921640262007713,
-0.05653635784983635,
0.012143217027187347,
0.008403059095144272,
-0.04203105717897415,
-0.06799978762865067,
0.02564188279211521,
0.0328577496111393,
0.05213532596826553,
0.03828268125653267,
-0.02680322527885437,
-0.1039159893989563,
0.009637282229959965,
0.08347177505493164,
-0.006094607058912516,
0.03960699215531349,
-0.03399355709552765,
-0.028073813766241074,
-0.04116126149892807,
0.05397147312760353,
0.008523261174559593,
-0.022945569828152657,
0.04806871712207794,
0.06964846700429916,
0.02896066941320896,
0.016460947692394257,
-0.012751107104122639,
-0.01375409122556448,
0.04718893766403198,
-0.009456368163228035,
0.012081931345164776,
-0.06220414489507675,
-0.03894728422164917,
0.03695372864603996,
0.061681851744651794,
-0.011635804548859596,
0.03942127153277397,
-0.019527355208992958,
0.09693101793527603,
0.10353940725326538,
-0.01853974722325802,
0.0018210409907624125,
0.0799759179353714,
-0.04854918643832207,
-0.02347845956683159,
0.013546623289585114,
-0.034333016723394394,
0.11757268011569977,
0.00811684038490057,
-0.07703237235546112,
-0.022463921457529068,
-0.0030161505565047264,
0.009936495684087276,
-0.09717516601085663,
-0.06448101997375488,
0.03275441378355026,
-0.05258049815893173,
0.1217355728149414,
-0.05085429921746254,
-0.07290655374526978,
0.009477190673351288,
-9.610034394640802e-33,
-0.011258238926529884,
0.061329152435064316,
-0.0012248535640537739,
0.07080648839473724,
-0.00827344972640276,
-0.018236586824059486,
0.06278907507658005,
0.04765951633453369,
-0.018521694466471672,
-0.09557618200778961,
0.049272529780864716,
-0.11694668978452682,
0.07978486269712448,
0.018894586712121964,
0.06990484148263931,
0.004677733406424522,
0.032281942665576935,
0.015208998695015907,
0.08063312619924545,
0.07957164943218231,
0.021332450211048126,
0.0498821884393692,
-0.063230499625206,
0.08792033046483994,
-0.038432490080595016,
-0.07722695916891098,
-0.07953282445669174,
0.01291649043560028,
0.008372909389436245,
-0.01442128699272871,
-0.026421599090099335,
0.02915947511792183,
-0.09553892910480499,
0.048497315496206284,
-0.0790044292807579,
-0.002584731439128518,
0.03894692659378052,
-0.020103992894291878,
-0.013350050896406174,
0.12122444808483124,
0.1187676265835762,
0.024710703641176224,
-0.08258409053087234,
-0.08933515846729279,
-0.012353037483990192,
-0.04706000164151192,
-0.02056344598531723,
-0.00701728044077754,
-0.0337742380797863,
-0.007758205756545067,
0.08732836693525314,
-0.04093552380800247,
-0.05414741486310959,
0.0443204864859581,
0.0014231967506930232,
0.009559987112879753,
0.0034687768202275038,
-0.10182594507932663,
-0.02104993909597397,
-0.013880297541618347,
-0.029696006327867508,
-0.005589601583778858,
-0.041125379502773285,
-0.06380543112754822,
0.06783609837293625,
0.0903087705373764,
0.009882456623017788,
0.01836400106549263,
0.03444717824459076,
0.007307132706046104,
-0.018151789903640747,
-0.06773456931114197,
-0.00045013328781351447,
0.012662132270634174,
0.003689025528728962,
-0.018521182239055634,
-0.0988290011882782,
-0.023877890780568123,
-0.023691311478614807,
-0.08420958369970322,
-0.06577180325984955,
0.012470203451812267,
0.07114246487617493,
-0.05742621049284935,
0.06827674806118011,
0.1194649413228035,
-0.03424431011080742,
-0.03642508387565613,
-0.04838236793875694,
0.0924045518040657,
-0.034408193081617355,
0.05980728939175606,
0.03987501189112663,
0.013754427433013916,
-0.0011679097078740597,
-5.423117954705958e-8,
-0.04658830165863037,
-0.008841115050017834,
0.04724007844924927,
-0.016858456656336784,
-0.03540041670203209,
-0.06255503743886948,
-0.0528268963098526,
0.00885811448097229,
0.04758406803011894,
-0.024372419342398643,
0.05440834164619446,
-0.039540451020002365,
-0.02119491994380951,
0.04357503354549408,
0.022704007104039192,
0.03811906650662422,
0.026787349954247475,
0.16469058394432068,
-0.04203643277287483,
-0.08880872279405594,
0.04409025236964226,
0.06779517978429794,
0.01548305619508028,
0.023183224722743034,
0.052299756556749344,
-0.0016736236866563559,
-0.04327850416302681,
0.08699905127286911,
0.006884192582219839,
-0.05257396772503853,
-0.008014603517949581,
0.001969023374840617,
0.011066553182899952,
-0.06809878349304199,
0.03492414578795433,
0.0072354720905423164,
-0.07286924868822098,
-0.02504272572696209,
-0.07024747133255005,
0.04973101243376732,
0.07186440378427505,
0.05896726995706558,
-0.15188759565353394,
-0.014054354280233383,
0.1184704378247261,
-0.01287110149860382,
-0.025100119411945343,
-0.09177569299936295,
0.02378588542342186,
0.012030930258333683,
-0.00634048692882061,
0.027825836092233658,
0.01827768050134182,
0.00014975463273003697,
0.06240347772836685,
0.04724019765853882,
0.014921520836651325,
-0.007847858592867851,
0.043438926339149475,
-0.000011544218068593182,
0.03008711338043213,
0.0025110291317105293,
-0.09394685178995132,
-0.006041417364031076
] |
castorini/ance-msmarco-passage | 6a7062e287fda08e561df5b9b55a6aff98c852a2 | 2021-05-20T15:18:16.000Z | [
"pytorch",
"roberta",
"arxiv:2007.00808",
"transformers"
] | null | false | castorini | null | castorini/ance-msmarco-passage | 2,954 | null | transformers | This model is converted from the original ANCE [repo](https://github.com/microsoft/ANCE) and fitted into Pyserini:
> Lee Xiong, Chenyan Xiong, Ye Li, Kwok-Fung Tang, Jialin Liu, Paul Bennett, Junaid Ahmed, Arnold Overwijk. [Approximate Nearest Neighbor Negative Contrastive Learning for Dense Text Retrieval](https://arxiv.org/pdf/2007.00808.pdf)
For more details on how to use it, check our experiments in [Pyserini](https://github.com/castorini/pyserini/blob/master/docs/experiments-ance.md)
| [
-0.04700297862291336,
-0.10910361260175705,
-0.03779790922999382,
-0.020792385563254356,
-0.07212294638156891,
0.017476214095950127,
-0.0466337651014328,
0.006250105332583189,
0.018044278025627136,
0.022926371544599533,
0.00924587156623602,
0.0008673819829709828,
0.03145028278231621,
0.042823489755392075,
-0.15649867057800293,
0.059293899685144424,
0.11830244958400726,
-0.012333979830145836,
-0.002407660009339452,
-0.035008057951927185,
-0.019549014046788216,
0.04476103186607361,
0.045214202255010605,
-0.0862564966082573,
0.005181622691452503,
-0.012993800453841686,
-0.00018543837359175086,
0.009045811370015144,
0.026896005496382713,
-0.0723876804113388,
0.047609664499759674,
0.032020725309848785,
0.06809964776039124,
0.042147714644670486,
-0.03375668078660965,
0.046226851642131805,
-0.010571177117526531,
-0.00470969220623374,
-0.022296078503131866,
0.04281727969646454,
0.02546420320868492,
0.03744509443640709,
-0.038145434111356735,
0.0174984373152256,
0.09757614880800247,
-0.0314333476126194,
-0.03748629614710808,
-0.006384852807968855,
0.031613051891326904,
-0.06251911073923111,
-0.06585251539945602,
-0.00465156277641654,
-0.06353804469108582,
-0.018855255097150803,
-0.11156093329191208,
0.01860184781253338,
-0.012391998432576656,
-0.04350418969988823,
-0.03389894962310791,
-0.035724326968193054,
0.015382247976958752,
-0.03323902189731598,
-0.08228810876607895,
-0.028446970507502556,
0.07210508733987808,
0.004849671386182308,
0.0088537922129035,
0.024240907281637192,
-0.0037742163985967636,
-0.019955353811383247,
-0.041374944150447845,
-0.001644288538955152,
0.02328961342573166,
-0.03382035344839096,
0.002391737187281251,
0.11125487089157104,
0.10452869534492493,
-0.025440305471420288,
0.032864347100257874,
-0.14166247844696045,
0.028801653534173965,
-0.07473912835121155,
0.07236658036708832,
0.018253285437822342,
0.08292274922132492,
-0.026500945910811424,
-0.00011703513882821426,
-0.0077514066360890865,
0.006893949117511511,
-0.008353191427886486,
0.025049520656466484,
-0.022684333845973015,
-0.09479525685310364,
-0.08301636576652527,
-0.030026845633983612,
-0.03305225446820259,
0.027398275211453438,
-0.005751552060246468,
-0.032188013195991516,
0.08676241338253021,
0.004607518203556538,
0.05159696191549301,
0.005105983465909958,
-0.03780313581228256,
-0.0035427091643214226,
-0.03785019367933273,
0.04160245507955551,
-0.031062079593539238,
0.12296482175588608,
-0.09951023757457733,
-0.03169747069478035,
0.01511577982455492,
-0.05050643905997276,
-0.030519623309373856,
0.015507482923567295,
0.0013765119947493076,
0.02356749400496483,
0.030045730993151665,
0.012025429867208004,
-0.020419403910636902,
-0.08769459277391434,
-0.011181680485606194,
-0.08165132254362106,
0.03757787495851517,
0.007609953172504902,
0.019166504964232445,
-0.055708080530166626,
3.328867758414251e-33,
0.011822842061519623,
0.04057318717241287,
0.048927269876003265,
-0.03880075365304947,
0.02930705063045025,
0.025840235874056816,
-0.03947952389717102,
-0.08025165647268295,
-0.060185179114341736,
-0.03302472457289696,
-0.04891492798924446,
0.026025529950857162,
-0.044633831828832626,
0.0906994417309761,
-0.032969292253255844,
-0.08487743139266968,
-0.020394891500473022,
-0.009755099192261696,
-0.04940864071249962,
0.015404296107590199,
0.037999462336301804,
-0.003839729819446802,
-0.004589418414980173,
-0.0905037671327591,
-0.04545950889587402,
0.007273923140019178,
0.04107626900076866,
-0.01516362652182579,
0.008058464154601097,
0.00732226250693202,
-0.13347862660884857,
0.0786210373044014,
-0.011533183045685291,
0.010492198169231415,
-0.01465160958468914,
-0.0514877550303936,
-0.04204602912068367,
0.004841706249862909,
0.030139463022351265,
-0.08116433024406433,
-0.07984000444412231,
0.050476301461458206,
0.06926482170820236,
-0.08927472680807114,
-0.07314479351043701,
-0.003081441158428788,
0.03615982457995415,
-0.026855822652578354,
0.05288868770003319,
-0.03485935926437378,
0.008858742192387581,
-0.0234846081584692,
-0.05689019709825516,
-0.019752711057662964,
-0.002988734981045127,
-0.0075185252353549,
0.04079991579055786,
0.1270861178636551,
0.11209022253751755,
0.016666775569319725,
0.11826937645673752,
0.041820019483566284,
0.03510885313153267,
0.0013711529318243265,
0.08176698535680771,
0.03924071043729782,
-0.02801859751343727,
-0.014946638606488705,
0.039164040237665176,
-0.0028264315333217382,
-0.008279748260974884,
0.05616268143057823,
0.06595632433891296,
-0.03667150437831879,
0.017041029408574104,
-0.0073199342004954815,
0.05420096591114998,
-0.018454065546393394,
0.015005601570010185,
-0.09631514549255371,
-0.08083528280258179,
-0.056258004158735275,
0.060022611171007156,
-0.0882268026471138,
-0.152223601937294,
0.014349996112287045,
0.0763012245297432,
-0.07870613783597946,
-0.03884876146912575,
-0.12141038477420807,
0.0326792374253273,
-0.003987470641732216,
-0.027650605887174606,
-0.05515971779823303,
0.04922481253743172,
-4.454535475099074e-33,
0.036034777760505676,
-0.013669934123754501,
-0.08666770905256271,
0.023283764719963074,
-0.05074647068977356,
0.008954286575317383,
0.047138094902038574,
0.11518479138612747,
-0.048550333827733994,
-0.04749017953872681,
0.03195704519748688,
0.027465160936117172,
0.03947221860289574,
-0.10054811835289001,
0.027953049167990685,
0.03333859145641327,
0.11014118790626526,
0.0201219841837883,
0.022451166063547134,
0.02085801586508751,
0.012155557051301003,
0.0566563755273819,
-0.02914423868060112,
0.06582468003034592,
0.0016324304742738605,
0.0031039402820169926,
0.035610660910606384,
0.03065403550863266,
-0.08254885673522949,
-0.10264523327350616,
-0.030410433188080788,
0.03787985071539879,
-0.04728901758790016,
-0.021010713651776314,
-0.04101075604557991,
0.05926110967993736,
0.018306653946638107,
-0.03951519355177879,
-0.05395909398794174,
0.0838693380355835,
0.06600572913885117,
0.08599692583084106,
-0.05924968421459198,
0.008732463233172894,
0.023083630949258804,
0.04458940029144287,
-0.09254655987024307,
0.016248304396867752,
0.028088172897696495,
-0.03744510933756828,
0.03302101790904999,
0.03734952583909035,
0.014472372829914093,
-0.0026051767636090517,
-0.03523736447095871,
-0.0017369447741657495,
0.03433973714709282,
-0.0532345250248909,
0.001350085949525237,
0.051251668483018875,
-0.10427754372358322,
-0.03813466057181358,
-0.052608322352170944,
-0.016111956909298897,
0.00508676003664732,
0.0008791292202658951,
0.008928696624934673,
0.03090388886630535,
0.01677742786705494,
0.017438435927033424,
0.01496164035052061,
-0.0020486272405833006,
-0.009334295056760311,
0.015624795109033585,
0.035267073661088943,
-0.048938311636447906,
0.03478190302848816,
0.014618868939578533,
-0.029760126024484634,
-0.02646331861615181,
0.004100879188627005,
-0.004238407593220472,
0.049224603921175,
0.08227332681417465,
0.10212302207946777,
0.01321470458060503,
0.007734852842986584,
-0.07446572929620743,
0.03811445087194443,
0.028002457693219185,
0.0453132726252079,
0.010422668419778347,
0.013401292264461517,
-0.04130399599671364,
0.03714359551668167,
-5.003263581215833e-8,
-0.0021541800815612078,
-0.05746764317154884,
0.0010567456483840942,
0.002628200687468052,
0.04370380565524101,
-0.06356672197580338,
0.0006514494307339191,
0.046809569001197815,
-0.08409138768911362,
-0.011527687311172485,
0.022493800148367882,
-0.06750465929508209,
-0.02286774106323719,
-0.041441287845373154,
-0.007549823261797428,
0.11853350698947906,
0.08706818521022797,
0.05518932268023491,
-0.015434392727911472,
0.04167007654905319,
0.012741858139634132,
0.055992819368839264,
0.023538101464509964,
-0.038683123886585236,
0.07665099203586578,
0.03292551264166832,
-0.09087242186069489,
-0.012019744142889977,
0.03273145109415054,
-0.012398903258144855,
0.05431530252099037,
0.05037768930196762,
-0.0044885971583426,
-0.012960781343281269,
0.04979168623685837,
0.131880521774292,
0.005886875092983246,
0.0030589946545660496,
-0.07286635786294937,
0.03814126178622246,
-0.05566190555691719,
-0.03430171683430672,
0.002737251343205571,
0.005686997901648283,
0.0340750552713871,
0.08155836910009384,
-0.02171974442899227,
-0.12954393029212952,
0.09238626062870026,
0.08052381128072739,
0.0806623324751854,
0.02344544045627117,
0.01184700708836317,
0.013026564382016659,
-0.020405413582921028,
-0.05415799096226692,
-0.015677446499466896,
0.004480994306504726,
0.006276457570493221,
-0.022831842303276062,
0.09976029396057129,
0.043436527252197266,
-0.03518331050872803,
0.022526176646351814
] |
sebastian-hofstaetter/colbert-distilbert-margin_mse-T2-msmarco | 0feb43c9885ff5e9b1116ea548525aadfc327d7e | 2021-03-18T10:35:12.000Z | [
"pytorch",
"ColBERT",
"en",
"dataset:ms_marco",
"arxiv:2004.12832",
"arxiv:2010.02666",
"transformers",
"dpr",
"dense-passage-retrieval",
"knowledge-distillation"
] | null | false | sebastian-hofstaetter | null | sebastian-hofstaetter/colbert-distilbert-margin_mse-T2-msmarco | 2,951 | 3 | transformers | ---
language: "en"
tags:
- dpr
- dense-passage-retrieval
- knowledge-distillation
datasets:
- ms_marco
---
# Margin-MSE Trained ColBERT
We provide a retrieval trained DistilBert-based ColBERT model (https://arxiv.org/pdf/2004.12832.pdf). Our model is trained with Margin-MSE using a 3 teacher BERT_Cat (concatenated BERT scoring) ensemble on MSMARCO-Passage.
This instance can be used to **re-rank a candidate set** or **directly for a vector index based dense retrieval**. The architecure is a 6-layer DistilBERT, with an additional single linear layer at the end.
If you want to know more about our simple, yet effective knowledge distillation method for efficient information retrieval models for a variety of student architectures that is used for this model instance check out our paper: https://arxiv.org/abs/2010.02666 🎉
For more information, training data, source code, and a minimal usage example please visit: https://github.com/sebastian-hofstaetter/neural-ranking-kd
## Configuration
- fp16 trained, so fp16 inference shouldn't be a problem
- We use no compression: 768 dim output vectors (better suited for re-ranking, or storage for smaller collections, MSMARCO gets to ~1TB vector storage with fp16 ... ups)
- Query [MASK] augmention = 8x regardless of batch-size (needs to be added before the model, see the usage example in GitHub repo for more)
## Model Code
````python
from transformers import AutoTokenizer,AutoModel, PreTrainedModel,PretrainedConfig
from typing import Dict
import torch
class ColBERTConfig(PretrainedConfig):
model_type = "ColBERT"
bert_model: str
compression_dim: int = 768
dropout: float = 0.0
return_vecs: bool = False
trainable: bool = True
class ColBERT(PreTrainedModel):
"""
ColBERT model from: https://arxiv.org/pdf/2004.12832.pdf
We use a dot-product instead of cosine per term (slightly better)
"""
config_class = ColBERTConfig
base_model_prefix = "bert_model"
def __init__(self,
cfg) -> None:
super().__init__(cfg)
self.bert_model = AutoModel.from_pretrained(cfg.bert_model)
for p in self.bert_model.parameters():
p.requires_grad = cfg.trainable
self.compressor = torch.nn.Linear(self.bert_model.config.hidden_size, cfg.compression_dim)
def forward(self,
query: Dict[str, torch.LongTensor],
document: Dict[str, torch.LongTensor]):
query_vecs = self.forward_representation(query)
document_vecs = self.forward_representation(document)
score = self.forward_aggregation(query_vecs,document_vecs,query["attention_mask"],document["attention_mask"])
return score
def forward_representation(self,
tokens,
sequence_type=None) -> torch.Tensor:
vecs = self.bert_model(**tokens)[0] # assuming a distilbert model here
vecs = self.compressor(vecs)
# if encoding only, zero-out the mask values so we can compress storage
if sequence_type == "doc_encode" or sequence_type == "query_encode":
vecs = vecs * tokens["tokens"]["mask"].unsqueeze(-1)
return vecs
def forward_aggregation(self,query_vecs, document_vecs,query_mask,document_mask):
# create initial term-x-term scores (dot-product)
score = torch.bmm(query_vecs, document_vecs.transpose(2,1))
# mask out padding on the doc dimension (mask by -1000, because max should not select those, setting it to 0 might select them)
exp_mask = document_mask.bool().unsqueeze(1).expand(-1,score.shape[1],-1)
score[~exp_mask] = - 10000
# max pooling over document dimension
score = score.max(-1).values
# mask out paddding query values
score[~(query_mask.bool())] = 0
# sum over query values
score = score.sum(-1)
return score
tokenizer = AutoTokenizer.from_pretrained("distilbert-base-uncased") # honestly not sure if that is the best way to go, but it works :)
model = ColBERT.from_pretrained("sebastian-hofstaetter/colbert-distilbert-margin_mse-T2-msmarco")
````
## Effectiveness on MSMARCO Passage & TREC Deep Learning '19
We trained our model on the MSMARCO standard ("small"-400K query) training triples with knowledge distillation with a batch size of 32 on a single consumer-grade GPU (11GB memory).
For re-ranking we used the top-1000 BM25 results.
### MSMARCO-DEV
Here, we use the larger 49K query DEV set (same range as the smaller 7K DEV set, minimal changes possible)
| | MRR@10 | NDCG@10 |
|----------------------------------|--------|---------|
| BM25 | .194 | .241 |
| **Margin-MSE ColBERT** (Re-ranking) | .375 | .436 |
### TREC-DL'19
For MRR we use the recommended binarization point of the graded relevance of 2. This might skew the results when compared to other binarization point numbers.
| | MRR@10 | NDCG@10 |
|----------------------------------|--------|---------|
| BM25 | .689 | .501 |
| **Margin-MSE ColBERT** (Re-ranking) | .878 | .744 |
For more metrics, baselines, info and analysis, please see the paper: https://arxiv.org/abs/2010.02666
## Limitations & Bias
- The model inherits social biases from both DistilBERT and MSMARCO.
- The model is only trained on relatively short passages of MSMARCO (avg. 60 words length), so it might struggle with longer text.
## Citation
If you use our model checkpoint please cite our work as:
```
@misc{hofstaetter2020_crossarchitecture_kd,
title={Improving Efficient Neural Ranking Models with Cross-Architecture Knowledge Distillation},
author={Sebastian Hofst{\"a}tter and Sophia Althammer and Michael Schr{\"o}der and Mete Sertkan and Allan Hanbury},
year={2020},
eprint={2010.02666},
archivePrefix={arXiv},
primaryClass={cs.IR}
}
``` | [
-0.05089406296610832,
-0.05945128574967384,
0.02105080708861351,
0.016880514100193977,
0.015971899032592773,
0.02333458885550499,
0.012614397332072258,
0.05290772020816803,
-0.04220648109912872,
-0.04042566567659378,
-0.09296119958162308,
-0.00510636530816555,
0.04807014390826225,
0.05253411829471588,
-0.06761057674884796,
0.06015132740139961,
0.09712554514408112,
0.06175629794597626,
-0.036032017320394516,
0.03113808110356331,
0.030468538403511047,
0.005100867711007595,
0.0040799998678267,
-0.026177624240517616,
0.0035668453201651573,
-0.02586469240486622,
-0.028944309800863266,
-0.0751267820596695,
0.0644204244017601,
-0.05415318161249161,
0.07238250970840454,
0.008566286414861679,
0.0450526662170887,
0.1321249008178711,
-0.07527591288089752,
0.05685859173536301,
-0.03863126412034035,
0.012499970383942127,
0.011098059825599194,
0.06688106060028076,
-0.04097585380077362,
0.0814080759882927,
-0.04105371981859207,
0.016661010682582855,
0.11752386391162872,
-0.0006339860847219825,
-0.07066430151462555,
-0.0024703044909983873,
0.03345256671309471,
0.030403664335608482,
-0.08910778164863586,
-0.015084882266819477,
-0.02872244454920292,
0.032327715307474136,
-0.02375992201268673,
0.026856262236833572,
-0.03268415480852127,
-0.07977324724197388,
-0.03973271697759628,
-0.11841148883104324,
0.040723979473114014,
-0.08306977897882462,
-0.046067461371421814,
-0.026780128479003906,
-0.0002561155706644058,
0.010622109286487103,
-0.049762967973947525,
0.06597740948200226,
0.04721373692154884,
-0.057647258043289185,
-0.015457922592759132,
0.061638664454221725,
-0.03169068694114685,
-0.0050071831792593,
0.05803042650222778,
0.06967385858297348,
0.05162125453352928,
-0.026766164228320122,
0.00784718245267868,
-0.052783768624067307,
0.00908180046826601,
0.014003678224980831,
0.04119566082954407,
-0.035998862236738205,
0.12323832511901855,
-0.04522109404206276,
0.04130087420344353,
-0.028168590739369392,
0.05369700863957405,
-0.021911313757300377,
0.024971531704068184,
-0.03513597324490547,
0.006902624387294054,
-0.03783673048019409,
0.08852081745862961,
0.049977857619524,
0.05540003627538681,
-0.04849018529057503,
-0.009312582202255726,
0.11912355571985245,
-0.02902892790734768,
0.0862007662653923,
-0.020163655281066895,
-0.11789959669113159,
-0.026157958433032036,
-0.012920367531478405,
0.06708238273859024,
0.0528392493724823,
0.002425375860184431,
-0.14563536643981934,
0.02491109073162079,
0.031895898282527924,
-0.0058592832647264,
-0.05258432403206825,
-0.004057371057569981,
-0.05393252149224281,
0.027285322546958923,
-0.03619964420795441,
0.09046628326177597,
0.014947845600545406,
-0.041833292692899704,
0.041661765426397324,
0.002516782144084573,
-0.030912766233086586,
-0.03567284718155861,
-0.008770524524152279,
-0.023889606818556786,
3.4380689795537334e-33,
0.031016284599900246,
0.0708543062210083,
-0.01916772872209549,
0.014847652055323124,
-0.0037388200871646404,
0.019686000421643257,
0.04688858240842819,
0.0020670825615525246,
-0.0541028156876564,
-0.023767564445734024,
-0.030077559873461723,
0.08079471439123154,
-0.011766867712140083,
0.039149172604084015,
0.042907342314720154,
-0.02600766345858574,
-0.03701042756438255,
0.06609424948692322,
0.013287079520523548,
-0.01110187266021967,
0.0734364241361618,
0.020368553698062897,
0.02035047486424446,
-0.10422935336828232,
0.026653140783309937,
-0.028600230813026428,
0.002741408534348011,
-0.05344169959425926,
-0.04237159341573715,
0.004959656856954098,
-0.07537850737571716,
-0.0049700201489031315,
-0.028381427749991417,
0.02967124804854393,
-0.018401287496089935,
-0.003097311593592167,
-0.04175485670566559,
-0.032246172428131104,
0.030275525525212288,
-0.0676620751619339,
0.0034513429272919893,
-0.008427809923887253,
0.05224084109067917,
-0.05583508312702179,
-0.109323650598526,
0.0076945447362959385,
0.030017640441656113,
0.006307791452854872,
0.07166578620672226,
-0.05644379183650017,
0.025959812104701996,
0.009976480156183243,
-0.06850886344909668,
-0.08205397427082062,
-0.0025962002109736204,
-0.004753488581627607,
0.025342518463730812,
0.06807949393987656,
0.1028880700469017,
0.0019809172954410315,
0.02213207259774208,
0.05210576206445694,
0.07482192665338516,
0.02808212675154209,
0.039943329989910126,
-0.037210073322057724,
-0.05979446694254875,
0.06083781644701958,
0.1154034361243248,
-0.007157956250011921,
-0.04287833720445633,
-0.03498140349984169,
0.04307650774717331,
-0.069818876683712,
0.025255238637328148,
-0.028659701347351074,
0.0009482194436714053,
-0.1319596916437149,
0.02537701278924942,
-0.013943370431661606,
-0.03989877551794052,
-0.05752963572740555,
0.017222262918949127,
-0.05226892605423927,
-0.06795687973499298,
0.07489676773548126,
0.0417194738984108,
-0.10542140901088715,
0.020960675552487373,
-0.04583604261279106,
-0.025034260004758835,
0.0007465372909791768,
-0.05279811844229698,
-0.003686717711389065,
0.012943074107170105,
-1.955895683112596e-33,
0.019857866689562798,
-0.016841502860188484,
-0.014791380614042282,
0.051125068217515945,
0.02510647289454937,
-0.032433707267045975,
-0.016231870278716087,
0.04486104100942612,
-0.051797833293676376,
-0.07591449469327927,
0.01147540844976902,
-0.024507829919457436,
0.010136747732758522,
-0.07060348242521286,
0.0036324469838291407,
0.06285921484231949,
-0.04301335662603378,
-0.05030902102589607,
-0.014239232055842876,
0.042802318930625916,
-0.02399268001317978,
-0.01512585673481226,
-0.09957288205623627,
0.08421563357114792,
0.03751923516392708,
0.036871835589408875,
0.02924005314707756,
0.02211645431816578,
-0.01121297013014555,
-0.025237319990992546,
0.011822324246168137,
-0.003709044074639678,
0.036514122039079666,
-0.045839183032512665,
-0.10428716242313385,
0.04991206154227257,
0.03352654352784157,
-0.0806865245103836,
-0.0565970242023468,
0.08631744980812073,
-0.005221340339630842,
0.018568944185972214,
-0.024960193783044815,
-0.013509313575923443,
0.03969720005989075,
0.05260413885116577,
-0.10339580476284027,
-0.0024191399570554495,
0.09672657400369644,
0.028735211119055748,
0.07712122052907944,
-0.03514711558818817,
-0.08312845975160599,
0.00790551770478487,
-0.012197108007967472,
-0.034801360219717026,
-0.03572467342019081,
-0.030170289799571037,
0.02024924010038376,
0.02546524442732334,
-0.023081589490175247,
0.002546361181885004,
0.03418320044875145,
0.024841273203492165,
0.03944305330514908,
-0.059894561767578125,
-0.05332837998867035,
0.03156998008489609,
-0.14206215739250183,
0.0526912622153759,
0.011158430948853493,
-0.010539935901761055,
0.07819182425737381,
-0.03328276053071022,
0.008912893943488598,
0.05936088413000107,
0.08897417783737183,
-0.012192932888865471,
-0.03980157896876335,
-0.029360370710492134,
-0.07289207726716995,
-0.042847756296396255,
0.03888851776719093,
0.09749447554349899,
0.039641931653022766,
0.049765538424253464,
0.030811335891485214,
-0.011666137725114822,
0.00011716954759322107,
0.03064442239701748,
-0.024948839098215103,
-0.08547861129045486,
0.06352242082357407,
0.008347554132342339,
0.03489743173122406,
-5.441147976625871e-8,
-0.057123128324747086,
0.0052922568283975124,
-0.08550193160772324,
0.044166263192892075,
-0.06241337209939957,
-0.04127997159957886,
-0.017660122364759445,
0.08515780419111252,
-0.07969871163368225,
-0.011329352855682373,
0.06588062644004822,
-0.038719046860933304,
-0.07754206657409668,
-0.01665535941720009,
0.06538733094930649,
0.10052011162042618,
0.01703248918056488,
0.06158122047781944,
-0.020597532391548157,
-0.0461818091571331,
0.12138842791318893,
0.009215384721755981,
0.1171569675207138,
0.004403490107506514,
0.07527516037225723,
0.004576154053211212,
-0.013770309276878834,
0.09107531607151031,
0.07163438946008682,
0.02313779480755329,
0.012828594073653221,
0.0064169797115027905,
0.012382556684315205,
-0.026471499353647232,
0.1002027615904808,
0.11020901054143906,
-0.07651295512914658,
-0.052834637463092804,
-0.047123804688453674,
0.06528478860855103,
0.030070984736084938,
0.022739486768841743,
-0.027119452133774757,
0.00570931239053607,
0.035040516406297684,
0.006715818308293819,
-0.06961631029844284,
0.02405472658574581,
0.08874094486236572,
0.04132192209362984,
0.009260863065719604,
-0.05211387574672699,
-0.027839433401823044,
0.046300001442432404,
0.03139283508062363,
0.02964387834072113,
-0.042371708899736404,
-0.04513813182711601,
0.01702270843088627,
-0.07331600785255432,
0.04841851070523262,
-0.017302218824625015,
-0.03912384435534477,
0.05226464942097664
] |
bigscience/bloom-2b5 | 68331cd7e9637733d1e3e011515288afb1c23ad8 | 2022-07-18T15:58:49.000Z | [
"pytorch",
"bloom",
"feature-extraction",
"ak",
"ar",
"as",
"bm",
"bn",
"ca",
"code",
"en",
"es",
"eu",
"fon",
"fr",
"gu",
"hi",
"id",
"ig",
"ki",
"kn",
"lg",
"ln",
"ml",
"mr",
"ne",
"nso",
"ny",
"or",
"pa",
"pt",
"rn",
"rw",
"sn",
"st",
"sw",
"ta",
"te",
"tn",
"ts",
"tum",
"tw",
"ur",
"vi",
"wo",
"xh",
"yo",
"zh",
"zhs",
"zht",
"zu",
"arxiv:1909.08053",
"arxiv:2110.02861",
"arxiv:2108.12409",
"transformers",
"license:bigscience-bloom-rail-1.0",
"text-generation",
"model-index"
] | text-generation | false | bigscience | null | bigscience/bloom-2b5 | 2,947 | 3 | transformers | ---
license: bigscience-bloom-rail-1.0
language:
- ak
- ar
- as
- bm
- bn
- ca
- code
- en
- es
- eu
- fon
- fr
- gu
- hi
- id
- ig
- ki
- kn
- lg
- ln
- ml
- mr
- ne
- nso
- ny
- or
- pa
- pt
- rn
- rw
- sn
- st
- sw
- ta
- te
- tn
- ts
- tum
- tw
- ur
- vi
- wo
- xh
- yo
- zh
- zhs
- zht
- zu
pipeline_tag: text-generation
model-index:
- name: bloom
results:
- task:
type: text-generation
name: text generation
dataset:
name: arc_challenge
type: arc_challenge
metrics:
- name: acc
type: acc
value: 0.27986348122866894
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: arc_easy
type: arc_easy
metrics:
- name: acc
type: acc
value: 0.5946969696969697
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: axb
type: axb
metrics:
- name: acc
type: acc
value: 0.4433876811594203
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: axg
type: axg
metrics:
- name: acc
type: acc
value: 0.5
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: boolq
type: boolq
metrics:
- name: acc
type: acc
value: 0.6165137614678899
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: cb
type: cb
metrics:
- name: acc
type: acc
value: 0.30357142857142855
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: cola
type: cola
metrics:
- name: acc
type: acc
value: 0.610738255033557
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: copa
type: copa
metrics:
- name: acc
type: acc
value: 0.63
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: crows_pairs_english
type: crows_pairs_english
metrics:
- name: acc
type: acc
value: 0.4973166368515206
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: crows_pairs_french
type: crows_pairs_french
metrics:
- name: acc
type: acc
value: 0.5032796660703638
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: diabla
type: diabla
metrics:
- name: acc
type: acc
value: 0.28888308977035493
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_afr
type: gsarti/flores_101_afr
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 6.500798737976343
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_amh
type: gsarti/flores_101_amh
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 3.9726863338897145
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_ara
type: gsarti/flores_101_ara
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 1.8083841089875814
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_asm
type: gsarti/flores_101_asm
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 5.699102962086425
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_ast
type: gsarti/flores_101_ast
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 3.9252047073429384
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_azj
type: gsarti/flores_101_azj
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 6.942805054270002
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_bel
type: gsarti/flores_101_bel
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 3.614136245847082
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_ben
type: gsarti/flores_101_ben
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 5.121491534300969
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_bos
type: gsarti/flores_101_bos
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 5.653353469118798
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_bul
type: gsarti/flores_101_bul
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 2.7014693938055068
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_cat
type: gsarti/flores_101_cat
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 2.305190041967345
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_ceb
type: gsarti/flores_101_ceb
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 6.291000321323428
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_ces
type: gsarti/flores_101_ces
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 5.447322753586386
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_ckb
type: gsarti/flores_101_ckb
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 3.7255124939234765
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_cym
type: gsarti/flores_101_cym
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 12.539424151448149
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_dan
type: gsarti/flores_101_dan
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 5.183309001005672
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_deu
type: gsarti/flores_101_deu
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 3.1180422286591347
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_ell
type: gsarti/flores_101_ell
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 2.467943456164706
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_eng
type: gsarti/flores_101_eng
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 2.018740628193298
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_est
type: gsarti/flores_101_est
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 9.11654425176368
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_fas
type: gsarti/flores_101_fas
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 3.058009097116482
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_fin
type: gsarti/flores_101_fin
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 6.847047959628553
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_fra
type: gsarti/flores_101_fra
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 1.9975177011840075
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_ful
type: gsarti/flores_101_ful
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 11.465912731488828
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_gle
type: gsarti/flores_101_gle
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 8.681491663539422
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_glg
type: gsarti/flores_101_glg
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 3.029991089015508
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_guj
type: gsarti/flores_101_guj
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 4.955224230286231
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_hau
type: gsarti/flores_101_hau
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 10.758347356372159
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_heb
type: gsarti/flores_101_heb
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 3.6004478129801667
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_hin
type: gsarti/flores_101_hin
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 4.712530650588064
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_hrv
type: gsarti/flores_101_hrv
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 5.822418943372185
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_hun
type: gsarti/flores_101_hun
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 6.440482646965992
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_hye
type: gsarti/flores_101_hye
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 3.657718918347166
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_ibo
type: gsarti/flores_101_ibo
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 5.564814003872672
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_ind
type: gsarti/flores_101_ind
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 2.1597101468869373
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_isl
type: gsarti/flores_101_isl
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 8.082349269518136
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_ita
type: gsarti/flores_101_ita
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 2.9687591414176207
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_jav
type: gsarti/flores_101_jav
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 7.0573805415708994
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_jpn
type: gsarti/flores_101_jpn
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 2.7758864197116933
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_kam
type: gsarti/flores_101_kam
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 11.072949642861332
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_kan
type: gsarti/flores_101_kan
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 5.551730651007082
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_kat
type: gsarti/flores_101_kat
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 2.522630524283745
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_kaz
type: gsarti/flores_101_kaz
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 3.3901748516975574
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_kea
type: gsarti/flores_101_kea
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 8.918534182590863
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_kir
type: gsarti/flores_101_kir
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 3.729278369847201
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_kor
type: gsarti/flores_101_kor
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 3.932884847226212
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_lao
type: gsarti/flores_101_lao
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 2.9077314760849924
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_lav
type: gsarti/flores_101_lav
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 7.777221919194806
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_lin
type: gsarti/flores_101_lin
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 7.524842908050988
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_lit
type: gsarti/flores_101_lit
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 7.369179434621725
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_ltz
type: gsarti/flores_101_ltz
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 8.801059747949214
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_lug
type: gsarti/flores_101_lug
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 8.483203026364786
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_luo
type: gsarti/flores_101_luo
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 11.975963093623681
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_mal
type: gsarti/flores_101_mal
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 4.615948455160037
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_mar
type: gsarti/flores_101_mar
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 5.483253482821379
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_mkd
type: gsarti/flores_101_mkd
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 2.9656732291754087
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_mlt
type: gsarti/flores_101_mlt
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 15.004773437665275
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_mon
type: gsarti/flores_101_mon
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 3.410598542315402
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_mri
type: gsarti/flores_101_mri
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 7.474035895661322
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_msa
type: gsarti/flores_101_msa
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 2.5710001772665634
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_mya
type: gsarti/flores_101_mya
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 2.413577969878331
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_nld
type: gsarti/flores_101_nld
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 4.127831721885065
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_nob
type: gsarti/flores_101_nob
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 5.402763169129877
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_npi
type: gsarti/flores_101_npi
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 5.199342701937889
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_nso
type: gsarti/flores_101_nso
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 8.154626800955667
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_nya
type: gsarti/flores_101_nya
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 8.179860208369393
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_oci
type: gsarti/flores_101_oci
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 4.8617357393685845
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_orm
type: gsarti/flores_101_orm
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 12.911595421079408
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_ory
type: gsarti/flores_101_ory
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 5.189421861225964
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_pan
type: gsarti/flores_101_pan
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 4.698477289331806
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_pol
type: gsarti/flores_101_pol
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 4.625550458479643
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_por
type: gsarti/flores_101_por
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 1.9754515986213523
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_pus
type: gsarti/flores_101_pus
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 4.4963371422771585
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_ron
type: gsarti/flores_101_ron
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 4.965456830031304
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_rus
type: gsarti/flores_101_rus
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 2.0498020542445303
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_slk
type: gsarti/flores_101_slk
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 6.450822127057479
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_slv
type: gsarti/flores_101_slv
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 6.620252120186232
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_sna
type: gsarti/flores_101_sna
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 8.462166771382726
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_snd
type: gsarti/flores_101_snd
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 5.466066951221973
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_som
type: gsarti/flores_101_som
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 11.95918054093392
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_spa
type: gsarti/flores_101_spa
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 1.8965140104323535
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_srp
type: gsarti/flores_101_srp
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 2.871214785885079
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_swe
type: gsarti/flores_101_swe
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 5.054972008155866
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_swh
type: gsarti/flores_101_swh
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 3.6973091886730676
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_tam
type: gsarti/flores_101_tam
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 4.539493400469833
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_tel
type: gsarti/flores_101_tel
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 5.807499987508966
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_tgk
type: gsarti/flores_101_tgk
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 3.5994818827380426
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_tgl
type: gsarti/flores_101_tgl
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 5.667053833119858
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_tha
type: gsarti/flores_101_tha
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 2.365940201944242
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_tur
type: gsarti/flores_101_tur
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 4.885014749844601
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_ukr
type: gsarti/flores_101_ukr
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 2.7240934990288483
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_umb
type: gsarti/flores_101_umb
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 12.766915508610673
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_urd
type: gsarti/flores_101_urd
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 1.9797467071381232
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_uzb
type: gsarti/flores_101_uzb
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 12.002337637722146
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_vie
type: gsarti/flores_101_vie
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 1.76578415476397
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_wol
type: gsarti/flores_101_wol
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 9.144285650306488
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_xho
type: gsarti/flores_101_xho
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 7.403240538286952
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_yor
type: gsarti/flores_101_yor
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 5.91272037551173
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_zho_simpl
type: gsarti/flores_101_zho_simpl
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 2.2769070822768533
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_zho_trad
type: gsarti/flores_101_zho_trad
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 2.5180582198242383
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: gsarti/flores_101_zul
type: gsarti/flores_101_zul
metrics:
- name: byte_perplexity
type: byte_perplexity
value: 8.53353320693145
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: headqa
type: headqa
metrics:
- name: acc
type: acc
value: 0.26440554339897887
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: hellaswag
type: hellaswag
metrics:
- name: acc
type: acc
value: 0.41236805417247563
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: logiqa
type: logiqa
metrics:
- name: acc
type: acc
value: 0.2073732718894009
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: mathqa
type: mathqa
metrics:
- name: acc
type: acc
value: 0.24958123953098826
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: mc_taco
type: mc_taco
metrics:
- name: em
type: em
value: 0.11936936936936937
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: mnli
type: mnli
metrics:
- name: acc
type: acc
value: 0.35496688741721855
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: mnli_mismatched
type: mnli_mismatched
metrics:
- name: acc
type: acc
value: 0.35211554109031734
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: mrpc
type: mrpc
metrics:
- name: acc
type: acc
value: 0.5857843137254902
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: multirc
type: multirc
metrics:
- name: acc
type: acc
value: 0.5375412541254125
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: openbookqa
type: openbookqa
metrics:
- name: acc
type: acc
value: 0.216
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: piqa
type: piqa
metrics:
- name: acc
type: acc
value: 0.7078346028291621
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: prost
type: prost
metrics:
- name: acc
type: acc
value: 0.22683603757472245
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: pubmedqa
type: pubmedqa
metrics:
- name: acc
type: acc
value: 0.616
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: qnli
type: qnli
metrics:
- name: acc
type: acc
value: 0.5072304594545122
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: qqp
type: qqp
metrics:
- name: acc
type: acc
value: 0.3842443729903537
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: race
type: race
metrics:
- name: acc
type: acc
value: 0.3521531100478469
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: rte
type: rte
metrics:
- name: acc
type: acc
value: 0.47653429602888087
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: sciq
type: sciq
metrics:
- name: acc
type: acc
value: 0.892
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: sst
type: sst
metrics:
- name: acc
type: acc
value: 0.5177752293577982
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: triviaqa
type: triviaqa
metrics:
- name: acc
type: acc
value: 0.041633518960487934
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: tydiqa_primary
type: tydiqa_primary
metrics:
- name: acc
type: acc
value: 0.3011337608795236
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: webqs
type: webqs
metrics:
- name: acc
type: acc
value: 0.01673228346456693
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: wic
type: wic
metrics:
- name: acc
type: acc
value: 0.5015673981191222
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: winogrande
type: winogrande
metrics:
- name: acc
type: acc
value: 0.5864246250986582
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: wnli
type: wnli
metrics:
- name: acc
type: acc
value: 0.471830985915493
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: wsc
type: wsc
metrics:
- name: acc
type: acc
value: 0.4423076923076923
verified: false
- task:
type: text-generation
name: text generation
dataset:
name: humaneval
type: humaneval
metrics:
- name: pass@1
type: pass@1
value: 0.15524390243902436
verified: false
- name: pass@10
type: pass@10
value: 0.3220367632383857
verified: false
- name: pass@100
type: pass@100
value: 0.5545431515723145
verified: false
---
<h1 style='text-align: center '>BLOOM LM</h1>
<h2 style='text-align: center '><em>BigScience Large Open-science Open-access Multilingual Language Model</em> </h2>
<h3 style='text-align: center '>Model Card</h3>
<img src="https://s3.amazonaws.com/moonup/production/uploads/1657124309515-5f17f0a0925b9863e28ad517.png" alt="BigScience Logo" width="800" style="margin-left:'auto' margin-right:'auto' display:'block'"/>
Version 1.0 / 26.May.2022
## Table of Contents
1. [Model Details](#model-details)
2. [Uses](#uses)
3. [Training Data](#training-data)
4. [Risks and Limitations](#risks-and-limitations)
5. [Evaluation](#evaluation)
6. [Recommendations](#recommendations)
7. [Glossary and Calculations](#glossary-and-calculations)
8. [More Information](#more-information)
9. [Model Card Authors](#model-card-authors)
## Model Details
### Basics
*This section provides information for anyone who wants to know about the model.*
<details>
<summary>Click to expand</summary> <br/>
**Developed by:** BigScience ([website](https://bigscience.huggingface.co))
* All collaborators are either volunteers or have an agreement with their employer. *(Further breakdown of participants forthcoming.)*
**Model Type:** Transformer-based Language Model
**Version:** 1.0.0
**Languages:** Multiple; see [training data](#training-data)
**License:** RAIL License v1.0 ([link](https://huggingface.co/spaces/bigscience/license))
**Release Date Estimate:** Monday, 11.July.2022
**Send Questions to:** [email protected]
**Cite as:** BigScience, _BigScience Language Open-science Open-access Multilingual (BLOOM) Language Model_. International, May 2021-May 2022
**Funded by:**
* The French government.
* Hugging Face ([website](https://huggingface.co)).
* Organizations of contributors. *(Further breakdown of organizations forthcoming.)*
</details>
### Technical Specifications
*This section provides information for people who work on model development.*
<details>
<summary>Click to expand</summary><br/>
Please see [the BLOOM training README](https://github.com/bigscience-workshop/bigscience/tree/master/train/tr11-176B-ml#readme) for full details on replicating training.
**Model Architecture:** Modified from Megatron-LM GPT2 (see [paper](https://arxiv.org/abs/1909.08053), [BLOOM Megatron code](https://github.com/bigscience-workshop/Megatron-DeepSpeed)):
* Decoder-only architecture
* Layer normalization applied to word embeddings layer (`StableEmbedding`; see [code](https://github.com/facebookresearch/bitsandbytes), [paper](https://arxiv.org/pdf/2110.02861.pdf))
* ALiBI positional encodings (see [paper](https://arxiv.org/pdf/2108.12409.pdf)), with GeLU activation functions
* 2.5 billion parameters:
* 30 layers, 32 attention heads
* Hidden layers are 2560-dimensional
* Sequence length of 2048 tokens used (see [BLOOM tokenizer](https://huggingface.co/bigscience/tokenizer), [tokenizer description](#tokenization))
**Objective Function:** Cross Entropy with mean reduction (see [API documentation](https://pytorch.org/docs/stable/generated/torch.nn.CrossEntropyLoss.html#torch.nn.CrossEntropyLoss)).
**Compute infrastructure:** Jean Zay Public Supercomputer, provided by the French government (see [announcement](https://www.enseignementsup-recherche.gouv.fr/fr/signature-du-marche-d-acquisition-de-l-un-des-supercalculateurs-les-plus-puissants-d-europe-46733)).
* Hardware: 384 A100 80GB GPUs (48 nodes):
* Additional 32 A100 80GB GPUs (4 nodes) in reserve
* 8 GPUs per node Using NVLink 4 inter-gpu connects, 4 OmniPath links
* CPU: AMD
* CPU memory: 512GB per node
* GPU memory: 640GB per node
* Inter-node connect: Omni-Path Architecture (OPA)
* NCCL-communications network: a fully dedicated subnet
* Disc IO network: shared network with other types of nodes
* Software:
* Megatron-DeepSpeed ([Github link](https://github.com/bigscience-workshop/Megatron-DeepSpeed))
* DeepSpeed ([Github link](https://github.com/microsoft/DeepSpeed))
* PyTorch (pytorch-1.11 w/ CUDA-11.5; see [Github link](https://github.com/pytorch/pytorch))
* apex ([Github link](https://github.com/NVIDIA/apex))
#### **Training**
_In progress._
Current training logs: [Tensorboard link](https://huggingface.co/tensorboard/bigscience/tr11-176B-ml-logs/)
- Checkpoint size:
- Bf16 weights: 329GB
- Full checkpoint with optimizer states: 2.3TB
- Training throughput: About 150 TFLOP per GPU per second
- Number of epochs: 1 (*current target*)
- Dates:
- Started 11th March, 2022 11:42am PST
- Estimated end: 5th July, 2022
- Estimated cost of training: Equivalent of $2-5M in cloud computing (including preliminary experiments)
- Server training location: Île-de-France, France
#### **Tokenization**
The BLOOM tokenizer ([link](https://huggingface.co/bigscience/tokenizer)) is a learned subword tokenizer trained using:
- A byte-level Byte Pair Encoding (BPE) algorithm
- A simple pre-tokenization rule, no normalization
- A vocabulary size of 250,680
It was trained on a subset of a preliminary version of the corpus using alpha-weighting per language.
</details>
### Environmental Impact
<details>
<summary>Click to expand</summary><br/>
The training supercomputer, Jean Zay ([website](http://www.idris.fr/eng/jean-zay/jean-zay-presentation-eng.html)), uses mostly nuclear energy. The heat generated by it is reused for heating campus housing.
**Estimated carbon emissions:** *(Forthcoming upon completion of training.)*
**Estimated electricity usage:** *(Forthcoming upon completion of training.)*
</details>
<p> </p>
## Uses
*This section addresses questions around how the model is intended to be used, discusses the foreseeable users of the model (including those affected by the model), and describes uses that are considered out of scope or misuse of the model.
It provides information for anyone considering using the model or who is affected by the model.*
<details>
<summary>Click to expand</summary><br/>
### Intended Use
This model is being created in order to enable public research on large language models (LLMs). LLMs are intended to be used for language generation or as a pretrained base model that can be further fine-tuned for specific tasks. Use cases below are not exhaustive.
#### **Direct Use**
- Text generation
- Exploring characteristics of language generated by a language model
- Examples: Cloze tests, counterfactuals, generations with reframings
#### **Downstream Use**
- Tasks that leverage language models include: Information Extraction, Question Answering, Summarization
### Misuse and Out-of-scope Use
*This section addresses what users ought not do with the model.*
See the [BLOOM License](https://huggingface.co/spaces/bigscience/license), Attachment A, for detailed usage restrictions. The below list is non-exhaustive, but lists some easily foreseeable problematic use cases.
#### **Out-of-scope Uses**
Using the model in [high-stakes](#high-stakes) settings is out of scope for this model. The model is not designed for [critical decisions](#critical-decisions) nor uses with any material consequences on an individual's livelihood or wellbeing. The model outputs content that appears factual but is not correct.
##### Out-of-scope Uses Include:
- Usage in biomedical domains, political and legal domains, or finance domains
- Usage for evaluating or scoring individuals, such as for employment, education, or credit
- Applying the model for critical automatic decisions, generating factual content, creating reliable summaries, or generating predictions that must be correct
#### **Misuse**
Intentionally using the model for harm, violating [human rights](#human-rights), or other kinds of malicious activities, is a misuse of this model. This includes:
- Spam generation
- Disinformation and influence operations
- Disparagement and defamation
- Harassment and abuse
- [Deception](#deception)
- Unconsented impersonation and imitation
- Unconsented surveillance
- Generating content without attribution to the model, as specified in the [RAIL License, Use Restrictions](https://huggingface.co/spaces/bigscience/license)
### Intended Users
#### **Direct Users**
- General Public
- Researchers
- Students
- Educators
- Engineers/developers
- Non-commercial entities
- Community advocates, including human and civil rights groups
#### Indirect Users
- Users of derivatives created by Direct Users, such as those using software with an [intended use](#intended-use)
- Users of [Derivatives of the Model, as described in the License](https://huggingface.co/spaces/bigscience/license)
#### Others Affected (Parties Prenantes)
- People and groups referred to by the LLM
- People and groups exposed to outputs of, or decisions based on, the LLM
- People and groups whose original work is included in the LLM
</details>
<p> </p>
## Training Data
*This section provides a high-level overview of the training data. It is relevant for anyone who wants to know the basics of what the model is learning.*
<details>
<summary>Click to expand</summary><br/>
Details for each dataset are provided in individual [Data Cards](https://huggingface.co/spaces/bigscience/BigScienceCorpus).
Training data includes:
- 45 natural languages
- 12 programming languages
- In 1.5TB of pre-processed text, converted into 350B unique tokens (see [the tokenizer section](#tokenization) for more.)
#### **Languages**
The pie chart shows the distribution of languages in training data.

The following table shows the further distribution of Niger-Congo and Indic languages in the training data.
<details>
<summary>Click to expand</summary><br/>
| Niger Congo | Percentage | | Indic | Percentage |
|----------------|------------ |------ |-----------|------------|
| Chi Tumbuka | 0.00002 | | Assamese | 0.01 |
| Kikuyu | 0.00004 | | Odia | 0.04 |
| Bambara | 0.00004 | | Gujarati | 0.04 |
| Akan | 0.00007 | | Marathi | 0.05 |
| Xitsonga | 0.00007 | | Punjabi | 0.05 |
| Sesotho | 0.00007 | | Kannada | 0.06 |
| Chi Chewa | 0.0001 | | Nepali | 0.07 |
| Setswana | 0.0002 | | Telugu | 0.09 |
| Northern Sotho | 0.0002 | | Malayalam | 0.10 |
| Fon | 0.0002 | | Urdu | 0.10 |
| Kirundi | 0.0003 | | Tamil | 0.20 |
| Wolof | 0.0004 | | Bengali | 0.50 |
| Kuganda | 0.0004 | | Hindi | 0.70 |
| Chi Shona | 0.001 |
| Isi Zulu | 0.001 |
| Igbo | 0.001 |
| Xhosa | 0.001 |
| Kinyarwanda | 0.003 |
| Yoruba | 0.006 |
| Swahili | 0.02 |
</details>
The following table shows the distribution of programming languages.
<details>
<summary>Click to expand</summary><br/>
| Extension | Language | Number of files |
|----------------|------------|-----------------|
| java | Java | 5,407,724 |
| php | PHP | 4,942,186 |
| cpp | C++ | 2,503,930 |
| py | Python | 2,435,072 |
| js | JavaScript | 1,905,518 |
| cs | C# | 1,577,347 |
| rb | Ruby | 6,78,413 |
| cc | C++ | 443,054 |
| hpp | C++ | 391,048 |
| lua | Lua | 352,317 |
| go | GO | 227,763 |
| ts | TypeScript | 195,254 |
| C | C | 134,537 |
| scala | Scala | 92,052 |
| hh | C++ | 67,161 |
| H | C++ | 55,899 |
| tsx | TypeScript | 33,107 |
| rs | Rust | 29,693 |
| phpt | PHP | 9,702 |
| c++ | C++ | 1,342 |
| h++ | C++ | 791 |
| php3 | PHP | 540 |
| phps | PHP | 270 |
| php5 | PHP | 166 |
| php4 | PHP | 29 |
</details>
</details>
<p> </p>
## Risks and Limitations
*This section identifies foreseeable harms and misunderstandings.*
<details>
<summary>Click to expand</summary><br/>
Model may:
- Overrepresent some viewpoints and underrepresent others
- Contain stereotypes
- Contain [personal information](#personal-data-and-information)
- Generate:
- Hateful, abusive, or violent language
- Discriminatory or prejudicial language
- Content that may not be appropriate for all settings, including sexual content
- Make errors, including producing incorrect information as if it were factual
- Generate irrelevant or repetitive outputs
</details>
<p> </p>
## Evaluation
*This section describes the evaluation protocols and provides the results.*
<details>
<summary>Click to expand</summary><br/>
### Metrics
*This section describes the different ways performance is calculated and why.*
Includes:
| Metric | Why chosen |
|--------------------|--------------------------------------------------------------------|
| [Perplexity](#perplexity) | Standard metric for quantifying model improvements during training |
| Cross Entropy [Loss](#loss) | Standard objective for language models. |
And multiple different metrics for specific tasks. _(More evaluation metrics forthcoming upon completion of evaluation protocol.)_
### Factors
*This section lists some different aspects of BLOOM models. Its focus is on aspects that are likely to give rise to high variance in model behavior.*
- Language, such as English or Yoruba
- Domain, such as newswire or stories
- Demographic characteristics, such as gender or nationality
### Results
*Results are based on the [Factors](#factors) and [Metrics](#metrics).*
**Zero-shot evaluations:**
See this repository for JSON files: https://github.com/bigscience-workshop/evaluation-results
| Task | Language | Metric | BLOOM-2B5 |
|:----|:----|:----|:----:|
| arc_challenge | eng | acc ↑ | 0.28 |
| arc_easy | eng | acc ↑ | 0.595 |
| axb (Median of 10 prompts) | eng | acc ↑ | 0.443 |
| axg (Median of 10 prompts) | eng | acc ↑ | 0.5 |
| boolq (Median of 11 prompts) | eng | acc ↑ | 0.617 |
| cb (Median of 15 prompts) | eng | acc ↑ | 0.304 |
| cola (Median of 5 prompts) | eng | acc ↑ | 0.611 |
| copa (Median of 9 prompts) | eng | acc ↑ | 0.63 |
| crows_pairs_english (Median of 6 prompts) | eng | acc ↑ | 0.497 |
| crows_pairs_french (Median of 7 prompts) | fra | acc ↑ | 0.503 |
| diabla (Median of 2 prompts) | eng | acc ↑ | 0.289 |
| gsarti/flores_101_afr | afr | byte_perplexity ↓ | 6.501 |
| gsarti/flores_101_amh | amh | byte_perplexity ↓ | 3.973 |
| gsarti/flores_101_ara | ara | byte_perplexity ↓ | 1.808 |
| gsarti/flores_101_asm | asm | byte_perplexity ↓ | 5.699 |
| gsarti/flores_101_ast | ast | byte_perplexity ↓ | 3.925 |
| gsarti/flores_101_azj | azj | byte_perplexity ↓ | 6.943 |
| gsarti/flores_101_bel | bel | byte_perplexity ↓ | 3.614 |
| gsarti/flores_101_ben | ben | byte_perplexity ↓ | 5.121 |
| gsarti/flores_101_bos | bos | byte_perplexity ↓ | 5.653 |
| gsarti/flores_101_bul | bul | byte_perplexity ↓ | 2.701 |
| gsarti/flores_101_cat | cat | byte_perplexity ↓ | 2.305 |
| gsarti/flores_101_ceb | ceb | byte_perplexity ↓ | 6.291 |
| gsarti/flores_101_ces | ces | byte_perplexity ↓ | 5.447 |
| gsarti/flores_101_ckb | ckb | byte_perplexity ↓ | 3.726 |
| gsarti/flores_101_cym | cym | byte_perplexity ↓ | 12.539 |
| gsarti/flores_101_dan | dan | byte_perplexity ↓ | 5.183 |
| gsarti/flores_101_deu | deu | byte_perplexity ↓ | 3.118 |
| gsarti/flores_101_ell | ell | byte_perplexity ↓ | 2.468 |
| gsarti/flores_101_eng | eng | byte_perplexity ↓ | 2.019 |
| gsarti/flores_101_est | est | byte_perplexity ↓ | 9.117 |
| gsarti/flores_101_fas | fas | byte_perplexity ↓ | 3.058 |
| gsarti/flores_101_fin | fin | byte_perplexity ↓ | 6.847 |
| gsarti/flores_101_fra | fra | byte_perplexity ↓ | 1.998 |
| gsarti/flores_101_ful | ful | byte_perplexity ↓ | 11.466 |
| gsarti/flores_101_gle | gle | byte_perplexity ↓ | 8.681 |
| gsarti/flores_101_glg | glg | byte_perplexity ↓ | 3.03 |
| gsarti/flores_101_guj | guj | byte_perplexity ↓ | 4.955 |
| gsarti/flores_101_hau | hau | byte_perplexity ↓ | 10.758 |
| gsarti/flores_101_heb | heb | byte_perplexity ↓ | 3.6 |
| gsarti/flores_101_hin | hin | byte_perplexity ↓ | 4.713 |
| gsarti/flores_101_hrv | hrv | byte_perplexity ↓ | 5.822 |
| gsarti/flores_101_hun | hun | byte_perplexity ↓ | 6.44 |
| gsarti/flores_101_hye | hye | byte_perplexity ↓ | 3.658 |
| gsarti/flores_101_ibo | ibo | byte_perplexity ↓ | 5.565 |
| gsarti/flores_101_ind | ind | byte_perplexity ↓ | 2.16 |
| gsarti/flores_101_isl | isl | byte_perplexity ↓ | 8.082 |
| gsarti/flores_101_ita | ita | byte_perplexity ↓ | 2.969 |
| gsarti/flores_101_jav | jav | byte_perplexity ↓ | 7.057 |
| gsarti/flores_101_jpn | jpn | byte_perplexity ↓ | 2.776 |
| gsarti/flores_101_kam | kam | byte_perplexity ↓ | 11.073 |
| gsarti/flores_101_kan | kan | byte_perplexity ↓ | 5.552 |
| gsarti/flores_101_kat | kat | byte_perplexity ↓ | 2.523 |
| gsarti/flores_101_kaz | kaz | byte_perplexity ↓ | 3.39 |
| gsarti/flores_101_kea | kea | byte_perplexity ↓ | 8.919 |
| gsarti/flores_101_kir | kir | byte_perplexity ↓ | 3.729 |
| gsarti/flores_101_kor | kor | byte_perplexity ↓ | 3.933 |
| gsarti/flores_101_lao | lao | byte_perplexity ↓ | 2.908 |
| gsarti/flores_101_lav | lav | byte_perplexity ↓ | 7.777 |
| gsarti/flores_101_lin | lin | byte_perplexity ↓ | 7.525 |
| gsarti/flores_101_lit | lit | byte_perplexity ↓ | 7.369 |
| gsarti/flores_101_ltz | ltz | byte_perplexity ↓ | 8.801 |
| gsarti/flores_101_lug | lug | byte_perplexity ↓ | 8.483 |
| gsarti/flores_101_luo | luo | byte_perplexity ↓ | 11.976 |
| gsarti/flores_101_mal | mal | byte_perplexity ↓ | 4.616 |
| gsarti/flores_101_mar | mar | byte_perplexity ↓ | 5.483 |
| gsarti/flores_101_mkd | mkd | byte_perplexity ↓ | 2.966 |
| gsarti/flores_101_mlt | mlt | byte_perplexity ↓ | 15.005 |
| gsarti/flores_101_mon | mon | byte_perplexity ↓ | 3.411 |
| gsarti/flores_101_mri | mri | byte_perplexity ↓ | 7.474 |
| gsarti/flores_101_msa | msa | byte_perplexity ↓ | 2.571 |
| gsarti/flores_101_mya | mya | byte_perplexity ↓ | 2.414 |
| gsarti/flores_101_nld | nld | byte_perplexity ↓ | 4.128 |
| gsarti/flores_101_nob | nob | byte_perplexity ↓ | 5.403 |
| gsarti/flores_101_npi | npi | byte_perplexity ↓ | 5.199 |
| gsarti/flores_101_nso | nso | byte_perplexity ↓ | 8.155 |
| gsarti/flores_101_nya | nya | byte_perplexity ↓ | 8.18 |
| gsarti/flores_101_oci | oci | byte_perplexity ↓ | 4.862 |
| gsarti/flores_101_orm | orm | byte_perplexity ↓ | 12.912 |
| gsarti/flores_101_ory | ory | byte_perplexity ↓ | 5.189 |
| gsarti/flores_101_pan | pan | byte_perplexity ↓ | 4.698 |
| gsarti/flores_101_pol | pol | byte_perplexity ↓ | 4.626 |
| gsarti/flores_101_por | por | byte_perplexity ↓ | 1.975 |
| gsarti/flores_101_pus | pus | byte_perplexity ↓ | 4.496 |
| gsarti/flores_101_ron | ron | byte_perplexity ↓ | 4.965 |
| gsarti/flores_101_rus | rus | byte_perplexity ↓ | 2.05 |
| gsarti/flores_101_slk | slk | byte_perplexity ↓ | 6.451 |
| gsarti/flores_101_slv | slv | byte_perplexity ↓ | 6.62 |
| gsarti/flores_101_sna | sna | byte_perplexity ↓ | 8.462 |
| gsarti/flores_101_snd | snd | byte_perplexity ↓ | 5.466 |
| gsarti/flores_101_som | som | byte_perplexity ↓ | 11.959 |
| gsarti/flores_101_spa | spa | byte_perplexity ↓ | 1.897 |
| gsarti/flores_101_srp | srp | byte_perplexity ↓ | 2.871 |
| gsarti/flores_101_swe | swe | byte_perplexity ↓ | 5.055 |
| gsarti/flores_101_swh | swh | byte_perplexity ↓ | 3.697 |
| gsarti/flores_101_tam | tam | byte_perplexity ↓ | 4.539 |
| gsarti/flores_101_tel | tel | byte_perplexity ↓ | 5.807 |
| gsarti/flores_101_tgk | tgk | byte_perplexity ↓ | 3.599 |
| gsarti/flores_101_tgl | tgl | byte_perplexity ↓ | 5.667 |
| gsarti/flores_101_tha | tha | byte_perplexity ↓ | 2.366 |
| gsarti/flores_101_tur | tur | byte_perplexity ↓ | 4.885 |
| gsarti/flores_101_ukr | ukr | byte_perplexity ↓ | 2.724 |
| gsarti/flores_101_umb | umb | byte_perplexity ↓ | 12.767 |
| gsarti/flores_101_urd | urd | byte_perplexity ↓ | 1.98 |
| gsarti/flores_101_uzb | uzb | byte_perplexity ↓ | 12.002 |
| gsarti/flores_101_vie | vie | byte_perplexity ↓ | 1.766 |
| gsarti/flores_101_wol | wol | byte_perplexity ↓ | 9.144 |
| gsarti/flores_101_xho | xho | byte_perplexity ↓ | 7.403 |
| gsarti/flores_101_yor | yor | byte_perplexity ↓ | 5.913 |
| gsarti/flores_101_zho_simpl | zho_simpl | byte_perplexity ↓ | 2.277 |
| gsarti/flores_101_zho_trad | zho_trad | byte_perplexity ↓ | 2.518 |
| gsarti/flores_101_zul | zul | byte_perplexity ↓ | 8.534 |
| headqa | esp | acc ↑ | 0.264 |
| hellaswag | eng | acc ↑ | 0.412 |
| logiqa | eng | acc ↑ | 0.207 |
| mathqa | eng | acc ↑ | 0.25 |
| mc_taco | eng | em ↑ | 0.119 |
| mnli (Median of 15 prompts) | eng | acc ↑ | 0.355 |
| mnli_mismatched (Median of 15 prompts) | eng | acc ↑ | 0.352 |
| mrpc | eng | acc ↑ | 0.586 |
| multirc (Median of 11 prompts) | eng | acc ↑ | 0.538 |
| openbookqa | eng | acc ↑ | 0.216 |
| piqa | eng | acc ↑ | 0.708 |
| prost | eng | acc ↑ | 0.227 |
| pubmedqa | eng | acc ↑ | 0.616 |
| qnli | eng | acc ↑ | 0.507 |
| qqp (Median of 7 prompts) | eng | acc ↑ | 0.384 |
| race | eng | acc ↑ | 0.352 |
| rte (Median of 6 prompts) | eng | acc ↑ | 0.477 |
| sciq | eng | acc ↑ | 0.892 |
| sst (Median of 6 prompts) | eng | acc ↑ | 0.518 |
| triviaqa | eng | acc ↑ | 0.042 |
| tydiqa_primary (Median of 24 prompts) | eng | acc ↑ | 0.301 |
| webqs | eng | acc ↑ | 0.017 |
| wic (Median of 11 prompts) | eng | acc ↑ | 0.502 |
| winogrande | eng | acc ↑ | 0.586 |
| wnli (Median of 6 prompts) | eng | acc ↑ | 0.472 |
| wsc (Median of 11 prompts) | eng | acc ↑ | 0.442 |
| humaneval | python | pass@1 ↑ | 0.155 |
| humaneval | python | pass@10 ↑ | 0.322 |
| humaneval | python | pass@100 ↑ | 0.555 |
**Train-time Evaluation:**
As of 25.May.2022, 15:00 PST:
- Training Loss: 2.0
- Validation Loss: 2.2
- Perplexity: 8.9
</details>
<p> </p>
## Recommendations
*This section provides information on warnings and potential mitigations.*
<details>
<summary>Click to expand</summary><br/>
- Indirect users should be made aware when the content they're working with is created by the LLM.
- Users should be aware of [Risks and Limitations](#risks-and-limitations), and include an appropriate age disclaimer or blocking interface as necessary.
- Models pretrained with the LLM should include an updated Model Card.
- Users of the model should provide mechanisms for those affected to provide feedback, such as an email address for comments.
</details>
<p> </p>
## Glossary and Calculations
*This section defines common terms and how metrics are calculated.*
<details>
<summary>Click to expand</summary><br/>
- <a name="loss">**Loss:**</a> A calculation of the difference between what the model has learned and what the data shows ("groundtruth"). The lower the loss, the better. The training process aims to minimize the loss.
- <a name="perplexity">**Perplexity:**</a> This is based on what the model estimates the probability of new data is. The lower the perplexity, the better. If the model is 100% correct at predicting the next token it will see, then the perplexity is 1. Mathematically this is calculated using entropy.
- <a name="high-stakes">**High-stakes settings:**</a> Such as those identified as "high-risk AI systems" and "unacceptable risk AI systems" in the European Union's proposed [Artificial Intelligence (AI) Act](https://artificialintelligenceact.eu/annexes/).
- <a name="critical-decisions">**Critical decisions:**</a> Such as those defined in [the United States' proposed Algorithmic Accountability Act](https://www.congress.gov/117/bills/s3572/BILLS-117s3572is.pdf).
- <a name="human-rights">**Human rights:**</a> Includes those rights defined in the [Universal Declaration of Human Rights](https://www.un.org/sites/un2.un.org/files/2021/03/udhr.pdf).
- <a name="personal-data-and-information">**Personal Data and Personal Information:**</a> Personal data and information is defined in multiple data protection regulations, such as "[personal data](https://gdpr-info.eu/issues/personal-data/)" in the [European Union's General Data Protection Regulation](https://gdpr-info.eu); and "personal information" in the Republic of South Africa's [Protection of Personal Information Act](https://www.gov.za/sites/default/files/gcis_document/201409/3706726-11act4of2013popi.pdf), The People's Republic of China's [Personal information protection law](http://en.npc.gov.cn.cdurl.cn/2021-12/29/c_694559.htm).
- <a name="sensitive-characteristics">**Sensitive characteristics:**</a> This includes specifically protected categories in human rights (see [UHDR, Article 2](https://www.un.org/sites/un2.un.org/files/2021/03/udhr.pdf)) and personal information regulation (see GDPR, [Article 9; Protection of Personal Information Act, Chapter 1](https://www.gov.za/sites/default/files/gcis_document/201409/3706726-11act4of2013popi.pdf))
- <a name="deception">**Deception:**</a> Doing something to intentionally mislead individuals to believe something that is false, such as by creating deadbots or chatbots on social media posing as real people, or generating text documents without making consumers aware that the text is machine generated.
</details>
<p> </p>
## More Information
<details>
<summary>Click to expand</summary><br/>
### Dataset Creation
Blog post detailing the design choices during the dataset creation: https://bigscience.huggingface.co/blog/building-a-tb-scale-multilingual-dataset-for-language-modeling
### Technical Specifications
Blog post summarizing how the architecture, size, shape, and pre-training duration where selected: https://bigscience.huggingface.co/blog/what-language-model-to-train-if-you-have-two-million-gpu-hours
More details on the architecture/optimizer: https://github.com/bigscience-workshop/bigscience/tree/master/train/tr11-176B-ml
Blog post on the hardware/engineering side: https://bigscience.huggingface.co/blog/which-hardware-to-train-a-176b-parameters-model
Details on the distributed setup used for the training: https://github.com/bigscience-workshop/bigscience/tree/master/train/tr11-176B-ml
Tensorboard updated during the training: https://huggingface.co/bigscience/tr11-176B-ml-logs/tensorboard#scalars&tagFilter=loss
Insights on how to approach training, negative results: https://github.com/bigscience-workshop/bigscience/blob/master/train/lessons-learned.md
Details on the obstacles overcome during the preparation on the engineering side (instabilities, optimization of training throughput, so many technical tricks and questions): https://github.com/bigscience-workshop/bigscience/blob/master/train/tr11-176B-ml/chronicles.md
### Initial Results
Initial prompting experiments using interim checkpoints: https://huggingface.co/spaces/bigscience/bloom-book
</details>
<p> </p>
## Model Card Authors
*Ordered roughly chronologically and by amount of time spent.*
Margaret Mitchell, Giada Pistilli, Yacine Jernite, Ezinwanne Ozoani, Marissa Gerchick, Nazneen Rajani, Sasha Luccioni, Irene Solaiman, Maraim Masoud, Somaieh Nikpoor, Carlos Muñoz Ferrandis, Stas Bekman, Christopher Akiki, Danish Contractor, David Lansky, Angelina McMillan-Major, Tristan Thrush, Suzana Ilić, Gérard Dupont, Shayne Longpre, Manan Dey, Stella Biderman, Douwe Kiela, Emi Baylor, Teven Le Scao, Aaron Gokaslan, Julien Launay, Niklas Muennighoff
| [
-0.05632960796356201,
-0.008967837318778038,
-0.026815669611096382,
0.055174820125103,
0.0022040188778191805,
-0.010965987108647823,
0.01777382753789425,
-0.010099628008902073,
-0.008404248394072056,
-0.02164905145764351,
0.026128102093935013,
-0.15927636623382568,
0.026281921193003654,
-0.018234219402074814,
-0.05545304715633392,
0.038899313658475876,
-0.0232643224298954,
-0.06479997932910919,
-0.08799945563077927,
-0.08581823855638504,
0.05567021295428276,
0.11478371918201447,
0.018195152282714844,
0.050061989575624466,
0.004577712155878544,
-0.00026718745357356966,
-0.05867866054177284,
0.02989952266216278,
-0.021082613617181778,
-0.04109157621860504,
0.03541628271341324,
0.07294056564569473,
0.057593055069446564,
0.009867547079920769,
0.12760943174362183,
0.08439239859580994,
-0.036478523164987564,
-0.03758953511714935,
0.003567417850717902,
-0.06470757722854614,
-0.014916395768523216,
-0.043082837015390396,
0.06393101811408997,
-0.03981181979179382,
0.06352733820676804,
-0.06518653780221939,
-0.0781058743596077,
-0.1179666742682457,
-0.03576884791254997,
0.1025053933262825,
-0.08931780606508255,
0.012629503384232521,
-0.0490906797349453,
-0.00642170337960124,
0.016668183729052544,
0.046633221209049225,
-0.037484560161828995,
0.0225943922996521,
0.03147504851222038,
-0.08678004890680313,
-0.019293775781989098,
-0.07737492769956589,
-0.055595770478248596,
-0.02396581321954727,
-0.05449043586850166,
-0.026501130312681198,
0.007758266758173704,
0.01929437555372715,
-0.007662227842956781,
-0.007625534664839506,
-0.006912949029356241,
0.03212283179163933,
-0.08904003351926804,
0.06372596323490143,
0.03769655153155327,
0.11662725359201431,
0.009971327148377895,
-0.019906505942344666,
0.04802989959716797,
-0.07330700755119324,
-0.07415008544921875,
-0.011840665712952614,
0.0305020734667778,
0.04582095518708229,
0.03694113343954086,
0.05875231698155403,
0.050432104617357254,
0.006419574376195669,
0.055666740983724594,
0.01126717496663332,
-0.03086020238697529,
-0.02710716240108013,
0.0006552753620781004,
0.07014863193035126,
-0.06147116422653198,
0.0286713819950819,
-0.02891216240823269,
0.01688050664961338,
-0.021715417504310608,
0.09997513145208359,
-0.04515380412340164,
0.040348079055547714,
0.05375054106116295,
0.0035149904433637857,
-0.05682112276554108,
0.007176736835390329,
0.034996937960386276,
0.03999781236052513,
-0.0142458351328969,
-0.05312139913439751,
0.06792081892490387,
-0.05359095707535744,
0.025312604382634163,
-0.054932672530412674,
0.03773825615644455,
0.02059473842382431,
-0.08037898689508438,
-0.02715616673231125,
-0.0015055470867082477,
0.09651036560535431,
0.0006737748044542968,
0.009557036682963371,
-0.05969084054231644,
-0.025243574753403664,
-0.07964838296175003,
0.027513880282640457,
0.050012245774269104,
6.502932461716807e-33,
0.059937600046396255,
-0.027318695560097694,
0.02010643109679222,
0.0010483444202691317,
0.01617138832807541,
-0.03418486565351486,
-0.024449165910482407,
-0.01323477178812027,
-0.11259697377681732,
-0.017659546807408333,
-0.07096423208713531,
0.007416668348014355,
-0.06760556250810623,
0.007730876095592976,
0.05162535980343819,
-0.08944539725780487,
0.06185148283839226,
0.03381013870239258,
-0.05890986695885658,
0.06445063650608063,
0.051373276859521866,
0.0058954693377017975,
-0.023437563329935074,
-0.10612654685974121,
0.020907243713736534,
0.06365785002708435,
0.05731157585978508,
-0.052174195647239685,
-0.10737979412078857,
0.04068272188305855,
-0.020730039104819298,
-0.001725969254039228,
0.03894882649183273,
0.016849013045430183,
0.02546209841966629,
-0.03420679643750191,
0.009431743063032627,
0.0002855599159374833,
-0.009222205728292465,
0.016596082597970963,
-0.024311760440468788,
0.09239747375249863,
-0.054404594004154205,
-0.0209172572940588,
0.01142795942723751,
-0.03578125312924385,
0.056296367198228836,
-0.0070382384583354,
-0.011149669997394085,
0.036316096782684326,
-0.023676667362451553,
0.013972158543765545,
0.0015051313675940037,
-0.020889097824692726,
-0.00484362430870533,
-0.024843871593475342,
-0.010924517177045345,
0.02965719811618328,
0.07754103094339371,
0.002748680068179965,
-0.02164473384618759,
0.03191463276743889,
0.008462956175208092,
0.0031648147851228714,
0.08535043895244598,
-0.03136632964015007,
0.052633363753557205,
0.0258125439286232,
0.10263632982969284,
0.013016249053180218,
-0.006567472591996193,
-0.06522149592638016,
0.032321419566869736,
0.031082743778824806,
0.076163649559021,
-0.07163851708173752,
0.09341200441122055,
-0.027379823848605156,
-0.042327966541051865,
0.03844800218939781,
-0.10329420864582062,
-0.011583029292523861,
-0.05667698755860329,
-0.10875381529331207,
0.00303700752556324,
0.007410881575196981,
-0.011581065133213997,
-0.03591405972838402,
-0.04831531271338463,
-0.058202166110277176,
-0.03226061537861824,
0.013518949039280415,
-0.04797592759132385,
-0.045069824904203415,
-0.06051167845726013,
-6.279637024202543e-33,
0.029750769957900047,
0.01373596303164959,
-0.06038683280348778,
0.04201895743608475,
0.08054786920547485,
-0.03049885481595993,
0.09060782939195633,
-0.01893957518041134,
0.008321459405124187,
0.011567353270947933,
-0.011699299328029156,
-0.015651030465960503,
0.013469120487570763,
-0.05567258968949318,
0.08575678616762161,
-0.0351354256272316,
-0.0010030708508566022,
0.0008749823900870979,
-0.007551450747996569,
0.07212293893098831,
-0.0018252389272674918,
0.11953247338533401,
-0.10133819282054901,
0.05394758656620979,
0.007268664427101612,
0.015330830588936806,
-0.03046094998717308,
-0.015125604346394539,
0.042937006801366806,
-0.017025703564286232,
0.013875885866582394,
0.049359146505594254,
-0.11836467683315277,
0.014849912375211716,
-0.0552690327167511,
-0.04882166162133217,
0.0547868050634861,
0.012165218591690063,
-0.04020930081605911,
0.08559229224920273,
0.03373027965426445,
0.04820560663938522,
-0.060980793088674545,
0.021644389256834984,
-0.023974323645234108,
0.051423363387584686,
0.08978146314620972,
-0.013439048081636429,
-0.02374981716275215,
-0.02017918974161148,
0.03229585662484169,
0.03187745064496994,
-0.07101915776729584,
0.0005598203861154616,
-0.020208317786455154,
-0.08232693374156952,
0.019766412675380707,
-0.06531603634357452,
-0.14244970679283142,
-0.03101027011871338,
-0.007272970397025347,
0.002366107888519764,
0.03621860221028328,
-0.0064448206685483456,
0.07292819768190384,
0.03338291123509407,
-0.03690074756741524,
0.02725384011864662,
-0.06413937360048294,
0.00044507006532512605,
-0.003292406676337123,
-0.02848687395453453,
0.05816163495182991,
0.023725321516394615,
0.020176170393824577,
-0.01902419701218605,
-0.08855865895748138,
0.013232146389782429,
0.03968346118927002,
0.0047589936293661594,
-0.020206190645694733,
0.06261131167411804,
0.0637115091085434,
0.08783464133739471,
0.03978576138615608,
-0.030964961275458336,
-0.01961161009967327,
0.08411997556686401,
-0.03488419950008392,
0.040012992918491364,
-0.05799384415149689,
-0.0435260646045208,
-0.030419502407312393,
0.11290197819471359,
-0.039703745394945145,
-6.868932445058817e-8,
-0.024668138474225998,
-0.03681838512420654,
-0.10626569390296936,
0.06149313226342201,
0.00796389952301979,
0.002314676297828555,
-0.040249958634376526,
0.06419376283884048,
-0.003027961589396,
-0.07733336091041565,
0.08437098562717438,
-0.005954407621175051,
-0.12298867106437683,
-0.01303589716553688,
0.028709767386317253,
-0.04045191407203674,
-0.014330092817544937,
0.10039710998535156,
-0.0443069152534008,
-0.0913914144039154,
0.03852837532758713,
0.03794079273939133,
-0.02953292801976204,
-0.062432680279016495,
-0.015429526567459106,
-0.008376449346542358,
0.003061482682824135,
0.06623250991106033,
0.008433750830590725,
-0.023584244772791862,
0.08620740473270416,
-0.07748134434223175,
-0.0003462537133600563,
-0.040176600217819214,
0.048555031418800354,
0.061850499361753464,
0.01017287839204073,
0.020998792722821236,
0.02049289084970951,
0.008764130063354969,
-0.023699497804045677,
0.1487119197845459,
0.0011930299224331975,
0.023085903376340866,
-0.027504488825798035,
-0.01339001301676035,
-0.022537963464856148,
-0.00814603827893734,
0.03558732569217682,
-0.046716898679733276,
-0.05215346813201904,
-0.057289984077215195,
-0.06987559050321579,
0.006702100392431021,
0.018799161538481712,
0.08393895626068115,
0.09640536457300186,
-0.04929767921566963,
-0.009153289720416069,
-0.013938544318079948,
0.12690255045890808,
-0.029988950118422508,
0.006242657080292702,
-0.04907651245594025
] |
KoboldAI/fairseq-dense-13B | e936211b7bb8f406cb78efca22a5f7c43ba090b3 | 2022-02-01T22:51:59.000Z | [
"pytorch",
"xglm",
"text-generation",
"transformers"
] | text-generation | false | KoboldAI | null | KoboldAI/fairseq-dense-13B | 2,941 | 3 | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
mrm8488/bert-spanish-cased-finetuned-pos-16-tags | 7245043c8ef25dc7ccf91e6afdd2e2dc94213155 | 2021-05-20T00:36:33.000Z | [
"pytorch",
"jax",
"bert",
"token-classification",
"transformers",
"autotrain_compatible"
] | token-classification | false | mrm8488 | null | mrm8488/bert-spanish-cased-finetuned-pos-16-tags | 2,921 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
voidful/albert_chinese_tiny | d40f566a40f057e5d8a6f7b2cd5171a4f104126f | 2021-08-03T05:07:02.000Z | [
"pytorch",
"albert",
"fill-mask",
"zh",
"transformers",
"autotrain_compatible"
] | fill-mask | false | voidful | null | voidful/albert_chinese_tiny | 2,920 | 5 | transformers | ---
language: zh
pipeline_tag: fill-mask
widget:
- text: "今天[MASK]情很好"
---
# albert_chinese_tiny
This a albert_chinese_tiny model from [brightmart/albert_zh project](https://github.com/brightmart/albert_zh), albert_tiny_google_zh model
converted by huggingface's [script](https://github.com/huggingface/transformers/blob/master/src/transformers/convert_albert_original_tf_checkpoint_to_pytorch.py)
## Notice
*Support AutoTokenizer*
Since sentencepiece is not used in albert_chinese_base model
you have to call BertTokenizer instead of AlbertTokenizer !!!
we can eval it using an example on MaskedLM
由於 albert_chinese_base 模型沒有用 sentencepiece
用AlbertTokenizer會載不進詞表,因此需要改用BertTokenizer !!!
我們可以跑MaskedLM預測來驗證這個做法是否正確
## Justify (驗證有效性)
```python
from transformers import AutoTokenizer, AlbertForMaskedLM
import torch
from torch.nn.functional import softmax
pretrained = 'voidful/albert_chinese_tiny'
tokenizer = AutoTokenizer.from_pretrained(pretrained)
model = AlbertForMaskedLM.from_pretrained(pretrained)
inputtext = "今天[MASK]情很好"
maskpos = tokenizer.encode(inputtext, add_special_tokens=True).index(103)
input_ids = torch.tensor(tokenizer.encode(inputtext, add_special_tokens=True)).unsqueeze(0) # Batch size 1
outputs = model(input_ids, labels=input_ids)
loss, prediction_scores = outputs[:2]
logit_prob = softmax(prediction_scores[0, maskpos],dim=-1).data.tolist()
predicted_index = torch.argmax(prediction_scores[0, maskpos]).item()
predicted_token = tokenizer.convert_ids_to_tokens([predicted_index])[0]
print(predicted_token, logit_prob[predicted_index])
```
Result: `感 0.40312355756759644`
| [
-0.07843157649040222,
0.06079765036702156,
0.03495503589510918,
0.035312533378601074,
0.0045317960903048515,
0.05308623984456062,
-0.015443592332303524,
0.049125995486974716,
-0.008690019138157368,
-0.04608974978327751,
0.04472729563713074,
-0.1356252133846283,
0.01754060946404934,
0.010250438004732132,
0.08096777647733688,
0.07339584827423096,
0.004991148132830858,
0.052848197519779205,
-0.08260557055473328,
-0.058810945600271225,
0.06783068180084229,
0.003477740567177534,
0.03517076373100281,
-0.04941381514072418,
0.034171901643276215,
-0.010046561248600483,
-0.04823243245482445,
0.008519523777067661,
0.11863423883914948,
0.058885421603918076,
-0.041996754705905914,
0.0016871090047061443,
0.026583928614854813,
0.06947202980518341,
0.08425525575876236,
0.08475574105978012,
0.0023270368110388517,
0.04906805604696274,
0.010138705372810364,
-0.0014807271072641015,
0.058744125068187714,
0.020863423123955727,
-0.05298556387424469,
-0.07405558228492737,
0.06713206321001053,
-0.0034880125895142555,
-0.030495189130306244,
-0.039621464908123016,
-0.14235709607601166,
-0.025666862726211548,
-0.0825885683298111,
-0.0455312617123127,
0.054817985743284225,
0.03975706174969673,
-0.03519975394010544,
0.04408830404281616,
-0.0032404130324721336,
-0.05349228158593178,
0.018226850777864456,
-0.01856974884867668,
-0.11818183958530426,
-0.0003036397392861545,
0.002990929875522852,
0.010067406110465527,
-0.053238123655319214,
0.03129597008228302,
-0.054682306945323944,
-0.009832203388214111,
0.004193705972284079,
0.06143176928162575,
-0.04548328369855881,
-0.06181519478559494,
-0.021828588098287582,
-0.013742903247475624,
-0.0222767386585474,
-0.038610223680734634,
0.06241117790341377,
-0.07242225110530853,
0.003888108767569065,
-0.010243777185678482,
-0.012690938077867031,
-0.0471627414226532,
0.06118045002222061,
0.07001342624425888,
0.027562228962779045,
0.04234544560313225,
-0.034249935299158096,
-0.04106924682855606,
0.0025739085394889116,
0.0048392340540885925,
-0.04205916449427605,
-0.04535834863781929,
0.0466599278151989,
0.042932599782943726,
0.020792018622159958,
-0.03698163852095604,
0.014130132272839546,
-0.01605905033648014,
-0.06504322588443756,
0.0759468749165535,
0.012851796112954617,
-0.005317748989909887,
0.021445488557219505,
-0.06544643640518188,
-0.022151075303554535,
0.04378974065184593,
-0.01648317091166973,
-0.029486240819096565,
0.043414827436208725,
-0.01239117980003357,
0.05052035301923752,
-0.07007919996976852,
-0.005715592298656702,
-0.1008896678686142,
0.02701643481850624,
-0.017790773883461952,
-0.005311168730258942,
-0.02820938639342785,
-0.0030285625252872705,
0.05096909776329994,
0.05124581605195999,
0.0267274621874094,
-0.041200775653123856,
0.02260909602046013,
-0.036567460745573044,
-0.012024885974824429,
0.047731030732393265,
-6.212414175698863e-34,
0.013959791511297226,
0.08059689402580261,
0.019032837823033333,
0.012461399659514427,
0.0015181396156549454,
0.004056309815496206,
0.03382955491542816,
0.030041079968214035,
-0.08286192268133163,
-0.009728518314659595,
0.014480337500572205,
0.00932200439274311,
-0.11539576947689056,
0.014461030252277851,
-0.10542222112417221,
-0.016016164794564247,
-0.009959930554032326,
0.04313021898269653,
0.021857621148228645,
0.053869642317295074,
0.07670380175113678,
0.024638211354613304,
-0.03186445310711861,
-0.055190715938806534,
-0.058414168655872345,
0.05982564389705658,
0.0817166343331337,
-0.09795131534337997,
-0.04322221502661705,
0.03133708983659744,
-0.07982444763183594,
0.029793336987495422,
-0.017688967287540436,
-0.004156794864684343,
-0.06385703384876251,
-0.039448827505111694,
-0.005025342106819153,
-0.06200359761714935,
-0.02366788312792778,
-0.07237916439771652,
-0.03183196112513542,
0.020833713933825493,
-0.054981835186481476,
-0.05188915506005287,
-0.04165208712220192,
0.028492584824562073,
0.006290350575000048,
0.015189480036497116,
0.08281209319829941,
-0.04158540442585945,
0.021489273756742477,
0.02475110813975334,
-0.06525591015815735,
0.034151870757341385,
0.027840152382850647,
-0.05780615285038948,
-0.013198877684772015,
0.016072051599621773,
0.058677591383457184,
-0.026432808488607407,
0.007244265638291836,
-0.021708941087126732,
0.042635053396224976,
0.10147082060575485,
0.0717039704322815,
0.030695408582687378,
-0.022791685536503792,
-0.06856095790863037,
-0.03989098221063614,
0.005458578933030367,
-0.05684638023376465,
0.014912201091647148,
-0.011781107634305954,
0.020853789523243904,
0.030722426250576973,
-0.07736827433109283,
0.004298649728298187,
-0.03192880377173424,
-0.01136708166450262,
-0.00667835958302021,
-0.054772526025772095,
-0.006434822920709848,
-0.006646067835390568,
-0.024471420794725418,
-0.04995517432689667,
-0.07623375952243805,
0.051514286547899246,
-0.004472518339753151,
-0.009533208794891834,
-0.04601046070456505,
-0.025986382737755775,
-0.09958567470312119,
0.008066745474934578,
-0.03507579490542412,
-0.08172915875911713,
-1.6217680260470842e-33,
0.010053860023617744,
0.03469434753060341,
-0.06593018025159836,
0.03689819201827049,
0.015245631337165833,
-0.06045316159725189,
0.04205745831131935,
0.09724834561347961,
0.0007585717248730361,
0.010008362121880054,
-0.024980587884783745,
-0.004709078464657068,
-0.016694724559783936,
-0.04682445898652077,
0.04822869598865509,
0.023470932617783546,
0.0008156277472153306,
0.04314645007252693,
0.02413155883550644,
0.03140317648649216,
0.06297647207975388,
0.012769902125000954,
-0.12801991403102875,
0.06372014433145523,
-0.06053810566663742,
0.07743988931179047,
0.033579349517822266,
0.004043495282530785,
0.045082855969667435,
0.03310011699795723,
-0.05869591236114502,
0.04758339747786522,
-0.03650308772921562,
0.11120990663766861,
-0.07730887085199356,
0.009163051843643188,
-0.03233441337943077,
-0.03859328851103783,
-0.02919037826359272,
-0.01589079014956951,
0.030105670914053917,
-0.019492078572511673,
-0.06601611524820328,
0.039256300777196884,
-0.025201546028256416,
-0.02329893410205841,
-0.06320926547050476,
-0.027069760486483574,
0.029613299295306206,
-0.054194606840610504,
0.006826386786997318,
-0.014279329217970371,
-0.0923352763056755,
-0.037567876279354095,
-0.10066796839237213,
0.04495011642575264,
0.04035463556647301,
-0.09045456349849701,
-0.04739901050925255,
-0.046221230179071426,
-0.052360132336616516,
-0.04175350069999695,
0.08530989289283752,
-0.07080642133951187,
0.0000972511843428947,
-0.024063650518655777,
0.061277393251657486,
0.0189269557595253,
0.038934461772441864,
-0.07674520462751389,
0.09781219065189362,
0.08143561333417892,
0.09808339923620224,
0.08155135065317154,
0.011507702991366386,
0.056354597210884094,
0.05161948874592781,
-0.03958958014845848,
-0.015047158114612103,
-0.017098402604460716,
-0.055730123072862625,
-0.007130318786948919,
0.10191047936677933,
0.09829249233007431,
0.01594655029475689,
-0.015293720178306103,
0.016091233119368553,
0.09204144775867462,
-0.0155547009781003,
0.0393616147339344,
-0.055202435702085495,
0.06015405058860779,
0.03199542686343193,
0.0957224890589714,
0.02658451721072197,
-4.9787804101697475e-8,
-0.0560513436794281,
-0.04939938709139824,
-0.039221104234457016,
0.0010265330784022808,
-0.1419750303030014,
-0.03872125223278999,
-0.028912916779518127,
-0.051817793399095535,
-0.013007209636271,
-0.08773504197597504,
0.036165717989206314,
0.037909455597400665,
-0.03665124252438545,
0.06537068635225296,
-0.004788433201611042,
0.01904626004397869,
-0.05804723501205444,
0.0361875481903553,
-0.018519848585128784,
-0.036937177181243896,
-0.08494515717029572,
0.05609128251671791,
0.043554313480854034,
-0.09152490645647049,
-0.02895425260066986,
-0.003078008769080043,
-0.12368794530630112,
0.13708434998989105,
-0.07817665487527847,
-0.003859457792714238,
0.04356104135513306,
0.02822728268802166,
0.0027010745834559202,
0.06936144828796387,
-0.014847170561552048,
0.022551018744707108,
0.009117437526583672,
-0.10812031477689743,
0.00718750711530447,
-0.005454276688396931,
0.08022870123386383,
-0.010975359939038754,
-0.07995548844337463,
0.01273538637906313,
0.09413290023803711,
0.008830998092889786,
0.027390269562602043,
-0.11984867602586746,
0.06015775725245476,
0.12081970274448395,
0.016728363931179047,
-0.028514374047517776,
-0.056514088064432144,
-0.004710868000984192,
-0.000014978790204622783,
-0.0005788952112197876,
-0.018787294626235962,
0.016716578975319862,
0.04081832617521286,
0.03506303206086159,
0.023727916181087494,
0.07644981145858765,
0.07207715511322021,
0.0037137139588594437
] |
rinna/japanese-cloob-vit-b-16 | 80b15fb86ca981749e1073bd7896e9ff1c965790 | 2022-07-19T05:49:48.000Z | [
"pytorch",
"cloob",
"ja",
"arxiv:2110.11316",
"transformers",
"feature-extraction",
"japanese",
"clip",
"vision",
"license:apache-2.0"
] | feature-extraction | false | rinna | null | rinna/japanese-cloob-vit-b-16 | 2,908 | 3 | transformers | ---
language: ja
thumbnail: https://github.com/rinnakk/japanese-pretrained-models/blob/master/rinna.png
license: apache-2.0
tags:
- feature-extraction
- ja
- japanese
- clip
- cloob
- vision
---
# rinna/japanese-cloob-vit-b-16

This is a Japanese [CLOOB (Contrastive Leave One Out Boost)](https://arxiv.org/abs/2110.11316) model trained by [rinna Co., Ltd.](https://corp.rinna.co.jp/).
Please see [japanese-clip](https://github.com/rinnakk/japanese-clip) for the other available models.
# How to use the model
1. Install package
```shell
$ pip install git+https://github.com/rinnakk/japanese-clip.git
```
2. Run
```python
import io
import requests
from PIL import Image
import torch
import japanese_clip as ja_clip
device = "cuda" if torch.cuda.is_available() else "cpu"
model, preprocess = ja_clip.load("rinna/japanese-cloob-vit-b-16", device=device)
tokenizer = ja_clip.load_tokenizer()
img = Image.open(io.BytesIO(requests.get('https://images.pexels.com/photos/2253275/pexels-photo-2253275.jpeg?auto=compress&cs=tinysrgb&dpr=3&h=750&w=1260').content))
image = preprocess(img).unsqueeze(0).to(device)
encodings = ja_clip.tokenize(
texts=["犬", "猫", "象"],
max_seq_len=77,
device=device,
tokenizer=tokenizer, # this is optional. if you don't pass, load tokenizer each time
)
with torch.no_grad():
image_features = model.get_image_features(image)
text_features = model.get_text_features(**encodings)
text_probs = (100.0 * image_features @ text_features.T).softmax(dim=-1)
print("Label probs:", text_probs) # prints: [[1.0, 0.0, 0.0]]
```
# Model architecture
The model was trained a ViT-B/16 Transformer architecture as an image encoder and uses a 12-layer BERT as a text encoder. The image encoder was initialized from the [AugReg `vit-base-patch16-224` model](https://github.com/google-research/vision_transformer).
# Training
The model was trained on [CC12M](https://github.com/google-research-datasets/conceptual-12m) translated the captions to Japanese.
# License
[The Apache 2.0 license](https://www.apache.org/licenses/LICENSE-2.0) | [
-0.0905565395951271,
-0.04681198298931122,
-0.019995247945189476,
-0.028400283306837082,
0.04035455733537674,
-0.0018163303611800075,
0.00021872237266507,
0.007070617284625769,
-0.022292857989668846,
-0.08932355046272278,
0.09569727629423141,
-0.08403026312589645,
0.04323010519146919,
0.011232768185436726,
0.0551534928381443,
0.026666466146707535,
0.010471190325915813,
0.04277033731341362,
-0.01926836185157299,
-0.0322369821369648,
0.015581146813929081,
-0.037883128970861435,
0.029446594417095184,
-0.02364875189960003,
0.05069851875305176,
0.04538806900382042,
0.04891311004757881,
0.02571534737944603,
0.04112881049513817,
-0.10182563215494156,
0.021934885531663895,
0.027767091989517212,
0.008390054106712341,
-0.012768345884978771,
0.019318273290991783,
0.11720945686101913,
-0.0066745528019964695,
-0.06302101910114288,
-0.01937173679471016,
0.028404168784618378,
-0.0030326521955430508,
0.01256046537309885,
-0.006203556898981333,
-0.13242924213409424,
0.04902924224734306,
-0.06916248053312302,
-0.024584949016571045,
-0.05588408187031746,
0.00821989681571722,
-0.0760253295302391,
-0.12112798541784286,
-0.10205423086881638,
-0.02041822485625744,
0.009035865776240826,
0.05039331316947937,
-0.053294118493795395,
-0.009921811521053314,
-0.022672582417726517,
-0.005336584988981485,
-0.03429029881954193,
-0.03192479535937309,
0.05646209046244621,
-0.016125276684761047,
0.017351090908050537,
-0.006175138056278229,
-0.06578672677278519,
-0.026259956881403923,
-0.019181028008461,
0.062321923673152924,
-0.08424992859363556,
-0.04829907417297363,
-0.02692708931863308,
-0.04299607127904892,
-0.030306821689009666,
-0.038286689668893814,
-0.012607911601662636,
0.07131680846214294,
0.1254560649394989,
0.030420780181884766,
-0.09209904819726944,
-0.03893917053937912,
-0.03275652229785919,
0.08037993311882019,
-0.015588977374136448,
0.07796228677034378,
0.09208572655916214,
-0.004992935340851545,
0.00936831720173359,
0.026932431384921074,
0.003642099676653743,
0.021807052195072174,
-0.002855214523151517,
-0.02632533758878708,
0.07538636028766632,
-0.014737743884325027,
0.047280266880989075,
-0.027354640886187553,
0.023970460519194603,
-0.0988784208893776,
0.13826879858970642,
-0.019024208188056946,
-0.031837355345487595,
0.0942520871758461,
-0.03104633465409279,
-0.013438321650028229,
0.019073685631155968,
-0.02334383875131607,
0.02613246440887451,
0.031929969787597656,
-0.057017311453819275,
0.039569608867168427,
-0.010200625285506248,
-0.06670328229665756,
-0.042635541409254074,
0.02060660347342491,
0.0358600839972496,
0.031167257577180862,
-0.021760521456599236,
-0.004629394970834255,
0.0020635752007365227,
0.015018160454928875,
-0.014920474961400032,
-0.04153653234243393,
-0.01310171838849783,
-0.030724840238690376,
0.05223872512578964,
-0.09012742340564728,
3.137894007443785e-33,
0.11548084020614624,
-0.006498063914477825,
-0.022685298696160316,
-0.04820280894637108,
0.0492350198328495,
-0.016945768147706985,
-0.019618140533566475,
-0.04000790789723396,
-0.1076597347855568,
-0.03103477507829666,
0.017712624743580818,
0.01938914693892002,
-0.11148347705602646,
0.02491525001823902,
0.0011202944442629814,
-0.07734278589487076,
-0.02654084749519825,
-0.002687811851501465,
0.0433395691215992,
0.014976602047681808,
0.06345387548208237,
0.009754822589457035,
-0.031130662187933922,
-0.0003527559747453779,
-0.033572860062122345,
0.04184039682149887,
0.0862051323056221,
-0.07951560616493225,
-0.03676661476492882,
0.07897982746362686,
0.04468194395303726,
-0.009891009889543056,
-0.02734777145087719,
-0.016407741233706474,
-0.09560129046440125,
-0.07788491994142532,
-0.006440218538045883,
-0.0356353297829628,
-0.06642482429742813,
-0.04316334053874016,
0.03768617659807205,
-0.014382963068783283,
-0.061143964529037476,
-0.019088437780737877,
0.007811781018972397,
-0.07316344976425171,
0.039423827081918716,
0.03248270973563194,
-0.010884600691497326,
0.06825609505176544,
-0.02079435996711254,
0.0023785289376974106,
-0.01426939107477665,
-0.0017683448968455195,
-0.0709456130862236,
0.03126818314194679,
0.07286945730447769,
0.005848131142556667,
0.022476239129900932,
-0.01826743222773075,
0.00559830479323864,
0.05167946591973305,
0.00034373183734714985,
0.048875052481889725,
0.05598634108901024,
0.0028896238654851913,
-0.0092345355078578,
-0.013071016408503056,
0.010201232507824898,
0.06517323851585388,
-0.05459333956241608,
-0.009652464650571346,
-0.003563718870282173,
-0.018951507285237312,
0.026973851025104523,
-0.17853017151355743,
-0.011319315060973167,
-0.013687465339899063,
0.01280911359935999,
0.04122500494122505,
-0.08925215154886246,
0.06062869355082512,
0.07296062260866165,
-0.05390838906168938,
-0.014266779646277428,
0.046998459845781326,
0.14093559980392456,
0.0035055631306022406,
0.014364395290613174,
-0.055605459958314896,
0.030900154262781143,
0.09437362849712372,
0.023326195776462555,
-0.05799436569213867,
-0.012397884391248226,
-2.91505682905535e-33,
0.1062779501080513,
0.02108132280409336,
-0.026642754673957825,
0.05695381388068199,
0.019686324521899223,
-0.06988944858312607,
0.040714796632528305,
0.03225995600223541,
-0.05253647267818451,
-0.03292795643210411,
-0.0024741636589169502,
-0.006385536398738623,
-0.020002156496047974,
-0.042092856019735336,
0.1022472083568573,
-0.044173333793878555,
0.01825224980711937,
-0.04960799962282181,
0.005741853732615709,
0.0453314483165741,
-0.009794682264328003,
0.05072379484772682,
-0.033713433891534805,
0.060515351593494415,
-0.07911061495542526,
0.001428229850716889,
0.03623839467763901,
0.032075099647045135,
-0.01701406203210354,
-0.004091099835932255,
-0.034154780209064484,
0.008571697399020195,
-0.10526730865240097,
0.012082944624125957,
-0.08043520897626877,
0.0061760046519339085,
0.013502493500709534,
0.05850692465901375,
0.02501695603132248,
0.09330782294273376,
0.04030398279428482,
-0.010550805367529392,
-0.10222427546977997,
0.03771268576383591,
-0.08735460788011551,
-0.002542973728850484,
0.03330093249678612,
0.03427169471979141,
-0.04262299835681915,
-0.06921645998954773,
0.024086209014058113,
-0.04207160696387291,
0.00729987770318985,
0.029020631685853004,
-0.12249989807605743,
-0.0035018124617636204,
0.015702389180660248,
0.031661245971918106,
-0.043254364281892776,
0.023406220600008965,
-0.04899923503398895,
-0.051279351115226746,
-0.03713631629943848,
-0.04520967975258827,
0.01868864893913269,
-0.032113704830408096,
-0.00333214714191854,
-0.015970291569828987,
0.01493165735155344,
-0.08784482628107071,
-0.03469221293926239,
0.02686361037194729,
0.09103861451148987,
0.04338158667087555,
0.08136078715324402,
-0.042408719658851624,
-0.0714585930109024,
0.060103412717580795,
0.10971048474311829,
-0.024955566972494125,
-0.03951204568147659,
0.014029164798557758,
-0.02347651682794094,
-0.0008455705246888101,
0.0731501504778862,
0.026672258973121643,
-0.03942026570439339,
0.07301672548055649,
0.05760549381375313,
-0.09111858159303665,
0.020785082131624222,
0.07491490244865417,
0.049333106726408005,
0.09546130150556564,
0.09534038603305817,
-5.458675644831601e-8,
-0.05145186185836792,
-0.08771373331546783,
-0.054936882108449936,
0.025967039167881012,
-0.016946665942668915,
-0.013649463653564453,
-0.02187272347509861,
-0.017138827592134476,
0.020757703110575676,
-0.01706586591899395,
0.02154899761080742,
0.074528269469738,
-0.08125714957714081,
0.07895553112030029,
-0.020462948828935623,
0.058993637561798096,
0.06842733919620514,
0.16871172189712524,
-0.02233770489692688,
0.014776061289012432,
-0.006263590417802334,
-0.02295181341469288,
0.06295038014650345,
-0.016342557966709137,
-0.035334210842847824,
0.009469306096434593,
-0.08137506246566772,
0.05474719777703285,
-0.03499557077884674,
-0.006143397651612759,
0.021881822496652603,
0.0188650693744421,
0.0010275579988956451,
-0.01304363738745451,
0.04210812598466873,
0.046451784670352936,
0.0028545584063977003,
-0.0436953641474247,
0.025432651862502098,
0.029478777199983597,
0.09505603462457657,
-0.0212397538125515,
-0.025560542941093445,
-0.05638080835342407,
-0.004743928089737892,
0.017008773982524872,
0.03571896627545357,
-0.11576040089130402,
-0.008012918755412102,
0.030795328319072723,
0.07033107429742813,
-0.07552418112754822,
0.0238431915640831,
0.025452924892306328,
0.0004502975207287818,
0.04181722551584244,
0.05704108998179436,
-0.08208762854337692,
0.03660789132118225,
0.06649461388587952,
0.06307972222566605,
0.0044739702716469765,
0.04114772379398346,
0.010017567314207554
] |
vinai/bartpho-word | 748f5b5deee937629b2ac1b7e7453730c71a969e | 2022-06-08T04:49:05.000Z | [
"pytorch",
"tf",
"mbart",
"feature-extraction",
"arxiv:2109.09701",
"transformers"
] | feature-extraction | false | vinai | null | vinai/bartpho-word | 2,893 | null | transformers | # <a name="introduction"></a> BARTpho: Pre-trained Sequence-to-Sequence Models for Vietnamese
Two BARTpho versions `BARTpho-syllable` and `BARTpho-word` are the first public large-scale monolingual sequence-to-sequence models pre-trained for Vietnamese. BARTpho uses the "large" architecture and pre-training scheme of the sequence-to-sequence denoising model [BART](https://github.com/pytorch/fairseq/tree/main/examples/bart), thus especially suitable for generative NLP tasks. Experiments on a downstream task of Vietnamese text summarization show that in both automatic and human evaluations, BARTpho outperforms the strong baseline [mBART](https://github.com/pytorch/fairseq/tree/main/examples/mbart) and improves the state-of-the-art.
The general architecture and experimental results of BARTpho can be found in our [paper](https://arxiv.org/abs/2109.09701):
@article{bartpho,
title = {{BARTpho: Pre-trained Sequence-to-Sequence Models for Vietnamese}},
author = {Nguyen Luong Tran and Duong Minh Le and Dat Quoc Nguyen},
journal = {arXiv preprint},
volume = {arXiv:2109.09701},
year = {2021}
}
**Please CITE** our paper when BARTpho is used to help produce published results or incorporated into other software.
For further information or requests, please go to [BARTpho's homepage](https://github.com/VinAIResearch/BARTpho)!
| [
-0.07329877465963364,
-0.07400128245353699,
0.0300898440182209,
-0.021432004868984222,
-0.05373455211520195,
0.08036810904741287,
-0.06581486761569977,
0.04520939663052559,
0.07592716813087463,
-0.01190557423979044,
0.008763941936194897,
-0.012806877493858337,
-0.0394909530878067,
0.01676284708082676,
-0.022300567477941513,
0.027213623747229576,
0.04344024136662483,
0.02971642278134823,
0.0261241365224123,
-0.09847164899110794,
0.04187940061092377,
0.10426332801580429,
0.004082508850842714,
-0.017808569595217705,
0.05159226432442665,
-0.08837376534938812,
-0.03679320588707924,
0.028266677632927895,
0.07812914252281189,
-0.00043589898268692195,
0.0254257433116436,
0.051365990191698074,
0.08454816043376923,
0.09602908045053482,
-0.04177575185894966,
0.07938235253095627,
-0.028050396591424942,
-0.01606191135942936,
0.036996036767959595,
0.018339579924941063,
-0.014734252355992794,
0.004126701969653368,
-0.03329652547836304,
-0.017538458108901978,
0.11663240939378738,
-0.06809645891189575,
-0.018025465309619904,
0.01712239719927311,
-0.0033120966982096434,
-0.014789861626923084,
-0.09846606105566025,
0.06843392550945282,
0.03529397025704384,
0.1250879466533661,
-0.042301639914512634,
-0.0037798588164150715,
0.02865956723690033,
0.07019414752721786,
0.018937546759843826,
-0.10110404342412949,
-0.07248587161302567,
-0.05860617756843567,
-0.061274029314517975,
-0.05450361222028732,
0.03407053276896477,
-0.03253365308046341,
0.049103640019893646,
0.07102350145578384,
-0.00558297848328948,
0.07774834334850311,
-0.046110332012176514,
0.07171881198883057,
0.028422560542821884,
0.08983004838228226,
-0.052037667483091354,
0.08210836350917816,
0.023027438670396805,
-0.031394828110933304,
0.0408686138689518,
-0.12343184649944305,
0.04011156037449837,
0.035109568387269974,
0.10747626423835754,
-0.01589513011276722,
-0.002053948352113366,
0.06694342941045761,
0.03701001778244972,
0.01281410176306963,
0.0285092294216156,
0.007325615733861923,
-0.02212456986308098,
-0.10749747604131699,
0.03879212588071823,
-0.04077949747443199,
0.011837908066809177,
0.05936373770236969,
0.03212445229291916,
-0.013407436199486256,
-0.0002144526515621692,
0.06675005704164505,
0.10868903249502182,
0.10821463912725449,
-0.021492671221494675,
-0.11960750818252563,
-0.023795483633875847,
-0.017569614574313164,
0.050655897706747055,
-0.018736466765403748,
0.018146347254514694,
-0.07210056483745575,
0.02277340553700924,
0.001305482815951109,
-0.02031056396663189,
-0.024974627420306206,
-0.004641675390303135,
-0.02535056136548519,
0.03971150889992714,
-0.07677596807479858,
0.01133759692311287,
0.03340037539601326,
-0.01498262770473957,
-0.023499390110373497,
0.008559446781873703,
0.04726424068212509,
-0.008255761116743088,
-0.03084113635122776,
0.036193665117025375,
1.0734034011373411e-33,
0.07349999994039536,
0.0270075686275959,
0.018659517168998718,
-0.003979862667620182,
-0.0031833997927606106,
-0.06174890324473381,
-0.04142800718545914,
0.008269847370684147,
-0.06827874481678009,
-0.03660421073436737,
-0.009949726983904839,
-0.016304612159729004,
-0.08427536487579346,
0.06467898935079575,
0.030067237094044685,
-0.009141229093074799,
-0.03901195898652077,
0.04112406447529793,
-0.044289641082286835,
0.009119244292378426,
0.023205727338790894,
-0.00877109169960022,
0.01600232720375061,
-0.06696948409080505,
0.02074897661805153,
0.04940182715654373,
0.07389738410711288,
-0.11082051694393158,
0.00839130487293005,
0.030224740505218506,
-0.13934451341629028,
0.01210037712007761,
0.006271419581025839,
0.05472865328192711,
-0.06139204651117325,
-0.0327872671186924,
-0.0569855272769928,
-0.03726304695010185,
-0.0008473025518469512,
-0.06220243498682976,
0.03292505070567131,
0.044838953763246536,
0.03191899508237839,
0.013787229545414448,
-0.0529126338660717,
-0.05651054531335831,
-0.03915970027446747,
-0.018145691603422165,
0.0507953055202961,
-0.043531935662031174,
0.015059414319694042,
-0.01345218624919653,
-0.03516315668821335,
-0.021953947842121124,
0.04776529595255852,
0.008232032880187035,
0.04842599853873253,
-0.008526506833732128,
0.02456211857497692,
0.009377408772706985,
0.019372830167412758,
-0.007287796586751938,
0.060263410210609436,
0.10862412303686142,
0.037108272314071655,
0.003519910853356123,
-0.06778977066278458,
0.0636477842926979,
0.049009546637535095,
-0.08301603049039841,
-0.010306376032531261,
-0.0466785803437233,
-0.017838450148701668,
0.021101800724864006,
0.03822975233197212,
0.028007138520479202,
0.0511988140642643,
-0.10091779381036758,
-0.04870019853115082,
0.04258803278207779,
-0.03513865917921066,
-0.020018814131617546,
0.016531504690647125,
-0.04612328112125397,
-0.03971762955188751,
0.0033572425600141287,
0.06177680939435959,
-0.0033898598048835993,
0.0070008784532547,
-0.056751880794763565,
-0.008251079358160496,
0.0567106194794178,
-0.001702054520137608,
0.05909055843949318,
-0.0014404349494725466,
-1.8594772283543365e-33,
-0.02198406495153904,
0.06500311195850372,
-0.08838816732168198,
0.03144069015979767,
-0.08042696863412857,
-0.04604120925068855,
-0.01168772578239441,
0.06305186450481415,
-0.050626710057258606,
-0.047726016491651535,
-0.05398715287446976,
-0.07402962446212769,
0.08248482644557953,
-0.02189934439957142,
0.02066665142774582,
-0.0519028976559639,
0.07506819814443588,
0.028201643377542496,
0.03773537278175354,
0.08663871884346008,
0.05350866913795471,
0.02846042439341545,
-0.10642575472593307,
0.04151548072695732,
0.03638719394803047,
0.024446966126561165,
0.004639544989913702,
0.04668805003166199,
-0.06399115920066833,
-0.029434066265821457,
0.02945181168615818,
0.01128793228417635,
-0.003107159398496151,
0.012699399143457413,
-0.08999529480934143,
0.017355989664793015,
0.059312064200639725,
-0.0020961978007107973,
-0.0709776058793068,
0.07819705456495285,
0.10830480605363846,
0.008394354954361916,
-0.028905771672725677,
-0.057349156588315964,
-0.05093708634376526,
-0.06812825798988342,
-0.12489323318004608,
-0.07235950231552124,
0.04758923873305321,
0.010869983583688736,
-0.021136509254574776,
0.016312560066580772,
-0.09045097976922989,
0.034286826848983765,
-0.050268929451704025,
-0.042031873017549515,
0.015188062563538551,
-0.043038010597229004,
-0.0477781817317009,
-0.04562293738126755,
-0.11123015731573105,
0.04370797798037529,
0.01300693117082119,
-0.061537791043519974,
0.04228468984365463,
-0.058204419910907745,
0.02582651376724243,
-0.019389161840081215,
0.010286089032888412,
-0.09021171182394028,
0.06083579361438751,
0.026765769347548485,
0.026006296277046204,
0.08206263929605484,
0.013553778640925884,
-0.040444210171699524,
0.005074677057564259,
-0.023932842537760735,
-0.03230136260390282,
-0.13550619781017303,
-0.05022788047790527,
-0.0382663831114769,
-0.009427722543478012,
0.02258804254233837,
-0.03119899146258831,
0.07690108567476273,
0.001361128524877131,
0.03643694519996643,
0.004183460492640734,
0.048803988844156265,
0.016027869656682014,
-0.012372522614896297,
-0.026452694088220596,
0.009276770986616611,
-0.05317064747214317,
-4.805693265552691e-8,
-0.025217972695827484,
-0.008913395926356316,
-0.023119689896702766,
0.051991622895002365,
-0.037925779819488525,
-0.13959188759326935,
-0.018205363303422928,
0.036480627954006195,
-0.018879661336541176,
-0.09514322131872177,
-0.007483548019081354,
0.022443020716309547,
-0.028255386278033257,
-0.013231292366981506,
0.005388807505369186,
0.06648017466068268,
0.07784788310527802,
0.03151637688279152,
-0.03674353286623955,
-0.037649042904376984,
0.036056991666555405,
0.042594607919454575,
-0.008146352134644985,
-0.008722868748009205,
0.012455303221940994,
0.0033089821226894855,
-0.10745342075824738,
0.032233864068984985,
0.00541250454261899,
-0.07583822309970856,
0.05362611636519432,
0.09301117062568665,
-0.04352930188179016,
-0.02012641169130802,
0.031273405998945236,
0.0843701958656311,
0.005090002901852131,
0.007670153398066759,
-0.01564132235944271,
0.03998755291104317,
0.09041450917720795,
-0.010525258257985115,
-0.08775503933429718,
-0.028499577194452286,
0.029290610924363136,
-0.021067317575216293,
-0.05202774330973625,
-0.09821329265832901,
0.06013694778084755,
-0.019302139058709145,
0.0693601742386818,
-0.05023511126637459,
0.06773090362548828,
-0.04125136137008667,
0.06491779536008835,
0.050256986171007156,
0.015377843752503395,
-0.012006756849586964,
0.05756692960858345,
0.018569080159068108,
0.07765927910804749,
0.03765463829040527,
0.006275605410337448,
0.01836855709552765
] |
microsoft/dit-base | 5f3a1d82def5866db1ac86d7701fe4f508050f42 | 2022-03-08T10:40:10.000Z | [
"pytorch",
"beit",
"arxiv:2203.02378",
"transformers",
"dit"
] | null | false | microsoft | null | microsoft/dit-base | 2,885 | 3 | transformers | ---
tags:
- dit
inference: false
---
# Document Image Transformer (base-sized model)
Document Image Transformer (DiT) model pre-trained on IIT-CDIP (Lewis et al., 2006), a dataset that includes 42 million document images. It was introduced in the paper [DiT: Self-supervised Pre-training for Document Image Transformer](https://arxiv.org/abs/2203.02378) by Li et al. and first released in [this repository](https://github.com/microsoft/unilm/tree/master/dit). Note that DiT is identical to the architecture of [BEiT](https://huggingface.co/docs/transformers/model_doc/beit).
Disclaimer: The team releasing DiT did not write a model card for this model so this model card has been written by the Hugging Face team.
## Model description
The Document Image Transformer (DiT) is a transformer encoder model (BERT-like) pre-trained on a large collection of images in a self-supervised fashion. The pre-training objective for the model is to predict visual tokens from the encoder of a discrete VAE (dVAE), based on masked patches.
Images are presented to the model as a sequence of fixed-size patches (resolution 16x16), which are linearly embedded. One also adds absolute position embeddings before feeding the sequence to the layers of the Transformer encoder.
By pre-training the model, it learns an inner representation of images that can then be used to extract features useful for downstream tasks: if you have a dataset of labeled document images for instance, you can train a standard classifier by placing a linear layer on top of the pre-trained encoder.
## Intended uses & limitations
You can use the raw model for encoding document images into a vector space, but it's mostly meant to be fine-tuned on tasks like document image classification, table detection or document layout analysis. See the [model hub](https://huggingface.co/models?search=microsoft/dit) to look for fine-tuned versions on a task that interests you.
### How to use
Here is how to use this model in PyTorch:
```python
from transformers import BeitFeatureExtractor, BeitForMaskedImageModeling
import torch
from PIL import Image
image = Image.open('path_to_your_document_image').convert('RGB')
feature_extractor = BeitFeatureExtractor.from_pretrained("microsoft/dit-base")
model = BeitForMaskedImageModeling.from_pretrained("microsoft/dit-base")
num_patches = (model.config.image_size // model.config.patch_size) ** 2
pixel_values = feature_extractor(images=image, return_tensors="pt").pixel_values
# create random boolean mask of shape (batch_size, num_patches)
bool_masked_pos = torch.randint(low=0, high=2, size=(1, num_patches)).bool()
outputs = model(pixel_values, bool_masked_pos=bool_masked_pos)
loss, logits = outputs.loss, outputs.logits
```
### BibTeX entry and citation info
```bibtex
@article{Lewis2006BuildingAT,
title={Building a test collection for complex document information processing},
author={David D. Lewis and Gady Agam and Shlomo Engelson Argamon and Ophir Frieder and David A. Grossman and Jefferson Heard},
journal={Proceedings of the 29th annual international ACM SIGIR conference on Research and development in information retrieval},
year={2006}
}
``` | [
-0.1661364734172821,
0.0034617220517247915,
0.06458458304405212,
0.017864739522337914,
0.053771235048770905,
-0.05755238234996796,
-0.03257089853286743,
0.11355631053447723,
0.04839848726987839,
0.020928796380758286,
0.030855447053909302,
0.03050827980041504,
0.0454176589846611,
0.07569149881601334,
-0.023819487541913986,
0.04075360670685768,
-0.0028709224425256252,
-0.012926467694342136,
-0.07732632011175156,
-0.008874109014868736,
0.01739116944372654,
0.0414823442697525,
0.010504314675927162,
-0.05064919963479042,
0.028591768816113472,
0.013949899934232235,
-0.019338836893439293,
-0.0625636950135231,
0.044424645602703094,
-0.06950941681861877,
0.03386704996228218,
0.026197705417871475,
-0.015523089095950127,
0.11928925663232803,
-0.02297051250934601,
0.08323469758033752,
0.04567363113164902,
-0.011974669061601162,
0.03728193789720535,
0.0438399612903595,
0.02951250597834587,
0.057271141558885574,
-0.01394802238792181,
-0.029820265248417854,
0.12037145346403122,
0.04991301894187927,
-0.037485577166080475,
0.007013526279479265,
-0.09818360954523087,
0.020675305277109146,
-0.048425670713186264,
0.009675164707005024,
-0.006639642640948296,
0.1122647374868393,
0.02369224838912487,
0.04563301056623459,
0.06607354432344437,
-0.03147295117378235,
-0.06241554021835327,
-0.014654613099992275,
-0.06265150755643845,
0.006738361436873674,
-0.09405749291181564,
-0.006822565104812384,
-0.04544747993350029,
0.07930061221122742,
0.010940785519778728,
-0.0064218249171972275,
0.05222245305776596,
-0.1509045958518982,
-0.03002404049038887,
0.03682169318199158,
-0.002426228951662779,
0.02956383116543293,
0.05356590077280998,
-0.060746245086193085,
0.1287938952445984,
0.011942402459681034,
0.04568444937467575,
-0.08271319419145584,
0.05525314435362816,
0.01880040019750595,
0.12009253352880478,
-0.042033545672893524,
0.04075296223163605,
-0.006240054499357939,
-0.06568659096956253,
0.04580871760845184,
-0.005303554702550173,
0.012404380366206169,
-0.06173763796687126,
-0.10369303077459335,
-0.019420398399233818,
0.044226374477148056,
-0.026873720809817314,
-0.0007089583086781204,
-0.0162916649132967,
-0.034632716327905655,
-0.002837705658748746,
0.08430508524179459,
0.022092120721936226,
-0.006726746913045645,
0.030002927407622337,
-0.03655480220913887,
0.004481298383325338,
-0.08569272607564926,
0.08682607114315033,
0.0020261434838175774,
-0.004992757458239794,
-0.08406137675046921,
0.03850145265460014,
-0.0411669984459877,
-0.057543858885765076,
-0.08448413759469986,
-0.029366644099354744,
-0.05231762304902077,
-0.026601700112223625,
0.00803733803331852,
-0.013250958174467087,
-0.04876163229346275,
0.022079987451434135,
0.05004296079277992,
0.020189503207802773,
-0.021117592230439186,
-0.0561080239713192,
-0.03278008848428726,
-0.13314275443553925,
1.459229758929498e-33,
0.008887415751814842,
0.018856678158044815,
0.033255647867918015,
0.037286706268787384,
-0.014189106412231922,
-0.00144888402428478,
0.011765985749661922,
0.01268857717514038,
-0.011424940079450607,
-0.05782841145992279,
-0.033684805035591125,
0.020424453541636467,
-0.07520138472318649,
0.10036903619766235,
-0.004313115030527115,
-0.018033064901828766,
-0.0460997149348259,
0.06892316788434982,
0.0014466738793998957,
0.0007516599725931883,
0.05401352047920227,
0.0380438007414341,
-0.0173683762550354,
-0.037228040397167206,
0.0007300387369468808,
0.0014991878997534513,
0.02455879934132099,
-0.05577106773853302,
0.020800339058041573,
-0.007477238308638334,
-0.07971782237291336,
0.02344631962478161,
0.05430281534790993,
-0.012782151810824871,
0.0003957405861001462,
0.002362977247685194,
-0.056766923516988754,
-0.10262376815080643,
-0.003571744542568922,
-0.04717382788658142,
0.019540634006261826,
0.020114602521061897,
0.043552216142416,
-0.07182645052671432,
-0.016952473670244217,
-0.003992695361375809,
0.013336969539523125,
-0.03150559216737747,
-0.028785841539502144,
-0.009993002749979496,
0.05617022514343262,
0.05293341726064682,
-0.07534412294626236,
-0.059322115033864975,
0.02002860978245735,
0.044256437569856644,
0.054166968911886215,
0.030466532334685326,
0.11857607960700989,
0.06504222005605698,
0.011818910948932171,
-0.010484565049409866,
-0.04797082021832466,
0.05646137520670891,
0.012369663454592228,
-0.05426795408129692,
0.016919100657105446,
-0.004023361951112747,
0.0070008947513997555,
0.04071260243654251,
-0.056459344923496246,
0.0008147903135977685,
-0.05834170803427696,
-0.12231405079364777,
0.057734712958335876,
-0.045257873833179474,
0.010697117075324059,
0.01310884952545166,
-0.08790575712919235,
0.027502533048391342,
-0.060080911964178085,
0.026101334020495415,
-0.017723694443702698,
-0.10288401693105698,
-0.018999293446540833,
0.04237416759133339,
0.045830756425857544,
-0.07107677310705185,
0.016454562544822693,
-0.03834665194153786,
0.08094599097967148,
0.03233218193054199,
-0.05537670478224754,
0.020257124677300453,
0.050407763570547104,
-1.1840657250206975e-33,
0.017166756093502045,
0.04233062267303467,
-0.07841107994318008,
0.06823565810918808,
-0.055566705763339996,
-0.051591694355010986,
0.05826248601078987,
0.15947909653186798,
0.022150883451104164,
-0.06584423035383224,
0.05216190591454506,
-0.019618431106209755,
-0.037600453943014145,
-0.0617617703974247,
0.013255703262984753,
-0.018228648230433464,
0.0007490104180760682,
-0.050184283405542374,
-0.001414442784152925,
0.040331728756427765,
0.05238255113363266,
0.07926703244447708,
-0.04703653231263161,
0.020389238372445107,
-0.03231760486960411,
0.06542898714542389,
-0.06683611124753952,
0.05738730728626251,
0.07084441184997559,
0.016580305993556976,
-0.05724756792187691,
-0.009527435526251793,
0.011699331924319267,
0.005298271309584379,
-0.04036996141076088,
0.014088528230786324,
0.006794249173253775,
0.0032507956493645906,
-0.06742320954799652,
0.06606513261795044,
-0.010701816529035568,
-0.012636792846024036,
-0.0610031895339489,
0.05790693312883377,
-0.09850580990314484,
-0.01461276225745678,
-0.012164672836661339,
-0.024355977773666382,
0.1108812466263771,
0.03523357957601547,
0.005167438182979822,
-0.00900822039693594,
-0.04010026901960373,
-0.001918235095217824,
-0.03639940917491913,
-0.08546019345521927,
-0.011599940247833729,
-0.03617999702692032,
0.02583906054496765,
0.020194146782159805,
0.007815666496753693,
-0.049327775835990906,
0.0005884721758775413,
-0.0341406911611557,
-0.034912098199129105,
-0.05973677709698677,
-0.13223505020141602,
-0.012928269803524017,
-0.02822733111679554,
0.07818403840065002,
-0.013179279863834381,
0.031316839158535004,
0.043768540024757385,
-0.052213456481695175,
-0.006147453095763922,
-0.02678080089390278,
0.005936584901064634,
-0.024385301396250725,
0.009224392473697662,
-0.0582730732858181,
-0.08356256037950516,
-0.044556669890880585,
0.016843538731336594,
0.07474374026060104,
0.08158987015485764,
0.03863969072699547,
0.015269581228494644,
-0.03390907868742943,
-0.017314422875642776,
0.06847865879535675,
-0.038481589406728745,
0.033596258610486984,
0.08717235922813416,
0.10385199636220932,
-0.0227341428399086,
-5.17510052588932e-8,
-0.10553241521120071,
-0.003167524700984359,
-0.02526208944618702,
-0.03153567761182785,
-0.04375216364860535,
-0.09289775788784027,
0.07594434171915054,
0.10702955722808838,
-0.0673772320151329,
0.0003075494314543903,
0.09376852959394455,
-0.006566268857568502,
-0.0812792107462883,
-0.015697235241532326,
0.05473347008228302,
0.09104000777006149,
0.04454605653882027,
0.015795091167092323,
-0.0005872052861377597,
0.01581217534840107,
0.0483367033302784,
-0.025983599945902824,
0.02142469212412834,
-0.00017399557691533118,
0.03381507843732834,
-0.017883965745568275,
-0.040352873504161835,
0.036261558532714844,
0.005932142026722431,
-0.0019700825214385986,
-0.010768018662929535,
0.04436257854104042,
-0.03743240609765053,
-0.02338268607854843,
0.05907011777162552,
0.0628749281167984,
0.017809798941016197,
-0.03668332099914551,
-0.039762575179338455,
-0.033454980701208115,
0.04869261756539345,
0.006671978626400232,
-0.10533791035413742,
0.0393986813724041,
0.08873596042394638,
0.0034782083239406347,
0.04499836638569832,
-0.03527738153934479,
-0.03729088604450226,
0.0027040550485253334,
0.005050566978752613,
-0.02605583332479,
0.001809710287488997,
0.07332710176706314,
-0.034051332622766495,
0.05162264034152031,
0.062202516943216324,
-0.03859269991517067,
0.059113360941410065,
0.07119355350732803,
0.03773937374353409,
0.003860377473756671,
0.08380807191133499,
0.039175186306238174
] |
allegro/herbert-large-cased | 8d0fa3bc0566c3a332bec0d471c8d8c37b5cbb90 | 2022-06-26T14:18:54.000Z | [
"pytorch",
"tf",
"jax",
"bert",
"feature-extraction",
"pl",
"transformers",
"herbert",
"license:cc-by-4.0"
] | feature-extraction | false | allegro | null | allegro/herbert-large-cased | 2,872 | 3 | transformers | ---
language: pl
tags:
- herbert
license: cc-by-4.0
---
# HerBERT
**[HerBERT](https://en.wikipedia.org/wiki/Zbigniew_Herbert)** is a BERT-based Language Model trained on Polish corpora
using Masked Language Modelling (MLM) and Sentence Structural Objective (SSO) with dynamic masking of whole words. For more details, please refer to: [HerBERT: Efficiently Pretrained Transformer-based Language Model for Polish](https://www.aclweb.org/anthology/2021.bsnlp-1.1/).
Model training and experiments were conducted with [transformers](https://github.com/huggingface/transformers) in version 2.9.
## Corpus
HerBERT was trained on six different corpora available for Polish language:
| Corpus | Tokens | Documents |
| :------ | ------: | ------: |
| [CCNet Middle](https://github.com/facebookresearch/cc_net) | 3243M | 7.9M |
| [CCNet Head](https://github.com/facebookresearch/cc_net) | 2641M | 7.0M |
| [National Corpus of Polish](http://nkjp.pl/index.php?page=14&lang=1)| 1357M | 3.9M |
| [Open Subtitles](http://opus.nlpl.eu/OpenSubtitles-v2018.php) | 1056M | 1.1M
| [Wikipedia](https://dumps.wikimedia.org/) | 260M | 1.4M |
| [Wolne Lektury](https://wolnelektury.pl/) | 41M | 5.5k |
## Tokenizer
The training dataset was tokenized into subwords using a character level byte-pair encoding (``CharBPETokenizer``) with
a vocabulary size of 50k tokens. The tokenizer itself was trained with a [tokenizers](https://github.com/huggingface/tokenizers) library.
We kindly encourage you to use the ``Fast`` version of the tokenizer, namely ``HerbertTokenizerFast``.
## Usage
Example code:
```python
from transformers import AutoTokenizer, AutoModel
tokenizer = AutoTokenizer.from_pretrained("allegro/herbert-large-cased")
model = AutoModel.from_pretrained("allegro/herbert-large-cased")
output = model(
**tokenizer.batch_encode_plus(
[
(
"A potem szedł środkiem drogi w kurzawie, bo zamiatał nogami, ślepy dziad prowadzony przez tłustego kundla na sznurku.",
"A potem leciał od lasu chłopak z butelką, ale ten ujrzawszy księdza przy drodze okrążył go z dala i biegł na przełaj pól do karczmy."
)
],
padding='longest',
add_special_tokens=True,
return_tensors='pt'
)
)
```
## License
CC BY 4.0
## Citation
If you use this model, please cite the following paper:
```
@inproceedings{mroczkowski-etal-2021-herbert,
title = "{H}er{BERT}: Efficiently Pretrained Transformer-based Language Model for {P}olish",
author = "Mroczkowski, Robert and
Rybak, Piotr and
Wr{\'o}blewska, Alina and
Gawlik, Ireneusz",
booktitle = "Proceedings of the 8th Workshop on Balto-Slavic Natural Language Processing",
month = apr,
year = "2021",
address = "Kiyv, Ukraine",
publisher = "Association for Computational Linguistics",
url = "https://www.aclweb.org/anthology/2021.bsnlp-1.1",
pages = "1--10",
}
```
## Authors
The model was trained by [**Machine Learning Research Team at Allegro**](https://ml.allegro.tech/) and [**Linguistic Engineering Group at Institute of Computer Science, Polish Academy of Sciences**](http://zil.ipipan.waw.pl/).
You can contact us at: <a href="mailto:[email protected]">[email protected]</a> | [
-0.14203456044197083,
-0.036547914147377014,
0.03166899085044861,
0.03605486452579498,
0.0053770290687680244,
0.0514938049018383,
-0.015600418671965599,
0.010225124657154083,
-0.020932266488671303,
-0.018017346039414406,
0.009806902147829533,
-0.02594299241900444,
0.08464493602514267,
0.08525363355875015,
0.02668592892587185,
0.05394943431019783,
0.03640202060341835,
0.12059476971626282,
-0.07961589097976685,
-0.08583669364452362,
0.08085809648036957,
0.0811944380402565,
0.03707015514373779,
-0.029132651165127754,
0.09726624935865402,
0.014248808845877647,
-0.05379028618335724,
-0.08014170080423355,
0.07722502201795578,
0.03660252317786217,
0.004425168968737125,
0.02320108376443386,
0.042304929345846176,
0.09754765033721924,
0.007718180771917105,
0.01996956393122673,
-0.00755819957703352,
0.008382788859307766,
-0.0229509100317955,
-0.00809125043451786,
-0.09208646416664124,
0.0036159842275083065,
-0.03617653623223305,
-0.030376112088561058,
0.10985274612903595,
0.005772473756223917,
-0.012382257729768753,
0.0392984002828598,
-0.10875915735960007,
-0.0485265888273716,
-0.08363323658704758,
-0.06864093989133835,
0.06058450788259506,
0.10058090090751648,
-0.07679489254951477,
-0.018006915226578712,
0.01629061996936798,
0.012425081804394722,
-0.016112234443426132,
-0.05530187115073204,
-0.1019028052687645,
-0.06760163605213165,
-0.07468128204345703,
0.0075363051146268845,
-0.06355240941047668,
0.027999941259622574,
-0.03720472753047943,
0.011484275572001934,
0.0043117874301970005,
-0.0035423508379608393,
-0.031013978645205498,
-0.00594743387773633,
-0.0295158289372921,
0.007312283385545015,
-0.00665073748677969,
-0.011913757771253586,
0.12550833821296692,
-0.05458255484700203,
0.058229200541973114,
-0.09056070446968079,
0.08316802978515625,
0.02200077474117279,
0.05881418287754059,
0.005362850148230791,
0.045156318694353104,
-0.008686703629791737,
0.0017897223588079214,
0.009574489668011665,
-0.05831844359636307,
0.04021356627345085,
-0.033895596861839294,
-0.09474871307611465,
0.04347068816423416,
-0.01777520775794983,
-0.030271513387560844,
-0.023761173710227013,
0.015260780230164528,
0.004336830228567123,
0.03886694833636284,
0.09449996054172516,
0.005094527266919613,
0.0015638216864317656,
0.02216346003115177,
-0.10515934973955154,
-0.047176260501146317,
-0.01668659970164299,
0.02159126102924347,
0.005611098371446133,
0.02297644130885601,
-0.06884118169546127,
0.04150773584842682,
0.011146768927574158,
-0.05202481150627136,
-0.09031209349632263,
0.030334198847413063,
-0.04086020588874817,
0.029481342062354088,
-0.026109905913472176,
0.002278855536133051,
0.09181337803602219,
-0.03258836641907692,
0.05192601680755615,
0.03801175206899643,
0.07883439213037491,
-0.002590914722532034,
0.03901774063706398,
0.02785489708185196,
7.447959757458861e-34,
0.03254249319434166,
0.04978182166814804,
-0.06999398022890091,
0.014270377345383167,
0.012188288383185863,
0.019837405532598495,
0.07274094223976135,
0.02219771407544613,
-0.04287354275584221,
-0.023816145956516266,
-0.022778507322072983,
0.047603100538253784,
-0.06081046536564827,
0.035202693194150925,
-0.0459723025560379,
-0.03243311494588852,
0.00796549767255783,
0.03152039274573326,
0.027192775160074234,
0.004220230970531702,
0.07710891962051392,
0.10533630847930908,
0.04092172533273697,
-0.06511692702770233,
-0.04581165686249733,
0.010997936129570007,
0.06674512475728989,
-0.1536640226840973,
-0.019128041341900826,
0.05019499734044075,
-0.08861863613128662,
0.03821272403001785,
-0.028429726138710976,
0.01055504847317934,
0.024019163101911545,
-0.017566096037626266,
-0.029377205297350883,
-0.09167956560850143,
0.011381245218217373,
-0.1002231016755104,
-0.015975559130311012,
0.0049330806359648705,
-0.006839131470769644,
-0.01806808076798916,
-0.03083387017250061,
-0.013377673923969269,
0.00623163441196084,
-0.00017211194790434092,
0.017343344166874886,
-0.02328084222972393,
0.04451899230480194,
0.04911142960190773,
-0.11719104647636414,
0.012591998092830181,
0.051574841141700745,
0.04326995462179184,
0.05232834815979004,
0.00749651063233614,
0.06599525362253189,
0.011463074944913387,
0.008050493896007538,
0.05632155388593674,
0.0099682891741395,
0.01173620019108057,
0.09974980354309082,
-0.05406598001718521,
-0.054848287254571915,
-0.026425637304782867,
0.007820317521691322,
-0.007358226925134659,
-0.021508604288101196,
-0.05756581947207451,
-0.05254299193620682,
0.04065351560711861,
0.004557131789624691,
-0.03117447718977928,
0.06804665923118591,
-0.010014238767325878,
-0.026308415457606316,
0.019656188786029816,
-0.06133381649851799,
0.012876858934760094,
0.0555000863969326,
-0.0729685053229332,
-0.06497609615325928,
0.028274893760681152,
0.0749349370598793,
-0.0657978281378746,
0.0017999425763264298,
-0.04620194807648659,
0.05058958753943443,
-0.05352730304002762,
0.04781545326113701,
0.0015785156283527613,
0.010469743050634861,
-2.3527901467372102e-33,
-0.020825808867812157,
0.07204509526491165,
-0.10495175421237946,
0.04637075960636139,
-0.05326742306351662,
-0.10837194323539734,
0.00012081715249223635,
0.0745355486869812,
-0.024062154814600945,
-0.004196112044155598,
-0.007390628568828106,
-0.08999226987361908,
0.031192345544695854,
-0.006715018767863512,
0.07200760394334793,
-0.04334395006299019,
0.027097545564174652,
0.03314121440052986,
0.03812722861766815,
0.11116154491901398,
-0.011948089115321636,
0.04718929901719093,
-0.1392146646976471,
0.04141674190759659,
-0.03964544087648392,
0.012111112475395203,
-0.055561717599630356,
0.06481537967920303,
0.03190579637885094,
0.043610114604234695,
-0.05644514784216881,
0.053392503410577774,
-0.0204145647585392,
-0.003477393416687846,
-0.07230773568153381,
0.02040831744670868,
-0.06156982108950615,
0.017107762396335602,
0.012879899702966213,
0.026085440069437027,
0.006001119036227465,
-0.003229322377592325,
-0.0953303799033165,
0.027416910976171494,
-0.0031241036485880613,
-0.020129511132836342,
-0.13124418258666992,
-0.04700645059347153,
-0.0063667516224086285,
-0.05853654816746712,
0.008689288049936295,
0.04713738337159157,
-0.10839096456766129,
-0.05469517037272453,
-0.03037404641509056,
-0.06423033028841019,
-0.011970458552241325,
-0.11968420445919037,
-0.007120716385543346,
-0.013376433402299881,
-0.018891556188464165,
0.0005595398833975196,
0.04596465826034546,
-0.0522301159799099,
0.05727267637848854,
0.013269765302538872,
-0.018611250445246696,
0.03267884626984596,
-0.007710522972047329,
-0.05071243643760681,
0.06312885135412216,
-0.024330036714673042,
0.052970293909311295,
0.0528891384601593,
-0.012035435996949673,
-0.0053755068220198154,
-0.015194444917142391,
-0.04652617126703262,
-0.07068560272455215,
-0.06676953285932541,
-0.03501344472169876,
-0.027700096368789673,
0.02242751233279705,
0.0422387570142746,
0.03276323527097702,
0.03888414427638054,
0.018114294856786728,
0.0008928534807637334,
0.009689210914075375,
0.05537816882133484,
-0.017673423513770103,
0.0362422876060009,
0.026776038110256195,
0.08456326276063919,
0.019622692838311195,
-5.4042057939795995e-8,
-0.05892539769411087,
0.03455580025911331,
-0.033008065074682236,
0.04426390305161476,
-0.01955721341073513,
-0.10295925289392471,
0.0016684773145243526,
-0.016140395775437355,
-0.075008824467659,
-0.04347256198525429,
0.00020396223408170044,
0.017668871209025383,
-0.041206441819667816,
0.005449704825878143,
0.0030232216231524944,
0.07520798593759537,
-0.02465267851948738,
0.04249660670757294,
0.005509066395461559,
0.038381874561309814,
0.0570090152323246,
0.053272638469934464,
-0.022317351773381233,
-0.010681620799005032,
0.0006696538184769452,
-0.010766403749585152,
-0.015554382465779781,
0.08254379779100418,
0.04416407644748688,
-0.011167650111019611,
-0.015010611154139042,
0.06102575361728668,
-0.04105492681264877,
0.04250357672572136,
0.05181721970438957,
0.11626722663640976,
0.004962723236531019,
-0.08994409441947937,
-0.08284915238618851,
0.07329069077968597,
0.06473152339458466,
0.036153409630060196,
-0.04227485507726669,
0.014329197816550732,
0.06973741948604584,
0.0028427764773368835,
-0.023326147347688675,
-0.16979847848415375,
0.038177281618118286,
0.002649078844115138,
0.04157084971666336,
-0.002000844804570079,
-0.06817672401666641,
0.051731642335653305,
0.06377775967121124,
0.03610086813569069,
0.007628957740962505,
-0.03881698101758957,
-0.007749792654067278,
0.0450982041656971,
-0.021753117442131042,
0.030257679522037506,
0.05142441391944885,
-0.00507256668061018
] |
trev/DialoGPT-small-MLP | e36e9fb34f98e0006f4ebcc755fe0b486708052a | 2022-04-05T17:10:13.000Z | [
"pytorch",
"gpt2",
"text-generation",
"transformers",
"conversational"
] | conversational | false | trev | null | trev/DialoGPT-small-MLP | 2,872 | null | transformers | ---
tags:
- conversational
---
# My Little Pony DialoGPT Model | [
-0.030925245955586433,
-0.0667312815785408,
0.0515555664896965,
-0.0004672825161833316,
0.009497291408479214,
-0.03412119671702385,
0.14262604713439941,
0.009501416236162186,
0.09468415379524231,
-0.0341334268450737,
0.001762373372912407,
-0.0036891691852360964,
0.018389960750937462,
-0.018122805282473564,
-0.013056484051048756,
0.039726193994283676,
0.007670101244002581,
-0.06313437968492508,
-0.05390040576457977,
0.05102759227156639,
-0.046507544815540314,
0.09235697984695435,
0.03416973352432251,
0.04042116925120354,
-0.038523055613040924,
0.015842661261558533,
-0.08083151280879974,
0.01906435191631317,
0.029642779380083084,
-0.004535181913524866,
-0.0030444811563938856,
0.07471637427806854,
0.07811551541090012,
0.02371581830084324,
-0.0365368016064167,
0.014628861099481583,
0.07749076932668686,
0.02642679773271084,
0.015315860509872437,
-0.018337471410632133,
-0.05292299762368202,
-0.059298112988471985,
-0.0722532793879509,
0.0030398692470043898,
0.04350806772708893,
-0.04798077046871185,
-0.07841894030570984,
-0.09195487201213837,
-0.059556107968091965,
0.05605113133788109,
-0.08632728457450867,
-0.015901600942015648,
-0.0011636741692200303,
0.08849317580461502,
-0.02956574037671089,
0.04290112480521202,
-0.03137486055493355,
-0.037969108670949936,
0.04978710040450096,
0.03617309778928757,
-0.057204198092222214,
-0.025391528382897377,
-0.07952264696359634,
0.07517911493778229,
-0.028084341436624527,
0.03713306039571762,
-0.07503236830234528,
0.031954675912857056,
-0.015318748541176319,
0.06116543710231781,
-0.03046884387731552,
0.014343345537781715,
0.018935179337859154,
-0.0022321282885968685,
-0.0207742340862751,
0.01542277354747057,
-0.02042503096163273,
-0.05608460307121277,
0.07327596098184586,
0.023309415206313133,
-0.03168119862675667,
-0.08561468869447708,
-0.0066165924072265625,
-0.009954828768968582,
0.008159180171787739,
-0.05730457231402397,
-0.0019259805558249354,
-0.07065334916114807,
-0.016197659075260162,
-0.02776564098894596,
-0.05264158546924591,
-0.07052497565746307,
0.07031667232513428,
0.026983484625816345,
-0.011096101254224777,
-0.002728452207520604,
-0.012805740348994732,
-0.10353650897741318,
-0.07676861435174942,
0.10457973182201385,
-0.013147368095815182,
0.05074827000498772,
0.05420645698904991,
-0.09753793478012085,
0.042348671704530716,
0.03143967315554619,
-0.024681100621819496,
-0.00115429877769202,
0.032300859689712524,
-0.02754727192223072,
-0.014644100330770016,
-0.05458437278866768,
0.028842130675911903,
-0.05783725157380104,
0.11609211564064026,
-0.06485113501548767,
0.028886687010526657,
-0.028935402631759644,
0.0518714003264904,
-0.0015292082680389285,
-0.03351307287812233,
0.03841778263449669,
-0.07078905403614044,
-0.042884938418865204,
-0.031160999089479446,
-0.018914474174380302,
-0.0651300773024559,
-2.9121698883981274e-33,
0.09233066439628601,
0.023714430630207062,
0.038434192538261414,
0.12082210183143616,
0.05503836274147034,
0.08062127232551575,
-0.0665009468793869,
-0.024715077131986618,
-0.018706589937210083,
-0.009103065356612206,
0.08167794346809387,
-0.07488326728343964,
-0.08590053021907806,
0.04503504931926727,
0.0165875107049942,
-0.016642596572637558,
-0.07116436213254929,
0.0003367823373991996,
0.01181489136070013,
-0.034530866891145706,
-0.02877962216734886,
0.0698576346039772,
-0.005953890737146139,
0.03583903983235359,
0.06330496072769165,
0.03663724660873413,
0.05387977883219719,
-0.12241809815168381,
-0.05521300435066223,
0.08380255103111267,
-0.04944439232349396,
0.005489031784236431,
-0.011301175691187382,
-0.015975773334503174,
-0.04813368245959282,
-0.01863977313041687,
0.005147566553205252,
-0.0629267543554306,
0.014676491729915142,
-0.048907242715358734,
-0.06712102144956589,
-0.06374994665384293,
-0.02691861428320408,
-0.038057439029216766,
-0.030522994697093964,
0.05650193616747856,
0.024847103282809258,
0.062121618539094925,
0.013203424401581287,
0.028465410694479942,
-0.01798820123076439,
0.020669391378760338,
0.000555266800802201,
-0.03151829168200493,
-0.019994433969259262,
-0.05778811126947403,
-0.06303932517766953,
0.0028158840723335743,
0.005706196650862694,
-0.041086506098508835,
0.026361528784036636,
0.036812249571084976,
0.05432382598519325,
-0.11087572574615479,
0.11578542739152908,
0.006664416287094355,
-0.044721249490976334,
0.004147283732891083,
0.031217727810144424,
0.01737348549067974,
-0.04784117266535759,
0.018124084919691086,
-0.056917257606983185,
0.010700545273721218,
0.01598125323653221,
0.03467543423175812,
-0.03828056529164314,
-0.0535908117890358,
0.07876607030630112,
0.052319012582302094,
-0.023915855213999748,
-0.07726052403450012,
-0.06478641927242279,
-0.008179118856787682,
-0.013485956937074661,
-0.050301436334848404,
0.05795169621706009,
-0.08825385570526123,
0.035401634871959686,
-0.005837409757077694,
0.021544819697737694,
0.02652718313038349,
-0.06360004097223282,
0.00963806826621294,
-0.07520757615566254,
3.0535819047197967e-34,
0.03105427324771881,
-0.01736205816268921,
-0.10030990093946457,
0.09882361441850662,
0.011242508888244629,
-0.03249845653772354,
0.05677570402622223,
0.09302391856908798,
0.012583566829562187,
0.009758755564689636,
-0.03851699084043503,
0.03591317683458328,
-0.025249378755688667,
-0.019311299547553062,
0.14311915636062622,
0.04345237836241722,
0.025493133813142776,
-0.06993958353996277,
0.05890553817152977,
-0.007474839687347412,
0.056647468358278275,
-0.019255148246884346,
-0.1483968198299408,
0.05349098518490791,
0.01539295632392168,
-0.0005484545836225152,
-0.0267828106880188,
0.034678682684898376,
0.12455705553293228,
-0.04945798218250275,
-0.06391593813896179,
0.02409639209508896,
0.01861616224050522,
-0.021958624944090843,
0.0002578639250714332,
0.019882526248693466,
0.0054319375194609165,
-0.029513521119952202,
0.0028606937266886234,
0.022517425939440727,
0.04051142930984497,
-0.06052229553461075,
0.03773723915219307,
0.004490766208618879,
-0.046031732112169266,
-0.0785866528749466,
-0.03314809128642082,
-0.03469223156571388,
-0.03973260894417763,
0.018424736335873604,
0.005669959355145693,
-0.01670069247484207,
-0.058578480035066605,
-0.04260982573032379,
-0.06812400370836258,
0.019397422671318054,
0.029628265649080276,
-0.04889471456408501,
-0.022527877241373062,
0.01404652837663889,
-0.07015426456928253,
-0.04230883717536926,
0.04361887648701668,
0.005003228317946196,
0.004275694955140352,
-0.049343474209308624,
-0.016691742464900017,
-0.0025428279768675566,
-0.0382700115442276,
-0.07170066982507706,
0.11139824986457825,
0.06204882636666298,
0.032232947647571564,
-0.011475015431642532,
0.051047034561634064,
-0.0036802445538342,
0.05293374881148338,
-0.014604154974222183,
0.04581018537282944,
-0.06992044299840927,
0.008508623577654362,
-0.01784614846110344,
0.05582904443144798,
0.0487581230700016,
0.030568107962608337,
-0.03169967234134674,
-0.0004891676944680512,
0.08300075680017471,
-0.020233958959579468,
0.029726775363087654,
0.029753578826785088,
0.03630366548895836,
0.006874775979667902,
0.05526427552103996,
-0.016868725419044495,
-2.6356648419323392e-8,
-0.047687385231256485,
-0.055045951157808304,
0.02042006514966488,
0.07795897126197815,
0.04987761005759239,
0.03141539916396141,
0.018928878009319305,
0.0003054385306313634,
-0.05609472468495369,
0.027992745861411095,
0.06510057300329208,
0.08017462491989136,
-0.021279755979776382,
0.06556400656700134,
-0.015212527476251125,
0.054913606494665146,
-0.03914589062333107,
-0.006323998793959618,
-0.011887908913195133,
-0.029335254803299904,
0.05345582589507103,
-0.0036612253170460463,
-0.06092361360788345,
0.01874227449297905,
0.027089299634099007,
-0.011778186075389385,
-0.07078958302736282,
0.10051001608371735,
-0.0631006583571434,
0.045930907130241394,
0.06017385795712471,
0.06456775963306427,
-0.17505872249603271,
-0.020190563052892685,
-0.0652620866894722,
-0.0007144108531065285,
-0.05998566746711731,
-0.03983907774090767,
-0.0057787480764091015,
-0.06925013661384583,
0.009451725520193577,
0.0254322811961174,
-0.03562042489647865,
-0.012631289660930634,
0.09396415948867798,
0.027966195717453957,
0.020700491964817047,
-0.13163240253925323,
-0.02488819882273674,
-0.0035052832681685686,
-0.06441791355609894,
-0.018454821780323982,
0.11591392755508423,
0.0401378832757473,
0.01281844824552536,
0.0304755587130785,
0.05284806340932846,
0.05750640481710434,
0.040994998067617416,
0.03603677079081535,
0.05482205003499985,
0.12468396127223969,
0.024386407807469368,
-0.0539860799908638
] |
voidful/bart-eqg-question-generator | e85b63236f244e0735bca7407ddb0cc76650061b | 2021-08-24T11:00:51.000Z | [
"pytorch",
"bart",
"text2text-generation",
"en",
"dataset:eqg-race",
"transformers",
"question",
"generation",
"seq2seq",
"autotrain_compatible"
] | text2text-generation | false | voidful | null | voidful/bart-eqg-question-generator | 2,861 | 7 | transformers | ---
language: en
tags:
- bart
- question
- generation
- seq2seq
datasets:
- eqg-race
metrics:
- bleu
- rouge
pipeline_tag: text2text-generation
widget:
- text: "When you ' re having a holiday , one of the main questions to ask is which hotel or apartment to choose . However , when it comes to France , you have another special choice : treehouses . In France , treehouses are offered to travelers as a new choice in many places . The price may be a little higher , but you do have a chance to _ your childhood memories . Alain Laurens , one of France ' s top treehouse designers , said , ' Most of the people might have the experience of building a den when they were young . And they like that feeling of freedom when they are children . ' Its fairy - tale style gives travelers a special feeling . It seems as if they are living as a forest king and enjoying the fresh air in the morning . Another kind of treehouse is the ' star cube ' . It gives travelers the chance of looking at the stars shining in the sky when they are going to sleep . Each ' star cube ' not only offers all the comfortable things that a hotel provides for travelers , but also gives them a chance to look for stars by using a telescope . The glass roof allows you to look at the stars from your bed . "
---
# voidful/bart-eqg-question-generator
## Model description
This model is a sequence-to-sequence question generator with only the context as an input, and generates a question as an output.
It is based on a pretrained `bart-base` model, and trained on [EQG-RACE](https://github.com/jemmryx/EQG-RACE) corpus.
## Intended uses & limitations
The model is trained to generate examinations-style multiple choice question.
#### How to use
The model takes context as an input sequence, and will generate a question as an output sequence. The max sequence length is 1024 tokens. Inputs should be organised into the following format:
```
context
```
The input sequence can then be encoded and passed as the `input_ids` argument in the model's `generate()` method.
| [
0.07963453978300095,
0.026097094640135765,
0.05187145248055458,
0.11433644592761993,
0.027881858870387077,
-0.021305743604898453,
0.04337453097105026,
-0.04059379920363426,
0.09576089680194855,
0.0064481995068490505,
-0.013216019608080387,
-0.03918364271521568,
0.03723224624991417,
-0.021884195506572723,
0.06583074480295181,
-0.019014902412891388,
0.018994592130184174,
-0.03624841570854187,
0.028391312807798386,
0.05030849203467369,
0.0035748425871133804,
-0.07489096373319626,
-0.0031010883394628763,
0.06364648044109344,
0.0614258274435997,
-0.05383441597223282,
-0.06811121106147766,
0.03812252730131149,
-0.0007743779569864273,
-0.04771738499403,
0.024845972657203674,
0.10675419867038727,
-0.03621348738670349,
0.03754504397511482,
-0.020473822951316833,
0.016406621783971786,
0.03917717561125755,
-0.08219201117753983,
-0.007889139465987682,
0.04773685708642006,
0.013453158549964428,
0.004869508091360331,
0.0073414272628724575,
-0.04257552698254585,
-0.040604788810014725,
0.029266932979226112,
-0.08742620050907135,
-0.017023950815200806,
-0.03957878425717354,
-0.017902128398418427,
-0.0337924025952816,
0.009381810203194618,
-0.04912066459655762,
-0.02771708369255066,
0.029927348718047142,
0.09268376976251602,
-0.0021680344361811876,
-0.05040592700242996,
0.09152098000049591,
0.01644163578748703,
0.0628010556101799,
-0.0130785396322608,
-0.0004730031650979072,
0.03759513795375824,
0.024434126913547516,
-0.05268002673983574,
-0.10923106968402863,
0.03778295964002609,
-0.07808970659971237,
-0.039791155606508255,
-0.11362089216709137,
-0.011161264963448048,
-0.037664007395505905,
-0.0030616228468716145,
0.048282425850629807,
-0.0273490771651268,
0.028081953525543213,
-0.03511861339211464,
0.01828332431614399,
-0.029178883880376816,
-0.021163782104849815,
0.018739186227321625,
-0.08334438502788544,
0.028437038883566856,
-0.06181685999035835,
-0.025656748563051224,
0.053459975868463516,
0.0554373525083065,
-0.05848313122987747,
-0.015565023757517338,
-0.03994913771748543,
-0.008037874475121498,
-0.05264374241232872,
0.042150020599365234,
-0.024024702608585358,
0.04464031755924225,
-0.06418630480766296,
-0.02386533096432686,
0.014931145124137402,
0.008106128312647343,
0.02825145609676838,
0.02178979106247425,
0.1265469789505005,
0.01065809652209282,
-0.10823502391576767,
-0.06587979197502136,
-0.04854575917124748,
-0.0050005619414150715,
0.015808362513780594,
-0.037064164876937866,
-0.0703476294875145,
-0.042944084852933884,
-0.010689960792660713,
0.003875225316733122,
-0.07550840079784393,
0.03099786676466465,
0.03445088490843773,
-0.07866843044757843,
0.07716043293476105,
0.020790541544556618,
0.04561067745089531,
0.00868218857795,
0.012026796117424965,
-0.037143126130104065,
-0.019398048520088196,
-0.04162846878170967,
0.005671282764524221,
4.1325950312222006e-34,
0.01186326052993536,
0.07956697046756744,
0.04367141053080559,
0.045092951506376266,
0.11798898130655289,
0.012784266844391823,
-0.1188158169388771,
0.07942754030227661,
-0.028601521626114845,
0.04287625104188919,
-0.02413056790828705,
-0.02020810730755329,
-0.034250833094120026,
0.034942738711833954,
0.10981524735689163,
0.014978432096540928,
-0.07636792212724686,
-0.030197342857718468,
-0.04939195513725281,
0.015832867473363876,
0.0005072695785202086,
0.0620303675532341,
0.018607208505272865,
0.08993849903345108,
0.06538452208042145,
-0.043398093432188034,
0.06938614696264267,
-0.011945204809308052,
-0.037121426314115524,
-0.0022907478269189596,
0.0009781650733202696,
-0.02017655223608017,
-0.01690271869301796,
-0.03180290013551712,
-0.03101607970893383,
0.029588332399725914,
-0.011102222837507725,
-0.04837815463542938,
-0.07835675776004791,
-0.07198679447174072,
-0.04086475446820259,
-0.007132573518902063,
0.012035442516207695,
0.08458776026964188,
0.010054442100226879,
0.09620583057403564,
0.06525079160928726,
-0.03812771290540695,
-0.05116581171751022,
0.02659025229513645,
-0.029911693185567856,
0.02426040917634964,
-0.06859613209962845,
-0.014330564066767693,
-0.013364228419959545,
0.022759290412068367,
0.020906690508127213,
0.01386855449527502,
-0.006073580589145422,
-0.04367177188396454,
0.0415085144340992,
-0.07763965427875519,
0.006803716067224741,
-0.02335391566157341,
-0.012337122112512589,
0.07792460173368454,
0.0731852725148201,
0.04915042594075203,
0.04640044644474983,
-0.07596369832754135,
-0.023156465962529182,
0.027838775888085365,
0.031364548951387405,
-0.011895856820046902,
0.0006082476465962827,
0.06518290191888809,
-0.016316207125782967,
0.010212455876171589,
0.04333255812525749,
-0.03464516997337341,
-0.049340326339006424,
0.010407854802906513,
0.022809019312262535,
-0.004717092029750347,
-0.0037076049484312534,
-0.11845587939023972,
0.07351677864789963,
0.01805739291012287,
0.01072778645902872,
0.018096432089805603,
-0.020284729078412056,
-0.04831667244434357,
0.06967078149318695,
-0.03550993278622627,
-0.05911732092499733,
-3.22217511568782e-33,
-0.004784072749316692,
-0.09210105240345001,
-0.023939277976751328,
0.004579135682433844,
0.0988641157746315,
-0.0009557502344250679,
-0.07041320949792862,
-0.11163821071386337,
0.06774376332759857,
-0.04140537604689598,
-0.07203546166419983,
0.03371446207165718,
0.09515688568353653,
-0.025302501395344734,
0.06790119409561157,
-0.020410601049661636,
0.018041417002677917,
-0.010831311345100403,
-0.09044159948825836,
0.06945297867059708,
0.04919707030057907,
0.13891534507274628,
-0.11599767953157425,
0.023700393736362457,
-0.05768793821334839,
0.043565645813941956,
-0.041723527014255524,
-0.03398817405104637,
-0.06141641363501549,
0.008220468647778034,
-0.06679651141166687,
-0.06882886588573456,
-0.010799078270792961,
0.01197685394436121,
0.01324159000068903,
0.013026773929595947,
-0.027408840134739876,
-0.0239957794547081,
-0.07252515852451324,
0.13329556584358215,
-0.039843522012233734,
-0.0395287461578846,
-0.010309718549251556,
0.015271028503775597,
0.050328128039836884,
0.007965144701302052,
-0.05266229063272476,
-0.08420485258102417,
-0.019471175968647003,
0.0303089190274477,
0.04964059963822365,
0.08190395683050156,
-0.08035466074943542,
-0.007988710887730122,
-0.06431280076503754,
-0.004292995668947697,
-0.02668541669845581,
0.027500806376338005,
0.02771436795592308,
0.058616653084754944,
-0.011607656255364418,
0.06505206972360611,
0.0016400667373090982,
0.01385012548416853,
-0.07499989122152328,
-0.09729625284671783,
-0.1048145517706871,
-0.0532357394695282,
-0.021769961342215538,
-0.02692943625152111,
0.04213710129261017,
-0.026401177048683167,
-0.06658102571964264,
0.05422021076083183,
-0.05558390915393829,
0.010513083077967167,
0.019745469093322754,
-0.008019810542464256,
-0.03109045699238777,
-0.0028228654991835356,
-0.07317172735929489,
0.012724468484520912,
0.038525983691215515,
0.05780801177024841,
0.03844614326953888,
-0.07896705716848373,
-0.045173853635787964,
0.030928242951631546,
0.0050248620100319386,
-0.036145906895399094,
-0.014684895053505898,
0.05697646737098694,
-0.06864950060844421,
0.036148034036159515,
0.016598528251051903,
-6.42239399439859e-8,
-0.023790201172232628,
0.04778086766600609,
-0.02148892730474472,
0.004683322738856077,
-0.016662567853927612,
-0.15533263981342316,
0.07487404346466064,
0.09374725073575974,
-0.042926497757434845,
0.016831016167998314,
-0.01535575557500124,
0.04203195497393608,
-0.0196840837597847,
-0.006907925475388765,
0.04384717345237732,
0.03837316855788231,
-0.007599036209285259,
-0.0037083274219185114,
-0.0013199049280956388,
0.11476866900920868,
0.051579080522060394,
0.022832855582237244,
0.001317445421591401,
0.006975783035159111,
-0.060328658670186996,
-0.03190526366233826,
-0.03520846366882324,
0.013356771320104599,
0.02001248113811016,
0.020096734166145325,
0.08825965970754623,
0.06898893415927887,
-0.011828320100903511,
-0.018938615918159485,
-0.021114101633429527,
0.07154885679483414,
-0.005861981306225061,
-0.00863652490079403,
0.03551116958260536,
0.03302662819623947,
0.0720289796590805,
-0.1472688466310501,
-0.040484607219696045,
-0.007326113060116768,
-0.06468625366687775,
0.043751560151576996,
-0.020993871614336967,
-0.015434286557137966,
0.028546543791890144,
0.07672927528619766,
-0.08124804496765137,
0.01049888040870428,
-0.10137759149074554,
-0.03324400633573532,
0.03197941556572914,
-0.02315618470311165,
-0.048332687467336655,
-0.013624221086502075,
0.11685557663440704,
-0.03184284642338753,
0.12230198085308075,
-0.026371179148554802,
-0.02391216531395912,
0.03770749643445015
] |
mrm8488/bert-tiny-5-finetuned-squadv2 | f586274a9919ef3ca801d3c7f3f30ee6ad7515d8 | 2022-01-18T20:19:49.000Z | [
"pytorch",
"jax",
"bert",
"question-answering",
"en",
"arxiv:1908.08962",
"transformers",
"QA",
"autotrain_compatible"
] | question-answering | false | mrm8488 | null | mrm8488/bert-tiny-5-finetuned-squadv2 | 2,860 | 3 | transformers | ---
language: en
thumbnail:
tags:
- QA
---
# BERT-Tiny ([5](https://huggingface.co/google/bert_uncased_L-12_H-128_A-2)) fine-tuned on SQuAD v2
[BERT-Tiny](https://huggingface.co/google/bert_uncased_L-12_H-128_A-2) created by [Google Research](https://github.com/google-research) and fine-tuned on [SQuAD 2.0](https://rajpurkar.github.io/SQuAD-explorer/) for **Q&A** downstream task.
**Mode size** (after training): **24.33 MB**
## Details of BERT-Tiny and its 'family' (from their documentation)
Released on March 11th, 2020
This is model is a part of 24 smaller BERT models (English only, uncased, trained with WordPiece masking) referenced in [Well-Read Students Learn Better: On the Importance of Pre-training Compact Models](https://arxiv.org/abs/1908.08962).
The smaller BERT models are intended for environments with restricted computational resources. They can be fine-tuned in the same manner as the original BERT models. However, they are most effective in the context of knowledge distillation, where the fine-tuning labels are produced by a larger and more accurate teacher.
## Details of the downstream task (Q&A) - Dataset
[SQuAD2.0](https://rajpurkar.github.io/SQuAD-explorer/) combines the 100,000 questions in SQuAD1.1 with over 50,000 unanswerable questions written adversarially by crowdworkers to look similar to answerable ones. To do well on SQuAD2.0, systems must not only answer questions when possible, but also determine when no answer is supported by the paragraph and abstain from answering.
| Dataset | Split | # samples |
| -------- | ----- | --------- |
| SQuAD2.0 | train | 130k |
| SQuAD2.0 | eval | 12.3k |
## Model training
The model was trained on a Tesla P100 GPU and 25GB of RAM.
The script for fine tuning can be found [here](https://github.com/huggingface/transformers/blob/master/examples/question-answering/run_squad.py)
## Results:
| Metric | # Value |
| ------ | --------- |
| **EM** | **57.12** |
| **F1** | **60.86** |
| Model | EM | F1 score | SIZE (MB) |
| ----------------------------------------------------------------------------------------- | --------- | --------- | --------- |
| [bert-tiny-finetuned-squadv2](https://huggingface.co/mrm8488/bert-tiny-finetuned-squadv2) | 48.60 | 49.73 | **16.74** |
| [bert-tiny-5-finetuned-squadv2](https://huggingface.co/mrm8488/bert-tiny-5-finetuned-squadv2) | **57.12** | **60.86** | 24.34
## Model in action
Fast usage with **pipelines**:
```python
from transformers import pipeline
qa_pipeline = pipeline(
"question-answering",
model="mrm8488/bert-tiny-5-finetuned-squadv2",
tokenizer="mrm8488/bert-tiny-5-finetuned-squadv2"
)
qa_pipeline({
'context': "Manuel Romero has been working hardly in the repository hugginface/transformers lately",
'question': "Who has been working hard for hugginface/transformers lately?"
})
```
> Created by [Manuel Romero/@mrm8488](https://twitter.com/mrm8488) | [LinkedIn](https://www.linkedin.com/in/manuel-romero-cs/)
> Made with <span style="color: #e25555;">♥</span> in Spain
| [
-0.10361368209123611,
-0.023386165499687195,
0.027026863768696785,
0.05271655693650246,
0.016755573451519012,
0.0527045838534832,
0.0006054543773643672,
0.037210144102573395,
-0.0443299300968647,
-0.006996506359428167,
0.05723724141716957,
-0.003860742086544633,
0.00727422209456563,
0.08365564793348312,
0.004148213192820549,
0.028984004631638527,
0.0871497094631195,
-0.09113726019859314,
-0.10832787305116653,
-0.034603800624608994,
0.02702314779162407,
0.004048147238790989,
0.07973527163267136,
-0.05097922682762146,
0.014721177518367767,
-0.03419579938054085,
-0.11240939050912857,
-0.041765738278627396,
0.060760725289583206,
-0.033856309950351715,
-0.017046591266989708,
0.05007389932870865,
0.07538694888353348,
0.06391202658414841,
0.02318652719259262,
0.09574401378631592,
0.03792526200413704,
-0.017271803691983223,
0.0021641033235937357,
0.0177066158503294,
-0.035217780619859695,
-0.04618930071592331,
-0.03518258035182953,
-0.01081620343029499,
0.08233737200498581,
-0.0013735294342041016,
-0.034043509513139725,
-0.03112172707915306,
-0.06455176323652267,
-0.07703886926174164,
-0.09585931897163391,
-0.08546516299247742,
0.032499849796295166,
0.051969338208436966,
0.019656984135508537,
0.043263036757707596,
-0.00933053344488144,
-0.05782777816057205,
-0.0754559263586998,
-0.03400423377752304,
-0.08352666348218918,
-0.02382647804915905,
-0.01931769773364067,
-0.010394169017672539,
-0.05940756946802139,
0.028077291324734688,
0.004332218784838915,
-0.019602835178375244,
0.031863901764154434,
0.009439712390303612,
0.010208312422037125,
0.022469092160463333,
-0.004782374016940594,
0.013443107716739178,
0.03442998602986336,
-0.04733136296272278,
0.03774983435869217,
0.01929267682135105,
0.02926967479288578,
-0.10107401758432388,
0.003980887588113546,
-0.04079257696866989,
0.030791401863098145,
-0.00027084574685432017,
0.053480587899684906,
-0.01282584574073553,
0.04602312669157982,
-0.05310054123401642,
-0.026454981416463852,
0.0044036321341991425,
-0.03721778094768524,
-0.03623487427830696,
0.03974444419145584,
0.013905897736549377,
0.021996445953845978,
-0.021232832223176956,
0.05288391560316086,
-0.0009158963221125305,
-0.05059944838285446,
0.04484950006008148,
0.09693487733602524,
0.004943474195897579,
0.05931514874100685,
0.00768694281578064,
-0.0013699709670618176,
0.05174155905842781,
0.03910621628165245,
0.04114951938390732,
0.0788523480296135,
-0.0636223554611206,
-0.017358271405100822,
0.013648457825183868,
-0.02600458823144436,
-0.035090234130620956,
-0.027164820581674576,
-0.016344917938113213,
-0.004070646595209837,
0.0456412099301815,
0.03674136474728584,
0.12289897352457047,
0.04631194472312927,
-0.011449883691966534,
-0.02406458556652069,
-0.05642182379961014,
-0.004836489446461201,
0.06244993582367897,
-0.035822972655296326,
-1.1372005101278399e-33,
0.0819731205701828,
0.03880978003144264,
-0.03125752881169319,
-0.008790829218924046,
0.055615488439798355,
-0.01261445228010416,
0.04306017607450485,
0.022999968379735947,
-0.05965728685259819,
0.023519037291407585,
-0.038882311433553696,
0.030224774032831192,
-0.07559791952371597,
0.06414143741130829,
0.015624845400452614,
-0.019879134371876717,
-0.06040029600262642,
0.05372266098856926,
0.027754375711083412,
0.00715651735663414,
0.05730970948934555,
0.028371382504701614,
-0.044273924082517624,
-0.09336572885513306,
0.02810894139111042,
-0.001693960395641625,
0.07233327627182007,
-0.04603847488760948,
-0.036640968173742294,
0.07442749291658401,
-0.17738518118858337,
0.049891721457242966,
-0.04847574234008789,
-0.007102618925273418,
-0.05867921561002731,
-0.014322170056402683,
-0.061905063688755035,
-0.05482735112309456,
0.04615388810634613,
-0.05148285999894142,
0.017202848568558693,
0.050676289945840836,
-0.003672731574624777,
-0.08807553350925446,
-0.03199080377817154,
-0.06221340224146843,
0.07130944728851318,
-0.0060519082471728325,
0.000850887387059629,
-0.04270029813051224,
0.04901236295700073,
-0.04848598688840866,
-0.04111422225832939,
-0.06431270390748978,
0.007031388580799103,
0.04278462007641792,
0.1324124038219452,
0.058268509805202484,
-0.024540448561310768,
0.019865812733769417,
0.03952895104885101,
-0.004480831325054169,
0.03235825523734093,
0.06592980027198792,
0.0311739444732666,
-0.020107291638851166,
-0.031045787036418915,
0.011473950929939747,
-0.032351650297641754,
-0.002419265452772379,
-0.04092618077993393,
-0.03678074851632118,
0.06394010782241821,
0.01392842922359705,
0.06327597796916962,
-0.11090891808271408,
0.0554828941822052,
-0.06927360594272614,
-0.055081259459257126,
-0.012784059159457684,
0.034296512603759766,
0.03951401263475418,
-0.048918716609478,
-0.004332350566983223,
-0.09447057545185089,
-0.1019832044839859,
0.0816737711429596,
-0.08917220681905746,
-0.0044684577733278275,
0.01819615066051483,
-0.018411878496408463,
-0.04920607805252075,
-0.01956486888229847,
-0.02220671996474266,
-0.07150596380233765,
-1.7190051712947183e-34,
0.07368499785661697,
0.011700193397700787,
-0.05563654005527496,
0.04566206783056259,
-0.026504026725888252,
-0.055277202278375626,
0.09903333336114883,
0.18737360835075378,
0.023298239335417747,
-0.030139802023768425,
-0.017121821641921997,
-0.006545371375977993,
-0.0705764889717102,
-0.09622511267662048,
0.03035912476480007,
0.014836201444268227,
-0.047851528972387314,
-0.028186481446027756,
0.06813996285200119,
0.025530710816383362,
0.08827632665634155,
-0.0797082856297493,
-0.00023395063180942088,
0.08464265614748001,
-0.0026132189668715,
0.022335920482873917,
-0.023892497643828392,
0.011779394932091236,
0.005324839614331722,
-0.005611921660602093,
-0.02238144539296627,
-0.09433925151824951,
-0.034243226051330566,
0.00223725032992661,
-0.030856503173708916,
0.026711098849773407,
0.003771713934838772,
-0.014881906099617481,
-0.027273504063487053,
0.03331803157925606,
0.0795053243637085,
-0.010978509671986103,
-0.0005103974835947156,
0.03590962663292885,
0.01131537463515997,
-0.005915618035942316,
-0.0670713558793068,
-0.08480843901634216,
-0.058715179562568665,
-0.02031346783041954,
0.05152800679206848,
-0.01166082825511694,
-0.09207690507173538,
0.04144668206572533,
-0.09874194115400314,
-0.0446150004863739,
0.030795585364103317,
-0.02349238283932209,
-0.03493962809443474,
0.01123996265232563,
0.037317853420972824,
-0.03431176766753197,
-0.06698665767908096,
-0.015514672733843327,
-0.01660451665520668,
-0.017338642850518227,
-0.047494348138570786,
0.04947661980986595,
0.004561688285320997,
-0.004879059735685587,
-0.011110990308225155,
-0.06826132535934448,
0.10590521991252899,
0.01480445172637701,
-0.061602916568517685,
-0.009455995634198189,
-0.055480897426605225,
-0.07723024487495422,
0.02687394805252552,
-0.0011750506237149239,
-0.030920790508389473,
-0.01152040995657444,
0.00993435364216566,
0.08471005409955978,
0.011334356851875782,
0.09448603540658951,
0.04108313098549843,
0.11789001524448395,
0.0009808525210246444,
0.011806734837591648,
0.025344910100102425,
0.029723336920142174,
0.05156158655881882,
0.09444581717252731,
-0.006324262823909521,
-5.3900873098200464e-8,
0.016872679814696312,
0.013800151646137238,
-0.046078603714704514,
0.019485898315906525,
-0.052223894745111465,
-0.08386745303869247,
-0.09221689403057098,
0.010742111131548882,
0.01697542890906334,
0.07615916430950165,
0.031925179064273834,
0.06905103474855423,
-0.09166675060987473,
0.02159283682703972,
0.009777446277439594,
0.06675531715154648,
-0.04115322604775429,
0.03983316197991371,
-0.027036678045988083,
-0.04931933805346489,
-0.0059745945036411285,
0.05246230587363243,
-0.012854291126132011,
-0.009399655275046825,
0.03370567038655281,
-0.05037747696042061,
-0.06612450629472733,
0.09337889403104782,
-0.0189273152500391,
-0.002516662934795022,
-0.029004830867052078,
0.042607177048921585,
-0.07608137279748917,
-0.0005828728899359703,
0.07624094188213348,
0.07016026228666306,
-0.06299018859863281,
-0.04463844373822212,
0.010822510346770287,
0.04609714448451996,
0.11258994042873383,
-0.0058126444928348064,
-0.06559601426124573,
0.002115255454555154,
0.08932876586914062,
-0.011963390745222569,
-0.0494266040623188,
-0.05398811027407646,
0.014864316210150719,
0.015507291071116924,
0.07678425312042236,
-0.008169354870915413,
-0.08155675232410431,
0.08877528458833694,
-0.03328603506088257,
0.08609059453010559,
-0.03441811725497246,
-0.04920429736375809,
0.056707724928855896,
0.023035956546664238,
0.017896901816129684,
0.059192050248384476,
0.003139288630336523,
0.07552000135183334
] |
uer/roberta-base-finetuned-jd-full-chinese | 001c14a6ad8498465b0d7a2be435c30e856507a8 | 2022-02-20T07:57:14.000Z | [
"pytorch",
"tf",
"jax",
"bert",
"text-classification",
"zh",
"arxiv:1909.05658",
"arxiv:1708.02657",
"transformers"
] | text-classification | false | uer | null | uer/roberta-base-finetuned-jd-full-chinese | 2,860 | 3 | transformers | ---
language: zh
widget:
- text: "这本书真的很不错"
---
# Chinese RoBERTa-Base Models for Text Classification
## Model description
This is the set of 5 Chinese RoBERTa-Base classification models fine-tuned by [UER-py](https://arxiv.org/abs/1909.05658). You can download the 5 Chinese RoBERTa-Base classification models either from the [UER-py Modelzoo page](https://github.com/dbiir/UER-py/wiki/Modelzoo) (in UER-py format), or via HuggingFace from the links below:
| Dataset | Link |
| :-----------: | :-------------------------------------------------------: |
| **JD full** | [**roberta-base-finetuned-jd-full-chinese**][jd_full] |
| **JD binary** | [**roberta-base-finetuned-jd-binary-chinese**][jd_binary] |
| **Dianping** | [**roberta-base-finetuned-dianping-chinese**][dianping] |
| **Ifeng** | [**roberta-base-finetuned-ifeng-chinese**][ifeng] |
| **Chinanews** | [**roberta-base-finetuned-chinanews-chinese**][chinanews] |
## How to use
You can use this model directly with a pipeline for text classification (take the case of roberta-base-finetuned-chinanews-chinese):
```python
>>> from transformers import AutoModelForSequenceClassification,AutoTokenizer,pipeline
>>> model = AutoModelForSequenceClassification.from_pretrained('uer/roberta-base-finetuned-chinanews-chinese')
>>> tokenizer = AutoTokenizer.from_pretrained('uer/roberta-base-finetuned-chinanews-chinese')
>>> text_classification = pipeline('sentiment-analysis', model=model, tokenizer=tokenizer)
>>> text_classification("北京上个月召开了两会")
[{'label': 'mainland China politics', 'score': 0.7211663722991943}]
```
## Training data
5 Chinese text classification datasets are used. JD full, JD binary, and Dianping datasets consist of user reviews of different sentiment polarities. Ifeng and Chinanews consist of first paragraphs of news articles of different topic classes. They are collected by [Glyph](https://github.com/zhangxiangxiao/glyph) project and more details are discussed in corresponding [paper](https://arxiv.org/abs/1708.02657).
## Training procedure
Models are fine-tuned by [UER-py](https://github.com/dbiir/UER-py/) on [Tencent Cloud](https://cloud.tencent.com/). We fine-tune three epochs with a sequence length of 512 on the basis of the pre-trained model [chinese_roberta_L-12_H-768](https://huggingface.co/uer/chinese_roberta_L-12_H-768). At the end of each epoch, the model is saved when the best performance on development set is achieved. We use the same hyper-parameters on different models.
Taking the case of roberta-base-finetuned-chinanews-chinese
```
python3 run_classifier.py --pretrained_model_path models/cluecorpussmall_roberta_base_seq512_model.bin-250000 \
--vocab_path models/google_zh_vocab.txt \
--train_path datasets/glyph/chinanews/train.tsv \
--dev_path datasets/glyph/chinanews/dev.tsv \
--output_model_path models/chinanews_classifier_model.bin \
--learning_rate 3e-5 --epochs_num 3 --batch_size 32 --seq_length 512
```
Finally, we convert the pre-trained model into Huggingface's format:
```
python3 scripts/convert_bert_text_classification_from_uer_to_huggingface.py --input_model_path models/chinanews_classifier_model.bin \
--output_model_path pytorch_model.bin \
--layers_num 12
```
### BibTeX entry and citation info
```
@article{devlin2018bert,
title={BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding},
author={Devlin, Jacob and Chang, Ming-Wei and Lee, Kenton and Toutanova, Kristina},
journal={arXiv preprint arXiv:1810.04805},
year={2018}
}
@article{liu2019roberta,
title={Roberta: A robustly optimized bert pretraining approach},
author={Liu, Yinhan and Ott, Myle and Goyal, Naman and Du, Jingfei and Joshi, Mandar and Chen, Danqi and Levy, Omer and Lewis, Mike and Zettlemoyer, Luke and Stoyanov, Veselin},
journal={arXiv preprint arXiv:1907.11692},
year={2019}
}
@article{zhang2017encoding,
title={Which encoding is the best for text classification in chinese, english, japanese and korean?},
author={Zhang, Xiang and LeCun, Yann},
journal={arXiv preprint arXiv:1708.02657},
year={2017}
}
@article{zhao2019uer,
title={UER: An Open-Source Toolkit for Pre-training Models},
author={Zhao, Zhe and Chen, Hui and Zhang, Jinbin and Zhao, Xin and Liu, Tao and Lu, Wei and Chen, Xi and Deng, Haotang and Ju, Qi and Du, Xiaoyong},
journal={EMNLP-IJCNLP 2019},
pages={241},
year={2019}
}
```
[jd_full]:https://huggingface.co/uer/roberta-base-finetuned-jd-full-chinese
[jd_binary]:https://huggingface.co/uer/roberta-base-finetuned-jd-binary-chinese
[dianping]:https://huggingface.co/uer/roberta-base-finetuned-dianping-chinese
[ifeng]:https://huggingface.co/uer/roberta-base-finetuned-ifeng-chinese
[chinanews]:https://huggingface.co/uer/roberta-base-finetuned-chinanews-chinese | [
-0.11267668753862381,
-0.05451902374625206,
-0.013514150865375996,
0.024772487580776215,
0.006562556605786085,
0.07622016221284866,
-0.019244613125920296,
0.0030214476864784956,
-0.01183942798525095,
0.003798313671723008,
0.06591767817735672,
-0.03214235603809357,
0.03400377556681633,
0.0024252028670161963,
0.013518578372895718,
0.08484303206205368,
0.0076593635603785515,
0.05207817628979683,
-0.05743332579731941,
-0.060930609703063965,
0.05327389016747475,
0.052238110452890396,
0.10306881368160248,
-0.007471744902431965,
0.03261341527104378,
-0.06854590028524399,
0.004425404127687216,
0.07249173521995544,
0.06552302837371826,
-0.005016335751861334,
-0.07114741951227188,
0.09016775339841843,
0.08166582882404327,
0.09111616015434265,
0.011996599845588207,
0.08524274080991745,
-0.004849315620958805,
-0.03457251563668251,
-0.05079621821641922,
0.02587554231286049,
-0.0026154830120503902,
0.033640649169683456,
-0.01690024510025978,
-0.03462816774845123,
0.05673924833536148,
-0.0533757358789444,
-0.03846568986773491,
-0.022247755900025368,
-0.030607866123318672,
-0.018161911517381668,
-0.06986966729164124,
-0.025492334738373756,
-0.04871140420436859,
0.06130533665418625,
-0.06204519420862198,
-0.006187023129314184,
-0.010990170761942863,
-0.02070142887532711,
-0.03085172176361084,
0.005636816378682852,
-0.06682944297790527,
-0.001541322679258883,
-0.04775027930736542,
-0.02503209561109543,
-0.008912559598684311,
0.04459599032998085,
-0.04595470055937767,
0.053989388048648834,
0.0435834601521492,
-0.03760659322142601,
-0.02820885367691517,
-0.05582208186388016,
-0.050024211406707764,
0.03885941952466965,
-0.05226020887494087,
-0.09952924400568008,
0.08764422684907913,
0.02843726985156536,
-0.051982033997774124,
-0.05391703173518181,
-0.07796823233366013,
-0.04184836521744728,
0.10431915521621704,
-0.0008873159531503916,
0.045498982071876526,
0.00699579669162631,
-0.08943558484315872,
0.10122831910848618,
-0.02638629451394081,
-0.010305444709956646,
-0.01968904584646225,
-0.01897396892309189,
0.010314539074897766,
0.016340626403689384,
-0.11968709528446198,
0.03826478123664856,
0.06646443158388138,
0.00015038739365991205,
-0.05301006883382797,
0.04593276605010033,
-0.013700452633202076,
-0.012433445081114769,
0.046986181288957596,
-0.040922630578279495,
-0.014002588577568531,
-0.025922471657395363,
0.04084984213113785,
0.00981593132019043,
0.018404556438326836,
-0.10458496958017349,
0.011378555558621883,
-0.0029591184575110674,
-0.11495816707611084,
-0.06181688234210014,
0.023092705756425858,
-0.04246444255113602,
0.03337489813566208,
-0.059408675879240036,
-0.0069238352589309216,
-0.02864662930369377,
0.011919044889509678,
0.0354742631316185,
-0.02292967215180397,
-0.026526907458901405,
0.005189734976738691,
0.02370326593518257,
-0.07119540870189667,
-3.114084736709864e-33,
0.10185669362545013,
0.07371869683265686,
0.05282493308186531,
0.01571468636393547,
-0.00016550706641282886,
-0.00632135383784771,
0.0038509650621563196,
0.007670848164707422,
-0.07108175754547119,
0.01060874667018652,
-0.049811460077762604,
0.05075749382376671,
-0.05321047082543373,
-0.0020394192542880774,
0.008652486838400364,
-0.05569260194897652,
-0.062136854976415634,
0.012254586443305016,
0.03379911184310913,
0.03480913117527962,
0.07374189049005508,
0.05763733759522438,
-0.03560923784971237,
-0.05584990605711937,
-0.03219963610172272,
0.056252315640449524,
0.0830269530415535,
-0.0841221883893013,
-0.08364920318126678,
0.016951490193605423,
-0.07106748223304749,
0.05247146636247635,
-0.0009310290915891528,
-0.024214105680584908,
-0.03138997033238411,
-0.05153302848339081,
-0.06562835723161697,
0.03129474073648453,
0.02006174810230732,
-0.0404411256313324,
0.04078352078795433,
-0.004834008868783712,
0.018101129680871964,
-0.014890527352690697,
-0.026185380294919014,
0.016390878707170486,
-0.03022639825940132,
-0.01253711711615324,
0.0706135556101799,
0.009184236638247967,
0.04511599242687225,
-0.02937263436615467,
-0.00028541183564811945,
0.12166450917720795,
-0.025468161329627037,
0.014516456052660942,
0.06441189348697662,
0.017617881298065186,
-0.026351604610681534,
0.02583255246281624,
0.055547624826431274,
-0.07889257371425629,
-0.030585426837205887,
0.03716041147708893,
0.06070619076490402,
0.006861082278192043,
-0.04421789571642876,
-0.055210527032613754,
0.03686847165226936,
0.05379054322838783,
-0.04414091631770134,
-0.0276965219527483,
0.07703746855258942,
0.07660718262195587,
-0.013455264270305634,
-0.04449433088302612,
-0.017105454578995705,
-0.05424899607896805,
-0.024898555129766464,
0.04518711939454079,
-0.0024355375207960606,
0.005749029573053122,
0.022771552205085754,
0.011188676580786705,
-0.08040082454681396,
-0.02826017513871193,
0.08721776306629181,
-0.05261322483420372,
-0.024275099858641624,
-0.059309449046850204,
-0.08512041717767715,
0.04462144151329994,
0.0468963123857975,
-0.06323060393333435,
-0.01379456091672182,
7.161621846184903e-35,
0.0752604678273201,
0.020052693784236908,
-0.0412571020424366,
0.014281478710472584,
0.03592071309685707,
-0.035380661487579346,
0.03408793359994888,
0.09987751394510269,
-0.021714238449931145,
-0.042419929057359695,
0.04946544021368027,
-0.026325315237045288,
0.09857507795095444,
-0.03730110079050064,
0.013401361182332039,
0.04980834200978279,
-0.019062386825680733,
0.024151921272277832,
0.018386762589216232,
0.017253132537007332,
-0.03793655335903168,
0.01797983981668949,
-0.0784924179315567,
-0.01810716837644577,
-0.011906794272363186,
-0.026779185980558395,
0.04491065815091133,
0.002693283138796687,
0.020455310121178627,
-0.012374007143080235,
-0.03757361322641373,
-0.04361170902848244,
-0.03169553354382515,
0.034521136432886124,
-0.09654323756694794,
-0.0056682792492210865,
-0.039892930537462234,
-0.058183904737234116,
-0.011573465541005135,
0.06310776621103287,
0.07505574822425842,
-0.011780333705246449,
-0.09588538110256195,
0.042122580111026764,
0.013193970546126366,
-0.027564866468310356,
-0.10303991287946701,
0.009318411350250244,
-0.08183839917182922,
-0.04646432027220726,
0.015504492446780205,
-0.001742761000059545,
-0.0488099679350853,
0.05853265896439552,
-0.012479559518396854,
-0.050585925579071045,
0.05756354331970215,
-0.10163645446300507,
-0.021828709170222282,
-0.017102694138884544,
-0.03698800131678581,
0.02000260539352894,
-0.023465760052204132,
0.005552704446017742,
0.01683090627193451,
-0.07784421741962433,
0.06619557738304138,
0.03876929357647896,
-0.026600966230034828,
-0.02827778272330761,
0.05499015003442764,
-0.02721184864640236,
0.05692870169878006,
0.07210500538349152,
0.007718336768448353,
0.027705492451786995,
-0.06353534013032913,
0.005999280139803886,
-0.023159530013799667,
-0.06534314900636673,
-0.02529045194387436,
-0.07407627254724503,
0.03209657594561577,
0.07101334631443024,
-0.006591531448066235,
0.017294058576226234,
0.01987101137638092,
0.11224722862243652,
0.0723024308681488,
0.005358592141419649,
-0.032636601477861404,
-0.011565223336219788,
0.034875959157943726,
0.12955281138420105,
-0.000057793738960754126,
-4.8652680106897606e-8,
-0.05094168707728386,
-0.007045069243758917,
-0.0600556917488575,
-0.023799676448106766,
-0.09121255576610565,
0.03045467659831047,
-0.020960692316293716,
0.04768245294690132,
0.017739253118634224,
0.05655715987086296,
-0.006719164550304413,
0.0022274330258369446,
-0.09226106852293015,
0.037653714418411255,
-0.044330134987831116,
0.06652221828699112,
0.053588636219501495,
0.12492670118808746,
0.018998105078935623,
0.024916546419262886,
0.021830515936017036,
0.04102720692753792,
0.04245368391275406,
-0.06574442237615585,
-0.03208042308688164,
0.009312513284385204,
-0.19159293174743652,
0.07885754853487015,
-0.03808237984776497,
-0.025275681167840958,
0.026335477828979492,
0.022382719442248344,
-0.0038093137554824352,
-0.0435488224029541,
-0.004212039988487959,
0.09574061632156372,
-0.0293708685785532,
-0.0924210399389267,
0.013022968545556068,
0.05897246673703194,
0.08957624435424805,
-0.056073810905218124,
-0.07772845029830933,
0.007137957960367203,
0.10060802847146988,
-0.03252189978957176,
0.020785482600331306,
-0.1466556340456009,
0.09349127113819122,
0.060855988413095474,
0.018083343282341957,
-0.05135168880224228,
-0.07446011900901794,
-0.07559764385223389,
-0.054579317569732666,
0.09014631062746048,
-0.015012895688414574,
-0.006889994256198406,
0.08257906138896942,
-0.030686600133776665,
0.006726853549480438,
0.04786228761076927,
-0.03422718867659569,
0.015474584884941578
] |
sberbank-ai/rugpt3medium_based_on_gpt2 | 63494984e6afd13972d863197ea1ce1be484d339 | 2021-09-21T19:29:06.000Z | [
"pytorch",
"ru",
"transformers",
"PyTorch",
"Transformers"
] | null | false | sberbank-ai | null | sberbank-ai/rugpt3medium_based_on_gpt2 | 2,859 | 3 | transformers | ---
language:
- ru
tags:
- PyTorch
- Transformers
thumbnail: "https://github.com/sberbank-ai/ru-gpts"
---
# rugpt3medium\_based\_on\_gpt2
Model was trained with sequence length 1024 using transformers lib by [SberDevices](https://sberdevices.ru/) team on 80B tokens for 3 epoch. After that model was finetuned on 2048 context.
Total training time was around 16 days on 64 GPUs.
Final perplexity on test set is `17.4`.
| [
-0.09409599751234055,
-0.10451769083738327,
-0.07091599702835083,
0.014355873689055443,
0.03137999400496483,
-0.06508138030767441,
-0.04821731522679329,
0.09479155391454697,
-0.026850629597902298,
-0.12697288393974304,
-0.0038475394248962402,
0.060697492212057114,
-0.07036471366882324,
0.030501961708068848,
0.006314340513199568,
-0.0030581350438296795,
-0.006784005090594292,
-0.018991390243172646,
-0.03917844220995903,
-0.043201543390750885,
0.018583735451102257,
-0.02033230848610401,
0.05857454240322113,
0.049255695194005966,
0.036833204329013824,
0.01815531589090824,
0.03455650806427002,
-0.09167109429836273,
0.08768516033887863,
-0.017987174913287163,
0.08867795020341873,
-0.003446775022894144,
0.004988815635442734,
0.007862037979066372,
0.01231334824115038,
0.03565962240099907,
-0.08003084361553192,
-0.060341525822877884,
-0.018594032153487206,
0.0031186589039862156,
0.09410462528467178,
-0.005638256203383207,
-0.008758032694458961,
0.03838862478733063,
0.06459231674671173,
0.012801502831280231,
0.015085007064044476,
-0.055255673825740814,
-0.01907622255384922,
-0.08756363391876221,
-0.020232953131198883,
-0.010318358428776264,
-0.006865815259516239,
0.0007953960448503494,
-0.045365165919065475,
-0.030284283682703972,
0.033107224851846695,
-0.01767793670296669,
0.02055494487285614,
-0.010183523409068584,
-0.02206702157855034,
-0.013538495637476444,
-0.06453756242990494,
-0.04998017102479935,
-0.02188965678215027,
0.05551926791667938,
0.02460072748363018,
-0.040168117731809616,
0.09503402560949326,
-0.01790492609143257,
0.016409076750278473,
0.0647697001695633,
-0.12241524457931519,
0.07068654894828796,
0.06442099064588547,
0.015596000477671623,
0.034329161047935486,
0.051879532635211945,
0.004230468068271875,
-0.1361701637506485,
0.022171838209033012,
-0.1029476523399353,
0.062136292457580566,
-0.0071928054094314575,
0.005837645847350359,
-0.013742828741669655,
0.022282330319285393,
0.06271232664585114,
-0.038018882274627686,
0.005530060268938541,
0.04650479927659035,
-0.03593439236283302,
-0.01035905722528696,
0.05688662454485893,
-0.009096157737076283,
0.04584898054599762,
-0.0010142213432118297,
-0.013972152955830097,
-0.058996569365262985,
0.028562668710947037,
-0.05401863902807236,
-0.003215671284124255,
-0.023709408938884735,
0.10249277949333191,
-0.0782977044582367,
0.04240407794713974,
-0.030798690393567085,
0.028023282065987587,
-0.016305312514305115,
-0.008352230302989483,
0.11708387732505798,
-0.00682396674528718,
0.025100557133555412,
-0.016654493287205696,
0.019587082788348198,
0.04479920491576195,
-0.08723866939544678,
0.035954736173152924,
0.009725566953420639,
0.12594090402126312,
-0.020591488108038902,
0.05011473968625069,
-0.035112861543893814,
-0.020460065454244614,
-0.04634420573711395,
0.02192562445998192,
-0.03223668411374092,
6.382488371795678e-33,
-0.004981239326298237,
0.016362452879548073,
-0.013574035838246346,
-0.010908062569797039,
-0.03943662717938423,
0.03583003580570221,
-0.012503287754952908,
-0.023913389071822166,
0.0391131229698658,
-0.012570529244840145,
-0.03908764570951462,
-0.03981796279549599,
-0.052701011300086975,
-0.02804240956902504,
-0.017710771411657333,
-0.007941262796521187,
-0.007740335073322058,
0.04714527353644371,
-0.032222338020801544,
0.08231355994939804,
0.10706853866577148,
0.022322524338960648,
-0.060948170721530914,
-0.0566074401140213,
-0.061705708503723145,
0.07508522272109985,
0.047543756663799286,
-0.02324768528342247,
0.03681953251361847,
0.023985449224710464,
-0.025868674740195274,
0.09478090703487396,
-0.03507102280855179,
0.06600988656282425,
0.0020716949366033077,
-0.018432319164276123,
0.040789466351270676,
-0.07484520971775055,
-0.015562271699309349,
-0.02157718688249588,
-0.0363037995994091,
0.005852143280208111,
0.008213751949369907,
-0.08131758868694305,
0.040772441774606705,
-0.0682893618941307,
0.033337026834487915,
0.05217226594686508,
0.024243835359811783,
0.07816539704799652,
-0.03933754563331604,
0.07905556261539459,
-0.0753788948059082,
0.0028135397005826235,
0.044307176023721695,
-0.0006704145926050842,
0.0777277797460556,
0.029042894020676613,
0.08962670713663101,
0.0971594825387001,
0.12620434165000916,
0.04283164441585541,
0.04850247874855995,
0.0348946787416935,
0.019185513257980347,
0.012281403876841068,
-0.059245314449071884,
0.031351134181022644,
0.01978110708296299,
0.08006896823644638,
-0.05446982756257057,
-0.028986599296331406,
0.011811697855591774,
-0.030234402045607567,
0.06785879284143448,
-0.11665112525224686,
0.03239613026380539,
0.038639966398477554,
-0.030054846778512,
0.01636550948023796,
-0.09574364125728607,
0.035282429307699203,
0.03002883866429329,
-0.04784068837761879,
-0.0754840224981308,
-0.04002590849995613,
-0.015230190940201283,
-0.0384831465780735,
-0.07299140840768814,
-0.03450438007712364,
0.003182305721566081,
0.0012757278745993972,
-0.03455335274338722,
0.06195280700922012,
-0.03190136328339577,
-5.362636819861954e-33,
0.035632915794849396,
0.036751072853803635,
-0.03880459815263748,
0.14034920930862427,
0.016323300078511238,
-0.0780721977353096,
-0.004435902927070856,
0.16004514694213867,
0.0013418934540823102,
0.05691133812069893,
0.07912091165781021,
-0.05109517648816109,
-0.017474845051765442,
-0.06690249592065811,
0.09797652065753937,
0.0007052998407743871,
0.005758193321526051,
-0.04742195084691048,
-0.0014153392985463142,
0.011991131119430065,
0.02808321826159954,
0.08771128952503204,
-0.08268719911575317,
0.04352039098739624,
-0.04711132124066353,
0.005167768336832523,
-0.0025216422509402037,
-0.015791233628988266,
0.03217853605747223,
-0.02612488530576229,
0.03967258334159851,
0.03737667202949524,
-0.07542110234498978,
0.0038848782423883677,
-0.0742855817079544,
-0.01008116640150547,
0.035434022545814514,
0.08450108766555786,
-0.006250046193599701,
0.017484774813055992,
0.09640268236398697,
0.03618542477488518,
-0.03710620477795601,
0.08345548063516617,
0.011650997214019299,
-0.008274728432297707,
-0.03044089488685131,
-0.0661076083779335,
0.09517790377140045,
-0.0135100819170475,
-0.008352186530828476,
-0.03211931139230728,
-0.005127294454723597,
-0.0156937874853611,
-0.020724797621369362,
-0.0908685177564621,
0.005904820282012224,
0.012227840721607208,
0.005415929947048426,
-0.008350555785000324,
-0.004762702155858278,
-0.01669234037399292,
0.040450017899274826,
-0.05145455151796341,
-0.0266990028321743,
-0.003913066349923611,
-0.025632048025727272,
0.019940273836255074,
0.09710805118083954,
0.0736837312579155,
0.010883922688663006,
0.019490495324134827,
0.0018212159629911184,
0.043192535638809204,
-0.0733385905623436,
0.044126056134700775,
-0.026330823078751564,
-0.029855068773031235,
0.04689464345574379,
-0.08871571719646454,
-0.057279687374830246,
0.017624810338020325,
0.0004782923497259617,
0.007197684142738581,
0.02576461061835289,
0.013894766569137573,
0.009727722965180874,
0.059671491384506226,
0.08125529438257217,
-0.017062418162822723,
0.03211621567606926,
0.03205103799700737,
0.028002049773931503,
0.054841503500938416,
0.006773581728339195,
-4.794683405862088e-8,
-0.009946174919605255,
0.03988592326641083,
-0.08514830470085144,
0.060654204338788986,
0.0012697267811745405,
-0.0654214397072792,
-0.0019302619621157646,
0.08125556260347366,
-0.08241342753171921,
0.07804232090711594,
-0.0011487012961879373,
-0.05725311487913132,
-0.07897114753723145,
-0.03658009693026543,
0.05695529282093048,
0.07364970445632935,
-0.0012777462834492326,
0.060849785804748535,
-0.04401889070868492,
-0.053099073469638824,
-0.010708929039537907,
-0.011885934509336948,
0.03044533170759678,
-0.08411259204149246,
-0.11263782531023026,
-0.03193742781877518,
0.024914581328630447,
0.05432596057653427,
0.021219883114099503,
-0.03991645947098732,
0.017184335738420486,
-0.03060971200466156,
-0.0321258120238781,
-0.049455396831035614,
0.015176272951066494,
0.04724488779902458,
0.002078566001728177,
-0.0022832765243947506,
0.07215318083763123,
0.048992857336997986,
-0.05213252827525139,
-0.01782585307955742,
0.00337751186452806,
-0.006735182832926512,
-0.0030148017685860395,
-0.015508991666138172,
-0.1502399891614914,
-0.0898243710398674,
-0.034141626209020615,
0.013218873180449009,
0.0873032733798027,
-0.011211644858121872,
-0.09032396972179413,
0.0037301769480109215,
0.0347842313349247,
0.056965943425893784,
-0.07237635552883148,
-0.12536323070526123,
-0.06082199513912201,
0.007062267046421766,
-0.015560111962258816,
-0.05820176377892494,
-0.03720894455909729,
0.007469166535884142
] |
etalab-ia/dpr-question_encoder-fr_qa-camembert | 20f81e3505a3184dbf3729701d7c6152125287ef | 2021-06-16T10:10:09.000Z | [
"pytorch",
"camembert",
"feature-extraction",
"fr",
"dataset:piaf",
"dataset:FQuAD",
"dataset:SQuAD-FR",
"arxiv:2004.04906",
"arxiv:1911.03894",
"transformers"
] | feature-extraction | false | etalab-ia | null | etalab-ia/dpr-question_encoder-fr_qa-camembert | 2,858 | 3 | transformers | ---
language: fr
datasets:
- piaf
- FQuAD
- SQuAD-FR
---
# dpr-question_encoder-fr_qa-camembert
## Description
French [DPR model](https://arxiv.org/abs/2004.04906) using [CamemBERT](https://arxiv.org/abs/1911.03894) as base and then fine-tuned on a combo of three French Q&A
## Data
### French Q&A
We use a combination of three French Q&A datasets:
1. [PIAFv1.1](https://www.data.gouv.fr/en/datasets/piaf-le-dataset-francophone-de-questions-reponses/)
2. [FQuADv1.0](https://fquad.illuin.tech/)
3. [SQuAD-FR (SQuAD automatically translated to French)](https://github.com/Alikabbadj/French-SQuAD)
### Training
We are using 90 562 random questions for `train` and 22 391 for `dev`. No question in `train` exists in `dev`. For each question, we have a single `positive_context` (the paragraph where the answer to this question is found) and around 30 `hard_negtive_contexts`. Hard negative contexts are found by querying an ES instance (via bm25 retrieval) and getting the top-k candidates **that do not contain the answer**.
The files are over [here](https://drive.google.com/file/d/1W5Jm3sqqWlsWsx2sFpA39Ewn33PaLQ7U/view?usp=sharing).
### Evaluation
We use FQuADv1.0 and French-SQuAD evaluation sets.
## Training Script
We use the official [Facebook DPR implentation](https://github.com/facebookresearch/DPR) with a slight modification: by default, the code can work with Roberta models, still we changed a single line to make it easier to work with Camembert. This modification can be found [over here](https://github.com/psorianom/DPR).
### Hyperparameters
```shell
python -m torch.distributed.launch --nproc_per_node=8 train_dense_encoder.py \
--max_grad_norm 2.0 --encoder_model_type hf_bert --pretrained_file data/bert-base-multilingual-uncased \
--seed 12345 --sequence_length 256 --warmup_steps 1237 --batch_size 16 --do_lower_case \
--train_file DPR_FR_train.json \
--dev_file ./data/100_hard_neg_ctxs/DPR_FR_dev.json \
--output_dir ./output/bert --learning_rate 2e-05 --num_train_epochs 35 \
--dev_batch_size 16 --val_av_rank_start_epoch 25 \
--pretrained_model_cfg ./data/bert-base-multilingual-uncased
```
###
## Evaluation results
We obtain the following evaluation by using FQuAD and SQuAD-FR evaluation (or validation) sets. To obtain these results, we use [haystack's evaluation script](https://github.com/deepset-ai/haystack/blob/db4151bbc026f27c6d709fefef1088cd3f1e18b9/tutorials/Tutorial5_Evaluation.py) (**we report Retrieval results only**).
### DPR
#### FQuAD v1.0 Evaluation
```shell
For 2764 out of 3184 questions (86.81%), the answer was in the top-20 candidate passages selected by the retriever.
Retriever Recall: 0.87
Retriever Mean Avg Precision: 0.57
```
#### SQuAD-FR Evaluation
```shell
For 8945 out of 10018 questions (89.29%), the answer was in the top-20 candidate passages selected by the retriever.
Retriever Recall: 0.89
Retriever Mean Avg Precision: 0.63
```
### BM25
For reference, BM25 gets the results shown below. As in the original paper, regarding SQuAD-like datasets, the results of DPR are consistently superseeded by BM25.
#### FQuAD v1.0 Evaluation
```shell
For 2966 out of 3184 questions (93.15%), the answer was in the top-20 candidate passages selected by the retriever.
Retriever Recall: 0.93
Retriever Mean Avg Precision: 0.74
```
#### SQuAD-FR Evaluation
```shell
For 9353 out of 10018 questions (93.36%), the answer was in the top-20 candidate passages selected by the retriever.
Retriever Recall: 0.93
Retriever Mean Avg Precision: 0.77
```
## Usage
The results reported here are obtained with the `haystack` library. To get to similar embeddings using exclusively HF `transformers` library, you can do the following:
```python
from transformers import AutoTokenizer, AutoModel
query = "Salut, mon chien est-il mignon ?"
tokenizer = AutoTokenizer.from_pretrained("etalab-ia/dpr-question_encoder-fr_qa-camembert", do_lower_case=True)
input_ids = tokenizer(query, return_tensors='pt')["input_ids"]
model = AutoModel.from_pretrained("etalab-ia/dpr-question_encoder-fr_qa-camembert", return_dict=True)
embeddings = model.forward(input_ids).pooler_output
print(embeddings)
```
And with `haystack`, we use it as a retriever:
```
retriever = DensePassageRetriever(
document_store=document_store,
query_embedding_model="etalab-ia/dpr-question_encoder-fr_qa-camembert",
passage_embedding_model="etalab-ia/dpr-ctx_encoder-fr_qa-camembert",
model_version=dpr_model_tag,
infer_tokenizer_classes=True,
)
```
## Acknowledgments
This work was performed using HPC resources from GENCI–IDRIS (Grant 2020-AD011011224).
## Citations
### Datasets
#### PIAF
```
@inproceedings{KeraronLBAMSSS20,
author = {Rachel Keraron and
Guillaume Lancrenon and
Mathilde Bras and
Fr{\'{e}}d{\'{e}}ric Allary and
Gilles Moyse and
Thomas Scialom and
Edmundo{-}Pavel Soriano{-}Morales and
Jacopo Staiano},
title = {Project {PIAF:} Building a Native French Question-Answering Dataset},
booktitle = {{LREC}},
pages = {5481--5490},
publisher = {European Language Resources Association},
year = {2020}
}
```
#### FQuAD
```
@article{dHoffschmidt2020FQuADFQ,
title={FQuAD: French Question Answering Dataset},
author={Martin d'Hoffschmidt and Maxime Vidal and Wacim Belblidia and Tom Brendl'e and Quentin Heinrich},
journal={ArXiv},
year={2020},
volume={abs/2002.06071}
}
```
#### SQuAD-FR
```
@MISC{kabbadj2018,
author = "Kabbadj, Ali",
title = "Something new in French Text Mining and Information Extraction (Universal Chatbot): Largest Q&A French training dataset (110 000+) ",
editor = "linkedin.com",
month = "November",
year = "2018",
url = "\url{https://www.linkedin.com/pulse/something-new-french-text-mining-information-chatbot-largest-kabbadj/}",
note = "[Online; posted 11-November-2018]",
}
```
### Models
#### CamemBERT
HF model card : [https://huggingface.co/camembert-base](https://huggingface.co/camembert-base)
```
@inproceedings{martin2020camembert,
title={CamemBERT: a Tasty French Language Model},
author={Martin, Louis and Muller, Benjamin and Su{\'a}rez, Pedro Javier Ortiz and Dupont, Yoann and Romary, Laurent and de la Clergerie, {\'E}ric Villemonte and Seddah, Djam{\'e} and Sagot, Beno{\^\i}t},
booktitle={Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics},
year={2020}
}
```
#### DPR
```
@misc{karpukhin2020dense,
title={Dense Passage Retrieval for Open-Domain Question Answering},
author={Vladimir Karpukhin and Barlas Oğuz and Sewon Min and Patrick Lewis and Ledell Wu and Sergey Edunov and Danqi Chen and Wen-tau Yih},
year={2020},
eprint={2004.04906},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
| [
-0.0942089855670929,
-0.05257468670606613,
-0.025222748517990112,
-0.0729345753788948,
0.026863208040595055,
0.05626293644309044,
0.022317739203572273,
0.03471989184617996,
0.058699727058410645,
-0.03096683882176876,
0.018518825992941856,
-0.08590204268693924,
0.02660187892615795,
0.0038714951369911432,
0.008490221574902534,
-0.032269105315208435,
-0.047864023596048355,
0.0130560752004385,
-0.05531870201230049,
-0.06143989413976669,
0.012545359320938587,
-0.022613348439335823,
0.10188940167427063,
-0.01707097515463829,
0.04783071205019951,
-0.05114756152033806,
-0.022619785740971565,
0.07631462067365646,
0.021944627165794373,
-0.054904162883758545,
0.015079912729561329,
0.14810526371002197,
0.0041679865680634975,
0.04833715781569481,
-0.024458523839712143,
0.05971425399184227,
0.002223541494458914,
-0.0518232099711895,
-0.053033068776130676,
0.09342633187770844,
-0.05043559893965721,
0.012111698277294636,
0.040718741714954376,
-0.021768668666481972,
0.06615809351205826,
0.04067262262105942,
-0.07115846127271652,
0.06260906904935837,
-0.002643533516675234,
-0.04866933822631836,
-0.06842498481273651,
0.017351610586047173,
0.015647487714886665,
0.050085876137018204,
-0.00028043947531841695,
0.0032246734481304884,
0.05079381912946701,
-0.02096058428287506,
0.01573965512216091,
0.025611193850636482,
-0.03632032871246338,
-0.04374383017420769,
-0.03448783978819847,
-0.001062246155925095,
-0.04132005572319031,
-0.03225036710500717,
-0.011399000883102417,
0.035200335085392,
-0.032913412898778915,
0.030647648498415947,
-0.10220586508512497,
0.013536590151488781,
-0.032137807458639145,
0.040946003049612045,
0.05618494376540184,
0.10044123977422714,
-0.00933873001486063,
-0.029962969943881035,
0.06038488447666168,
-0.1503283977508545,
0.010304816998541355,
-0.030507156625390053,
0.06641083210706711,
0.0012757775839418173,
0.11901283264160156,
-0.026747561991214752,
0.05763530358672142,
0.040301911532878876,
0.022111939266324043,
0.01893494464457035,
-0.06798643618822098,
0.00796168390661478,
0.054544657468795776,
0.07252048701047897,
-0.001813787268474698,
0.07382720708847046,
0.05684172734618187,
0.03057231567800045,
-0.024141626432538033,
0.04326998442411423,
0.015732109546661377,
-0.005660525523126125,
0.05845383554697037,
0.011065528728067875,
-0.12640900909900665,
-0.013353087939321995,
0.04812806472182274,
0.05356301739811897,
0.023449206724762917,
-0.13790574669837952,
-0.040139876306056976,
-0.006310896947979927,
-0.04634570702910423,
-0.059219274669885635,
-0.05651864409446716,
0.03735504299402237,
0.03811849281191826,
-0.05318417400121689,
-0.024641701951622963,
-0.02064589038491249,
-0.006226849742233753,
-0.030256249010562897,
-0.035712677985429764,
0.008989724330604076,
0.012346082367002964,
-0.01925492286682129,
-0.03943083807826042,
2.8280654716487005e-33,
0.11565277725458145,
0.11331057548522949,
0.026575645431876183,
-0.007860726676881313,
-0.012631422840058804,
-0.020794695243239403,
-0.046892713755369186,
0.033978234976530075,
-0.0801025778055191,
0.029931725934147835,
-0.06638322025537491,
0.011223345063626766,
-0.05715607479214668,
0.03521643951535225,
0.021058516576886177,
-0.0690530464053154,
0.01910012774169445,
-0.008769827894866467,
-0.014034423977136612,
0.001982582500204444,
0.14285756647586823,
0.04730397090315819,
-0.00017191447841469198,
-0.03450682759284973,
0.09188321977853775,
0.05034369230270386,
0.004318722058087587,
-0.022380013018846512,
-0.06894617527723312,
0.024178506806492805,
-0.09271908551454544,
-0.05532868951559067,
-0.04291402921080589,
0.042250119149684906,
-0.03681371361017227,
-0.06794214248657227,
0.010103332810103893,
-0.020595796406269073,
-0.006587668787688017,
-0.007733164820820093,
0.08462584763765335,
-0.013163389638066292,
0.049284644424915314,
-0.01574571058154106,
-0.045230086892843246,
-0.04420791193842888,
0.015669357031583786,
-0.012982510961592197,
0.06620866060256958,
0.01056465320289135,
-0.01749936118721962,
-0.0369403138756752,
-0.04819673299789429,
-0.06504428386688232,
-0.0037890495732426643,
0.07266432791948318,
-0.010331272147595882,
0.014606948010623455,
-0.020717019215226173,
0.004647258203476667,
0.019078919664025307,
0.008584014140069485,
0.00030741968657821417,
0.03819674998521805,
0.013253419660031796,
-0.026997098699212074,
-0.0004151530738454312,
-0.030401643365621567,
0.09412771463394165,
-0.019529536366462708,
-0.05281684920191765,
0.005720899440348148,
0.0810956060886383,
-0.01799965463578701,
0.09634201973676682,
0.04034671187400818,
-0.0111083984375,
-0.023312460631132126,
-0.015993045642971992,
-0.08224557340145111,
-0.037592899054288864,
0.026592571288347244,
-0.07316451519727707,
0.016040775924921036,
-0.027823807671666145,
0.03268726170063019,
0.08560706675052643,
-0.07261354476213455,
-0.03414381667971611,
-0.008107303641736507,
-0.0772085040807724,
-0.041985489428043365,
0.01195930503308773,
-0.05991685017943382,
0.000869360810611397,
-3.6346079615913736e-33,
0.029607092961668968,
0.08836336433887482,
-0.01929738186299801,
0.028690310195088387,
0.06975162774324417,
-0.035217199474573135,
0.06360958516597748,
0.0661422535777092,
0.05998070910573006,
-0.037622176110744476,
-0.015816135331988335,
-0.11032802611589432,
0.017183441668748856,
-0.07751777768135071,
0.012631010264158249,
-0.018557170405983925,
-0.050868332386016846,
-0.03658869117498398,
0.06268050521612167,
0.09556695818901062,
-0.035179734230041504,
-0.007598366122692823,
-0.08819470554590225,
0.016100192442536354,
-0.03670920431613922,
0.03879101201891899,
0.020254338160157204,
0.020629731938242912,
-0.023919569328427315,
0.008298171684145927,
-0.03634675219655037,
-0.10317205637693405,
-0.026315541937947273,
0.01970585249364376,
-0.07520928233861923,
0.030447613447904587,
0.07131246477365494,
0.019821373745799065,
-0.05529981106519699,
0.11759116500616074,
0.02794087491929531,
0.016597071662545204,
-0.060145601630210876,
-0.01674988679587841,
-0.00047852992429398,
-0.013353322632610798,
-0.03384934738278389,
-0.05274999886751175,
-0.05843893811106682,
-0.03484411910176277,
0.03843450918793678,
0.04957212507724762,
-0.09553764015436172,
0.013789776712656021,
-0.036021631211042404,
-0.02869483083486557,
0.04464510828256607,
-0.033343587070703506,
-0.05141618475317955,
-0.0324433296918869,
-0.04348515719175339,
-0.005352611653506756,
0.0037480206228792667,
-0.02705903723835945,
0.061880242079496384,
-0.05120809003710747,
-0.08031151443719864,
0.04542945325374603,
0.019726183265447617,
-0.01978929340839386,
0.03670773282647133,
-0.061787333339452744,
0.055072344839572906,
0.05690443515777588,
-0.026166163384914398,
-0.0852976143360138,
-0.10932715982198715,
-0.0408664271235466,
0.07867036759853363,
0.04285474121570587,
-0.06448715180158615,
-0.07279733568429947,
0.008476251736283302,
0.08912315964698792,
0.0029687078204005957,
0.062044717371463776,
0.04297306388616562,
0.054657042026519775,
0.07680358737707138,
-0.04610690101981163,
0.04108184948563576,
0.05240539088845253,
0.0692746639251709,
0.09633627533912659,
0.036230314522981644,
-4.944983444943318e-8,
0.016682717949151993,
0.013751894235610962,
-0.07397790253162384,
0.04477446526288986,
-0.016903994604945183,
-0.12563709914684296,
-0.07876665890216827,
-0.0037670095916837454,
0.004702397156506777,
-0.0077864243648946285,
-0.0058543626219034195,
0.048413246870040894,
-0.02130110375583172,
-0.04886454716324806,
-0.018976204097270966,
0.053191233426332474,
-0.012409339658915997,
0.0763971209526062,
-0.028947116807103157,
0.029032502323389053,
0.0022634067572653294,
0.08109164237976074,
-0.038297031074762344,
-0.05110977962613106,
0.0382891520857811,
0.0013809638330712914,
-0.07739406824111938,
-0.03307296335697174,
0.024437585845589638,
0.031814996153116226,
-0.026247402653098106,
0.015550731681287289,
0.0027479256968945265,
-0.0656515583395958,
0.004456285387277603,
0.02307238057255745,
-0.0562213696539402,
-0.05713455751538277,
0.03229467570781708,
0.038291748613119125,
0.12974660098552704,
0.01903657056391239,
-0.13604137301445007,
0.035135138779878616,
0.09085401892662048,
-0.020165687426924706,
-0.026531193405389786,
-0.12671199440956116,
0.05988478288054466,
-0.029461806640028954,
0.026238780468702316,
0.01613493449985981,
-0.04016893357038498,
0.052528899163007736,
0.04002613201737404,
0.04162144660949707,
-0.07917152345180511,
-0.014199827797710896,
0.06344594061374664,
0.00023278048320207745,
-0.049733489751815796,
0.051662132143974304,
-0.010634520091116428,
0.0020704506896436214
] |
facebook/wav2vec2-base-100k-voxpopuli | 7a43eaf4d68a147cfc6b754e338bd9aa72a1fbad | 2021-11-05T12:46:12.000Z | [
"pytorch",
"wav2vec2",
"pretraining",
"multilingual",
"arxiv:2101.00390",
"transformers",
"audio",
"automatic-speech-recognition",
"voxpopuli",
"license:cc-by-nc-4.0"
] | automatic-speech-recognition | false | facebook | null | facebook/wav2vec2-base-100k-voxpopuli | 2,857 | 1 | transformers | ---
language: multilingual
tags:
- audio
- automatic-speech-recognition
- voxpopuli
license: cc-by-nc-4.0
---
# Wav2Vec2-Base-VoxPopuli
[Facebook's Wav2Vec2](https://ai.facebook.com/blog/wav2vec-20-learning-the-structure-of-speech-from-raw-audio/) base model pretrained on the 100k unlabeled subset of [VoxPopuli corpus](https://arxiv.org/abs/2101.00390).
**Note**: This model does not have a tokenizer as it was pretrained on audio alone. In order to use this model **speech recognition**, a tokenizer should be created and the model should be fine-tuned on labeled text data. Check out [this blog](https://huggingface.co/blog/fine-tune-wav2vec2-english) for more in-detail explanation of how to fine-tune the model.
**Paper**: *[VoxPopuli: A Large-Scale Multilingual Speech Corpus for Representation
Learning, Semi-Supervised Learning and Interpretation](https://arxiv.org/abs/2101.00390)*
**Authors**: *Changhan Wang, Morgane Riviere, Ann Lee, Anne Wu, Chaitanya Talnikar, Daniel Haziza, Mary Williamson, Juan Pino, Emmanuel Dupoux* from *Facebook AI*
See the official website for more information, [here](https://github.com/facebookresearch/voxpopuli/)
# Fine-Tuning
Please refer to [this blog](https://huggingface.co/blog/fine-tune-xlsr-wav2vec2) on how to fine-tune this model on a specific language. Note that you should replace `"facebook/wav2vec2-large-xlsr-53"` with this checkpoint for fine-tuning.
| [
-0.04859955981373787,
-0.10342255979776382,
-0.02895834669470787,
-0.038777027279138565,
0.02060541883111,
0.044247932732105255,
-0.028420738875865936,
-0.03783608227968216,
-0.025595732033252716,
-0.06587252765893936,
-0.019537953659892082,
-0.11213632673025131,
-0.00233003543689847,
0.020679837092757225,
-0.00351150194182992,
-0.02136952057480812,
0.029971599578857422,
0.052139874547719955,
-0.03917765989899635,
0.0052988543175160885,
0.06866680830717087,
0.061868663877248764,
0.07712356001138687,
0.034870900213718414,
0.04100599139928818,
0.10604498535394669,
-0.06052699685096741,
0.006270936690270901,
0.05281757935881615,
-0.04271961376070976,
0.07259766757488251,
0.017923817038536072,
0.10377314686775208,
0.04022301360964775,
0.003583432175219059,
-0.022819943726062775,
-0.0332769975066185,
-0.0281309075653553,
0.0012010684004053473,
-0.03438863530755043,
-0.01665494404733181,
0.005309604108333588,
-0.03023785911500454,
-0.0003564080107025802,
0.03814445808529854,
0.046147458255290985,
-0.043252404779195786,
0.010320596396923065,
-0.04745011031627655,
0.014304964803159237,
-0.07266674190759659,
-0.07466302067041397,
0.030248578637838364,
0.07428411394357681,
-0.10904576629400253,
0.01719174161553383,
0.009223636239767075,
0.028181862086057663,
0.08269157260656357,
-0.00646784994751215,
-0.03775953873991966,
-0.0538509301841259,
0.007743487600237131,
0.0032613263465464115,
-0.025212345644831657,
0.018098946660757065,
-0.05383528396487236,
0.0019922980573028326,
0.01384658645838499,
0.03468737378716469,
0.017527755349874496,
0.06953563541173935,
0.06465894728899002,
0.034826330840587616,
0.03814246878027916,
0.03429744765162468,
0.05762288719415665,
-0.008878808468580246,
0.06844724714756012,
-0.09140767902135849,
0.0288406815379858,
0.005175240337848663,
0.060925427824258804,
-0.07236599177122116,
0.05919088423252106,
-0.02644292823970318,
0.019613156095147133,
-0.04968183487653732,
-0.035768650472164154,
-0.011655101552605629,
-0.039130132645368576,
-0.004854219034314156,
0.023766379803419113,
0.009678658097982407,
-0.05620856583118439,
0.024968348443508148,
0.007341708987951279,
0.04425062611699104,
0.02360224910080433,
0.051007479429244995,
0.01391523890197277,
-0.04481115937232971,
-0.003423666348680854,
-0.03665945306420326,
-0.03762200474739075,
-0.10286945104598999,
0.05631367117166519,
0.052056703716516495,
0.07090429961681366,
-0.04485997185111046,
0.02681567892432213,
0.06259407848119736,
-0.07820327579975128,
-0.030964713543653488,
0.03857426717877388,
0.04204815998673439,
-0.01885957457125187,
-0.08269045501947403,
0.060875240713357925,
0.06548571586608887,
-0.15344636142253876,
-0.0651714950799942,
-0.05385496839880943,
-0.030939755961298943,
-0.02708452194929123,
-0.057586535811424255,
-0.04099521040916443,
1.3100953416536513e-33,
-0.020129268988966942,
0.047729142010211945,
-0.033280353993177414,
-0.005380588583648205,
0.028984181582927704,
-0.05939212441444397,
-0.042317893356084824,
-0.026525398716330528,
-0.03332436457276344,
-0.025622474029660225,
-0.009903112426400185,
0.015179130248725414,
-0.06707371771335602,
0.1065889298915863,
-0.06202302128076553,
-0.051878463476896286,
-0.07387644797563553,
-0.0009012044174596667,
-0.015645157545804977,
0.008728663437068462,
0.09445909410715103,
0.029884614050388336,
0.04635469242930412,
0.04000912979245186,
0.06122048944234848,
0.03778287023305893,
0.03558419644832611,
-0.11515000462532043,
0.05353517457842827,
0.0573803186416626,
-0.07812648266553879,
-0.07335248589515686,
0.04325982555747032,
-0.009743008762598038,
0.008618976920843124,
0.04750387743115425,
-0.0009219988714903593,
0.02978994883596897,
-0.07746203243732452,
-0.11646769940853119,
0.012110527604818344,
0.01609315350651741,
0.014498039148747921,
-0.09555795788764954,
-0.061952054500579834,
0.006499683018773794,
-0.032636601477861404,
0.03417389094829559,
0.015363592654466629,
-0.015885232016444206,
0.04415125399827957,
-0.007070635911077261,
-0.08164798468351364,
0.01329770963639021,
-0.008435916155576706,
-0.018715739250183105,
0.025787388905882835,
0.047703925520181656,
0.03804905340075493,
-0.03247334063053131,
0.0324886329472065,
0.01641138829290867,
0.08153251558542252,
-0.018285682424902916,
0.05349200218915939,
-0.007846292108297348,
-0.01938195526599884,
0.009509415365755558,
0.06759382784366608,
-0.056179728358983994,
-0.02883138880133629,
-0.019805269315838814,
0.0049586221575737,
0.06052997708320618,
-0.027721133083105087,
0.028098227456212044,
0.044691599905490875,
-0.13386906683444977,
-0.014116154052317142,
0.07715418934822083,
-0.03111642599105835,
0.008298247121274471,
0.01252913847565651,
-0.06290235370397568,
-0.000987434177659452,
-0.04531685635447502,
-0.004484555684030056,
-0.12939892709255219,
-0.01853051967918873,
0.0344664603471756,
0.0017663403414189816,
0.04394974187016487,
-0.050457682460546494,
-0.00024525969638489187,
-0.06133328750729561,
-3.449368051659028e-33,
0.0557386577129364,
0.06889495998620987,
-0.0058447555638849735,
0.0439138188958168,
-0.05345539376139641,
0.037204671651124954,
0.06374293565750122,
0.09977985173463821,
0.030316784977912903,
-0.08003686368465424,
0.0744640901684761,
-0.0651727020740509,
0.0697639212012291,
0.04345427826046944,
0.03703939914703369,
-0.030449278652668,
-0.05419628322124481,
0.04709717258810997,
0.09183988720178604,
0.12966255843639374,
-0.018987908959388733,
0.019055074080824852,
-0.1378576159477234,
0.1030336543917656,
-0.0679565891623497,
0.028470272198319435,
-0.08536498248577118,
0.06205219775438309,
0.08047772198915482,
-0.007552166935056448,
-0.06765700876712799,
0.016476497054100037,
-0.034166041761636734,
0.00591465225443244,
-0.02696799859404564,
0.011985626071691513,
0.037958092987537384,
-0.038159605115652084,
0.0019595674239099026,
0.030380837619304657,
0.05945523455739021,
0.06711995601654053,
-0.09879128634929657,
-0.05912930518388748,
0.00857512466609478,
-0.057393740862607956,
-0.023915186524391174,
0.03528377786278725,
-0.06724081933498383,
0.0012249448336660862,
0.04745661839842796,
-0.053355008363723755,
-0.015305731445550919,
0.01389380730688572,
-0.049873098731040955,
-0.0377444252371788,
-0.007986491546034813,
-0.07569408416748047,
-0.0164814256131649,
-0.010942653752863407,
-0.08402969688177109,
0.004473256878554821,
-0.028061671182513237,
-0.07788359373807907,
0.026118909940123558,
-0.0004843807255383581,
-0.02714422345161438,
-0.005106991156935692,
0.021238410845398903,
-0.012063861824572086,
0.010246180929243565,
0.041558243334293365,
-0.04410175234079361,
0.03770003095269203,
-0.05307745188474655,
-0.01750068925321102,
-0.061150964349508286,
-0.04809761047363281,
-0.042195189744234085,
-0.11664760112762451,
-0.044773075729608536,
0.04519634321331978,
0.04327775537967682,
0.07321958243846893,
0.09468331187963486,
0.12003690004348755,
0.03069409541785717,
-0.015033455565571785,
-0.02503335103392601,
0.05118623003363609,
-0.00727918092161417,
0.053434643894433975,
0.04320736229419708,
0.12176009267568588,
-0.02168840728700161,
-5.7738720471434135e-8,
-0.06331680715084076,
0.029403764754533768,
-0.0064409407787024975,
-0.01724516786634922,
-0.01424513477832079,
-0.07877317816019058,
0.011988755315542221,
0.0011169802164658904,
-0.01938643306493759,
-0.0036813037004321814,
-0.000862759305164218,
-0.03160607069730759,
-0.06986555457115173,
-0.016820473596453667,
-0.033960822969675064,
0.06418414413928986,
0.013360702432692051,
0.1224057674407959,
-0.00002728624349401798,
-0.05680195987224579,
0.008214222267270088,
0.01741105131804943,
0.03684065118432045,
0.013510806486010551,
0.0520772747695446,
-0.01077132299542427,
0.023097775876522064,
0.05409138277173042,
-0.022466791793704033,
-0.08118142187595367,
-0.03482406958937645,
0.07891099154949188,
-0.0596206970512867,
-0.04591803252696991,
0.038853440433740616,
0.05539708212018013,
-0.028584858402609825,
-0.06667693704366684,
-0.046479132026433945,
0.05659273639321327,
0.0761946588754654,
0.10190360993146896,
-0.12122273445129395,
-0.05232357606291771,
0.05993811786174774,
0.0014483246486634016,
-0.04447183012962341,
-0.08477714657783508,
0.03022841550409794,
0.012860890477895737,
0.0007302971207536757,
0.05508623644709587,
-0.06679720431566238,
0.029362643137574196,
0.0352286770939827,
0.06451798975467682,
-0.029848426580429077,
0.036490775644779205,
0.023759832605719566,
0.025982633233070374,
0.018299875780940056,
0.02113037183880806,
-0.0007748560747131705,
-0.03790425509214401
] |
Helsinki-NLP/opus-mt-eo-en | 894c5ff7f7871951289933e74f9b5de7b996903d | 2021-09-09T21:40:53.000Z | [
"pytorch",
"marian",
"text2text-generation",
"eo",
"en",
"transformers",
"translation",
"license:apache-2.0",
"autotrain_compatible"
] | translation | false | Helsinki-NLP | null | Helsinki-NLP/opus-mt-eo-en | 2,851 | null | transformers | ---
tags:
- translation
license: apache-2.0
---
### opus-mt-eo-en
* source languages: eo
* target languages: en
* OPUS readme: [eo-en](https://github.com/Helsinki-NLP/OPUS-MT-train/blob/master/models/eo-en/README.md)
* dataset: opus
* model: transformer-align
* pre-processing: normalization + SentencePiece
* download original weights: [opus-2019-12-18.zip](https://object.pouta.csc.fi/OPUS-MT-models/eo-en/opus-2019-12-18.zip)
* test set translations: [opus-2019-12-18.test.txt](https://object.pouta.csc.fi/OPUS-MT-models/eo-en/opus-2019-12-18.test.txt)
* test set scores: [opus-2019-12-18.eval.txt](https://object.pouta.csc.fi/OPUS-MT-models/eo-en/opus-2019-12-18.eval.txt)
## Benchmarks
| testset | BLEU | chr-F |
|-----------------------|-------|-------|
| Tatoeba.eo.en | 54.8 | 0.694 |
| [
-0.05767179653048515,
-0.0071457489393651485,
0.018938593566417694,
-0.013200829736888409,
0.011194386519491673,
0.09165653586387634,
-0.05428273230791092,
0.024424854665994644,
0.008699494414031506,
-0.004371361341327429,
0.015099119395017624,
-0.04824238270521164,
-0.08136693388223648,
-0.03731327876448631,
-0.028094196692109108,
0.002265069866552949,
-0.02233617752790451,
0.08636891841888428,
-0.07268790900707245,
-0.010206702165305614,
0.05603700876235962,
0.020013459026813507,
0.03036072663962841,
-0.007072670850902796,
0.10006875544786453,
0.07109376788139343,
-0.09373178333044052,
0.006767794024199247,
0.09266659617424011,
-0.04180125519633293,
-0.004485289100557566,
0.008283627219498158,
0.0684177502989769,
0.07952578365802765,
0.05570131540298462,
0.07237359881401062,
0.0010368145303800702,
-0.0737360492348671,
-0.03696860373020172,
0.040862880647182465,
0.04369013011455536,
0.04660974070429802,
-0.03770938888192177,
-0.008589879609644413,
0.051442135125398636,
0.0005187886999920011,
-0.07431337237358093,
0.031794339418411255,
0.007668348960578442,
-0.008908621966838837,
-0.1181703731417656,
-0.017109276726841927,
0.011413834989070892,
0.06579278409481049,
-0.06754229217767715,
0.03498801589012146,
0.044720347970724106,
-0.009587154723703861,
0.07119344919919968,
-0.022244997322559357,
-0.12366244941949844,
-0.031154204159975052,
-0.10031259804964066,
0.017594998702406883,
-0.008752149529755116,
0.0021604287903755903,
0.027183540165424347,
0.05522015690803528,
-0.06325504183769226,
0.036846600472927094,
-0.026066729798913002,
-0.013912388123571873,
0.00747943390160799,
0.05626495182514191,
-0.011566716246306896,
0.05130944028496742,
0.0005685774958692491,
-0.04836395010352135,
-0.0037801258731633425,
-0.08137866109609604,
0.002334882039576769,
-0.06135772168636322,
0.06622490286827087,
-0.014696522615849972,
0.06819946318864822,
0.02277703955769539,
0.016935167834162712,
-0.00037121796049177647,
-0.007722480222582817,
0.048026490956544876,
-0.06176222488284111,
-0.043015170842409134,
0.00912370067089796,
0.023116357624530792,
0.009418291039764881,
0.0543019138276577,
0.010460927151143551,
0.053869813680648804,
0.014587176963686943,
0.05780896916985512,
0.023829260841012,
0.02917509153485298,
0.06396322697401047,
-0.04236556962132454,
-0.11751748621463776,
-0.03200250491499901,
0.08054021000862122,
0.04894950985908508,
0.008203098550438881,
-0.08291862159967422,
0.012579081580042839,
-0.026828529313206673,
-0.02399960532784462,
-0.10199250280857086,
0.02467084676027298,
-0.05480306223034859,
-0.004911457188427448,
-0.01509547047317028,
-0.007674541790038347,
0.044149722903966904,
-0.028839459642767906,
-0.021287664771080017,
-0.03475296497344971,
-0.005332179833203554,
-0.04406477138400078,
-0.06998543441295624,
0.024240009486675262,
1.816319504276873e-33,
0.061937302350997925,
-0.01738007552921772,
-0.01710939034819603,
-0.0015908617060631514,
-0.05999807268381119,
-0.004182473756372929,
-0.026487672701478004,
0.030884305015206337,
-0.12381847947835922,
0.0002163289609597996,
-0.01548661570996046,
-0.0024577428121119738,
-0.08923213183879852,
0.02813088707625866,
-0.022627325728535652,
0.021213505417108536,
0.06919866800308228,
0.02556212991476059,
0.04745102673768997,
0.040739595890045166,
0.07472798228263855,
0.04009611904621124,
-0.006961751729249954,
-0.04226773977279663,
-0.057210832834243774,
0.07288753986358643,
0.0202096626162529,
-0.11782795935869217,
-0.10879677534103394,
0.02321774885058403,
-0.10232517123222351,
0.03196914121508598,
-0.014972064644098282,
0.012131690979003906,
-0.009255404584109783,
-0.024970196187496185,
-0.018798617646098137,
-0.007310913875699043,
-0.03193733096122742,
-0.08774787187576294,
0.011378404684364796,
0.012477366253733635,
-0.01961641199886799,
-0.05294507369399071,
0.03915436938405037,
0.0048563238233327866,
0.007752919569611549,
0.002256261184811592,
0.1265290528535843,
0.002180266659706831,
0.004768184386193752,
0.06197940185666084,
-0.06981025636196136,
0.0002996626717504114,
0.04142835736274719,
0.1058315634727478,
0.06567996740341187,
0.032186854630708694,
0.041490551084280014,
0.036548715084791183,
0.055502332746982574,
0.04356596991419792,
0.01964765414595604,
0.008971866220235825,
0.10787717998027802,
-0.016004785895347595,
-0.04935538396239281,
-0.07967336475849152,
0.07222407311201096,
0.03752495348453522,
-0.13985690474510193,
-0.05720791593194008,
0.06798238307237625,
0.09156344830989838,
0.0610407255589962,
-0.027208782732486725,
-0.028368011116981506,
-0.022914130240678787,
-0.010471668094396591,
-0.029761850833892822,
-0.059852201491594315,
0.013735353946685791,
-0.007803737185895443,
-0.011642763391137123,
-0.026714138686656952,
0.006122112274169922,
0.04751162976026535,
-0.053471099585294724,
-0.030040353536605835,
0.008333083242177963,
0.05431240424513817,
0.03988035395741463,
-0.10919960588216782,
-0.008237065747380257,
0.0048153456300497055,
-2.2949137640195133e-33,
0.10391543060541153,
0.01171936932951212,
-0.05109579116106033,
0.06291157752275467,
-0.029982084408402443,
-0.06773839145898819,
0.0029036500491201878,
0.11485881358385086,
0.07009376585483551,
0.040321797132492065,
0.08077870309352875,
-0.15000255405902863,
0.03722497820854187,
-0.07693313807249069,
0.06135425716638565,
-0.043969329446554184,
-0.013255186378955841,
0.022420980036258698,
0.03409513086080551,
0.030895691365003586,
0.007948791608214378,
0.07393347471952438,
-0.03404951095581055,
0.08346406370401382,
-0.005305813159793615,
-0.018125707283616066,
-0.023317841812968254,
0.06661447882652283,
-0.003539843950420618,
0.001240859623067081,
0.00210511009208858,
0.0014791861176490784,
-0.12283868342638016,
-0.014376994222402573,
-0.07881741225719452,
0.043652597814798355,
0.035911619663238525,
0.039268381893634796,
0.044400863349437714,
0.06294631958007812,
0.06534775346517563,
0.05980576574802399,
-0.0365414135158062,
-0.048733457922935486,
0.0233781598508358,
-0.02188463695347309,
-0.007498373743146658,
0.01595568098127842,
0.0014971022028476,
-0.07128744572401047,
0.024912679567933083,
0.005524162668734789,
-0.08864494413137436,
-0.029880110174417496,
-0.001887469319626689,
-0.0780656635761261,
-0.005292593501508236,
-0.14393934607505798,
-0.05574765056371689,
-0.024148084223270416,
-0.014191091060638428,
0.0340195931494236,
-0.044524747878313065,
-0.07448600232601166,
0.021425003185868263,
-0.005227793473750353,
0.03867219761013985,
0.01467357762157917,
0.010436266660690308,
0.06114739179611206,
-0.023235952481627464,
-0.0761096253991127,
0.06817686557769775,
0.07964994758367538,
0.005441675428301096,
-0.040278110653162,
-0.035889822989702225,
0.04091697558760643,
0.058997467160224915,
-0.0812409296631813,
-0.022479020059108734,
0.03215494379401207,
0.0027513697277754545,
0.04598190262913704,
0.10121966898441315,
0.10272002965211868,
0.0276227705180645,
-0.014242269098758698,
-0.009434571489691734,
0.06354966759681702,
0.02160077542066574,
0.025235923007130623,
0.01280185766518116,
0.11862780898809433,
-0.002511977916583419,
-4.948559961803767e-8,
-0.09844212234020233,
-0.00014933747297618538,
-0.09315917640924454,
0.03846409171819687,
-0.03550993651151657,
-0.06754634529352188,
-0.05848601087927818,
-0.021165210753679276,
-0.03876621276140213,
-0.030708184465765953,
0.004439048934727907,
0.005861198529601097,
-0.07183166593313217,
-0.008681168779730797,
-0.04761195555329323,
0.02168695628643036,
-0.019775914028286934,
0.08323240280151367,
-0.024526890367269516,
-0.03077246993780136,
0.05320584774017334,
0.04226221889257431,
0.05024310201406479,
-0.08545607328414917,
0.0003713309997692704,
0.0036942693404853344,
-0.031524352729320526,
0.03086915798485279,
-0.00415272731333971,
0.015188649296760559,
0.04429762810468674,
0.031708214432001114,
-0.01576126366853714,
-0.09113965928554535,
0.04234347119927406,
0.06494564563035965,
0.0023023923859000206,
-0.032563094049692154,
-0.007856788113713264,
0.046336643397808075,
0.09553901851177216,
0.03221851587295532,
-0.11324060708284378,
0.009971745312213898,
0.03695787861943245,
-0.028215885162353516,
-0.053242750465869904,
-0.033182911574840546,
0.025737283751368523,
-0.06831590831279755,
0.06525669991970062,
-0.07416318356990814,
-0.059393879026174545,
0.014227915555238724,
0.02153501659631729,
0.00990675762295723,
0.06359028816223145,
-0.022335480898618698,
0.019484156742691994,
-0.014739453792572021,
0.04389707371592522,
-0.02930384874343872,
-0.021130159497261047,
-0.006997320335358381
] |
audeering/wav2vec2-large-robust-12-ft-emotion-msp-dim | 6aeb7661bcf364e2dfe8ac0d00f919ba44e4c973 | 2022-05-06T13:48:50.000Z | [
"pytorch",
"wav2vec2",
"en",
"dataset:msp-podcast",
"arxiv:2203.07378",
"transformers",
"speech",
"audio",
"audio-classification",
"emotion-recognition",
"license:cc-by-nc-sa-4.0"
] | audio-classification | false | audeering | null | audeering/wav2vec2-large-robust-12-ft-emotion-msp-dim | 2,851 | 4 | transformers | ---
language: en
datasets:
- msp-podcast
inference: true
tags:
- speech
- audio
- wav2vec2
- audio-classification
- emotion-recognition
license: cc-by-nc-sa-4.0
---
# Model for Dimensional Speech Emotion Recognition based on Wav2vec 2.0
The model expects a raw audio signal as input and outputs predictions for arousal, dominance and valence in a range of approximately 0...1. In addition, it also provides the pooled states of the last transformer layer. The model was created by fine-tuning [
Wav2Vec2-Large-Robust](https://huggingface.co/facebook/wav2vec2-large-robust) on [MSP-Podcast](https://ecs.utdallas.edu/research/researchlabs/msp-lab/MSP-Podcast.html) (v1.7). The model was pruned from 24 to 12 transformer layers before fine-tuning. An [ONNX](https://onnx.ai/") export of the model is available from [doi:10.5281/zenodo.6221127](https://zenodo.org/record/6221127). Further details are given in the associated [paper](https://arxiv.org/abs/2203.07378).
# Usage
```python
import numpy as np
import torch
import torch.nn as nn
from transformers import Wav2Vec2Processor
from transformers.models.wav2vec2.modeling_wav2vec2 import (
Wav2Vec2Model,
Wav2Vec2PreTrainedModel,
)
class RegressionHead(nn.Module):
r"""Classification head."""
def __init__(self, config):
super().__init__()
self.dense = nn.Linear(config.hidden_size, config.hidden_size)
self.dropout = nn.Dropout(config.final_dropout)
self.out_proj = nn.Linear(config.hidden_size, config.num_labels)
def forward(self, features, **kwargs):
x = features
x = self.dropout(x)
x = self.dense(x)
x = torch.tanh(x)
x = self.dropout(x)
x = self.out_proj(x)
return x
class EmotionModel(Wav2Vec2PreTrainedModel):
r"""Speech emotion classifier."""
def __init__(self, config):
super().__init__(config)
self.config = config
self.wav2vec2 = Wav2Vec2Model(config)
self.classifier = RegressionHead(config)
self.init_weights()
def forward(
self,
input_values,
):
outputs = self.wav2vec2(input_values)
hidden_states = outputs[0]
hidden_states = torch.mean(hidden_states, dim=1)
logits = self.classifier(hidden_states)
return hidden_states, logits
# load model from hub
device = 'cpu'
model_name = 'audeering/wav2vec2-large-robust-12-ft-emotion-msp-dim'
processor = Wav2Vec2Processor.from_pretrained(model_name)
model = EmotionModel.from_pretrained(model_name)
# dummy signal
sampling_rate = 16000
signal = np.zeros((1, sampling_rate), dtype=np.float32)
def process_func(
x: np.ndarray,
sampling_rate: int,
embeddings: bool = False,
) -> np.ndarray:
r"""Predict emotions or extract embeddings from raw audio signal."""
# run through processor to normalize signal
# always returns a batch, so we just get the first entry
# then we put it on the device
y = processor(x, sampling_rate=sampling_rate)
y = y['input_values'][0]
y = torch.from_numpy(y).to(device)
# run through model
with torch.no_grad():
y = model(y)[0 if embeddings else 1]
# convert to numpy
y = y.detach().cpu().numpy()
return y
process_func(signal, sampling_rate)
# Arousal dominance valence
# [[0.5460759 0.6062269 0.4043165]]
process_func(signal, sampling_rate, embeddings=True)
# Pooled hidden states of last transformer layer
# [[-0.00752167 0.0065819 -0.00746339 ... 0.00663631 0.00848747
# 0.00599209]]
```
| [
-0.11698535084724426,
-0.06643679738044739,
0.009443036280572414,
0.009740937501192093,
0.059320222586393356,
0.041056666523218155,
-0.08631528913974762,
0.02060861699283123,
0.01416265219449997,
-0.046779170632362366,
-0.06685478240251541,
-0.05463087558746338,
-0.000582388136535883,
0.01865394040942192,
-0.06217591464519501,
0.011287040077149868,
0.021318567916750908,
0.010660326108336449,
-0.06177494302392006,
0.022702664136886597,
0.0796891450881958,
0.08878137171268463,
0.03278123214840889,
-0.03698262944817543,
0.06601298600435257,
0.009738887660205364,
0.01680215448141098,
0.003885079873725772,
0.04209815710783005,
-0.033167943358421326,
0.039231907576322556,
0.02395949698984623,
0.06893046945333481,
0.007058154325932264,
-0.040714360773563385,
-0.022714940831065178,
-0.023094406351447105,
-0.030561745166778564,
-0.053143445402383804,
-0.03165307641029358,
0.002587769879028201,
0.02564588375389576,
-0.0072580925188958645,
-0.07043951749801636,
0.006610359530895948,
-0.01656658761203289,
-0.009907115250825882,
-0.05502945929765701,
-0.03228136524558067,
0.029715994372963905,
-0.008103771135210991,
-0.009497509337961674,
-0.05202533304691315,
0.13184472918510437,
-0.06349585950374603,
0.03219059854745865,
-0.026640867814421654,
-0.01102543342858553,
0.04562417417764664,
-0.025955529883503914,
-0.036535754799842834,
-0.0208425372838974,
0.00003084724448854104,
-0.04498323053121567,
-0.028876494616270065,
-0.016710640862584114,
-0.05579904839396477,
0.02047281339764595,
0.02538476325571537,
-0.053433049470186234,
-0.10537746548652649,
0.010680011473596096,
-0.005433029495179653,
-0.05435534939169884,
0.08426371961832047,
-0.0026968992315232754,
0.12942926585674286,
0.0012621806235983968,
0.07109201699495316,
-0.01903570257127285,
0.0160563625395298,
-0.037574250251054764,
0.02158535085618496,
-0.08191082626581192,
0.029062315821647644,
-0.026480134576559067,
-0.0070680477656424046,
-0.07309184968471527,
-0.019884608685970306,
0.052271533757448196,
-0.062277283519506454,
-0.03020971454679966,
0.021023228764533997,
0.014897680841386318,
0.05201883986592293,
0.030934305861592293,
-0.02986413985490799,
0.10065493732690811,
-0.015415691770613194,
0.03950070962309837,
0.011932941153645515,
-0.02501177042722702,
-0.02375856041908264,
-0.057872965931892395,
-0.023481274023652077,
-0.09374666959047318,
-0.026952408254146576,
0.026528287678956985,
0.05526319518685341,
-0.058737628161907196,
-0.04099620506167412,
0.045525189489126205,
-0.023720914497971535,
-0.05620019510388374,
0.13011713325977325,
0.035472482442855835,
-0.03942069783806801,
-0.017042987048625946,
-0.0007282051374204457,
0.01758403703570366,
-0.014495396986603737,
-0.007076799403876066,
-0.019400794059038162,
-0.0001054832391673699,
0.03869950771331787,
0.057081177830696106,
-0.06708858162164688,
1.1394637959306536e-33,
-0.02273363247513771,
0.018350113183259964,
0.0018988787196576595,
0.0621618926525116,
0.01872510090470314,
-0.01903076097369194,
0.019512975588440895,
-0.004187619313597679,
-0.009371712803840637,
0.027525639161467552,
-0.12989716231822968,
0.07447567582130432,
-0.045716091990470886,
0.07649332284927368,
-0.04564071074128151,
-0.04558629170060158,
-0.03248896822333336,
0.00861384253948927,
-0.033685822039842606,
-0.013580606319010258,
0.0838807225227356,
0.0200297050178051,
0.042266376316547394,
0.022693129256367683,
-0.013511654920876026,
0.0007672474021092057,
0.06991075724363327,
-0.06612320244312286,
0.03818101808428764,
0.02567026950418949,
-0.11150772124528885,
-0.05059795826673508,
0.01369828823953867,
-0.09534727036952972,
0.06378568708896637,
0.031165648251771927,
-0.04423928260803223,
0.03265157341957092,
-0.021320505067706108,
-0.07936999201774597,
-0.0013715419918298721,
0.01829787716269493,
0.03134633228182793,
-0.020895367488265038,
-0.05763593316078186,
-0.030198631808161736,
-0.007251830771565437,
0.03731733188033104,
-0.0030164492782205343,
0.021086435765028,
0.03329239785671234,
0.007884101010859013,
-0.032976184040308,
0.05093856155872345,
-0.010043369606137276,
-0.03849712759256363,
0.11317688971757889,
0.030758125707507133,
0.017001645639538765,
0.0011686868965625763,
0.01845860294997692,
0.02733789198100567,
0.04574321210384369,
-0.060179390013217926,
0.0813344419002533,
0.011461741290986538,
-0.03921956568956375,
-0.023364078253507614,
-0.025303933769464493,
-0.0028731191996484995,
-0.017024099826812744,
0.06099579855799675,
0.08247987926006317,
-0.06125197559595108,
0.019168579950928688,
-0.036193061619997025,
0.01642676070332527,
-0.02904491126537323,
0.00854511559009552,
0.014133420772850513,
-0.07604172825813293,
0.051255520433187485,
-0.059396352618932724,
-0.04751114919781685,
-0.04908791556954384,
-0.048586830496788025,
-0.03681579977273941,
-0.125482976436615,
-0.018401233479380608,
0.04465954750776291,
-0.024747474119067192,
0.04909142851829529,
-0.0174407958984375,
-0.01738869957625866,
-0.016254952177405357,
-1.692904168713049e-33,
0.044923700392246246,
0.056461889296770096,
-0.08651269227266312,
0.04877680167555809,
-0.048719801008701324,
-0.006317539606243372,
0.08009140193462372,
0.13161437213420868,
-0.027198422700166702,
-0.08479878306388855,
0.14305421710014343,
-0.05369843170046806,
0.10575433820486069,
0.0419427715241909,
0.08406805992126465,
-0.041381798684597015,
-0.050699565559625626,
-0.0024387920275330544,
0.06744346022605896,
0.054174281656742096,
0.09421777725219727,
0.06954008340835571,
-0.04533018544316292,
0.09185929596424103,
-0.07165143638849258,
-0.005755114369094372,
-0.03425964340567589,
0.0597088448703289,
0.06802055239677429,
-0.006794521119445562,
-0.06786367297172546,
-0.006161349825561047,
-0.10711976140737534,
0.02652949094772339,
-0.004256507847458124,
0.021644292399287224,
0.009583829902112484,
-0.08243273198604584,
0.01537137757986784,
-0.006596570368856192,
0.058007482439279556,
0.06322907656431198,
-0.11434297263622284,
0.009113555774092674,
0.004202747717499733,
-0.06669105589389801,
-0.0065862564370036125,
0.016246726736426353,
0.038751568645238876,
-0.035745348781347275,
0.07068710029125214,
-0.021642988547682762,
0.017387108877301216,
0.04644786939024925,
0.021701646968722343,
-0.04581868276000023,
-0.04063663259148598,
-0.08254151046276093,
0.04997136443853378,
-0.020176047459244728,
-0.009249156340956688,
0.0015287060523405671,
-0.04486299678683281,
-0.12657500803470612,
-0.01146196573972702,
0.09799597412347794,
0.018703797832131386,
0.041866786777973175,
0.023458318784832954,
0.05124712362885475,
0.02133762277662754,
0.025717925280332565,
0.0791650041937828,
-0.01964394748210907,
-0.025981051847338676,
-0.0707775205373764,
-0.06853926926851273,
-0.040148716419935226,
-0.007913089357316494,
-0.09940499812364578,
-0.10344298183917999,
0.0329875573515892,
0.07174313813447952,
0.007914871908724308,
0.07492093741893768,
0.08084317296743393,
0.08922678232192993,
0.050509922206401825,
-0.01746228151023388,
0.03151913359761238,
-0.047856129705905914,
0.06841032952070236,
0.015780624002218246,
0.08847364038228989,
0.03438596427440643,
-5.532018931830862e-8,
-0.11200668662786484,
0.05606131628155708,
0.0033885049633681774,
-0.04732712730765343,
-0.012893136590719223,
-0.0666651576757431,
0.026441801339387894,
0.010556140914559364,
-0.02283240295946598,
0.052133046090602875,
0.044720496982336044,
-0.03440463915467262,
-0.022983621805906296,
0.012804508209228516,
0.04110930487513542,
0.05707342177629471,
0.01960778422653675,
0.09856726974248886,
-0.004829715937376022,
-0.1653577983379364,
0.04615947976708412,
0.042036671191453934,
0.03354651853442192,
-0.0018260474316775799,
0.0891651064157486,
-0.019926859065890312,
-0.03582198917865753,
0.038630399852991104,
-0.039847444742918015,
-0.01695547066628933,
-0.018805120140314102,
0.02215285412967205,
-0.044570621103048325,
-0.07805624604225159,
0.03134899213910103,
0.07587284594774246,
-0.0014859134098514915,
-0.034533996134996414,
-0.0071268328465521336,
0.03729467839002609,
-0.022032808512449265,
0.08445414900779724,
-0.07838187366724014,
0.0056630526669323444,
0.02070586569607258,
-0.0005631585954688489,
0.041377827525138855,
-0.10501925647258759,
0.007730105426162481,
0.08535503596067429,
0.01630077138543129,
0.05846702307462692,
-0.030409207567572594,
0.043347157537937164,
0.039968933910131454,
0.028962641954421997,
-0.04894980043172836,
-0.013097399845719337,
0.004006651230156422,
0.00225856713950634,
0.01579795964062214,
0.0689905434846878,
-0.0904073566198349,
0.02556672878563404
] |
kykim/albert-kor-base | 04e79bcdfe860f251165a93dc685f9544bc597c0 | 2021-01-22T00:27:49.000Z | [
"pytorch",
"tf",
"albert",
"fill-mask",
"ko",
"transformers",
"autotrain_compatible"
] | fill-mask | false | kykim | null | kykim/albert-kor-base | 2,844 | 2 | transformers | ---
language: ko
---
# Albert base model for Korean
* 70GB Korean text dataset and 42000 lower-cased subwords are used
* Check the model performance and other language models for Korean in [github](https://github.com/kiyoungkim1/LM-kor)
```python
from transformers import BertTokenizerFast, AlbertModel
tokenizer_albert = BertTokenizerFast.from_pretrained("kykim/albert-kor-base")
model_albert = AlbertModel.from_pretrained("kykim/albert-kor-base")
``` | [
-0.07077708840370178,
-0.03815434128046036,
0.050078678876161575,
0.013022119179368019,
-0.0024886769242584705,
0.005204387940466404,
-0.04083340987563133,
0.053405750542879105,
-0.005839066114276648,
-0.034792460501194,
0.06456772238016129,
-0.03808315843343735,
0.04346691444516182,
-0.012926934286952019,
0.05784299597144127,
0.05594700574874878,
-0.006647530477494001,
0.04867016524076462,
-0.11780564486980438,
-0.10644717514514923,
0.07696656137704849,
-0.010216360911726952,
0.021642763167619705,
-0.03608407825231552,
0.09241572767496109,
0.007437606807798147,
0.03646443039178848,
0.010638398118317127,
0.07690946757793427,
0.047961894422769547,
0.02597205899655819,
0.0562693327665329,
0.052060551941394806,
0.08129473775625229,
0.056061048060655594,
0.025684842839837074,
-0.11054778844118118,
-0.02669050544500351,
0.0565267838537693,
0.020976649597287178,
-0.0008143410086631775,
-0.04202277213335037,
0.01182111818343401,
-0.031550634652376175,
0.09182038903236389,
0.01506748329848051,
-0.06869754940271378,
-0.014833176508545876,
-0.04022834450006485,
-0.005305112339556217,
-0.03418784216046333,
-0.012165398336946964,
0.05473990738391876,
0.08976687490940094,
-0.022502798587083817,
-0.01548384502530098,
-0.04662718623876572,
0.027505097910761833,
0.05522653087973595,
-0.04697069898247719,
-0.0920337662100792,
-0.043248776346445084,
-0.00887846015393734,
0.017561523243784904,
-0.10987450182437897,
0.027435889467597008,
0.010651183314621449,
0.026131022721529007,
0.03762637451291084,
0.013155365362763405,
-0.000001791425916053413,
0.06313209235668182,
0.008843698538839817,
0.11469243466854095,
-0.022748177871108055,
-0.0753944143652916,
0.12148160487413406,
-0.026299595832824707,
0.051414526998996735,
-0.08512755483388901,
0.013641845434904099,
-0.029190145432949066,
0.007441209629178047,
-0.012742435559630394,
0.006496530491858721,
-0.05725909769535065,
0.030797354876995087,
-0.0246614757925272,
-0.04341591149568558,
-0.002535952255129814,
-0.006904254667460918,
-0.10698002576828003,
0.07457254827022552,
-0.052416156977415085,
-0.09694817662239075,
0.043018653988838196,
-0.020863661542534828,
0.057280730456113815,
-0.012886826880276203,
0.040893156081438065,
0.0011823574313893914,
0.0693657323718071,
0.08847035467624664,
-0.015139386057853699,
-0.07739660143852234,
-0.061908602714538574,
0.03898796811699867,
0.0059186234138906,
0.013095173984766006,
0.030530180782079697,
0.04775748401880264,
-0.050126999616622925,
-0.024328045547008514,
-0.06127826124429703,
0.053746547549963,
-0.0611332543194294,
0.0008343428489752114,
-0.022330712527036667,
0.008712619543075562,
0.0948750451207161,
0.025309942662715912,
-0.03659464418888092,
-0.0035877872724086046,
0.021411921828985214,
-0.0641210675239563,
0.027301426976919174,
0.03010094352066517,
3.726508844622629e-33,
-0.0014945078874006867,
0.02165396884083748,
0.034218914806842804,
-0.012568640522658825,
-0.07295889407396317,
-0.08937032520771027,
0.007445788476616144,
0.024642910808324814,
-0.09591073542833328,
-0.00872642919421196,
-0.09133417904376984,
0.0668797492980957,
-0.11405114829540253,
0.015427148900926113,
-0.03255792707204819,
0.008164568804204464,
-0.031175963580608368,
0.03793859854340553,
0.011927429586648941,
0.08039659261703491,
0.10710915178060532,
0.03265559673309326,
0.02442886307835579,
-0.02898118644952774,
-0.05493272468447685,
-0.01010496448725462,
0.090256467461586,
-0.15160104632377625,
-0.019012443721294403,
0.031514234840869904,
-0.06341629475355148,
0.0014609877252951264,
-0.018060576170682907,
0.04724687337875366,
-0.04466778784990311,
-0.03781457990407944,
-0.02654055505990982,
-0.05054612457752228,
-0.048943281173706055,
-0.11786289513111115,
-0.014787736348807812,
0.01938115619122982,
-0.021105896681547165,
0.005911329295486212,
-0.01145047415047884,
0.03270236402750015,
-0.014311350882053375,
-0.04340016469359398,
0.06077941134572029,
0.03693731874227524,
0.058858517557382584,
0.014759693294763565,
-0.08183597028255463,
0.05284455791115761,
0.0756622776389122,
0.014833400957286358,
0.0749034583568573,
-0.009555095806717873,
0.08764879405498505,
-0.0046870470978319645,
-0.061762817203998566,
0.025904351845383644,
0.07088375836610794,
0.09254303574562073,
0.0982808768749237,
-0.06405903398990631,
-0.0034195641055703163,
-0.025259817019104958,
-0.0725354254245758,
-0.017924217507243156,
-0.02142454870045185,
-0.04634694755077362,
0.041675567626953125,
0.033832840621471405,
-0.021184401586651802,
-0.04517059400677681,
0.0029779067263007164,
-0.060089368373155594,
-0.08076587319374084,
0.010712096467614174,
-0.0299974475055933,
-0.03223829343914986,
-0.02327217161655426,
-0.013398163951933384,
-0.02499980479478836,
-0.0160572100430727,
-0.0005397899658419192,
-0.06766284257173538,
-0.009442486800253391,
-0.027660930529236794,
-0.013351510278880596,
0.004901982378214598,
-0.032286956906318665,
-0.015149092301726341,
-0.05416266992688179,
-4.2385563878927255e-33,
0.020930280908942223,
0.016223523765802383,
-0.017666969448328018,
0.04975588992238045,
-0.0471796877682209,
-0.03378540650010109,
0.04841852933168411,
0.18623659014701843,
0.005298767238855362,
0.011578586883842945,
0.025786438956856728,
-0.05775913968682289,
0.04633148014545441,
-0.06736548990011215,
0.1207854151725769,
-0.009586567059159279,
0.03361721709370613,
0.08678058534860611,
0.04287507012486458,
0.0773031935095787,
-0.01781701296567917,
0.03655567765235901,
-0.09439165145158768,
0.05392458289861679,
-0.054616447538137436,
0.08018691837787628,
-0.05177317187190056,
0.07884353399276733,
0.013182571157813072,
0.02133583277463913,
-0.01323232427239418,
0.0413433238863945,
-0.021541250869631767,
0.0346824936568737,
-0.07637781649827957,
-0.04349275678396225,
-0.02055886946618557,
0.01411995105445385,
0.010920166969299316,
-0.003261462552472949,
-0.004255212843418121,
0.0439971461892128,
-0.08378700911998749,
0.05002249777317047,
0.005404505878686905,
-0.058493129909038544,
-0.05746084079146385,
-0.05623328313231468,
0.08015558123588562,
-0.12354321032762527,
0.019618043676018715,
0.022914623841643333,
-0.09919633716344833,
-0.028338255360722542,
-0.02962341532111168,
-0.028934111818671227,
-0.010237117297947407,
-0.0926608294248581,
-0.024788029491901398,
-0.07088800519704819,
-0.06987461447715759,
-0.09611191600561142,
0.11320226639509201,
-0.024565059691667557,
-0.04235350713133812,
-0.044407401233911514,
0.06425772607326508,
0.022562745958566666,
-0.0018396865343675017,
-0.025644471868872643,
-0.010538347996771336,
0.0025584055110812187,
0.06488139182329178,
0.04480227082967758,
-0.011214845813810825,
0.03889596834778786,
-0.022241393104195595,
0.003973688930273056,
0.046428609639406204,
-0.022180885076522827,
-0.07864614576101303,
0.05577114224433899,
0.011611171998083591,
0.06734222173690796,
-0.011265000328421593,
0.01646171137690544,
-0.011015449650585651,
0.03783624619245529,
0.06136560067534447,
-0.03471401706337929,
-0.0008982110884971917,
0.028711877763271332,
0.023010771721601486,
0.05676165223121643,
-0.020433476194739342,
-4.4847560332073044e-8,
-0.008669205009937286,
-0.020882077515125275,
-0.02485606260597706,
0.027610518038272858,
-0.015414511784911156,
-0.03028932958841324,
-0.06575669348239899,
0.03180212527513504,
-0.0032994819339364767,
-0.05086437612771988,
0.06267677247524261,
0.03813853859901428,
-0.1256955862045288,
0.025032227858901024,
0.010851261205971241,
0.03780965879559517,
-0.012284180149435997,
0.0966813713312149,
-0.015085708349943161,
0.026378463953733444,
0.017403483390808105,
0.032777220010757446,
0.032999083399772644,
-0.07270225137472153,
0.008374945260584354,
0.02720770798623562,
-0.031604018062353134,
0.08887609839439392,
-0.02412310242652893,
-0.08087332546710968,
-0.02500142715871334,
0.021214265376329422,
-0.10052062571048737,
0.04487462714314461,
0.043955616652965546,
0.05302448943257332,
-0.006170520093291998,
-0.03899184986948967,
-0.03333442285656929,
0.04552023112773895,
0.01990019902586937,
-0.03679650276899338,
-0.10412775725126266,
0.01323466282337904,
0.0263969823718071,
-0.007754201535135508,
-0.0027635993901640177,
-0.05993708595633507,
0.0321943499147892,
0.024421824142336845,
-0.018497701734304428,
-0.03349008411169052,
-0.13543066382408142,
-0.009813250042498112,
-0.010824449360370636,
0.05080508813261986,
-0.020276153460144997,
-0.019999584183096886,
-0.0028064914513379335,
-0.03432567045092583,
-0.017142808064818382,
0.024019755423069,
0.0022949897684156895,
0.055942218750715256
] |
nreimers/MiniLMv2-L6-H384-distilled-from-BERT-Large | 37519953d888723fe745ea10a1438d8c20a3800f | 2021-06-20T19:02:12.000Z | [
"pytorch",
"bert",
"fill-mask",
"transformers",
"autotrain_compatible"
] | fill-mask | false | nreimers | null | nreimers/MiniLMv2-L6-H384-distilled-from-BERT-Large | 2,841 | null | transformers | # MiniLMv2
This is a MiniLMv2 model from: [https://github.com/microsoft/unilm](https://github.com/microsoft/unilm/tree/master/minilm) | [
-0.04895520955324173,
0.02276579663157463,
-0.07000173628330231,
0.036097876727581024,
0.042695432901382446,
0.02520260028541088,
-0.0600503534078598,
-0.0007676688255742192,
0.0047691743820905685,
0.015759311616420746,
0.06056235358119011,
0.00046843758900649846,
0.00011801968503277749,
0.01050148531794548,
-0.08721819519996643,
0.08599226176738739,
0.01498068030923605,
-0.033648423850536346,
0.018567850813269615,
-0.00535806268453598,
0.018649930134415627,
0.06277196109294891,
-0.09240960329771042,
-0.007299271412193775,
0.040097471326589584,
-0.004971580114215612,
0.0006423594313673675,
0.1038576066493988,
0.04481102153658867,
-0.06379807740449905,
0.0007346547790803015,
0.03916702792048454,
0.054420169442892075,
0.06359685957431793,
0.06703665852546692,
0.017612049356102943,
0.018825411796569824,
-0.02627837099134922,
-0.06136196479201317,
-0.06859486550092697,
-0.03151470422744751,
0.012728261761367321,
-0.00036808030563406646,
-0.0049248202703893185,
-0.003028493607416749,
0.03204696625471115,
-0.06138736382126808,
-0.06638938933610916,
-0.02506941929459572,
-0.03726499527692795,
0.02621067315340042,
-0.016988463699817657,
-0.04025539383292198,
0.022876180708408356,
0.0062387920916080475,
-0.05012645944952965,
-0.04382697120308876,
-0.07776421308517456,
0.0261690653860569,
-0.004900501109659672,
0.0404387004673481,
0.01664581522345543,
-0.033247388899326324,
0.022963257506489754,
0.0005878026131540537,
0.04971107095479965,
-0.002978444565087557,
-0.0770753026008606,
0.00504093524068594,
-0.1170683279633522,
-0.06406883895397186,
-0.042548276484012604,
-0.013686327263712883,
0.01435297355055809,
0.05689704790711403,
-0.0041104028932750225,
0.057125676423311234,
-0.0020104823634028435,
0.06368602812290192,
-0.022806983441114426,
-0.05479602515697479,
-0.014698090963065624,
-0.05923996493220329,
0.055565617978572845,
0.005362731870263815,
0.009084606543183327,
-0.053686242550611496,
0.006340047810226679,
0.13758905231952667,
-0.03288983926177025,
-0.13095535337924957,
0.03352827578783035,
0.06479422748088837,
0.0464354082942009,
-0.04781796410679817,
-0.03083430416882038,
0.06145505607128143,
-0.039485786110162735,
-0.04700921103358269,
0.06165080890059471,
-0.028451289981603622,
0.008284357376396656,
0.09092967957258224,
-0.0435781255364418,
0.02283267304301262,
-0.09361627697944641,
0.0875069722533226,
0.0037490196991711855,
0.04307965189218521,
-0.034554027020931244,
0.022966425865888596,
0.04025290161371231,
-0.060006674379110336,
-0.03719509020447731,
0.020583711564540863,
-0.08092869818210602,
-0.004847933538258076,
-0.03384304791688919,
-0.0035581530537456274,
-0.004073707852512598,
-0.012737107463181019,
-0.08114377409219742,
-0.06092020124197006,
-0.04482210427522659,
-0.019749078899621964,
0.015877695754170418,
-0.03853707015514374,
-2.6795005722022776e-34,
0.023562079295516014,
0.002345276065170765,
0.041187796741724014,
0.04818432778120041,
0.12321080267429352,
0.03126312419772148,
0.037290722131729126,
-0.028969576582312584,
-0.03538160398602486,
-0.009504184126853943,
-0.06577702611684799,
-0.04173797369003296,
-0.04687798023223877,
0.05348403751850128,
0.042434483766555786,
-0.1564864069223404,
0.005376838613301516,
0.056475166231393814,
0.00615740055218339,
0.01097516156733036,
-0.002388580935075879,
0.062372997403144836,
0.01149376668035984,
-0.12392240017652512,
0.09175445139408112,
0.09208182245492935,
0.06229568272829056,
-0.05160153657197952,
0.12363734096288681,
0.030484914779663086,
0.018321994692087173,
0.025407403707504272,
-0.041776224970817566,
0.004930829629302025,
0.010072944685816765,
-0.003038248745724559,
-0.07549645006656647,
-0.04250333458185196,
0.012249883264303207,
0.043215200304985046,
0.04041370376944542,
-0.03334071859717369,
0.02563297562301159,
-0.09244126826524734,
-0.04262537136673927,
-0.03287732973694801,
0.08154769241809845,
0.030890565365552902,
0.033896904438734055,
-0.10368799418210983,
-0.00709743145853281,
0.10551934689283371,
-0.0580039918422699,
-0.03281163424253464,
-0.05456491559743881,
-0.0006596371531486511,
0.03529633954167366,
0.07184820622205734,
-0.018682057037949562,
0.025925563648343086,
-0.02375112846493721,
0.04552971199154854,
-0.015816742554306984,
-0.019376982003450394,
0.05819624289870262,
-0.06790648400783539,
0.02873656339943409,
-0.10556823015213013,
0.026395810768008232,
0.033373210579156876,
-0.03833993524312973,
0.061588555574417114,
0.1160840168595314,
-0.014380334876477718,
0.0328463539481163,
-0.02532363310456276,
-0.00008932945638662204,
-0.05958889052271843,
-0.03185178339481354,
-0.01603260450065136,
-0.08207481354475021,
0.02502184361219406,
-0.014883361756801605,
-0.06546608358621597,
-0.021300610154867172,
-0.049747999757528305,
0.011992932297289371,
-0.05155694857239723,
-0.059855710715055466,
-0.040341176092624664,
-0.04414317384362221,
0.004887729417532682,
-0.03620303422212601,
0.052660148590803146,
0.05261489376425743,
-7.799672258999697e-34,
0.01805131323635578,
-0.07870244234800339,
0.035833582282066345,
0.013918714597821236,
0.017147144302725792,
-0.017809318378567696,
0.0013737345580011606,
0.09485296905040741,
-0.06044799089431763,
0.06761103123426437,
0.1251000165939331,
0.029214991256594658,
0.021908491849899292,
0.03323373198509216,
0.07918522506952286,
0.0647868663072586,
0.04299990087747574,
-0.08083342760801315,
0.0648777186870575,
-0.0031687230803072453,
0.06792683899402618,
0.11291144043207169,
-0.06576183438301086,
0.0022100606001913548,
-0.0003265137493144721,
-0.0211471039801836,
0.02950330637395382,
0.027035577222704887,
0.017098354175686836,
0.007303563877940178,
-0.013109011575579643,
0.013264812529087067,
-0.0040221912786364555,
-0.06251870840787888,
-0.05056912451982498,
0.03122297301888466,
0.010912477970123291,
-0.07608149200677872,
0.0013252729550004005,
0.03418628126382828,
0.012108595110476017,
-0.004260566551238298,
-0.037589266896247864,
-0.0010901553323492408,
0.00019804549810942262,
0.0039751529693603516,
0.051719773560762405,
0.01840861514210701,
0.036358222365379333,
-0.047623198479413986,
-0.001962418667972088,
-0.04082280769944191,
-0.06246257573366165,
0.01947067119181156,
-0.018930351361632347,
0.004109586589038372,
-0.029708394780755043,
0.022892946377396584,
0.10597699135541916,
-0.05200570076704025,
0.0059000710025429726,
-0.062312737107276917,
-0.06801187247037888,
-0.029168201610445976,
0.0023561923298984766,
0.08286891877651215,
-0.02780498005449772,
-0.04260875657200813,
-0.10295811295509338,
-0.023439185693860054,
0.08337375521659851,
0.015961965546011925,
-0.005081993993371725,
-0.04153130203485489,
0.01835375279188156,
-0.12168727815151215,
-0.007396259345114231,
0.004637852776795626,
0.007763034198433161,
-0.052417416125535965,
0.018932530656456947,
-0.06141224876046181,
0.056934766471385956,
0.0359552837908268,
0.06223267316818237,
-0.07546088099479675,
0.0016263349680230021,
0.07306613773107529,
-0.0722530335187912,
0.07526092976331711,
-0.08512663096189499,
0.04277236387133598,
0.06524386256933212,
0.047329675406217575,
-0.0568804070353508,
-3.3755057415874035e-8,
-0.05509170889854431,
-0.04316028580069542,
-0.006609188858419657,
-0.07148507237434387,
-0.01681501232087612,
-0.017823873087763786,
-0.010312178172171116,
0.005743416957557201,
0.006658314261585474,
0.01432848908007145,
0.02297535538673401,
-0.06807417422533035,
-0.019999925047159195,
0.040754903107881546,
-0.003140736371278763,
0.09019815921783447,
-0.01887335069477558,
0.10077892988920212,
0.014269710518419743,
-0.010625546798110008,
0.007593395188450813,
0.04507836326956749,
0.04207226634025574,
-0.016287516802549362,
0.018933238461613655,
-0.01261296309530735,
0.015273381024599075,
0.05510784685611725,
0.017163868993520737,
-0.05784009024500847,
-0.07543112337589264,
0.13375404477119446,
0.003285897197201848,
0.01842893287539482,
-0.08964250236749649,
0.14244233071804047,
0.004471041262149811,
0.0770513042807579,
0.01102218497544527,
-0.03233208879828453,
0.06936565786600113,
-0.0030374638736248016,
-0.09949064999818802,
0.023456064984202385,
0.014719266444444656,
0.08689513057470322,
-0.03110765665769577,
-0.11027206480503082,
-0.0014900248497724533,
-0.053888194262981415,
-0.013822948560118675,
0.01076432317495346,
0.04855505749583244,
0.06610007584095001,
-0.07260602712631226,
-0.005500246770679951,
-0.0011934731155633926,
-0.06526650488376617,
0.07483870536088943,
0.04716981574892998,
0.04062175750732422,
0.06567289680242538,
0.018383987247943878,
0.061560697853565216
] |
microsoft/BiomedNLP-KRISSBERT-PubMed-UMLS-EL | 373f710a611281c9ba2fa935586be1dbe98fc3fe | 2022-05-25T02:45:36.000Z | [
"pytorch",
"bert",
"en",
"arxiv:2112.07887",
"transformers",
"exbert",
"license:mit",
"feature-extraction"
] | feature-extraction | false | microsoft | null | microsoft/BiomedNLP-KRISSBERT-PubMed-UMLS-EL | 2,832 | 4 | transformers | ---
language: en
tags:
- exbert
license: mit
pipeline_tag: feature-extraction
widget:
- text: "<ENT> ER </ENT> crowding has become a wide-spread problem."
---
## KRISSBERT
[https://arxiv.org/pdf/2112.07887.pdf](https://arxiv.org/pdf/2112.07887.pdf)
Entity linking faces significant challenges such as prolific variations and prevalent ambiguities, especially in high-value domains with myriad entities. Standard classification approaches suffer from the annotation bottleneck and cannot effectively handle unseen entities. Zero-shot entity linking has emerged as a promising direction for generalizing to new entities, but it still requires example gold entity mentions during training and canonical descriptions for all entities, both of which are rarely available outside of Wikipedia ([Logeswaran et al., 2019](https://aclanthology.org/P19-1335.pdf); [Wu et al., 2020](https://aclanthology.org/2020.emnlp-main.519.pdf)). We explore Knowledge-RIch Self-Supervision (KRISS) and train a contextual encoder (KRISSBERT) for entity linking, by leveraging readily available unlabeled text and domain knowledge.
Specifically, the KRISSBERT model is initialized with [PubMedBERT](https://huggingface.co/microsoft/BiomedNLP-PubMedBERT-base-uncased-abstract) parameters, and then continuously pretrained using biomedical entity names from the [UMLS](https://www.nlm.nih.gov/research/umls/index.html) ontology to self-supervise entity linking examples from [PubMed](https://pubmed.ncbi.nlm.nih.gov/) abstracts. Experiments on seven standard biomedical entity linking datasets show that KRISSBERT attains new state of the art, outperforming prior self-supervised methods by as much as 20 absolute points in accuracy.
See [Zhang et al., 2021](https://arxiv.org/abs/2112.07887) for the details.
Note that some prior systems like [BioSyn](https://aclanthology.org/2020.acl-main.335.pdf), [SapBERT](https://aclanthology.org/2021.naacl-main.334.pdf), and their follow-up work (e.g., [Lai et al., 2021](https://aclanthology.org/2021.findings-emnlp.140.pdf)) claimed to do entity linking, but their systems completely ignore the context of an entity mention, and can only predict a surface form in the entity dictionary (See Figure 1 in [BioSyn](https://aclanthology.org/2020.acl-main.335.pdf)), _**not the canonical entity ID (e.g., CUI in UMLS)**_. Therefore, they can't disambiguate ambiguous mentions. For instance, given the entity mention "_ER_" in the sentence "*ER crowding has become a wide-spread problem*", their systems ignore the sentence context, and simply predict the closest surface form, which is just "ER". Multiple entities share this surface form as a potential name or alias, such as *Emergency Room (C0562508)*, *Estrogen Receptor Gene (C1414461)*, and *Endoplasmic Reticulum(C0014239)*. Without using the context information, their systems can't resolve such ambiguity and pinpoint the correct entity *Emergency Room (C0562508)*. More problematically, their evaluation would deem such an ambiguous prediction as correct. Consequently, the reported results in their papers do not reflect true performance on entity linking.
## Usage for Entity Linking
Here, we use the [MedMentions](https://github.com/chanzuckerberg/MedMentions) data to show you how to 1) **generate prototype embeddings**, and 2) **run entity linking**.
(We are currently unable to release the self-supervised mention examples, because they require the UMLS and PubMed licenses.)
#### 1. Create conda environment and install requirements
```bash
conda create -n kriss -y python=3.8 && conda activate kriss
pip install -r requirements.txt
```
#### 2. Switch the root dir to [usage](https://huggingface.co/microsoft/BiomedNLP-KRISSBERT-PubMed-UMLS-EL/tree/main/usage)
```bash
cd usage
```
#### 3. Download the MedMentions dataset
```bash
git clone https://github.com/chanzuckerberg/MedMentions.git
```
#### 4. Generate prototype embeddings
```bash
python generate_prototypes.py
```
#### 5. Run entity linking
```bash
python run_entity_linking.py
```
This will give you about `58.3%` top-1 accuracy.
## Citation
If you find KRISSBERT useful in your research, please cite the following paper:
```latex
@article{krissbert,
author = {Sheng Zhang, Hao Cheng, Shikhar Vashishth, Cliff Wong, Jinfeng Xiao, Xiaodong Liu, Tristan Naumann, Jianfeng Gao, Hoifung Poon},
title = {Knowledge-Rich Self-Supervision for Biomedical Entity Linking},
year = {2021},
url = {https://arxiv.org/abs/2112.07887},
eprinttype = {arXiv},
eprint = {2112.07887},
}
``` | [
-0.01852470636367798,
-0.03548455610871315,
0.02405976690351963,
-0.0063829864375293255,
0.11411093920469284,
-0.02773641236126423,
0.0011980370618402958,
0.0375283882021904,
0.019411960616707802,
0.0007559393416158855,
0.00003127224772470072,
-0.04485693946480751,
0.022188575938344002,
0.01609768718481064,
0.01119306217879057,
0.07711457461118698,
0.04400716349482536,
0.07093463093042374,
-0.035992495715618134,
-0.04586072638630867,
0.022850386798381805,
0.021774688735604286,
0.07757289707660675,
0.016333770006895065,
-0.0007644032593816519,
-0.0784454196691513,
-0.058478113263845444,
-0.05053585767745972,
0.01614963822066784,
-0.03541138395667076,
-0.008012541569769382,
0.06619974225759506,
-0.08102412521839142,
0.06331532448530197,
-0.0007304877508431673,
0.08186213672161102,
0.020266560837626457,
0.0528525747358799,
-0.03352677822113037,
0.012731524184346199,
0.040166836231946945,
-0.030189596116542816,
-0.04926906153559685,
0.025014890357851982,
0.03063170053064823,
0.07196912169456482,
-0.04781876504421234,
-0.04591931030154228,
-0.09967324137687683,
0.01643274910748005,
-0.07466035336256027,
-0.019910570234060287,
-0.015236211940646172,
0.034093402326107025,
0.034099966287612915,
-0.02506430074572563,
-0.04006964713335037,
-0.06655606627464294,
-0.008107749745249748,
0.0025979650672525167,
-0.005866187624633312,
-0.04073621332645416,
0.006520051043480635,
-0.012496326118707657,
0.008065329864621162,
-0.02382262609899044,
0.037687748670578,
0.0807863175868988,
0.033382073044776917,
0.005738608539104462,
0.061420854181051254,
0.03327089548110962,
-0.04648846387863159,
0.09128653258085251,
-0.00405611377209425,
0.06055895984172821,
-0.02234191633760929,
0.09728416800498962,
-0.010968855582177639,
-0.03963218256831169,
0.014188571833074093,
0.03119099698960781,
0.06538418680429459,
-0.0660296380519867,
0.06003504991531372,
0.08539029210805893,
0.04331060126423836,
-0.0007122100796550512,
-0.05423297733068466,
0.05026160553097725,
-0.08520953357219696,
-0.07930482923984528,
0.17079198360443115,
-0.022127579897642136,
0.09249170124530792,
0.020828908309340477,
0.06298884749412537,
-0.10700511187314987,
-0.02563413232564926,
0.09376106411218643,
-0.03349296748638153,
0.09172652661800385,
0.04182620346546173,
-0.006899304687976837,
-0.007894245907664299,
-0.03312959522008896,
0.004451869986951351,
0.06985269486904144,
-0.0372820720076561,
-0.07394653558731079,
0.023082470521330833,
-0.008174056187272072,
-0.01033850759267807,
-0.14502820372581482,
0.008776580914855003,
-0.04191603884100914,
-0.0021291051525622606,
-0.052582431584596634,
0.06317398697137833,
0.003524667350575328,
-0.04725904017686844,
0.019482092931866646,
-0.09413640946149826,
-0.019750036299228668,
0.006671602837741375,
0.08290960639715195,
-0.08620043843984604,
5.1006297819812204e-33,
0.15253977477550507,
0.0708947405219078,
0.05043058842420578,
-0.00010625668073771521,
-0.10229039937257767,
0.004047941882163286,
-0.0485716313123703,
0.00003983315036748536,
-0.045154180377721786,
0.003053811611607671,
0.010719490237534046,
0.038848813623189926,
-0.027928700670599937,
0.011263162828981876,
-0.006923239678144455,
-0.058000218123197556,
0.013303996995091438,
0.08298424631357193,
0.021690718829631805,
-0.006642487365752459,
-0.018018530681729317,
0.006310411728918552,
-0.037219032645225525,
-0.013620381243526936,
-0.0024547679349780083,
0.014618666842579842,
-0.01932763308286667,
-0.1342734694480896,
-0.01909554935991764,
0.03717498853802681,
-0.05606735870242119,
0.006607114337384701,
0.0539163313806057,
0.05625826492905617,
0.03963784500956535,
-0.07221093028783798,
-0.041336916387081146,
-0.0752137154340744,
0.020156437531113625,
-0.04514845460653305,
-0.02681485377252102,
0.014557038433849812,
-0.024440843611955643,
-0.06753446161746979,
-0.02062845788896084,
0.013565761968493462,
-0.005731851793825626,
-0.026013782247900963,
-0.02928248792886734,
-0.02306842990219593,
0.05253253132104874,
0.04732641577720642,
0.0161741990596056,
-0.0000025495694444543915,
-0.042160071432590485,
0.01440335065126419,
-0.010069238021969795,
-0.028608854860067368,
-0.03231960907578468,
-0.03269921988248825,
0.012769867666065693,
0.02423025667667389,
-0.029048172757029533,
0.014570243656635284,
0.027779465541243553,
0.024682551622390747,
0.0008835839689709246,
0.028028439730405807,
0.0420299731194973,
-0.043771374970674515,
-0.014858320355415344,
0.03421923145651817,
-0.02352018840610981,
-0.026925962418317795,
0.09715607762336731,
0.01994258351624012,
0.01733383722603321,
-0.11915577948093414,
0.00677376426756382,
0.06230933591723442,
-0.05055784061551094,
-0.044473808258771896,
-0.022545164451003075,
-0.002852396806702018,
0.006215396337211132,
-0.04540027678012848,
0.03372151032090187,
-0.06626460701227188,
0.0012768860906362534,
-0.020734937861561775,
0.0309867225587368,
0.013029580935835838,
-0.09236589074134827,
0.05078310891985893,
-0.009727121330797672,
-4.5693063354095464e-33,
0.014461895450949669,
-0.015724731609225273,
-0.02121940813958645,
0.01736920326948166,
0.0031126285903155804,
-0.016028601676225662,
0.042313456535339355,
0.045542195439338684,
-0.002719287062063813,
-0.02530871331691742,
0.0004241036658640951,
-0.0589812770485878,
-0.03858707845211029,
-0.055292025208473206,
0.022027455270290375,
-0.0410684235394001,
0.0023248489014804363,
0.004303575959056616,
0.019995002076029778,
0.16636665165424347,
0.0525779090821743,
0.010112764313817024,
-0.08030107617378235,
0.05695074424147606,
0.038918387144804,
0.09303530305624008,
0.019868135452270508,
0.005071042571216822,
-0.04221703112125397,
-0.0956820547580719,
-0.032211825251579285,
0.01609037257730961,
-0.05338888615369797,
-0.09561162441968918,
-0.08704166114330292,
-0.02219289354979992,
0.05207353085279465,
0.020624959841370583,
0.015984605997800827,
0.00935266725718975,
0.037631142884492874,
0.026109054684638977,
-0.05172589048743248,
-0.013353250920772552,
-0.008669236674904823,
-0.0438559427857399,
-0.08299992233514786,
0.0528278574347496,
0.034736648201942444,
-0.0037161512300372124,
-0.04191165789961815,
0.025209935382008553,
-0.0823545753955841,
-0.05160318687558174,
-0.0030616475269198418,
-0.08640123903751373,
0.026105400174856186,
-0.023060163483023643,
-0.08060876280069351,
0.05283162370324135,
-0.05870739370584488,
0.08658602833747864,
0.02029087394475937,
0.09678670018911362,
0.011412200517952442,
-0.06671695411205292,
-0.019219206646084785,
0.05610644072294235,
-0.08374709635972977,
-0.10818848758935928,
0.09101400524377823,
-0.028942380100488663,
-0.03730668127536774,
-0.03931942954659462,
0.01666608452796936,
-0.01763211004436016,
0.04195529967546463,
-0.05945947393774986,
0.012557330541312695,
-0.0480414517223835,
0.03066229075193405,
-0.023875389248132706,
0.03604774922132492,
0.07849530130624771,
0.11360018700361252,
0.06761106103658676,
0.0492730550467968,
0.0076805478893220425,
-0.013569246046245098,
-0.024546179920434952,
-0.03140893206000328,
-0.032141752541065216,
-0.055112048983573914,
0.14164797961711884,
-0.04482818394899368,
-5.971023853135193e-8,
-0.06103905662894249,
0.08322350680828094,
-0.05212382599711418,
0.03364568203687668,
0.009148556739091873,
-0.055003948509693146,
-0.019949346780776978,
0.12998931109905243,
-0.02859206683933735,
0.019945498555898666,
0.013411377556622028,
0.04642403498291969,
-0.12873893976211548,
0.0035475539043545723,
0.0725080743432045,
0.04817844182252884,
-0.016349557787179947,
0.02389097586274147,
-0.04415277764201164,
0.008581681177020073,
-0.04646372050046921,
-0.010097960941493511,
0.00617841724306345,
-0.0743572860956192,
0.03107968159019947,
-0.08815430104732513,
0.00927675236016512,
0.03774314373731613,
0.12522058188915253,
-0.05078829824924469,
-0.04887944832444191,
0.0386175811290741,
-0.06177627295255661,
0.02386740781366825,
0.0714772418141365,
0.08991611003875732,
-0.03826138377189636,
-0.018087487667798996,
-0.04865546151995659,
0.020100463181734085,
0.0454162172973156,
0.04798858240246773,
-0.03942318260669708,
0.008694886229932308,
0.058872610330581665,
0.0022590328007936478,
-0.013140948489308357,
-0.05036721006035805,
0.01587163843214512,
-0.04198779538273811,
-0.02342166006565094,
-0.023647962138056755,
-0.006288771517574787,
0.03641556575894356,
0.01111681293696165,
0.0882018506526947,
0.04119464382529259,
-0.0161297544836998,
0.054770976305007935,
0.07993901520967484,
0.0671486109495163,
-0.06716389954090118,
0.02077322266995907,
0.08154554665088654
] |
nateraw/bert-base-uncased-emotion | 064d252021b51d95cd0547c89c6489100da0dc4c | 2021-05-20T01:18:38.000Z | [
"pytorch",
"jax",
"bert",
"text-classification",
"en",
"dataset:emotion",
"transformers",
"emotion",
"license:apache-2.0"
] | text-classification | false | nateraw | null | nateraw/bert-base-uncased-emotion | 2,827 | 3 | transformers | ---
language:
- en
thumbnail: https://avatars3.githubusercontent.com/u/32437151?s=460&u=4ec59abc8d21d5feea3dab323d23a5860e6996a4&v=4
tags:
- text-classification
- emotion
- pytorch
license: apache-2.0
datasets:
- emotion
metrics:
- accuracy
---
# bert-base-uncased-emotion
## Model description
`bert-base-uncased` finetuned on the emotion dataset using PyTorch Lightning. Sequence length 128, learning rate 2e-5, batch size 32, 2 GPUs, 4 epochs.
For more details, please see, [the emotion dataset on nlp viewer](https://huggingface.co/nlp/viewer/?dataset=emotion).
#### Limitations and bias
- Not the best model, but it works in a pinch I guess...
- Code not available as I just hacked this together.
- [Follow me on github](https://github.com/nateraw) to get notified when code is made available.
## Training data
Data came from HuggingFace's `datasets` package. The data can be viewed [on nlp viewer](https://huggingface.co/nlp/viewer/?dataset=emotion).
## Training procedure
...
## Eval results
val_acc - 0.931 (useless, as this should be precision/recall/f1)
The score was calculated using PyTorch Lightning metrics.
| [
-0.11765565723180771,
-0.06850294023752213,
0.03678814694285393,
0.05519430711865425,
0.05081610381603241,
0.021184660494327545,
-0.013830951415002346,
0.025326469913125038,
0.020117860287427902,
-0.07320396602153778,
0.04520837962627411,
-0.03388660401105881,
-0.07622814923524857,
-0.0037789540365338326,
0.05113749951124191,
0.10787850618362427,
0.020535316318273544,
-0.007385019212961197,
-0.061524514108896255,
-0.01344519667327404,
0.03130757808685303,
0.07893149554729462,
0.0037889813538640738,
-0.07118544727563858,
0.06348655372858047,
0.026572223752737045,
-0.015415915288031101,
-0.01109993178397417,
0.05881669372320175,
0.04560127481818199,
0.05002456158399582,
-0.005598612129688263,
0.05731990188360214,
0.11649802327156067,
0.0015561532927677035,
0.08049953728914261,
-0.02839599922299385,
-0.008641740307211876,
-0.0016119404463097453,
0.018777986988425255,
-0.010955982841551304,
0.010728312656283379,
-0.04737841710448265,
0.003154792357236147,
0.0815381407737732,
-0.023320281878113747,
-0.04593448340892792,
-0.027309484779834747,
-0.058684442192316055,
-0.08248256146907806,
-0.08060923963785172,
-0.03997045382857323,
0.02030407078564167,
0.07245247066020966,
-0.0189578328281641,
-0.006912721320986748,
0.003773063886910677,
-0.030669016763567924,
0.02528819441795349,
-0.0404818020761013,
-0.02868044376373291,
-0.04800983518362045,
0.0019359079888090491,
-0.032822299748659134,
-0.08765872567892075,
0.00007975461630849168,
0.0029007585253566504,
-0.01094850990921259,
0.03932657465338707,
-0.014327505603432655,
0.024009456858038902,
-0.009773105382919312,
-0.028881264850497246,
0.019685905426740646,
0.013444628566503525,
-0.005850755609571934,
0.09014233946800232,
-0.01638646423816681,
0.05008121207356453,
-0.08616017550230026,
-0.016844525933265686,
-0.05903679504990578,
0.08074211329221725,
0.038594119250774384,
0.03887958452105522,
-0.0595366433262825,
0.015270271338522434,
0.04367952421307564,
-0.07791244983673096,
0.06853675097227097,
-0.004038519226014614,
-0.10755278915166855,
0.059117089956998825,
-0.014500835910439491,
0.018214425072073936,
0.038253303617239,
0.001881964853964746,
-0.0107183326035738,
-0.11236529052257538,
0.04436318948864937,
0.0053961388766765594,
-0.004474207758903503,
-0.06859400123357773,
-0.057837776839733124,
-0.03290567547082901,
0.02355301007628441,
-0.0351615846157074,
-0.030656753107905388,
0.032229945063591,
-0.08869294822216034,
-0.06666780263185501,
-0.02155560813844204,
-0.037134841084480286,
-0.09157335758209229,
0.06619205325841904,
-0.054781220853328705,
-0.028790703043341637,
0.018371624872088432,
0.10371097922325134,
0.09548872709274292,
-0.0027217972092330456,
0.033771369606256485,
-0.002923060907050967,
0.050356149673461914,
-0.013334576971828938,
0.020745933055877686,
-0.05141238123178482,
6.703002334910729e-33,
0.04599592462182045,
0.0059958635829389095,
-0.01580614596605301,
-0.054596349596977234,
-0.011127143166959286,
-0.052386775612831116,
0.026477720588445663,
-0.03273507207632065,
-0.09032188355922699,
0.0007011967827565968,
-0.0533328540623188,
0.01375107653439045,
-0.036903731524944305,
0.07457039505243301,
-0.05071869492530823,
-0.022737888619303703,
-0.029348099604249,
-0.022933339700102806,
0.05369033291935921,
0.024842780083417892,
-0.006079263053834438,
0.014994091354310513,
-0.017183290794491768,
-0.04065541923046112,
-0.15851379930973053,
0.054753754287958145,
0.07995045930147171,
-0.06143999844789505,
0.01273219846189022,
0.021166687831282616,
-0.12568193674087524,
0.06105610355734825,
-0.0011343307560309768,
-0.005806710571050644,
0.030434325337409973,
0.014011377468705177,
0.0026682436000555754,
-0.024504996836185455,
0.01415987778455019,
-0.04541842266917229,
-0.040255628526210785,
0.08256752043962479,
0.013940693810582161,
-0.04964904114603996,
-0.03727421909570694,
0.06956563144922256,
0.010154904797673225,
0.008614161983132362,
0.03432203084230423,
0.03532910719513893,
0.03187369182705879,
0.020368440076708794,
0.007644102908670902,
0.07069975137710571,
-0.008569097146391869,
-0.0007633554632775486,
0.07547243684530258,
0.07164008170366287,
0.07682936638593674,
0.028317060321569443,
-0.027983905747532845,
0.011376086622476578,
0.07516877353191376,
-0.07768751680850983,
0.042561911046504974,
0.012510095722973347,
-0.0659412145614624,
-0.02224738709628582,
-0.040490131825208664,
0.019312923774123192,
-0.030229300260543823,
0.051529135555028915,
0.028915954753756523,
-0.04115744307637215,
0.006918380968272686,
-0.05656814202666283,
0.03545233607292175,
-0.04271242767572403,
-0.0359182208776474,
-0.017699673771858215,
-0.012716909870505333,
-0.033996060490608215,
0.0032517611980438232,
-0.03930026292800903,
-0.04836791753768921,
-0.08633442968130112,
0.007335517089813948,
-0.08957157284021378,
-0.04312516376376152,
0.08455862104892731,
-0.01436171866953373,
-0.03971216827630997,
0.002886291593313217,
-0.009762277826666832,
-0.01758582331240177,
-6.721857264297919e-33,
0.028267309069633484,
0.03670654073357582,
-0.11088836193084717,
0.057695403695106506,
-0.013467304408550262,
-0.0015237063635140657,
0.02739037573337555,
0.15380598604679108,
0.025833819061517715,
-0.021479029208421707,
0.05714450404047966,
-0.05440356954932213,
0.012470624409615993,
-0.018402373418211937,
0.06937903165817261,
0.026172714307904243,
-0.06201457604765892,
0.04388101026415825,
0.022809334099292755,
0.027998939156532288,
0.0005990657373331487,
0.07130192220211029,
-0.10220272094011307,
0.06927825510501862,
-0.022267723456025124,
0.01966850459575653,
-0.02770720236003399,
0.009800662286579609,
0.02383250743150711,
-0.08000452071428299,
-0.02017371542751789,
0.03533296287059784,
-0.11915871500968933,
0.020041564479470253,
-0.043866898864507675,
0.00847266148775816,
0.036747027188539505,
-0.05171847715973854,
-0.053583648055791855,
0.01246629934757948,
0.14757956564426422,
0.037351857870817184,
-0.0749354138970375,
0.05219264328479767,
0.017894374206662178,
0.021812664344906807,
-0.06011229753494263,
0.01016776729375124,
0.024323606863617897,
0.031699590384960175,
0.021847667172551155,
-0.027763627469539642,
-0.056137293577194214,
0.028020478785037994,
-0.020278604701161385,
-0.1365595906972885,
0.05414566770195961,
-0.0535331591963768,
-0.06103385612368584,
0.020687062293291092,
-0.0718429684638977,
-0.033889107406139374,
0.007858228869736195,
-0.033382512629032135,
0.023959659039974213,
-0.040242407470941544,
-0.013645990751683712,
0.03538614884018898,
-0.04865428805351257,
0.004179754760116339,
0.041186150163412094,
0.028587477281689644,
0.10442691296339035,
0.01610865816473961,
0.02193785458803177,
0.03133665770292282,
0.006478262133896351,
-0.05135723575949669,
-0.060628313571214676,
-0.05643090978264809,
-0.00561773544177413,
0.011999489739537239,
0.03864919766783714,
0.06784132868051529,
0.0344015471637249,
0.0988665372133255,
0.054251592606306076,
0.1372619867324829,
-0.01480750273913145,
-0.012855647131800652,
-0.05226671323180199,
0.08116762340068817,
0.06200788915157318,
0.11102460324764252,
0.09011845290660858,
-5.997183905037673e-8,
-0.09235723316669464,
0.011789540760219097,
-0.042614564299583435,
0.022260254248976707,
-0.03833676874637604,
-0.02428070642054081,
0.03745732083916664,
0.022357869893312454,
-0.07110752165317535,
-0.03316553682088852,
0.05891745537519455,
0.06634005159139633,
-0.09152784198522568,
0.03308490663766861,
-0.03967713192105293,
0.044123921543359756,
0.022994086146354675,
0.01117642130702734,
0.03445134684443474,
-0.03164483979344368,
-0.013900215737521648,
0.029897484928369522,
0.02184979058802128,
-0.05343727394938469,
0.02523544616997242,
-0.04879910871386528,
-0.030312679708003998,
0.06343545019626617,
-0.08183641731739044,
-0.030796462669968605,
0.014048644341528416,
0.013474265113472939,
-0.017246253788471222,
-0.060453590005636215,
0.07237041741609573,
0.04219113662838936,
-0.08380931615829468,
-0.09560076892375946,
0.005827103741466999,
0.0557139553129673,
0.03969508036971092,
0.05849688872694969,
-0.05760258436203003,
-0.03345618396997452,
0.05486603453755379,
-0.014092020690441132,
0.03968780115246773,
-0.13625800609588623,
0.05741743743419647,
0.08131637424230576,
0.03649165853857994,
-0.035804398357868195,
-0.10003210604190826,
0.042168330401182175,
0.04737916216254234,
0.025112031027674675,
-0.08461286872625351,
0.016708428040146828,
0.032445620745420456,
0.014055397361516953,
0.04894595593214035,
0.004790824837982655,
-0.03524738922715187,
0.052424486726522446
] |
m3hrdadfi/wav2vec2-large-xlsr-persian-v3 | f3ceecb54fc81bb796f1565429bcf5599cd0e24d | 2021-11-04T15:22:11.000Z | [
"pytorch",
"tf",
"wav2vec2",
"automatic-speech-recognition",
"fa",
"dataset:common_voice",
"transformers",
"audio",
"speech",
"xlsr-fine-tuning-week",
"model-index"
] | automatic-speech-recognition | false | m3hrdadfi | null | m3hrdadfi/wav2vec2-large-xlsr-persian-v3 | 2,826 | 8 | transformers | ---
language: fa
datasets:
- common_voice
tags:
- audio
- automatic-speech-recognition
- speech
- xlsr-fine-tuning-week
widget:
- example_title: Common Voice sample 1
src: https://huggingface.co/m3hrdadfi/wav2vec2-large-xlsr-persian-v3/resolve/main/sample1.flac
- example_title: Common Voice sample 2978
src: https://huggingface.co/m3hrdadfi/wav2vec2-large-xlsr-persian-v3/resolve/main/sample2978.flac
- example_title: Common Voice sample 5168
src: https://huggingface.co/m3hrdadfi/wav2vec2-large-xlsr-persian-v3/resolve/main/sample5168.flac
model-index:
- name: XLSR Wav2Vec2 Persian (Farsi) V3 by Mehrdad Farahani
results:
- task:
name: Speech Recognition
type: automatic-speech-recognition
dataset:
name: Common Voice fa
type: common_voice
args: fa
metrics:
- name: Test WER
type: wer
value: 10.36
---
# Wav2Vec2-Large-XLSR-53-Persian V3
## Usage
Fine-tuned [facebook/wav2vec2-large-xlsr-53](https://huggingface.co/facebook/wav2vec2-large-xlsr-53) in Persian (Farsi) using [Common Voice](https://huggingface.co/datasets/common_voice). When using this model, make sure that your speech input is sampled at 16kHz.
**Requirements**
```bash
# requirement packages
!pip install git+https://github.com/huggingface/datasets.git
!pip install git+https://github.com/huggingface/transformers.git
!pip install torchaudio
!pip install librosa
!pip install jiwer
!pip install parsivar
!pip install num2fawords
```
**Normalizer**
```bash
# Normalizer
!wget -O normalizer.py https://huggingface.co/m3hrdadfi/"wav2vec2-large-xlsr-persian-v3/raw/main/dictionary.py
!wget -O normalizer.py https://huggingface.co/m3hrdadfi/"wav2vec2-large-xlsr-persian-v3/raw/main/normalizer.py
```
**Downloading data**
```bash
wget https://voice-prod-bundler-ee1969a6ce8178826482b88e843c335139bd3fb4.s3.amazonaws.com/cv-corpus-6.1-2020-12-11/fa.tar.gz
tar -xzf fa.tar.gz
rm -rf fa.tar.gz
```
**Cleaning**
```python
from normalizer import normalizer
def cleaning(text):
if not isinstance(text, str):
return None
return normalizer({"sentence": text}, return_dict=False)
data_dir = "/content/cv-corpus-6.1-2020-12-11/fa"
test = pd.read_csv(f"{data_dir}/test.tsv", sep=" ")
test["path"] = data_dir + "/clips/" + test["path"]
print(f"Step 0: {len(test)}")
test["status"] = test["path"].apply(lambda path: True if os.path.exists(path) else None)
test = test.dropna(subset=["path"])
test = test.drop("status", 1)
print(f"Step 1: {len(test)}")
test["sentence"] = test["sentence"].apply(lambda t: cleaning(t))
test = test.dropna(subset=["sentence"])
print(f"Step 2: {len(test)}")
test = test.reset_index(drop=True)
print(test.head())
test = test[["path", "sentence"]]
test.to_csv("/content/test.csv", sep=" ", encoding="utf-8", index=False)
```
**Prediction**
```python
import numpy as np
import pandas as pd
import librosa
import torch
import torchaudio
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
from datasets import load_dataset, load_metric
import IPython.display as ipd
model_name_or_path = "m3hrdadfi/wav2vec2-large-xlsr-persian-v3"
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
print(model_name_or_path, device)
processor = Wav2Vec2Processor.from_pretrained(model_name_or_path)
model = Wav2Vec2ForCTC.from_pretrained(model_name_or_path).to(device)
def speech_file_to_array_fn(batch):
speech_array, sampling_rate = torchaudio.load(batch["path"])
speech_array = speech_array.squeeze().numpy()
speech_array = librosa.resample(np.asarray(speech_array), sampling_rate, processor.feature_extractor.sampling_rate)
batch["speech"] = speech_array
return batch
def predict(batch):
features = processor(
batch["speech"],
sampling_rate=processor.feature_extractor.sampling_rate,
return_tensors="pt",
padding=True
)
input_values = features.input_values.to(device)
attention_mask = features.attention_mask.to(device)
with torch.no_grad():
logits = model(input_values, attention_mask=attention_mask).logits
pred_ids = torch.argmax(logits, dim=-1)
batch["predicted"] = processor.batch_decode(pred_ids)
return batch
dataset = load_dataset("csv", data_files={"test": "/content/test.csv"}, delimiter=" ")["test"]
dataset = dataset.map(speech_file_to_array_fn)
result = dataset.map(predict, batched=True, batch_size=4)
```
**WER Score**
```python
wer = load_metric("wer")
print("WER: {:.2f}".format(100 * wer.compute(predictions=result["predicted"], references=result["sentence"])))
```
**Output**
```python
max_items = np.random.randint(0, len(result), 20).tolist()
for i in max_items:
reference, predicted = result["sentence"][i], result["predicted"][i]
print("reference:", reference)
print("predicted:", predicted)
print('---')
```
```text
reference: ماجرا رو براش تعریف کردم اون گفت مریم اگه میدونی پسر خوبیه خب چه اشکالی داره باهاش بیشتر اشنا بشو
predicted: ماجرا رو براش تعریف کردم اون گفت مریم اگه میدونی پسر خوبیه خب چه اشکالی داره باهاش بیشتر اشنا بشو
---
reference: بیا پایین تو اجازه نداری بری اون بالا
predicted: بیا پایین تو اجازه نداری بری اون بالا
---
reference: هر روز یک دو مداد کش می رفتتم تااین که تا پایان ترم از تمامی دوستانم مداد برداشته بودم
predicted: هر روز یک دو مداد کش می رفتم تااین که تا پایین ترم از تمامی دوستان و مداد برداشته بودم
---
reference: فکر میکنی آروم میشینه
predicted: فکر میکنی آروم میشینه
---
reference: هرکسی با گوشی هوشمند خود میتواند با کایلا متصل گردد در یک محدوده مکانی
predicted: هرکسی با گوشی هوشمند خود میتواند با کایلا متصل گردد در یک محدوده مکانی
---
reference: برو از مهرداد بپرس
predicted: برو از مهرداد بپرس
---
reference: می خواهم شما را با این قدمها آشنا کنم
predicted: می خواهم شما را با این قدمها آشنا کنم
---
reference: میدونم یه روز دوباره می تونم تو رو ببینم
predicted: میدونم یه روز دوباره می تونم تو رو ببینم
---
reference: بسیار خوب خواهد بود دعوت او را بپذیری
predicted: بسیار خوب خواهد بود دعوت او را بپذیری
---
reference: بهت بگن آشغالی خوبه
predicted: بهت بگن آشغالی خوبه
---
reference: چرا معاشرت با هم ایمانان ما را محفوظ نگه میدارد
predicted: چرا معاشرت با هم ایمانان آ را م حفوظ نگه میدارد
---
reference: بولیوی پس از گویان فقیرترین کشور آمریکای جنوبی است
predicted: بولیوی پس از گویان فقیرترین کشور آمریکای جنوبی است
---
reference: بعد از مدتی اینکار برایم عادی شد
predicted: بعد از مدتی اینکار برایم عادو شد
---
reference: به نظر اون هم همینطوره
predicted: به نظر اون هم همینطوره
---
reference: هیچ مایونز ی دارید
predicted: هیچ مایونز ی دارید
---
reference: هیچ یک از انان کاری به سنگ نداشتند
predicted: هیچ شک از انان کاری به سنگ نداشتند
---
reference: می خواهم کمی کتاب شعر ببینم
predicted: می خواهم کتاب شعر ببینم
---
reference: همین شوهر فهیمه مگه نمی گفتی فرمانده بوده کو
predicted: همین شوهر فهیمه بینامی گفتی فهمانده بود کو
---
reference: اون جاها کسی رو نمیبینی که تو دستش کتاب نباشه
predicted: اون جاها کسی رو نمیبینی که تو دستش کتاب نباشه
---
reference: زندان رفتن من در این سالهای اخیر برام شانس بزرگی بود که معما و مشکل چندین سالهام را حل کرد
predicted: زندان رفتن من در این سالها اخی براب شانس بزرگی بود که معما و مشکل چندین سالهام را حل کرد
---
```
## Evaluation
**Test Result:**
- WER: 10.36% | [
-0.10966219753026962,
-0.08014152199029922,
-0.051723893731832504,
-0.0305162500590086,
0.08168373256921768,
0.012033766135573387,
-0.03713138774037361,
-0.03820332884788513,
-0.028373461216688156,
-0.09040869027376175,
0.0064581818878650665,
-0.12347524613142014,
-0.05854380875825882,
0.036788199096918106,
-0.00953640230000019,
-0.08241072297096252,
-0.0784742459654808,
-0.005471441429108381,
-0.07104761898517609,
-0.006680821068584919,
0.07113246619701385,
0.05103718116879463,
0.1423010379076004,
-0.0018496767152100801,
0.01435422245413065,
-0.011505643837153912,
-0.06042206659913063,
0.05263611674308777,
0.03058272786438465,
-0.08342702686786652,
0.06974282115697861,
0.10998699814081192,
0.10627566277980804,
0.02430243417620659,
-0.01573670655488968,
0.016178570687770844,
-0.03715919330716133,
-0.04759923368692398,
-0.015427090227603912,
-0.011048223823308945,
-0.04952159523963928,
0.00474840123206377,
0.05696777626872063,
-0.04489720240235329,
0.02612677775323391,
-0.05657694488763809,
-0.09924114495515823,
-0.025726284831762314,
0.01933123916387558,
0.11528977751731873,
-0.10004106163978577,
0.014136940240859985,
-0.055134501308202744,
0.07340279221534729,
-0.039097514003515244,
0.00135996553581208,
-0.010359473526477814,
0.0049767023883759975,
0.020194558426737785,
0.017097225412726402,
-0.032992031425237656,
-0.02238360047340393,
-0.03681590035557747,
-0.007271092850714922,
-0.04921257123351097,
-0.007938679307699203,
-0.03067292831838131,
-0.03562502562999725,
0.014211989939212799,
-0.03342160955071449,
-0.10269593447446823,
0.06662306934595108,
0.032382749021053314,
0.05586962029337883,
0.042940933257341385,
-0.01600828766822815,
-0.01075489167124033,
-0.02398248016834259,
0.0084665073081851,
-0.05293705314397812,
0.007009953260421753,
-0.06510774791240692,
-0.05515434220433235,
0.00540069118142128,
0.09532777965068817,
-0.006444613449275494,
-0.06783465296030045,
-0.04480145871639252,
-0.008150740526616573,
0.01298926305025816,
-0.026924986392259598,
0.009100950323045254,
0.011381261050701141,
0.07209454476833344,
-0.004726174287497997,
0.0732523649930954,
0.051723673939704895,
0.07395023107528687,
-0.033260591328144073,
0.09418344497680664,
0.023168252781033516,
-0.10541720688343048,
0.047418829053640366,
0.047502659261226654,
-0.06511305272579193,
-0.06578002870082855,
-0.016626382246613503,
0.03510057553648949,
-0.06392908841371536,
-0.09427764266729355,
-0.02359198033809662,
-0.03060820885002613,
-0.04169827327132225,
-0.0951765775680542,
-0.023340115323662758,
0.029576236382126808,
-0.030692894011735916,
-0.06760106235742569,
-0.00789792649447918,
-0.008730597794055939,
-0.03667456656694412,
0.009020796045660973,
0.023066507652401924,
-0.03610432893037796,
-0.00482966611161828,
-0.026629818603396416,
-0.023185119032859802,
6.744335655021518e-33,
0.0359422005712986,
0.04096694663167,
0.0000016845898471729015,
0.011375188827514648,
0.005566527601331472,
-0.08734464645385742,
-0.028456391766667366,
0.02783963270485401,
-0.055245090276002884,
0.018187377601861954,
-0.03349422290921211,
0.04641889035701752,
-0.06479936093091965,
-0.06702026724815369,
-0.012034166604280472,
0.0027070618234574795,
0.018604077398777008,
0.006578685715794563,
-0.05973381549119949,
0.014121382497251034,
0.1731923371553421,
0.1170533299446106,
0.05203170329332352,
-0.038843948394060135,
0.10343065112829208,
0.05696212127804756,
0.037572361528873444,
-0.05264651030302048,
-0.016603393480181694,
0.04725246503949165,
-0.022207001224160194,
-0.0627238005399704,
-0.008817989379167557,
-0.009897468611598015,
0.05085277557373047,
0.03318742290139198,
-0.04305241256952286,
-0.04880770295858383,
-0.05798868462443352,
-0.08666883409023285,
0.02537347562611103,
0.020017357543110847,
-0.011758030392229557,
0.002660951344296336,
-0.027943016961216927,
-0.11266466230154037,
-0.0014708776725456119,
0.11546358466148376,
0.04741598665714264,
0.07145139575004578,
-0.019951706752181053,
0.007313096895813942,
-0.0681670606136322,
0.02429952658712864,
-0.044815097004175186,
0.031131263822317123,
0.038517072796821594,
0.06851932406425476,
0.0074139987118542194,
0.03171258792281151,
0.00860758125782013,
-0.03367534652352333,
0.015348440036177635,
-0.00911046378314495,
0.02041545882821083,
-0.013270383700728416,
-0.034300677478313446,
0.004925786517560482,
0.09295280277729034,
0.042844489216804504,
0.013693747110664845,
-0.06352236866950989,
0.10093758255243301,
0.057318538427352905,
0.0892011821269989,
-0.0027127531357109547,
-0.019560500979423523,
-0.02458704635500908,
-0.05555550754070282,
0.038970861583948135,
-0.018545502796769142,
0.08789569139480591,
0.008778583258390427,
0.05138051137328148,
-0.06349167972803116,
0.005666755139827728,
0.04674562066793442,
-0.09882238507270813,
0.030995303764939308,
0.012592093087732792,
-0.008910109288990498,
0.019222648814320564,
0.002423505298793316,
-0.07986148446798325,
-0.03146810829639435,
-6.721431882279715e-33,
0.03015170246362686,
0.06140681356191635,
-0.0016937977634370327,
0.022081123664975166,
0.046537846326828,
-0.03194527328014374,
0.119967982172966,
0.05438705161213875,
0.033325571566820145,
0.002408575266599655,
0.06615862250328064,
-0.06770717352628708,
0.04285122826695442,
-0.10403502732515335,
0.04100075364112854,
0.04508058726787567,
0.03538500517606735,
-0.018755991011857986,
0.04276582598686218,
0.09134113788604736,
-0.03222823888063431,
0.05795789510011673,
-0.0036805763375014067,
0.043213579803705215,
-0.03923134505748749,
-0.03220897540450096,
0.015077056363224983,
0.013040519319474697,
0.01628296636044979,
-0.019639287143945694,
-0.035338517278432846,
0.008424969390034676,
-0.16564399003982544,
0.015030195005238056,
-0.03708501160144806,
-0.03631279990077019,
0.030475793406367302,
0.031861938536167145,
-0.06427430361509323,
0.04781736806035042,
0.08367365598678589,
0.028674716129899025,
-0.09128814935684204,
-0.0393262654542923,
0.011457588523626328,
-0.026387494057416916,
-0.033155638724565506,
0.010108452290296555,
0.022918347269296646,
-0.04235086590051651,
0.049526020884513855,
0.008630616590380669,
-0.03932717815041542,
0.022241367027163506,
0.03815142437815666,
-0.02411905862390995,
0.05869868025183678,
-0.026242032647132874,
-0.06693349033594131,
0.002211147453635931,
0.012703025713562965,
-0.03203447535634041,
-0.089824378490448,
-0.026408983394503593,
0.14142726361751556,
-0.0182902030646801,
-0.03169278800487518,
-0.013356742449104786,
0.04173712432384491,
-0.03193550556898117,
-0.031554628163576126,
-0.05461650714278221,
0.028425658121705055,
-0.021487770602107048,
0.02824477106332779,
0.0035128777381032705,
-0.09583962708711624,
0.00033918576082214713,
0.0033636409789323807,
-0.018227962777018547,
0.0015026411274448037,
0.006433567497879267,
0.07234823703765869,
0.07499012351036072,
0.02096708118915558,
0.0938984751701355,
-0.020762668922543526,
0.045708101242780685,
0.009413577616214752,
0.03979786857962608,
-0.02041238732635975,
0.04561362415552139,
0.005718765780329704,
0.03430123254656792,
-0.0011908254818990827,
-5.1695252523131785e-8,
-0.08665219694375992,
0.0020074688363820314,
-0.014491315931081772,
-0.018402405083179474,
-0.04869232699275017,
-0.024728914722800255,
-0.04328538104891777,
-0.012420293875038624,
-0.007430617231875658,
-0.013580129481852055,
0.05874790996313095,
0.008774013258516788,
-0.008166537620127201,
-0.01962958462536335,
-0.03560016304254532,
-0.026058461517095566,
-0.03547327592968941,
0.1487060934305191,
-0.04826204478740692,
-0.09509448707103729,
0.024957502260804176,
0.038801517337560654,
0.00399924349039793,
-0.017547985538840294,
0.006766976788640022,
0.03216522932052612,
-0.07700642943382263,
0.0912720113992691,
0.017427513375878334,
-0.0036959934514015913,
-0.014532097615301609,
-0.004902807995676994,
0.022598227486014366,
-0.07544703781604767,
0.01932738721370697,
-0.025380782783031464,
-0.030160794034600258,
0.0076026772148907185,
-0.0654769241809845,
0.05233018845319748,
0.10055802762508392,
0.07703743875026703,
-0.09453874826431274,
0.04702158272266388,
0.09025195986032486,
0.0068694818764925,
-0.003805243643000722,
-0.06791957467794418,
0.08713030070066452,
0.00875912792980671,
0.02393447235226631,
-0.024527814239263535,
-0.014884513802826405,
0.014972317032516003,
0.055108826607465744,
0.02856968529522419,
0.008276739157736301,
-0.007853316143155098,
0.05132770165801048,
-0.011635796166956425,
0.07802300900220871,
0.019077008590102196,
-0.03601514548063278,
0.04541649669408798
] |
alvaroalon2/biobert_chemical_ner | a5c41a966542076b2cea6a0ffca62d5610277e6f | 2022-07-11T11:12:51.000Z | [
"pytorch",
"tf",
"bert",
"token-classification",
"English",
"dataset:BC5CDR-chemicals",
"dataset:BC4CHEMD",
"transformers",
"NER",
"Biomedical",
"Chemicals",
"license:apache-2.0",
"autotrain_compatible"
] | token-classification | false | alvaroalon2 | null | alvaroalon2/biobert_chemical_ner | 2,823 | 4 | transformers | ---
language: "English"
tags:
- token-classification
- NER
- Biomedical
- Chemicals
datasets:
- BC5CDR-chemicals
- BC4CHEMD
license: apache-2.0
---
BioBERT model fine-tuned in NER task with BC5CDR-chemicals and BC4CHEMD corpus.
This was fine-tuned in order to use it in a BioNER/BioNEN system which is available at: https://github.com/librairy/bio-ner | [
-0.01089440193027258,
-0.05420708656311035,
-0.02601551078259945,
-0.1296326220035553,
-0.0016834705602377653,
-0.019191769883036613,
-0.004305872600525618,
0.0440291091799736,
-0.03690078482031822,
-0.03560783714056015,
0.0027799715753644705,
-0.11266462504863739,
-0.011536268517374992,
0.020269297063350677,
-0.02473636530339718,
0.044897984713315964,
0.01905643939971924,
0.03431929275393486,
-0.06756219267845154,
-0.05381767824292183,
-0.02070208266377449,
0.08906499296426773,
0.05023092404007912,
0.023578234016895294,
-0.039608508348464966,
-0.006497385445982218,
-0.001975985709577799,
0.00261795986443758,
0.050534095615148544,
-0.030390799045562744,
0.10957314819097519,
0.06612136214971542,
0.022562960162758827,
-0.012804675847291946,
0.010376197285950184,
0.027252860367298126,
-0.059483643621206284,
-0.05553656071424484,
-0.002034342847764492,
0.01409110426902771,
-0.003712557489052415,
-0.01267476286739111,
-0.01864294335246086,
0.05449572950601578,
0.07112037390470505,
0.026218151673674583,
-0.04347683861851692,
-0.06650830805301666,
0.06421572715044022,
-0.025336770340800285,
-0.11237914860248566,
-0.02085699886083603,
0.015393806621432304,
0.06537923216819763,
0.04382103309035301,
-0.04540787264704704,
-0.04420776665210724,
-0.08274321258068085,
-0.019900143146514893,
-0.03688468784093857,
0.02621007151901722,
-0.03371690213680267,
-0.00952242873609066,
0.007229697424918413,
-0.03313635662198067,
0.0550253763794899,
0.0064350455068051815,
-0.010358039289712906,
0.07008958607912064,
-0.040223922580480576,
-0.05838629975914955,
-0.041835810989141464,
0.061241358518600464,
0.05760986730456352,
-0.07926736027002335,
0.05804881080985069,
0.0326005294919014,
0.048569463193416595,
0.06932634115219116,
-0.11684943735599518,
-0.03209701552987099,
-0.027447758242487907,
0.03579229488968849,
0.012716174125671387,
0.11926186084747314,
-0.03459945321083069,
0.02964155748486519,
-0.0001337446883553639,
-0.00325762084685266,
0.049314748495817184,
0.024850713089108467,
-0.04967137798666954,
0.11199092864990234,
-0.06593845784664154,
-0.14999234676361084,
-0.026673698797822,
0.03806948661804199,
0.04388118162751198,
0.03134557232260704,
0.04287844896316528,
-0.04206516221165657,
0.03892283886671066,
-0.06362777948379517,
-0.02241181582212448,
-0.07126487791538239,
-0.05448615550994873,
0.03553464636206627,
0.1291375607252121,
0.12811598181724548,
-0.018800832331180573,
0.04242895916104317,
0.04142839461565018,
-0.036035846918821335,
-0.024890126660466194,
0.024571256712079048,
-0.014778566546738148,
0.010772326029837132,
-0.05578397214412689,
0.07061035186052322,
0.017473438754677773,
-0.11333000659942627,
-0.04836397245526314,
-0.061263956129550934,
-0.14793378114700317,
0.04249844327569008,
0.02281181886792183,
-0.10000539571046829,
1.5038299431451149e-33,
0.05742707476019859,
0.029332272708415985,
0.02811703272163868,
0.024595722556114197,
-0.058625366538763046,
-0.037709128111600876,
-0.08133291453123093,
-0.035955753177404404,
-0.01813320815563202,
-0.07384652644395828,
0.0325436070561409,
-0.010947095230221748,
-0.08322039991617203,
0.06931356340646744,
0.01246583927422762,
-0.01830199360847473,
-0.06910166889429092,
-0.0407017357647419,
0.05800609663128853,
-0.0005007541622035205,
0.03638077899813652,
0.018872233107686043,
-0.01167839951813221,
-0.035392146557569504,
0.0016957075567916036,
0.027626221999526024,
-0.018469510599970818,
-0.06285122036933899,
0.0025915552396327257,
0.011146562173962593,
-0.055029962211847305,
-0.049527864903211594,
-0.0319790244102478,
0.031145500019192696,
0.050338760018348694,
-0.02035548724234104,
0.0012730283197015524,
0.04100461304187775,
0.033314745873212814,
-0.0169022586196661,
-0.01153579168021679,
0.05236227065324783,
0.08713772147893906,
-0.04836420714855194,
0.056509990245103836,
0.02321658283472061,
-0.020156722515821457,
0.02691083960235119,
0.1022464707493782,
0.0020636171102523804,
0.018663153052330017,
-0.054150935262441635,
0.057099804282188416,
-0.032964687794446945,
0.023343617096543312,
0.05849204584956169,
-0.03630223497748375,
-0.013061853125691414,
-0.00010610697063384578,
0.05449630320072174,
-0.0008100245613604784,
0.028391694650053978,
-0.0020269863307476044,
-0.04409593716263771,
0.11980050057172775,
-0.010315630584955215,
-0.023817136883735657,
-0.03586799278855324,
-0.000035341206967132166,
0.0059940870851278305,
-0.048547498881816864,
0.027909858152270317,
0.10543835908174515,
0.06323277950286865,
0.011547557078301907,
0.03137991949915886,
-0.02141539566218853,
-0.1473410725593567,
-0.06553182750940323,
0.05882181227207184,
-0.022937264293432236,
-0.0022151025477796793,
-0.05921720713376999,
0.041573915630578995,
-0.049227163195610046,
-0.040963657200336456,
0.04550071060657501,
-0.04623251408338547,
0.008456401526927948,
-0.03828709200024605,
0.05790838971734047,
-0.012435657903552055,
-0.046597789973020554,
-0.0037447111681103706,
-0.08578397333621979,
-2.7628000049101316e-33,
-0.008325590752065182,
-0.05473928153514862,
0.005471148528158665,
0.07314305007457733,
-0.004088434856384993,
-0.0023386336397379637,
0.08504236489534378,
0.038229890167713165,
0.029150689020752907,
-0.04057767614722252,
0.08108120411634445,
0.004461855161935091,
0.03472474217414856,
0.023363005369901657,
0.01284431666135788,
0.015588418580591679,
-0.11796371638774872,
0.03073042817413807,
0.04177587106823921,
0.08176958560943604,
-0.006946300156414509,
0.07957136631011963,
-0.10833875089883804,
0.0682547315955162,
0.050411373376846313,
0.06200917065143585,
-0.026115428656339645,
0.05880671739578247,
0.0186607725918293,
-0.04976733401417732,
-0.0070739020593464375,
0.02180682122707367,
-0.04606778547167778,
-0.07668661326169968,
-0.03558517247438431,
-0.045602891594171524,
0.015107695944607258,
-0.022575639188289642,
0.03302508220076561,
0.01802457496523857,
0.03916829451918602,
0.09359525144100189,
-0.07107022404670715,
0.02531747706234455,
0.05150506645441055,
-0.03768784552812576,
-0.10122263431549072,
0.026306701824069023,
-0.009011143818497658,
0.03200624883174896,
0.05288837105035782,
-0.030601078644394875,
-0.10652265697717667,
-0.05594826862215996,
0.0010801405878737569,
-0.0666794553399086,
0.02745957300066948,
-0.10961399972438812,
-0.026955602690577507,
0.015989676117897034,
-0.0806170403957367,
-0.01250651478767395,
0.05991161987185478,
0.013086412101984024,
-0.047290269285440445,
0.030978338792920113,
0.009998814202845097,
0.04938654601573944,
-0.030845049768686295,
-0.004830254707485437,
0.057624202221632004,
0.015517959371209145,
0.06002102792263031,
0.01566840335726738,
0.007220805156975985,
-0.06436651945114136,
-0.04225262254476547,
-0.02577625960111618,
-0.034712936729192734,
-0.039585843682289124,
-0.01762992888689041,
0.03167356178164482,
0.026116210967302322,
0.005259743891656399,
0.03520446643233299,
0.060518573969602585,
-0.003994193859398365,
-0.021467890590429306,
0.002226506359875202,
-0.06255115568637848,
0.022705864161252975,
0.043023400008678436,
0.00690868403762579,
0.13466699421405792,
-0.04888806864619255,
-4.453590207731395e-8,
0.02032584883272648,
-0.053078360855579376,
-0.007272444199770689,
-0.013546799309551716,
0.013145724311470985,
0.00565862562507391,
-0.06979938596487045,
-0.01827775314450264,
0.013003157451748848,
0.05023901164531708,
0.059609267860651016,
0.03361879289150238,
-0.1375451534986496,
-0.0022158639039844275,
0.040902115404605865,
0.046043090522289276,
0.017035644501447678,
0.0621829479932785,
-0.0367266908288002,
-0.0614197812974453,
-0.013264809735119343,
0.04362569376826286,
-0.011585786007344723,
-0.034061629325151443,
0.09031777828931808,
-0.044845666736364365,
-0.007630383130162954,
-0.03381789103150368,
0.05035826936364174,
-0.07238331437110901,
0.01639777049422264,
0.0802696943283081,
0.02586882747709751,
0.014565353281795979,
0.09732649475336075,
0.0035011046566069126,
-0.041504599153995514,
-0.04559538885951042,
-0.0476582907140255,
0.056795164942741394,
0.047798965126276016,
0.021505359560251236,
-0.16901545226573944,
-0.0030220099724829197,
0.09356090426445007,
-0.034480709582567215,
-0.03353244811296463,
-0.008980685845017433,
0.027866609394550323,
-0.01593784987926483,
0.043834488838911057,
-0.035875461995601654,
-0.01528371125459671,
0.017287611961364746,
-0.010535885579884052,
0.09352529048919678,
-0.029727227985858917,
-0.021576592698693275,
0.05107041075825691,
-0.028355484828352928,
0.044350966811180115,
0.007279612123966217,
-0.010728050023317337,
-0.02266295626759529
] |
castorini/t5-base-canard | f0f21fc4cae5dc130d97e4fa4dc07d7710875b7b | 2021-06-23T11:56:05.000Z | [
"pytorch",
"jax",
"t5",
"text2text-generation",
"transformers",
"autotrain_compatible"
] | text2text-generation | false | castorini | null | castorini/t5-base-canard | 2,816 | null | transformers | This model is trained for conversational question rewriting.
Usage:
Source text format: ${HISTORY} ||| ${CURRENT_QUESTION}
example from [CANARD](https://sites.google.com/view/qanta/projects/canard):
Frank Zappa ||| Disbandment ||| What group disbanded ||| Zappa and the Mothers of Invention ||| When did they disband?
Target text:
When did Zappa and the Mothers of Invention disband?
You can find our guide to reproduce the training in this [repo](https://github.com/castorini/chatty-goose/blob/c7d0cd8c45354b09b5fb930ab0b5af8be2e5772b/docs/t5_finetuning.md). | [
-0.03612363710999489,
0.05223914980888367,
-0.01490783877670765,
0.04411592334508896,
-0.004205859731882811,
0.06172008439898491,
-0.0067744688130915165,
-0.04232780262827873,
0.054592885076999664,
-0.01891736313700676,
0.0427757129073143,
0.029016930609941483,
0.03576711192727089,
-0.0051223840564489365,
-0.04077782854437828,
0.04022785648703575,
-0.010638467036187649,
-0.036683544516563416,
-0.045859672129154205,
-0.048115793615579605,
0.09399434924125671,
0.09637989103794098,
0.06408761441707611,
0.03282947093248367,
0.03873322531580925,
0.043705061078071594,
-0.0408623181283474,
-0.00276582152582705,
0.051143791526556015,
-0.028188198804855347,
-0.03779320418834686,
0.053983625024557114,
-0.03235406428575516,
0.08916092664003372,
-0.009956026449799538,
0.05673860013484955,
-0.0021086789201945066,
0.056698840111494064,
0.023316701874136925,
-0.0089822206646204,
-0.02580319344997406,
-0.04726497083902359,
-0.08550364524126053,
-0.07619556039571762,
0.09292984008789062,
-0.06511324644088745,
-0.04061571881175041,
0.038580019026994705,
-0.03256255015730858,
0.07529110461473465,
-0.05648336559534073,
-0.09233007580041885,
0.0488833412528038,
0.05597754567861557,
0.06854063272476196,
-0.003123937640339136,
-0.05995135009288788,
0.06424281001091003,
-0.04278931766748428,
-0.02267126366496086,
-0.03512083366513252,
-0.041867759078741074,
-0.08783391863107681,
0.004239900037646294,
0.0018777422374114394,
-0.03684705123305321,
-0.04551497474312782,
0.03523595258593559,
-0.07723987847566605,
-0.026439789682626724,
-0.04979360103607178,
0.00041584434802643955,
-0.04126785695552826,
-0.020665429532527924,
-0.02490573190152645,
0.03504868224263191,
0.011968106962740421,
0.003882134798914194,
-0.03658004477620125,
-0.07510378211736679,
0.03487920016050339,
-0.10744448751211166,
0.057671792805194855,
0.019136331975460052,
0.07985971868038177,
-0.0044757104478776455,
0.06067861244082451,
-0.0056798094883561134,
0.03880735859274864,
-0.028139490634202957,
-0.08892273902893066,
-0.041479870676994324,
0.11459919810295105,
0.00693465443328023,
0.03819525986909866,
0.02099059894680977,
-0.04417061805725098,
0.04005490988492966,
-0.026036575436592102,
0.1069626733660698,
0.04703674837946892,
0.054240189492702484,
-0.047507088631391525,
-0.06567493081092834,
-0.0629926472902298,
-0.04280243441462517,
-0.010110144503414631,
0.024111341685056686,
0.014350983314216137,
-0.04161551594734192,
-0.07361925393342972,
0.029618287459015846,
-0.026017602533102036,
-0.036657217890024185,
0.054406724870204926,
-0.06410329043865204,
0.049662090837955475,
0.0283193476498127,
-0.047154899686574936,
-0.02573188580572605,
0.04152080789208412,
-0.03552226349711418,
-0.05216122046113014,
0.028423424810171127,
-0.04964448884129524,
-0.013241764158010483,
-0.07751231640577316,
7.303391952064967e-33,
0.1133103296160698,
0.09179455041885376,
-0.04131494462490082,
0.08641336113214493,
0.06407396495342255,
0.04553426057100296,
-0.040170926600694656,
0.07041306793689728,
0.04181975498795509,
-0.02918066270649433,
0.04573327302932739,
-0.04417422413825989,
-0.03342539072036743,
-0.05312652513384819,
-0.012564151547849178,
-0.04981417581439018,
-0.1283833384513855,
0.019848762080073357,
0.020673707127571106,
0.014328325167298317,
0.04881563410162926,
0.14724457263946533,
-0.02789955772459507,
-0.018640244379639626,
0.05653546005487442,
0.05017898604273796,
0.006774380337446928,
-0.02755679190158844,
-0.044104017317295074,
0.0039147124625742435,
-0.017082830891013145,
-0.042088285088539124,
-0.07495826482772827,
-0.03327740356326103,
0.061777837574481964,
0.013946378603577614,
0.07545993477106094,
-0.09579522907733917,
-0.022610707208514214,
-0.07352463901042938,
0.01859784498810768,
-0.011445390991866589,
0.031692031770944595,
-0.09026230126619339,
-0.037086691707372665,
-0.0065522342920303345,
-0.006961571052670479,
-0.06017469987273216,
0.04055161029100418,
-0.03191891312599182,
0.01799117960035801,
0.05506342649459839,
-0.04468628019094467,
-0.05508894473314285,
0.0028895391151309013,
-0.01829204149544239,
-0.07406900823116302,
-0.0077176387421786785,
0.020104192197322845,
0.06835384666919708,
0.06605733931064606,
0.07239261269569397,
0.03967789560556412,
0.07420778274536133,
-0.007548131048679352,
0.06811095774173737,
-0.07755953818559647,
-0.0007151197060011327,
0.08307483792304993,
0.02129698358476162,
-0.010794980451464653,
0.020002955570816994,
-0.07385648041963577,
-0.0438094399869442,
-0.03664681315422058,
-0.06182228773832321,
-0.031091494485735893,
0.013604855164885521,
0.04727499559521675,
-0.025447649881243706,
0.02207580953836441,
-0.024072343483567238,
0.04469306766986847,
-0.05129071697592735,
-0.018078692257404327,
-0.08845025300979614,
0.0595078282058239,
-0.09551984816789627,
0.0027221804484725,
-0.046333834528923035,
-0.0032368600368499756,
0.021255452185869217,
-0.02292490005493164,
-0.03834177553653717,
0.04576887935400009,
-7.969060443940212e-33,
0.038884278386831284,
0.009038328193128109,
-0.04482985660433769,
0.04515300691127777,
0.05757277086377144,
-0.10791277885437012,
-0.030916661024093628,
0.10395707190036774,
0.031964801251888275,
-0.04763749986886978,
-0.018840570002794266,
-0.07540260255336761,
0.02760227769613266,
0.015353338792920113,
-0.036145687103271484,
-0.058481279760599136,
-0.0016298367409035563,
-0.0865519791841507,
-0.01568678952753544,
-0.03311159461736679,
-0.022820523008704185,
0.05628414824604988,
-0.08365055173635483,
0.009580091573297977,
-0.013290248811244965,
0.04095156490802765,
0.0614604577422142,
0.02401958778500557,
0.021077660843729973,
-0.0341014564037323,
-0.057417456060647964,
-0.10108095407485962,
-0.04227015748620033,
0.0042741065844893456,
-0.06668348610401154,
0.06347592920064926,
-0.03278611972928047,
0.006133559625595808,
0.00026377852191217244,
-0.014009554870426655,
0.043630048632621765,
0.033512163907289505,
-0.11274772882461548,
0.03399904817342758,
-0.03777617961168289,
-0.024594122543931007,
-0.05746692791581154,
0.0030486180912703276,
0.08895238488912582,
-0.02061854861676693,
0.05168765410780907,
-0.007951741106808186,
-0.061451785266399384,
-0.02983279339969158,
-0.03647398203611374,
-0.006365278735756874,
0.021991102024912834,
-0.04125334322452545,
-0.021830664947628975,
-0.009587939828634262,
-0.07940392941236496,
0.06328202039003372,
0.001835281727835536,
-0.02434871718287468,
0.10215768218040466,
-0.099507175385952,
0.029509786516427994,
0.07166825234889984,
0.010976368561387062,
-0.036714330315589905,
0.026621125638484955,
-0.045322395861148834,
0.008542574010789394,
0.04491240531206131,
0.0471033975481987,
0.02820393443107605,
-0.052211664617061615,
-0.022127851843833923,
-0.022040309384465218,
-0.044391125440597534,
-0.02824167162179947,
-0.067674420773983,
-0.015701182186603546,
0.1634945571422577,
0.03059234470129013,
0.050561413168907166,
0.022948995232582092,
0.1188361719250679,
0.020384829491376877,
0.027756119146943092,
0.0407026931643486,
-0.11293287575244904,
0.05054478347301483,
0.09644049406051636,
-0.09471487998962402,
-6.451234213500356e-8,
-0.0705321654677391,
0.04279111698269844,
-0.032510511577129364,
0.06048499792814255,
0.027629289776086807,
-0.03766978904604912,
0.05518954619765282,
-0.028572211042046547,
-0.022002648562192917,
-0.06499596685171127,
-0.04424290731549263,
0.08455540239810944,
-0.07554733753204346,
-0.03332484886050224,
0.018535586073994637,
0.027095720171928406,
0.017576154321432114,
0.03329116106033325,
-0.017054768279194832,
-0.06076301261782646,
0.04375293850898743,
0.0009803901193663478,
-0.036711808294057846,
0.05750982463359833,
-0.021258056163787842,
0.0753205418586731,
-0.09152857959270477,
0.10520483553409576,
-0.03397524729371071,
0.006981242448091507,
-0.0030504423193633556,
0.043827421963214874,
-0.014639942906796932,
0.010157963261008263,
-0.004752499982714653,
0.003450757125392556,
-0.02322191558778286,
0.012049279175698757,
0.0047673992812633514,
-0.0054709650576114655,
0.046065136790275574,
0.03611592575907707,
-0.017177164554595947,
0.04819019138813019,
0.021159764379262924,
-0.016612958163022995,
-0.06786222010850906,
-0.12225436419248581,
-0.013826102949678898,
-0.06472530961036682,
-0.022954441606998444,
0.017409665510058403,
0.017867881804704666,
-0.009478827938437462,
-0.0070669627748429775,
0.007414591033011675,
0.06862431764602661,
0.03896886110305786,
-0.044495388865470886,
-0.02835325337946415,
0.1158338189125061,
0.04278377443552017,
0.0690426230430603,
0.022713666781783104
] |
julien-c/hotdog-not-hotdog | e268d30900a9e75185eb7543bd2ffceb80686cde | 2021-07-02T12:13:28.000Z | [
"pytorch",
"tensorboard",
"vit",
"image-classification",
"transformers",
"huggingpics",
"model-index"
] | image-classification | false | julien-c | null | julien-c/hotdog-not-hotdog | 2,816 | 1 | transformers | ---
tags:
- image-classification
- huggingpics
metrics:
- accuracy
model-index:
- name: hotdog-not-hotdog
results:
- task:
name: Image Classification
type: image-classification
metrics:
- name: Accuracy
type: accuracy
value: 0.824999988079071
---
# hotdog-not-hotdog
Autogenerated by HuggingPics🤗🖼️
Create your own image classifier for **anything** by running [the demo on Google Colab](https://colab.research.google.com/github/nateraw/huggingpics/blob/main/HuggingPics.ipynb).
Report any issues with the demo at the [github repo](https://github.com/nateraw/huggingpics).
## Example Images
#### hot dog

#### not hot dog
 | [
-0.09999463707208633,
-0.048530738800764084,
0.04527292773127556,
0.06872501224279404,
0.05842870846390724,
-0.0507686547935009,
-0.013281834311783314,
-0.1009514108300209,
-0.04607687145471573,
-0.05774803087115288,
0.03640129044651985,
-0.09652531147003174,
0.061382830142974854,
0.07255293428897858,
-0.04656083881855011,
0.014228046871721745,
0.01436483021825552,
-0.002632583724334836,
-0.04568378999829292,
-0.010947898030281067,
-0.0986570492386818,
-0.005352015607059002,
0.09983015060424805,
-0.016664013266563416,
-0.05381530895829201,
-0.01452126819640398,
-0.03412625938653946,
-0.028070691972970963,
0.007082086056470871,
-0.079135000705719,
-0.011837393045425415,
-0.012400684878230095,
-0.030485408380627632,
0.0028000439051538706,
0.019135668873786926,
0.10615525394678116,
-0.008446971885859966,
0.02729666605591774,
-0.03440485894680023,
-0.007651719730347395,
0.06061721593141556,
-0.06580569595098495,
0.03719620406627655,
-0.026663485914468765,
0.06116862967610359,
0.076539047062397,
-0.09673460572957993,
-0.062250882387161255,
-0.02484738826751709,
-0.03684886544942856,
-0.09458048641681671,
-0.06900312006473541,
0.014956888742744923,
0.009145718067884445,
-0.02006944641470909,
0.003981668036431074,
-0.0032367028761655092,
-0.05305676907300949,
0.008507009595632553,
-0.0010681847343221307,
0.07032863050699234,
0.02679975889623165,
-0.06459479033946991,
-0.03347121924161911,
0.04087546840310097,
0.03413401544094086,
0.010376309975981712,
0.010753471404314041,
0.055267319083213806,
-0.004398635122925043,
0.08178505301475525,
0.04360643029212952,
0.04014980420470238,
0.014938022941350937,
0.026090877130627632,
-0.02018195018172264,
-0.05875891447067261,
0.027416963130235672,
0.1094188317656517,
-0.07953553646802902,
-0.05582059547305107,
-0.04320552572607994,
0.02704191952943802,
0.018845655024051666,
0.09847618639469147,
0.019781598821282387,
-0.044077396392822266,
0.0005430780001915991,
-0.07408437132835388,
-0.012420100159943104,
-0.006697395816445351,
-0.03714878112077713,
-0.0349874347448349,
-0.015736274421215057,
-0.015172750689089298,
-0.000637623539660126,
-0.05273761972784996,
0.0015353575581684709,
-0.08584904670715332,
0.05878501757979393,
-0.1059270054101944,
-0.04004378616809845,
-0.03381695598363876,
0.06989597529172897,
0.14491938054561615,
-0.008650466799736023,
-0.035699959844350815,
-0.004283849615603685,
0.10020734369754791,
0.0036042623687535524,
-0.0288079846650362,
-0.01304903905838728,
-0.03204739838838577,
-0.008815103210508823,
-0.018221097066998482,
-0.011627033352851868,
-0.03445408120751381,
-0.05269470065832138,
0.06542927771806717,
-0.011427633464336395,
0.018214339390397072,
0.042323797941207886,
-0.012792513705790043,
-0.056266870349645615,
0.010281099937856197,
-0.0679503083229065,
-0.1057063564658165,
5.426330613919275e-33,
0.02912077121436596,
0.005276985466480255,
0.062104567885398865,
-0.03443654254078865,
0.02558118849992752,
-0.058913446962833405,
-0.06505263596773148,
-0.04894133657217026,
-0.01206789817661047,
0.02991667576134205,
-0.055899638682603836,
0.04043092578649521,
-0.013628573156893253,
0.07877123355865479,
0.0697002187371254,
-0.032935094088315964,
-0.0582461841404438,
0.045971907675266266,
-0.01830190233886242,
0.047622714191675186,
-0.029783405363559723,
0.02369256317615509,
0.015056256204843521,
-0.017895903438329697,
-0.06814352422952652,
0.09282469749450684,
0.03879113495349884,
-0.01960463635623455,
-0.010556712746620178,
0.039226505905389786,
-0.02331603318452835,
-0.04993617162108421,
0.09687944501638412,
0.020576849579811096,
-0.0351564846932888,
-0.0326327383518219,
-0.03903915733098984,
0.03215278312563896,
-0.058999378234148026,
0.011909144930541515,
0.04858333617448807,
0.018066992983222008,
0.016201097518205643,
0.002530527301132679,
-0.0137971555814147,
0.08413922041654587,
0.05942355841398239,
-0.016344180330634117,
-0.033847082406282425,
0.009523343294858932,
0.05763707682490349,
-0.030722105875611305,
0.0006119420286267996,
-0.030400268733501434,
-0.10905390232801437,
-0.0038400657940655947,
0.07475464046001434,
0.051864322274923325,
-0.00981083232909441,
-0.029083730652928352,
0.04041231423616409,
0.046065930277109146,
0.027639402076601982,
-0.0407065823674202,
0.03383001685142517,
-0.014815271832048893,
0.004515043925493956,
0.027868010103702545,
-0.017297429963946342,
0.03169278800487518,
0.0003585749363992363,
0.08011647313833237,
-0.02881000004708767,
-0.038691163063049316,
0.08546919375658035,
-0.07161486148834229,
0.07999222725629807,
-0.004420078359544277,
-0.03425069898366928,
0.00022451252152677625,
-0.016922781243920326,
0.02071473002433777,
0.03674604371190071,
-0.060088615864515305,
-0.109312042593956,
-0.058980923146009445,
0.04596490412950516,
0.0171799473464489,
-0.04261664301156998,
0.07231800258159637,
-0.008543274365365505,
0.06453775614500046,
-0.007646644022315741,
0.04279324412345886,
-0.059118177741765976,
-5.176606022636696e-33,
0.024690285325050354,
0.007325130049139261,
-0.009832832962274551,
0.05850822851061821,
0.000041500945371808484,
0.04703734070062637,
0.012418746948242188,
0.10340901464223862,
0.018558116629719734,
-0.018619559705257416,
0.07684077322483063,
0.008948566392064095,
-0.05783720314502716,
-0.05002401024103165,
0.0066278791055083275,
0.021508246660232544,
-0.07726366817951202,
-0.011247263289988041,
-0.06050968915224075,
0.023173097521066666,
0.02641785331070423,
0.072768434882164,
-0.04831743985414505,
0.0682884082198143,
-0.060700997710227966,
0.108576200902462,
0.041562579572200775,
0.019244462251663208,
0.06578219681978226,
-0.06554325670003891,
0.0006844597519375384,
-0.08028952777385712,
-0.07971180975437164,
0.01884978450834751,
0.01888039894402027,
0.0037566612008959055,
-0.010051465593278408,
-0.009652771055698395,
-0.0027542533352971077,
0.006819147150963545,
0.10203677415847778,
-0.007872718386352062,
-0.15007923543453217,
0.04006727784872055,
0.02458573691546917,
0.0158340223133564,
-0.017013264819979668,
-0.028914548456668854,
-0.010143772698938847,
-0.0022581827361136675,
0.01877221278846264,
-0.04434538632631302,
-0.11337244510650635,
0.03538456931710243,
-0.025543274357914925,
0.0063989595510065556,
-0.006034729070961475,
-0.031074954196810722,
0.006091950926929712,
0.06305736303329468,
-0.04912751540541649,
-0.03497280180454254,
-0.01613137125968933,
0.032087329775094986,
-0.008090587332844734,
-0.03336653858423233,
-0.06307173520326614,
0.015520129352807999,
0.016781320795416832,
0.00850134901702404,
0.020812394097447395,
0.10967297852039337,
0.01922902651131153,
-0.022825881838798523,
-0.04095534235239029,
0.018081236630678177,
0.04121806100010872,
-0.022553004324436188,
0.06631936877965927,
-0.07672237604856491,
-0.09460730850696564,
-0.013592137955129147,
0.03416525200009346,
0.1396525800228119,
0.05478021502494812,
0.10211719572544098,
0.04506983608007431,
0.043652426451444626,
0.035392049700021744,
-0.015198937617242336,
0.015199627727270126,
0.0659634917974472,
0.06474313139915466,
0.10604248940944672,
-0.0013285605236887932,
-5.290565141535808e-8,
-0.007601309102028608,
0.006886358838528395,
-0.010004599578678608,
0.0063178762793540955,
-0.01941712386906147,
-0.018337951973080635,
-0.041163451969623566,
-0.024059321731328964,
-0.016462532803416252,
0.027355117723345757,
0.039552804082632065,
0.031160367652773857,
-0.0759962871670723,
0.03313332796096802,
-0.043721091002225876,
-0.06476347148418427,
0.01928205043077469,
0.1592196524143219,
-0.02291395142674446,
0.017174137756228447,
-0.06378995627164841,
-0.01031012088060379,
0.034912195056676865,
-0.10988931357860565,
0.0025453693233430386,
-0.07853008061647415,
-0.08623046427965164,
0.018136629834771156,
-0.007165323942899704,
-0.0348554365336895,
0.007134090643376112,
0.03821864724159241,
-0.02513819932937622,
-0.01563960686326027,
0.06937462091445923,
0.05487872660160065,
-0.03699036315083504,
-0.08419813215732574,
-0.028240351006388664,
-0.02017231099307537,
0.01100347749888897,
0.03213838115334511,
-0.0670061856508255,
-0.01715940423309803,
0.050951890647411346,
-0.017404397949576378,
0.07718174904584885,
-0.08839546889066696,
0.006716595496982336,
0.11717259883880615,
-0.029958870261907578,
-0.020418792963027954,
-0.06626825779676437,
0.03850112110376358,
0.03485763072967529,
-0.060700930655002594,
0.022843124344944954,
-0.10513614118099213,
0.10814882069826126,
0.06874145567417145,
0.09853152185678482,
-0.06895699352025986,
-0.04182364046573639,
0.08118350803852081
] |
csebuetnlp/banglabert | 7bed1e381af5564564faadc9718f25c6116491e0 | 2022-05-10T05:17:06.000Z | [
"pytorch",
"electra",
"pretraining",
"bn",
"arxiv:2101.00204",
"transformers"
] | null | false | csebuetnlp | null | csebuetnlp/banglabert | 2,812 | 2 | transformers | ---
language:
- bn
licenses:
- cc-by-nc-sa-4.0
---
# BanglaBERT
This repository contains the pretrained discriminator checkpoint of the model **BanglaBERT**. This is an [ELECTRA](https://openreview.net/pdf?id=r1xMH1BtvB) discriminator model pretrained with the Replaced Token Detection (RTD) objective. Finetuned models using this checkpoint achieve state-of-the-art results on many of the NLP tasks in bengali.
For finetuning on different downstream tasks such as `Sentiment classification`, `Named Entity Recognition`, `Natural Language Inference` etc., refer to the scripts in the official GitHub [repository](https://github.com/csebuetnlp/banglabert).
**Note**: This model was pretrained using a specific normalization pipeline available [here](https://github.com/csebuetnlp/normalizer). All finetuning scripts in the official GitHub repository uses this normalization by default. If you need to adapt the pretrained model for a different task make sure the text units are normalized using this pipeline before tokenizing to get best results. A basic example is given below:
## Using this model as a discriminator in `transformers` (tested on 4.11.0.dev0)
```python
from transformers import AutoModelForPreTraining, AutoTokenizer
from normalizer import normalize # pip install git+https://github.com/csebuetnlp/normalizer
import torch
model = AutoModelForPreTraining.from_pretrained("csebuetnlp/banglabert")
tokenizer = AutoTokenizer.from_pretrained("csebuetnlp/banglabert")
original_sentence = "আমি কৃতজ্ঞ কারণ আপনি আমার জন্য অনেক কিছু করেছেন।"
fake_sentence = "আমি হতাশ কারণ আপনি আমার জন্য অনেক কিছু করেছেন।"
fake_sentence = normalize(fake_sentence) # this normalization step is required before tokenizing the text
fake_tokens = tokenizer.tokenize(fake_sentence)
fake_inputs = tokenizer.encode(fake_sentence, return_tensors="pt")
discriminator_outputs = model(fake_inputs).logits
predictions = torch.round((torch.sign(discriminator_outputs) + 1) / 2)
[print("%7s" % token, end="") for token in fake_tokens]
print("\n" + "-" * 50)
[print("%7s" % int(prediction), end="") for prediction in predictions.squeeze().tolist()[1:-1]]
print("\n" + "-" * 50)
```
## Benchmarks
* Zero-shot cross-lingual transfer-learning
| Model | Params | SC (macro-F1) | NLI (accuracy) | NER (micro-F1) | QA (EM/F1) | BangLUE score |
|----------------|-----------|-----------|-----------|-----------|-----------|-----------|
|[mBERT](https://huggingface.co/bert-base-multilingual-cased) | 180M | 27.05 | 62.22 | 39.27 | 59.01/64.18 | 50.35 |
|[XLM-R (base)](https://huggingface.co/xlm-roberta-base) | 270M | 42.03 | 72.18 | 45.37 | 55.03/61.83 | 55.29 |
|[XLM-R (large)](https://huggingface.co/xlm-roberta-large) | 550M | 49.49 | 78.13 | 56.48 | 71.13/77.70 | 66.59 |
|[BanglishBERT](https://huggingface.co/csebuetnlp/banglishbert) | 110M | 48.39 | 75.26 | 55.56 | 72.87/78.63 | 66.14 |
* Supervised fine-tuning
| Model | Params | SC (macro-F1) | NLI (accuracy) | NER (micro-F1) | QA (EM/F1) | BangLUE score |
|----------------|-----------|-----------|-----------|-----------|-----------|-----------|
|[mBERT](https://huggingface.co/bert-base-multilingual-cased) | 180M | 67.59 | 75.13 | 68.97 | 67.12/72.64 | 70.29 |
|[XLM-R (base)](https://huggingface.co/xlm-roberta-base) | 270M | 69.54 | 78.46 | 73.32 | 68.09/74.27 | 72.82 |
|[XLM-R (large)](https://huggingface.co/xlm-roberta-large) | 550M | 70.97 | 82.40 | 78.39 | 73.15/79.06 | 76.79 |
|[sahajBERT](https://huggingface.co/neuropark/sahajBERT) | 18M | 71.12 | 76.92 | 70.94 | 65.48/70.69 | 71.03 |
|[BanglishBERT](https://huggingface.co/csebuetnlp/banglishbert) | 110M | 70.61 | 80.95 | 76.28 | 72.43/78.40 | 75.73 |
|[BanglaBERT](https://huggingface.co/csebuetnlp/banglabert) | 110M | 72.89 | 82.80 | 77.78 | 72.63/79.34 | **77.09** |
The benchmarking datasets are as follows:
* **SC:** **[Sentiment Classification](https://aclanthology.org/2021.findings-emnlp.278)**
* **NER:** **[Named Entity Recognition](https://multiconer.github.io/competition)**
* **NLI:** **[Natural Language Inference](https://github.com/csebuetnlp/banglabert/#datasets)**
* **QA:** **[Question Answering](https://github.com/csebuetnlp/banglabert/#datasets)**
## Citation
If you use this model, please cite the following paper:
```
@inproceedings{bhattacharjee-etal-2022-banglabert,
title = {BanglaBERT: Lagnuage Model Pretraining and Benchmarks for Low-Resource Language Understanding Evaluation in Bangla},
author = "Bhattacharjee, Abhik and
Hasan, Tahmid and
Mubasshir, Kazi and
Islam, Md. Saiful and
Uddin, Wasi Ahmad and
Iqbal, Anindya and
Rahman, M. Sohel and
Shahriyar, Rifat",
booktitle = "Findings of the North American Chapter of the Association for Computational Linguistics: NAACL 2022",
month = july,
year = {2022},
url = {https://arxiv.org/abs/2101.00204},
eprinttype = {arXiv},
eprint = {2101.00204}
}
```
If you use the normalization module, please cite the following paper:
```
@inproceedings{hasan-etal-2020-low,
title = "Not Low-Resource Anymore: Aligner Ensembling, Batch Filtering, and New Datasets for {B}engali-{E}nglish Machine Translation",
author = "Hasan, Tahmid and
Bhattacharjee, Abhik and
Samin, Kazi and
Hasan, Masum and
Basak, Madhusudan and
Rahman, M. Sohel and
Shahriyar, Rifat",
booktitle = "Proceedings of the 2020 Conference on Empirical Methods in Natural Language Processing (EMNLP)",
month = nov,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://www.aclweb.org/anthology/2020.emnlp-main.207",
doi = "10.18653/v1/2020.emnlp-main.207",
pages = "2612--2623",
abstract = "Despite being the seventh most widely spoken language in the world, Bengali has received much less attention in machine translation literature due to being low in resources. Most publicly available parallel corpora for Bengali are not large enough; and have rather poor quality, mostly because of incorrect sentence alignments resulting from erroneous sentence segmentation, and also because of a high volume of noise present in them. In this work, we build a customized sentence segmenter for Bengali and propose two novel methods for parallel corpus creation on low-resource setups: aligner ensembling and batch filtering. With the segmenter and the two methods combined, we compile a high-quality Bengali-English parallel corpus comprising of 2.75 million sentence pairs, more than 2 million of which were not available before. Training on neural models, we achieve an improvement of more than 9 BLEU score over previous approaches to Bengali-English machine translation. We also evaluate on a new test set of 1000 pairs made with extensive quality control. We release the segmenter, parallel corpus, and the evaluation set, thus elevating Bengali from its low-resource status. To the best of our knowledge, this is the first ever large scale study on Bengali-English machine translation. We believe our study will pave the way for future research on Bengali-English machine translation as well as other low-resource languages. Our data and code are available at https://github.com/csebuetnlp/banglanmt.",
}
```
| [
-0.057380154728889465,
-0.060250818729400635,
-0.023454023525118828,
0.013787070289254189,
-0.03129493072628975,
0.08502672612667084,
0.012080545537173748,
-0.01737048104405403,
-0.029612688347697258,
0.011899353936314583,
-0.017538808286190033,
-0.0446692518889904,
-0.016265861690044403,
0.02511834353208542,
0.039793625473976135,
0.014798668213188648,
0.06581450253725052,
0.019735313951969147,
-0.10574249178171158,
-0.15355344116687775,
-0.050820812582969666,
0.08640553802251816,
-0.009735130704939365,
0.012961686588823795,
0.033846959471702576,
-0.044958874583244324,
-0.026203736662864685,
-0.026734229177236557,
0.09223300218582153,
-0.04281150549650192,
0.01354539580643177,
0.07397238910198212,
0.015007237903773785,
0.07499661296606064,
0.011225047521293163,
0.009881197474896908,
0.006314326077699661,
0.0077726771123707294,
0.06167984753847122,
-0.06091216579079628,
0.008525785990059376,
0.018620088696479797,
-0.06668101251125336,
0.00015499834262300283,
0.09709928929805756,
-0.04432341456413269,
-0.07759062200784683,
0.05359863117337227,
-0.059754181653261185,
-0.07367296516895294,
-0.07702816277742386,
-0.027008287608623505,
0.059481892734766006,
0.08520495891571045,
-0.035624582320451736,
-0.07440822571516037,
0.07753269374370575,
-0.004349736031144857,
0.04092710465192795,
-0.03505028411746025,
-0.09789080172777176,
-0.01830293796956539,
-0.05969640985131264,
-0.002514689229428768,
0.048560529947280884,
-0.019273746758699417,
-0.004207500256597996,
0.021121514961123466,
0.10375747084617615,
0.05045737698674202,
-0.058638203889131546,
0.0033913443330675364,
0.010530207306146622,
0.04349091276526451,
-0.06092539057135582,
-0.004618879873305559,
0.05608014017343521,
0.03580227494239807,
-0.01571277342736721,
-0.11785996705293655,
-0.020904541015625,
0.00439459877088666,
0.1357356160879135,
0.012851396575570107,
0.08332127332687378,
-0.013342812657356262,
-0.04576287046074867,
0.016400203108787537,
0.04991796240210533,
-0.0009870283538475633,
-0.012431249022483826,
-0.04924978315830231,
0.07208134979009628,
-0.0644574910402298,
-0.05504986643791199,
0.002818361623212695,
-0.003988738637417555,
-0.05705653876066208,
0.05342091992497444,
0.06755544990301132,
0.009271016344428062,
0.0006981660844758153,
-0.005411675665527582,
-0.0969611257314682,
-0.04968286678195,
0.021577488631010056,
0.03363574296236038,
-0.031421661376953125,
0.0431387722492218,
-0.08447711914777756,
-0.008320393040776253,
0.06670288741588593,
-0.019989516586065292,
-0.02182442508637905,
0.0019554616883397102,
-0.018529271706938744,
-0.019886810332536697,
-0.02731029875576496,
0.022345690056681633,
0.04263998195528984,
-0.12157507985830307,
-0.007482328452169895,
0.03902371972799301,
0.005402704700827599,
0.007754255551844835,
-0.032925575971603394,
0.013830704614520073,
3.292555268549412e-33,
0.050902485847473145,
0.015402920544147491,
-0.025668397545814514,
-0.05759856849908829,
0.021863918751478195,
-0.06582610309123993,
-0.02496800385415554,
-0.0165296271443367,
-0.10462899506092072,
-0.011974595487117767,
0.04157622531056404,
0.018657919019460678,
-0.041805535554885864,
0.009216316044330597,
-0.00928476918488741,
-0.0323612205684185,
-0.09008266776800156,
-0.005373408552259207,
-0.007241991814225912,
0.018025226891040802,
0.04397840052843094,
0.03169437497854233,
0.028004944324493408,
0.004491393454372883,
0.01628231629729271,
0.034045543521642685,
0.04929398000240326,
-0.03251531347632408,
-0.000046203742385841906,
0.07388249039649963,
-0.04316852614283562,
0.030040772631764412,
-0.0505976639688015,
0.039603546261787415,
0.00041689217323437333,
-0.008329889737069607,
-0.04351846128702164,
0.018544277176260948,
-0.02986839786171913,
-0.06768044829368591,
0.04773417115211487,
0.013040735386312008,
-0.018683452159166336,
-0.018283559009432793,
-0.020437020808458328,
0.011123646050691605,
-0.029960310086607933,
-0.010089101269841194,
0.0344167985022068,
0.05278535187244415,
0.027565347030758858,
0.00567296938970685,
-0.02168858051300049,
-0.016851644963026047,
0.001352483406662941,
0.027355417609214783,
0.06480100005865097,
-0.027363095432519913,
0.08467914909124374,
0.02581109292805195,
0.041548606008291245,
-0.04304984584450722,
-0.04943159222602844,
-0.006894876714795828,
0.06276161223649979,
-0.022121179848909378,
-0.016366319730877876,
0.003585645230486989,
0.05480215325951576,
-0.024123337119817734,
-0.06239252910017967,
-0.034338608384132385,
0.003755218582227826,
0.08890431374311447,
0.008724018931388855,
0.004764506593346596,
0.017155488952994347,
-0.040802113711833954,
0.00433548865839839,
0.017675120383501053,
-0.023581665009260178,
-0.03258252143859863,
-0.011211216449737549,
-0.11690974980592728,
0.03320970758795738,
-0.08679812401533127,
-0.0009731511818245053,
-0.06873877346515656,
-0.010302637703716755,
0.0031397072598338127,
0.06846120953559875,
0.06350544840097427,
0.036103446036577225,
0.08597398549318314,
-0.011780942790210247,
-4.3468399887545976e-33,
0.01926097273826599,
0.03676006942987442,
-0.09967588633298874,
0.08164665848016739,
-0.09266386926174164,
-0.06695394963026047,
0.01704658567905426,
0.03344154357910156,
-0.014654934406280518,
-0.04809502884745598,
0.004032219760119915,
-0.039737917482852936,
0.019131837412714958,
0.052970435470342636,
0.04891670122742653,
-0.014458825811743736,
0.029880866408348083,
0.08022985607385635,
0.057180531322956085,
0.10810983180999756,
0.018830077722668648,
0.03081316314637661,
-0.14353898167610168,
0.039244383573532104,
0.007042207755148411,
0.058975812047719955,
-0.05865572392940521,
0.06610695272684097,
0.01425341609865427,
0.005797399673610926,
-0.04580867290496826,
0.018554100766777992,
-0.10049240291118622,
-0.05412065237760544,
-0.08976519852876663,
-0.07672690600156784,
0.014358876273036003,
-0.03197610378265381,
0.028101719915866852,
0.12253022938966751,
0.053907740861177444,
0.10587328672409058,
-0.11775153130292892,
0.012036829255521297,
-0.004320994485169649,
0.032474130392074585,
-0.08638878911733627,
-0.027936222031712532,
-0.021373670548200607,
-0.06808273494243622,
0.06262427568435669,
-0.01261355821043253,
0.0190113615244627,
0.004485504701733589,
0.017237460240721703,
-0.07518818229436874,
0.0713374987244606,
-0.07894939184188843,
-0.04799111932516098,
0.012754217721521854,
-0.11770901083946228,
0.036651208996772766,
0.0745525062084198,
-0.05439840629696846,
0.03588062524795532,
-0.035491254180669785,
0.04099057614803314,
0.0413537360727787,
0.010816402733325958,
-0.05766010656952858,
0.02811525948345661,
0.023984991014003754,
-0.009027574211359024,
0.027391040697693825,
-0.11061061173677444,
-0.033880140632390976,
-0.0186906885355711,
-0.04310716316103935,
-0.04959771782159805,
-0.023410528898239136,
-0.0019151878077536821,
-0.002787293866276741,
0.01197835337370634,
0.03677253797650337,
0.00386831839568913,
0.08305837213993073,
0.002750835847109556,
0.02209337428212166,
0.10157027095556259,
0.06298486888408661,
-0.005693931132555008,
0.00929147657006979,
0.024920733645558357,
0.11591809242963791,
0.031270187348127365,
-5.7613696924363467e-8,
-0.10272090882062912,
-0.06352347135543823,
-0.11012300103902817,
0.07675289362668991,
0.02546638250350952,
-0.038107212632894516,
-0.06389833986759186,
0.008046897128224373,
-0.03957103192806244,
-0.06186043471097946,
0.020688772201538086,
0.05708041414618492,
-0.0838979110121727,
0.0019991931039839983,
0.002999431686475873,
0.048367783427238464,
0.049878865480422974,
-0.005543319042772055,
-0.013210306875407696,
-0.015267415903508663,
0.06974425911903381,
0.04304811358451843,
0.04308022931218147,
0.01811297982931137,
-0.003006987739354372,
-0.019822224974632263,
-0.037044938653707504,
0.027274658903479576,
-0.013707444071769714,
-0.06981860846281052,
-0.01219682302325964,
0.08105944097042084,
-0.03273889049887657,
-0.029809975996613503,
0.03909529745578766,
0.06843791157007217,
0.019310280680656433,
-0.0834214836359024,
0.02092921733856201,
0.08587019145488739,
0.08322305232286453,
0.08666379004716873,
-0.11727346479892731,
-0.023375751450657845,
0.03189849853515625,
-0.0035306981299072504,
-0.0553552471101284,
-0.08269494026899338,
-0.03305850550532341,
-0.09269920736551285,
0.029843296855688095,
-0.031250037252902985,
-0.043287139385938644,
0.09426933526992798,
0.015964828431606293,
0.07025613635778427,
-0.049076542258262634,
-0.02015869878232479,
0.016443684697151184,
0.04712015762925148,
0.07339880615472794,
0.025321543216705322,
0.06285899132490158,
-0.02866826206445694
] |
hf-internal-testing/tiny-random-beit-pipeline | 2c1f7ac7d33f3ad4f7b9f06aa045175423689ee2 | 2022-02-14T17:42:35.000Z | [
"pytorch",
"beit",
"transformers",
"image-segmentation"
] | image-segmentation | false | hf-internal-testing | null | hf-internal-testing/tiny-random-beit-pipeline | 2,811 | null | transformers | ---
pipeline_tag: image-segmentation
---
Make the feature_extractor and model config agree.
| [
-0.0018517638090997934,
0.004275538958609104,
0.011652423068881035,
0.014703059569001198,
0.11108927428722382,
-0.0706043466925621,
-0.02356758899986744,
0.013573753647506237,
-0.08979233354330063,
-0.060086339712142944,
-0.006312366109341383,
-0.05642683431506157,
-0.029284726828336716,
0.086056187748909,
0.005883653648197651,
0.033163975924253464,
-0.06206584349274635,
0.04598291590809822,
-0.0218104999512434,
-0.03157403692603111,
0.0235209409147501,
0.032470304518938065,
-0.01343856193125248,
-0.017296331003308296,
0.005714111961424351,
0.01878250762820244,
-0.03682180121541023,
0.013994463719427586,
0.03044792078435421,
-0.011803695932030678,
0.05189131572842598,
0.04411150515079498,
0.01568252220749855,
0.0248991958796978,
0.08490673452615738,
0.04993055388331413,
0.08088052272796631,
-0.06724328547716141,
0.028041109442710876,
-0.004986682441085577,
0.06815434992313385,
-0.0669870674610138,
-0.032262422144412994,
-0.10106980800628662,
0.030534809455275536,
-0.0032433676533401012,
-0.042358167469501495,
-0.07760054618120193,
0.01223944965749979,
-0.002467063255608082,
-0.099066361784935,
-0.03849029541015625,
-0.09187805652618408,
0.0857529491186142,
0.002325332025066018,
0.04871109127998352,
0.0014611766673624516,
-0.08230392634868622,
0.033541761338710785,
0.028571270406246185,
-0.03645049035549164,
-0.019319705665111542,
-0.03046516515314579,
0.10990232229232788,
-0.01748872548341751,
0.013242166489362717,
-0.06336596608161926,
-0.06747237592935562,
0.09352648258209229,
-0.05044057220220566,
-0.0022444771602749825,
0.007322086952626705,
-0.010046305134892464,
-0.09765035659074783,
-0.026187429204583168,
-0.01682915911078453,
0.039487071335315704,
0.10937457531690598,
0.019169514998793602,
-0.09601262211799622,
-0.019138623028993607,
-0.015813546255230904,
0.05382101237773895,
-0.0057302978821098804,
0.027423443272709846,
0.015337220393121243,
-0.0697152316570282,
-0.04782377555966377,
-0.013796991668641567,
0.02440510131418705,
-0.05334306135773659,
-0.07452716678380966,
-0.0500832200050354,
0.030699705705046654,
-0.02504599466919899,
-0.03437420353293419,
-0.012384600937366486,
-0.05701543390750885,
-0.008066643960773945,
0.031384099274873734,
-0.11585229635238647,
-0.029374541714787483,
0.09780699759721756,
0.0031785129103809595,
0.03296540305018425,
0.0375588983297348,
0.015184124000370502,
0.06893313676118851,
-0.01734413579106331,
-0.008192015811800957,
0.06118623539805412,
0.018098730593919754,
0.05753576382994652,
-0.047540709376335144,
0.08179832994937897,
0.023841742426156998,
-0.02464209869503975,
-0.0094226635992527,
0.03442230820655823,
0.048114631325006485,
-0.03504509851336479,
0.0010744382161647081,
-0.04286941513419151,
-0.0023496865760535,
0.04591021314263344,
-0.027859307825565338,
-0.09148912876844406,
-2.842506787090537e-33,
0.024020126089453697,
-0.07751047611236572,
0.03818962723016739,
-0.04020324721932411,
0.020120127126574516,
0.05381928011775017,
-0.007834331132471561,
-0.07360304892063141,
-0.027188431471586227,
0.0010823692427948117,
-0.01831054501235485,
-0.05544044077396393,
-0.07064110785722733,
0.11790534853935242,
0.007394321728497744,
-0.030444283038377762,
0.0011864975094795227,
0.0711025595664978,
-0.00039294498856179416,
0.027232732623815536,
0.03478183224797249,
-0.01631332002580166,
-0.09809291362762451,
-0.0022583373356610537,
0.007940810173749924,
0.02601500041782856,
-0.03044532798230648,
-0.04619389772415161,
-0.04604018107056618,
0.026993731036782265,
-0.013225364498794079,
0.026694253087043762,
0.10458970814943314,
0.06027848273515701,
-0.0676824077963829,
-0.011100885458290577,
0.019217416644096375,
0.03538667410612106,
-0.03536486625671387,
0.01722744293510914,
0.06839203089475632,
0.027080951258540154,
-0.03250128775835037,
-0.0850197896361351,
-0.017837919294834137,
0.03550044074654579,
0.028541777282953262,
0.003720065811648965,
-0.03240131214261055,
-0.020838715136051178,
0.05592762306332588,
0.0037877897266298532,
0.07269767671823502,
-0.025210117921233177,
-0.06393454968929291,
-0.06488156318664551,
0.00756209809333086,
0.029952291399240494,
0.05641414597630501,
-0.03919919207692146,
-0.02403554879128933,
0.063511922955513,
0.005312575027346611,
-0.02222614921629429,
0.013301219791173935,
-0.039541471749544144,
0.05102602392435074,
0.06256218999624252,
-0.026495948433876038,
0.06402808427810669,
-0.09689000993967056,
0.08931190520524979,
-0.06199146434664726,
0.005543455481529236,
0.10519994050264359,
-0.06319082528352737,
-0.028501765802502632,
0.024244796484708786,
-0.005639947950839996,
0.06805042177438736,
-0.1142527312040329,
0.05683431401848793,
0.00856046937406063,
-0.023501893505454063,
0.007980143651366234,
0.08135741204023361,
0.05840342491865158,
-0.05000491440296173,
-0.01645619422197342,
-0.033583883196115494,
-0.014977009035646915,
-0.013571099378168583,
-0.04875693470239639,
0.023539820685982704,
0.0029281163588166237,
3.971044914043274e-34,
0.0939447209239006,
0.02982083521783352,
0.00023200432769954205,
0.014715887606143951,
-0.02027842402458191,
-0.03542773798108101,
0.06183125823736191,
0.07279577851295471,
-0.002735759597271681,
-0.07351408153772354,
0.05708938091993332,
0.012789200991392136,
-0.10581915080547333,
-0.061809271574020386,
0.014058337546885014,
-0.08195043355226517,
-0.07911348342895508,
-0.11478818207979202,
0.02552019990980625,
0.0566011443734169,
-0.04229667782783508,
0.03371446579694748,
-0.009610920213162899,
0.06967797875404358,
-0.05720024183392525,
0.01748625375330448,
-0.008531911298632622,
-0.014693202450871468,
0.025961173698306084,
0.007227616384625435,
-0.08282910287380219,
0.04468511417508125,
-0.07127285748720169,
-0.030320698395371437,
-0.06138095259666443,
-0.010121745057404041,
-0.061376992613077164,
0.029936980456113815,
0.09310367703437805,
0.058859288692474365,
0.014397356659173965,
0.04831767827272415,
-0.08568830043077469,
0.03190011531114578,
-0.043118175119161606,
-0.003422258421778679,
0.18513180315494537,
-0.005265498999506235,
-0.10110567510128021,
0.0006290986202657223,
-0.041819360107183456,
0.03886577859520912,
-0.10007157176733017,
0.010596020147204399,
-0.04172844812273979,
0.04379654303193092,
0.021131202578544617,
-0.03653497248888016,
-0.05107726529240608,
-0.024165090173482895,
-0.015613354742527008,
0.014778388664126396,
-0.02892249822616577,
-0.06438461691141129,
0.049778081476688385,
-0.008686025626957417,
-0.052613724023103714,
0.036456506699323654,
-0.04938780516386032,
-0.0023416222538799047,
0.007573142647743225,
-0.01853124611079693,
0.05484699830412865,
0.13515354692935944,
0.02178531512618065,
-0.07384497672319412,
-0.033416785299777985,
0.009347491897642612,
0.07851411402225494,
0.025132447481155396,
-0.0633184090256691,
-0.024706291034817696,
-0.0019323453307151794,
0.07577233016490936,
0.0827791765332222,
0.019428124651312828,
0.0005204564076848328,
-0.03531303629279137,
0.025467678904533386,
-0.07426106184720993,
-0.010507924482226372,
-0.02674323134124279,
0.012829530984163284,
0.10778150707483292,
0.00890432670712471,
-2.637566431928917e-8,
-0.0346621572971344,
0.005403639283031225,
0.013579360209405422,
0.0010516702895984054,
-0.009090659208595753,
0.017252594232559204,
0.08234500885009766,
0.0820244699716568,
0.023153966292738914,
-0.027141686528921127,
0.03641714155673981,
0.020432259887456894,
-0.12096677720546722,
0.0490887351334095,
-0.010014764964580536,
0.014064143411815166,
0.02194245532155037,
0.11388952285051346,
-0.000014776355783396866,
-0.0733785554766655,
-0.03801038861274719,
-0.0704435184597969,
0.00435370858758688,
-0.021247349679470062,
0.06876936554908752,
-0.03515497222542763,
-0.04741036146879196,
0.059282973408699036,
-0.015322904102504253,
-0.02324739657342434,
0.022275811061263084,
-0.0018003175500780344,
-0.018287474289536476,
0.05654777213931084,
0.12611393630504608,
0.03829822316765785,
-0.029642952606081963,
-0.037993356585502625,
-0.016433484852313995,
-0.06952450424432755,
0.040721915662288666,
0.036218930035829544,
0.0379762202501297,
-0.08544416725635529,
-0.03520866855978966,
0.06609630584716797,
0.10023301839828491,
0.008694635704159737,
-0.0009561622282490134,
0.08664517104625702,
0.05531409755349159,
-0.031434495002031326,
-0.003855656133964658,
0.08108893781900406,
0.025639859959483147,
-0.06832663714885712,
0.07522108405828476,
-0.04793339595198631,
0.03464892879128456,
0.033686425536870956,
-0.00488666445016861,
-0.018814781680703163,
0.014619506895542145,
-0.06661773473024368
] |
kyriinx/DialoGPT-small-glyph | 7ad4861bfe3bc8469bb6b89d18648d73dccb22a2 | 2022-04-27T16:35:54.000Z | [
"pytorch",
"gpt2",
"text-generation",
"transformers",
"conversational"
] | conversational | false | kyriinx | null | kyriinx/DialoGPT-small-glyph | 2,803 | null | transformers | ---
tags:
- conversational
---
# Glyph DialoGPT model | [
-0.021028872579336166,
-0.048823002725839615,
0.05880364030599594,
-0.02266448177397251,
-0.03793533518910408,
-0.06103490665555,
0.1132744625210762,
-0.0027484979946166277,
0.06442465633153915,
-0.04379365220665932,
0.0015470505459234118,
0.006422173231840134,
-0.005384101998060942,
0.015861274674534798,
-0.0010629805037751794,
-0.012872361578047276,
0.0024231451097875834,
-0.058299340307712555,
-0.07884690165519714,
0.060563795268535614,
0.03282953053712845,
0.1062905564904213,
0.047178637236356735,
0.010000495240092278,
0.018492504954338074,
0.022708486765623093,
-0.09142853319644928,
-0.011525575071573257,
0.08951593190431595,
-0.007922638207674026,
-0.01585223153233528,
0.028987959027290344,
0.044111452996730804,
0.06401737779378891,
-0.07325976341962814,
0.0745796263217926,
0.05477142333984375,
0.027932634577155113,
-0.02623249590396881,
-0.0010773150715976954,
-0.08680734038352966,
-0.002532519865781069,
-0.050377849489450455,
0.006687060464173555,
0.035082340240478516,
-0.011486418545246124,
-0.1069401353597641,
-0.03424688056111336,
-0.06170099601149559,
0.062355756759643555,
-0.07569875568151474,
-0.06838656961917877,
0.001428448362275958,
0.09442120790481567,
0.003461429849267006,
0.06186990439891815,
-0.061177946627140045,
-0.022867459803819656,
0.03571506589651108,
0.007697077002376318,
-0.06941772252321243,
-0.03826165571808815,
-0.05384257808327675,
0.03295229747891426,
-0.011904479935765266,
0.062280476093292236,
0.007079031318426132,
-0.010250492952764034,
-0.050580039620399475,
0.08552142232656479,
0.001490068738348782,
-0.006082616746425629,
-0.026822078973054886,
-0.06705790758132935,
-0.04459771513938904,
0.07030397653579712,
-0.019304843619465828,
-0.044426966458559036,
0.06352733820676804,
-0.008056983351707458,
0.04979265481233597,
-0.04767505079507828,
0.05751718580722809,
0.01984136924147606,
-0.002672659931704402,
-0.041813869029283524,
-0.02070099487900734,
-0.028804687783122063,
-0.04721846058964729,
0.00266505335457623,
-0.03574617579579353,
-0.090229332447052,
0.10427236557006836,
0.018660930916666985,
-0.021434804424643517,
0.03325662389397621,
0.011784601025283337,
-0.1217048317193985,
-0.0698280930519104,
0.09497637301683426,
-0.02521228790283203,
0.028354009613394737,
0.07166986167430878,
-0.10766737163066864,
-0.035147834569215775,
0.04668364301323891,
-0.013687457889318466,
-0.01742774248123169,
0.011120655573904514,
-0.022934984415769577,
-0.04513927549123764,
-0.0015611996641382575,
0.020245922729372978,
-0.036456868052482605,
0.05349487066268921,
-0.05690712109208107,
0.03059384413063526,
-0.05632510781288147,
0.0810663104057312,
-0.02225310169160366,
-0.03946049511432648,
0.008353080600500107,
-0.08561037480831146,
-0.0014409126015380025,
-0.003660057671368122,
0.01186713669449091,
-0.05176765099167824,
-1.7386702045523914e-33,
0.1255492866039276,
0.05294455215334892,
0.06093449890613556,
0.07243741303682327,
0.027984704822301865,
0.08054361492395401,
-0.08751685917377472,
-0.04903923720121384,
-0.012613801285624504,
-0.03865210711956024,
0.02223317325115204,
-0.062113407999277115,
-0.09765326231718063,
0.08399556577205658,
0.02496674284338951,
-0.0013290763599798083,
-0.06734377890825272,
0.031321004033088684,
-0.02057719975709915,
-0.06966947764158249,
0.015581842511892319,
0.021783549338579178,
0.015075867995619774,
0.024803917855024338,
0.08135411888360977,
0.045543964952230453,
0.04434769228100777,
-0.08951921761035919,
-0.01807512529194355,
0.05392559617757797,
-0.04990845173597336,
-0.005472405347973108,
-0.048439621925354004,
-0.001515531912446022,
-0.003262227401137352,
-0.022152205929160118,
-0.011265059933066368,
-0.06779736280441284,
0.015005549415946007,
-0.059110067784786224,
-0.03872539475560188,
-0.04291491582989693,
-0.011103353463113308,
-0.05700482428073883,
-0.00662102410569787,
0.08242370933294296,
-0.05398271977901459,
0.004081272054463625,
-0.03361939266324043,
-0.010941347107291222,
0.00561251025646925,
0.03204772248864174,
-0.03162631019949913,
-0.02681432105600834,
-0.017510419711470604,
-0.04684620723128319,
-0.032584287226200104,
-0.023750105872750282,
-0.01280513871461153,
0.0108485771343112,
0.01207764819264412,
0.06101397052407265,
0.10302285104990005,
-0.08118536323308945,
0.10048379004001617,
0.006731967907398939,
-0.12176422774791718,
-0.015005185268819332,
0.017129993066191673,
-0.005940638482570648,
-0.017795270308852196,
-0.018744392320513725,
0.02232622355222702,
0.07581721991300583,
-0.06139057129621506,
0.010369589552283287,
0.0026464492548257113,
-0.07568424195051193,
0.07888438552618027,
0.07795784622430801,
-0.04580351337790489,
-0.06998651474714279,
-0.06633787602186203,
-0.033070970326662064,
-0.009224713779985905,
-0.06399983912706375,
0.051110561937093735,
-0.14119166135787964,
-0.00307562667876482,
0.015615561045706272,
0.008996124379336834,
0.04347163066267967,
-0.029359493404626846,
-0.03184410557150841,
-0.13358333706855774,
-9.781147984883133e-34,
0.02214055322110653,
-0.00389998871833086,
-0.07920199632644653,
0.1140458732843399,
0.013835209421813488,
-0.01971271075308323,
0.0444856621325016,
0.09032192081212997,
0.05066291242837906,
-0.0053505501709878445,
-0.0057052625343203545,
0.08129168301820755,
0.014218179509043694,
-0.028430206701159477,
0.15105362236499786,
0.00922546349465847,
0.056054405868053436,
-0.037556685507297516,
0.03678440302610397,
0.02152489311993122,
0.07938403636217117,
-0.057428453117609024,
-0.13816897571086884,
0.04036317765712738,
0.019324837252497673,
-0.015398235060274601,
-0.012923155911266804,
0.030893782153725624,
0.07465054094791412,
-0.03613819554448128,
-0.061005085706710815,
0.0725618377327919,
-0.04488319158554077,
-0.04823292791843414,
0.0232771597802639,
0.013981564901769161,
-0.0021003223955631256,
-0.022043947130441666,
0.023680154234170914,
0.004771154839545488,
0.06373763829469681,
-0.04884978011250496,
0.03574654832482338,
0.0056287930347025394,
0.012360680848360062,
-0.057577140629291534,
-0.05187135562300682,
-0.04485670477151871,
-0.029580194503068924,
-0.003045481164008379,
0.004114366136491299,
-0.015478950925171375,
-0.04703628644347191,
-0.012315994128584862,
-0.07127361744642258,
-0.027437401935458183,
0.007405370473861694,
-0.03825564682483673,
-0.022539455443620682,
0.025587325915694237,
-0.06467657536268234,
-0.03444432467222214,
0.06266824156045914,
-0.03575523942708969,
0.02913232147693634,
-0.043292153626680374,
-0.0038225268945097923,
-0.042299531400203705,
0.03700309991836548,
-0.04563785344362259,
0.10856995731592178,
-0.013919719494879246,
0.03214757889509201,
0.05907094106078148,
0.04353925213217735,
-0.013897128403186798,
0.014631143771111965,
-0.018244963139295578,
0.01942250318825245,
-0.10679518431425095,
0.04263616353273392,
0.03014303371310234,
0.045255232602357864,
0.08748763799667358,
0.05213717743754387,
-0.025515297427773476,
0.003936914261430502,
0.12722735106945038,
-0.005344539415091276,
0.04881930723786354,
-0.020684441551566124,
0.03157314658164978,
0.02045956254005432,
0.0784277692437172,
0.015834039077162743,
-2.470591020653501e-8,
-0.07141368836164474,
-0.03494824469089508,
0.01608218625187874,
0.007465212605893612,
0.013363635167479515,
-0.002256508683785796,
0.06089930236339569,
-0.0021961124148219824,
-0.07629086822271347,
-0.04453394189476967,
0.058605507016181946,
0.055364057421684265,
0.00015057540440466255,
-0.019669516012072563,
-0.04236476868391037,
0.08147349208593369,
-0.08547982573509216,
0.05546879023313522,
-0.02488507516682148,
-0.03468439355492592,
0.05590398609638214,
0.01648925058543682,
-0.07705257087945938,
0.11267641186714172,
-0.014051659032702446,
-0.0016477752942591906,
-0.06977031379938126,
0.0805385634303093,
-0.05581850931048393,
0.06446555256843567,
0.06506910175085068,
0.07327782362699509,
-0.10511846095323563,
-0.0013785563642159104,
-0.04618542268872261,
-0.01282845064997673,
-0.015307819470763206,
-0.039060354232788086,
0.01355750672519207,
-0.061438608914613724,
0.02922956645488739,
0.00438606645911932,
-0.0449923537671566,
-0.013209749013185501,
0.06726720929145813,
0.021397368982434273,
0.02053324319422245,
-0.0993841290473938,
-0.02578783966600895,
0.040390968322753906,
-0.005995620973408222,
0.01823352836072445,
0.015181348659098148,
0.041971076279878616,
0.0024072828236967325,
-0.02999911457300186,
0.02719571441411972,
0.05143685266375542,
0.042193274945020676,
0.01627500168979168,
0.04896881431341171,
0.08503600209951401,
-0.005710279103368521,
-0.025001289322972298
] |
stas/tiny-wmt19-en-ru | cad41949841fed75b823799992d79dd7a35698c5 | 2021-05-03T01:47:47.000Z | [
"pytorch",
"fsmt",
"text2text-generation",
"en",
"ru",
"dataset:wmt19",
"transformers",
"wmt19",
"testing",
"license:apache-2.0",
"autotrain_compatible"
] | text2text-generation | false | stas | null | stas/tiny-wmt19-en-ru | 2,797 | null | transformers | ---
language:
- en
- ru
thumbnail:
tags:
- wmt19
- testing
license: apache-2.0
datasets:
- wmt19
metrics:
- bleu
---
# Tiny FSMT en-ru
This is a tiny model that is used in the `transformers` test suite. It doesn't do anything useful, other than testing that `modeling_fsmt.py` is functional.
Do not try to use it for anything that requires quality.
The model is indeed 30KB in size.
You can see how it was created [here](https://huggingface.co/stas/tiny-wmt19-en-ru/blob/main/fsmt-make-super-tiny-model.py).
If you're looking for the real model, please go to [https://huggingface.co/facebook/wmt19-en-ru](https://huggingface.co/facebook/wmt19-en-ru).
| [
-0.06982538849115372,
0.025074055418372154,
-0.054299429059028625,
0.07121098786592484,
0.028842143714427948,
-0.08647470921278,
-0.05029912665486336,
0.1439344435930252,
-0.08323586732149124,
-0.001435029786080122,
0.09161300212144852,
-0.033217187970876694,
0.008099931292235851,
0.01230110228061676,
-0.033453866839408875,
-0.006443431135267019,
0.03465753421187401,
-0.040314093232154846,
-0.0718214213848114,
0.014845429919660091,
0.011284620501101017,
0.0007961532101035118,
0.02913837879896164,
-0.020234284922480583,
-0.0005310800042934716,
-0.026415782049298286,
-0.014988123439252377,
0.030688976868987083,
0.04350294917821884,
-0.11624579131603241,
0.05516023933887482,
0.07964474707841873,
0.0061933742836117744,
0.020626390352845192,
0.09038429707288742,
0.06189572066068649,
0.03966272994875908,
-0.09371926635503769,
-0.036706481128931046,
-0.05072306841611862,
0.04637144133448601,
0.0006930699455551803,
0.027449557557702065,
-0.03626205772161484,
-0.008552806451916695,
-0.034822847694158554,
0.05334660783410072,
-0.01684374362230301,
-0.04080786928534508,
-0.05020624399185181,
-0.026526235044002533,
-0.0424702949821949,
0.003057624213397503,
0.08733462542295456,
0.025102976709604263,
-0.06977257877588272,
-0.02004186436533928,
-0.05177086591720581,
-0.0596659854054451,
0.03943229466676712,
-0.03915098309516907,
0.0094514861702919,
-0.10337012261152267,
-0.03662509098649025,
-0.02302512340247631,
0.03187814727425575,
0.021443702280521393,
-0.1034214198589325,
0.036654554307460785,
-0.05549351125955582,
-0.006458754651248455,
0.0071124969981610775,
-0.05083245411515236,
0.0680113211274147,
0.02230108343064785,
-0.04715092107653618,
0.09538384526968002,
0.022907547652721405,
0.018626417964696884,
-0.03745849058032036,
-0.01530531607568264,
-0.05392470955848694,
0.024581648409366608,
-0.06215919926762581,
0.027738027274608612,
0.009381597861647606,
0.030506914481520653,
0.056985609233379364,
0.0007726231124252081,
-0.02328992635011673,
-0.030610568821430206,
0.03910062462091446,
-0.09805954992771149,
0.07248040288686752,
-0.025726724416017532,
0.04284023120999336,
0.037808384746313095,
0.0022438829764723778,
-0.07837294042110443,
0.12541168928146362,
0.056835051625967026,
0.022739224135875702,
0.0998978391289711,
0.05075389891862869,
-0.04486352577805519,
-0.018497051671147346,
0.018000531941652298,
0.1272599995136261,
-0.006519715767353773,
-0.030789056792855263,
0.03279176354408264,
0.038712576031684875,
-0.05303307622671127,
-0.03316637501120567,
0.05234775319695473,
-0.06152794137597084,
-0.01457263994961977,
-0.012582503259181976,
-0.06404197961091995,
-0.008010657504200935,
0.04002393037080765,
0.05493852496147156,
-0.04573052003979683,
-0.035290058702230453,
-0.02203420363366604,
0.03563802316784859,
-0.09670257568359375,
2.7167132795182053e-33,
0.06816717237234116,
0.09672900289297104,
-0.012308108620345592,
0.02355867438018322,
-0.016738224774599075,
0.06130426749587059,
0.03909086808562279,
0.024823928251862526,
-0.07264826446771622,
0.009627709165215492,
-0.03965580463409424,
0.024707170203328133,
-0.03914778679609299,
-0.015000655315816402,
0.06030648946762085,
-0.04855504631996155,
-0.035966090857982635,
0.026835313066840172,
0.013128780759871006,
0.037537556141614914,
0.08226878941059113,
-0.000028125747121521272,
-0.010712150484323502,
-0.07089108973741531,
-0.03671969845890999,
0.00025457169977016747,
0.036704301834106445,
0.020265739411115646,
-0.05447130277752876,
0.04468873143196106,
-0.05796165391802788,
0.04031152278184891,
0.014565368182957172,
-0.04489289969205856,
0.010868389159440994,
-0.02556932158768177,
-0.07195962220430374,
-0.11621371656656265,
-0.06551238894462585,
-0.12294050306081772,
0.056989796459674835,
0.03937728330492973,
-0.07425357401371002,
-0.017292624339461327,
-0.033232904970645905,
-0.03118837997317314,
0.05173901841044426,
0.008285487070679665,
0.034764364361763,
0.031210416927933693,
0.050557684153318405,
0.05589171499013901,
-0.0519634485244751,
0.005894262809306383,
-0.019866565242409706,
0.0702054426074028,
0.11759805679321289,
-0.03341855853796005,
0.05621076747775078,
0.034202441573143005,
0.012729528360068798,
-0.03363766521215439,
0.04953661561012268,
0.05521964654326439,
0.04586431384086609,
0.00027186761144548655,
0.028281424194574356,
0.04141302406787872,
-0.019623393192887306,
0.09281767904758453,
-0.022732993587851524,
-0.024140695109963417,
0.0344519317150116,
0.004156153183430433,
0.048079848289489746,
-0.09845899045467377,
0.09804604202508926,
-0.05520116537809372,
-0.04717006906867027,
0.05685710906982422,
0.013278774917125702,
0.025650914758443832,
0.05201799422502518,
-0.08361133188009262,
-0.03669523075222969,
-0.050322845578193665,
0.03335769101977348,
-0.007012969348579645,
-0.043170955032110214,
-0.07605575770139694,
0.018919669091701508,
0.012646334245800972,
-0.02565639652311802,
-0.045420996844768524,
-0.051585614681243896,
-2.0455596371284354e-33,
0.008959518745541573,
0.053059667348861694,
-0.005187536124140024,
0.10200827568769455,
0.0031529210973531008,
-0.07621438801288605,
0.03819965198636055,
0.15935634076595306,
-0.011634571477770805,
0.013666315004229546,
0.13299161195755005,
-0.05655288323760033,
0.020832331851124763,
-0.11219149827957153,
0.10006796568632126,
0.007143883965909481,
0.009340882301330566,
-0.1562010496854782,
0.06250390410423279,
0.03511824831366539,
-0.00441882386803627,
0.1018250361084938,
-0.038859762251377106,
0.05054425448179245,
-0.11633459478616714,
0.036897506564855576,
-0.03388616815209389,
-0.004305690992623568,
-0.008706213906407356,
-0.02427075058221817,
-0.025923846289515495,
-0.05089129880070686,
-0.05949362367391586,
-0.0035217839758843184,
-0.002203294076025486,
-0.05031783878803253,
0.004941513761878014,
0.007278060540556908,
0.004460762720555067,
0.0072730123065412045,
0.027892660349607468,
0.08155401796102524,
-0.0413203164935112,
0.0322330966591835,
-0.05092032626271248,
-0.02943568490445614,
0.020800350233912468,
-0.06760121136903763,
0.07583283632993698,
-0.03122316300868988,
0.04262328892946243,
0.013514804653823376,
-0.050047218799591064,
0.020773712545633316,
-0.041961900889873505,
-0.0807463675737381,
-0.042998723685741425,
0.05925525724887848,
-0.021941564977169037,
0.010558762587606907,
-0.03689345717430115,
-0.04916919767856598,
-0.04410966858267784,
-0.028911462053656578,
-0.03349009528756142,
-0.053155090659856796,
-0.028156615793704987,
-0.006038502790033817,
0.03177643567323685,
0.0983150452375412,
0.04364491626620293,
0.01596667617559433,
0.07760528475046158,
0.03166535124182701,
-0.014930048026144505,
-0.034101054072380066,
-0.005059143528342247,
0.06334628164768219,
0.12032491713762283,
-0.01073654368519783,
0.009997944347560406,
0.05954798683524132,
0.016642102971673012,
-0.01056417915970087,
0.09499511122703552,
-0.0337212048470974,
-0.03550117090344429,
0.06952877342700958,
-0.0035708057694137096,
0.03818526118993759,
-0.06340984255075455,
0.05669134855270386,
0.038926638662815094,
0.0621236152946949,
0.05205667018890381,
-5.42975904238574e-8,
0.0010416648583486676,
0.033280931413173676,
-0.06887932866811752,
0.046959735453128815,
-0.0823562815785408,
-0.05551014840602875,
0.005002652294933796,
0.023843618109822273,
-0.012587185017764568,
0.07959453016519547,
0.01829960010945797,
0.0013015150325372815,
-0.0993887186050415,
0.03586035594344139,
-0.04374300315976143,
-0.026323938742280006,
-0.03397953882813454,
0.048069316893815994,
-0.0434587188065052,
-0.05607585608959198,
-0.0380060039460659,
0.0428091362118721,
0.07458078116178513,
-0.026305966079235077,
0.02977791056036949,
0.036152344197034836,
-0.02401925064623356,
0.07251241058111191,
-0.033764470368623734,
-0.06025460734963417,
-0.0021841914858669043,
-0.007541824597865343,
-0.04725399240851402,
-0.02091670036315918,
0.007577619981020689,
0.08703014999628067,
-0.07016078382730484,
0.045983485877513885,
-0.015580225735902786,
0.007615839596837759,
0.06724472343921661,
-0.022052569314837456,
-0.03311699628829956,
0.005675154738128185,
-0.02054816484451294,
0.01564447022974491,
-0.04894689843058586,
-0.06218034029006958,
-0.025433171540498734,
0.026550639420747757,
0.049170542508363724,
-0.011129477992653847,
-0.055902354419231415,
-0.01818900741636753,
-0.06874129921197891,
0.034555479884147644,
0.009463711641728878,
-0.02051004022359848,
-0.0405702069401741,
0.009317640215158463,
0.053782809525728226,
-0.04147905111312866,
-0.04981677606701851,
0.03171149268746376
] |
Helsinki-NLP/opus-mt-fr-es | 4bd0d3d212940704145e6a2699f4b93e6cfe8b61 | 2021-09-09T21:53:46.000Z | [
"pytorch",
"marian",
"text2text-generation",
"fr",
"es",
"transformers",
"translation",
"license:apache-2.0",
"autotrain_compatible"
] | translation | false | Helsinki-NLP | null | Helsinki-NLP/opus-mt-fr-es | 2,792 | null | transformers | ---
tags:
- translation
license: apache-2.0
---
### opus-mt-fr-es
* source languages: fr
* target languages: es
* OPUS readme: [fr-es](https://github.com/Helsinki-NLP/OPUS-MT-train/blob/master/models/fr-es/README.md)
* dataset: opus
* model: transformer-align
* pre-processing: normalization + SentencePiece
* download original weights: [opus-2020-01-09.zip](https://object.pouta.csc.fi/OPUS-MT-models/fr-es/opus-2020-01-09.zip)
* test set translations: [opus-2020-01-09.test.txt](https://object.pouta.csc.fi/OPUS-MT-models/fr-es/opus-2020-01-09.test.txt)
* test set scores: [opus-2020-01-09.eval.txt](https://object.pouta.csc.fi/OPUS-MT-models/fr-es/opus-2020-01-09.eval.txt)
## Benchmarks
| testset | BLEU | chr-F |
|-----------------------|-------|-------|
| newssyscomb2009.fr.es | 34.3 | 0.601 |
| news-test2008.fr.es | 32.5 | 0.583 |
| newstest2009.fr.es | 31.6 | 0.586 |
| newstest2010.fr.es | 36.5 | 0.616 |
| newstest2011.fr.es | 38.3 | 0.622 |
| newstest2012.fr.es | 38.1 | 0.619 |
| newstest2013.fr.es | 34.0 | 0.587 |
| Tatoeba.fr.es | 53.2 | 0.709 |
| [
-0.062440793961286545,
-0.025204412639141083,
0.023285361006855965,
-0.006202352233231068,
0.018268492072820663,
0.09799963235855103,
-0.05648607388138771,
0.03955678269267082,
0.03456101194024086,
-0.008148597553372383,
0.0040783025324344635,
-0.044940412044525146,
-0.07507983595132828,
-0.023671496659517288,
-0.026364237070083618,
-0.01062558963894844,
-0.03782614320516586,
0.0887444019317627,
-0.0783781036734581,
-0.026562796905636787,
0.04948318749666214,
0.026745866984128952,
0.030047377571463585,
-0.022150473669171333,
0.09890078753232956,
0.08129063993692398,
-0.10444149374961853,
0.003608357859775424,
0.09855837374925613,
-0.04556342214345932,
-0.012716862373054028,
0.014154129661619663,
0.05072227492928505,
0.08197340369224548,
0.03446195274591446,
0.07199827581644058,
-0.009248977527022362,
-0.07922978699207306,
-0.03243988752365112,
0.04370032995939255,
0.030794722959399223,
0.05876680091023445,
-0.034673601388931274,
-0.010947441682219505,
0.047370802611112595,
0.002898323815315962,
-0.08201964944601059,
0.03226526081562042,
0.014019397087395191,
-0.001407154486514628,
-0.11691103130578995,
-0.01581074297428131,
0.007924634031951427,
0.0773700401186943,
-0.07071759551763535,
0.04815462976694107,
0.05128873512148857,
-0.022871021181344986,
0.07339378446340561,
-0.026041219010949135,
-0.12062962353229523,
-0.03240673243999481,
-0.09753405302762985,
-0.0009914390975609422,
-0.01174580492079258,
-0.011676214635372162,
0.008759310469031334,
0.0618099682033062,
-0.058241479098796844,
0.0635782927274704,
-0.023898202925920486,
0.001746824593283236,
0.011662397533655167,
0.06040586158633232,
-0.006330158561468124,
0.04799888655543327,
-0.006192556582391262,
-0.06040084734559059,
-0.00391464913263917,
-0.06796955317258835,
0.0014015933265909553,
-0.0602039210498333,
0.06587312370538712,
-0.011778540909290314,
0.08479129523038864,
-0.002810211619362235,
0.03170298412442207,
0.002700382610782981,
-0.016551531851291656,
0.045817237347364426,
-0.06487292796373367,
-0.032979775220155716,
0.004840996116399765,
0.02319989912211895,
-0.00037782458821311593,
0.06002194806933403,
0.013428930193185806,
0.05399733781814575,
0.01646731048822403,
0.06871574372053146,
0.021096741780638695,
0.019853292033076286,
0.07915324717760086,
-0.04048139601945877,
-0.10865870118141174,
-0.023018693551421165,
0.05734771862626076,
0.049107279628515244,
-0.005279453005641699,
-0.09236126393079758,
0.02564391680061817,
-0.027478763833642006,
-0.022596178576350212,
-0.08783110976219177,
0.02578405849635601,
-0.05407257750630379,
-0.0019597443751990795,
-0.024121999740600586,
-0.01359351072460413,
0.0481477789580822,
-0.02940150536596775,
-0.007666796911507845,
-0.03530559316277504,
0.003747181035578251,
-0.04958002269268036,
-0.05642097443342209,
0.0329107828438282,
1.5545350756388265e-33,
0.06284525990486145,
-0.011491678655147552,
-0.014451192691922188,
-0.013804549351334572,
-0.05446682870388031,
-0.007290413603186607,
-0.032082267105579376,
0.03904681280255318,
-0.10712109506130219,
0.00009246203262591735,
-0.01371247973293066,
-0.007100497838109732,
-0.08636824041604996,
0.012172795832157135,
-0.02509162202477455,
0.006538794841617346,
0.07720859348773956,
0.0127768749371171,
0.04093524441123009,
0.030956175178289413,
0.0811699703335762,
0.044024545699357986,
-0.0005302242934703827,
-0.04003816470503807,
-0.050757866352796555,
0.05625567212700844,
0.016689231619238853,
-0.11475684493780136,
-0.11539240926504135,
0.024461250752210617,
-0.10089228302240372,
0.02346123196184635,
-0.01692039892077446,
0.0076126111671328545,
-0.013012386858463287,
-0.02512962929904461,
-0.007940027862787247,
-0.012965594418346882,
-0.036929450929164886,
-0.08286382257938385,
0.004913437180221081,
0.013814201578497887,
-0.013394873589277267,
-0.05516565963625908,
0.026063844561576843,
0.012256097048521042,
0.002259145025163889,
0.010492641478776932,
0.11310131102800369,
0.019239667803049088,
0.011000148952007294,
0.0494995154440403,
-0.06862648576498032,
0.00773521838709712,
0.029928000643849373,
0.10843028128147125,
0.06218511611223221,
0.010102478787302971,
0.022071894258260727,
0.03343122452497482,
0.07102545350790024,
0.023694174364209175,
0.019439131021499634,
0.021984169259667397,
0.09938494861125946,
-0.0058518582955002785,
-0.040309030562639236,
-0.07312839478254318,
0.07949252426624298,
0.042949289083480835,
-0.1433757096529007,
-0.04646427929401398,
0.06270781904459,
0.07709494978189468,
0.06373089551925659,
-0.02088758535683155,
-0.024843472987413406,
-0.02699453756213188,
-0.027166152372956276,
-0.026310238987207413,
-0.06634537130594254,
0.02609359100461006,
-0.0058358823880553246,
-0.02385123260319233,
-0.03626859560608864,
0.006607256829738617,
0.04673825576901436,
-0.06064330041408539,
-0.034214701503515244,
0.0026859864592552185,
0.036206137388944626,
0.05015555024147034,
-0.08977221697568893,
-0.019252069294452667,
-0.0018113615224137902,
-1.7708835252107223e-33,
0.09621290862560272,
0.016510361805558205,
-0.040310222655534744,
0.07165651768445969,
-0.025971658527851105,
-0.07141856849193573,
0.0029191637877374887,
0.10755569487810135,
0.06211079657077789,
0.038630906492471695,
0.06427469849586487,
-0.14796848595142365,
0.03542746976017952,
-0.08545167744159698,
0.06984752416610718,
-0.04592764750123024,
-0.007104065734893084,
0.038068074733018875,
0.02861585095524788,
0.03355005010962486,
0.004725235048681498,
0.07584958523511887,
-0.021269584074616432,
0.09180306643247604,
-0.0026428750716149807,
-0.016742467880249023,
-0.019989117980003357,
0.06530670076608658,
0.0016610522288829088,
-0.003621291136369109,
0.00852146279066801,
-0.0015681823715567589,
-0.1090657114982605,
-0.01909664459526539,
-0.08384179323911667,
0.04164307564496994,
0.031042583286762238,
0.044939473271369934,
0.04045751318335533,
0.0707915648818016,
0.0658414289355278,
0.07264712452888489,
-0.042227040976285934,
-0.04210449010133743,
0.01598876155912876,
-0.027815846726298332,
0.012436735443770885,
-0.0017134188674390316,
0.007726674433797598,
-0.08540792018175125,
0.017788100987672806,
0.006463412661105394,
-0.09295625239610672,
-0.03259579464793205,
-0.01602904684841633,
-0.07966459542512894,
-0.01360951829701662,
-0.1449328511953354,
-0.06377768516540527,
-0.022098829969763756,
-0.008200903423130512,
0.02787470445036888,
-0.044341448694467545,
-0.07806243747472763,
0.04344535619020462,
-0.004906368441879749,
0.04298160597681999,
0.013939711265265942,
0.01929621212184429,
0.06327586621046066,
-0.013963723555207253,
-0.061049118638038635,
0.06816928833723068,
0.09611623734235764,
0.006573855876922607,
-0.042579781264066696,
-0.04530956223607063,
0.03531757742166519,
0.05283629521727562,
-0.06573357433080673,
-0.02290002629160881,
0.017715681344270706,
0.007144581992179155,
0.027718517929315567,
0.09853998571634293,
0.1045852079987526,
0.023818977177143097,
-0.002578370738774538,
-0.001671972800977528,
0.06078619509935379,
0.012715878896415234,
0.0180338304489851,
0.02303706854581833,
0.10425420105457306,
0.007760742213577032,
-4.914072704309547e-8,
-0.10056822746992111,
0.0050312126986682415,
-0.10328544676303864,
0.04953533038496971,
-0.04542197287082672,
-0.07007813453674316,
-0.059293344616889954,
-0.029389847069978714,
-0.0373966209590435,
-0.0379832461476326,
0.0018607595702633262,
0.01746355928480625,
-0.07703667879104614,
-0.007494312711060047,
-0.04666990786790848,
0.02201765961945057,
-0.018293514847755432,
0.08945491164922714,
-0.025556648150086403,
-0.03539000451564789,
0.052427589893341064,
0.05070209130644798,
0.04559832438826561,
-0.07533769309520721,
-0.002642574952915311,
0.006716975476592779,
-0.03981316462159157,
0.030150743201375008,
0.010487953200936317,
0.017427444458007812,
0.03742023557424545,
0.031453292816877365,
-0.005500443279743195,
-0.09271061420440674,
0.045157112181186676,
0.059553876519203186,
0.01087950263172388,
-0.030847107991576195,
-0.021595286205410957,
0.06798975169658661,
0.10376568138599396,
0.04817008972167969,
-0.11579129099845886,
0.02099461667239666,
0.030559051781892776,
-0.029710326343774796,
-0.04048923775553703,
-0.028966808691620827,
0.03881533071398735,
-0.06508946418762207,
0.0782240703701973,
-0.06674547493457794,
-0.06821799278259277,
0.022090816870331764,
0.029858149588108063,
0.00662760017439723,
0.0593193843960762,
-0.007158446125686169,
0.00001616620647837408,
-0.026753172278404236,
0.04312968626618385,
-0.02559608593583107,
-0.013797528110444546,
-0.01158073078840971
] |
sgugger/tiny-distilbert-classification | a30e0f7dc9dc24b0dacce98fd144e0a7ffb70a1a | 2021-07-29T17:12:02.000Z | [
"pytorch",
"tf",
"distilbert",
"text-classification",
"transformers"
] | text-classification | false | sgugger | null | sgugger/tiny-distilbert-classification | 2,783 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
DeepESP/gpt2-spanish | 1b935e39cf9893108bd2f4fb5317f48ae1c3ab5e | 2021-10-19T08:52:48.000Z | [
"pytorch",
"tf",
"jax",
"gpt2",
"text-generation",
"es",
"dataset:ebooks",
"transformers",
"GPT-2",
"Spanish",
"ebooks",
"nlg",
"license:mit"
] | text-generation | false | DeepESP | null | DeepESP/gpt2-spanish | 2,774 | 9 | transformers | ---
language: es
tags:
- GPT-2
- Spanish
- ebooks
- nlg
datasets:
- ebooks
widget:
- text: "Quisiera saber que va a suceder"
license: mit
---
# GPT2-Spanish
GPT2-Spanish is a language generation model trained from scratch with 11.5GB of Spanish texts and with a Byte Pair Encoding (BPE) tokenizer that was trained for this purpose. The parameters used are the same as the small version of the original OpenAI GPT2 model.
## Corpus
This model was trained with a corpus of 11.5GB of texts corresponding to 3.5GB of Wikipedia articles and 8GB of books (narrative, short stories, theater, poetry, essays, and popularization).
## Tokenizer
The texts are tokenized using a byte-level version of Byte Pair Encoding (BPE) (for Unicode characters) and a vocabulary size of 50257. The inputs are sequences of 1024 consecutive tokens.
This tokenizer was trained from scratch with the Spanish corpus, since it was evidenced that the tokenizer of the English models presented limitations to capture the semantic relations of Spanish, due to the morphosyntactic differences between both languages.
Apart from the special token "<|endoftext|>" for text ending in the OpenAI GPT-2 models, the tokens "<|talk|>", "<|ax1|>", "<|ax2|>" (..)"<|ax9|>" were included so that they can serve as prompts in future training.
## Training
The model and tokenizer were trained using the Hugging Face libraries with an Nvidia Tesla V100 GPU with 16GB memory on Google Colab servers.
## Authors
The model was trained by Alejandro Oñate Latorre (Spain) and Jorge Ortiz Fuentes (Chile), members of -Deep ESP-, an open-source community on Natural Language Processing in Spanish (https://t.me/joinchat/VoEp1bPrDYEexc6h).
Thanks to the members of the community who collaborated with funding for the initial tests.
## Cautions
The model generates texts according to the patterns learned in the training corpus. These data were not filtered, therefore, the model could generate offensive or discriminatory content.
| [
-0.026082636788487434,
-0.10646127909421921,
0.014615953899919987,
0.02526036649942398,
0.04547739773988724,
-0.000042489493353059515,
0.012740319594740868,
0.042632270604372025,
0.09319906681776047,
-0.018599100410938263,
0.04684177786111832,
0.007829070091247559,
0.014014819636940956,
0.003192880656570196,
0.05427471548318863,
0.0022987443953752518,
-0.01666463166475296,
0.01578441821038723,
-0.1035141870379448,
-0.04052111878991127,
0.11944513022899628,
0.06579238921403885,
0.015450908802449703,
0.014627208933234215,
0.02358238771557808,
0.012391941621899605,
0.01580672897398472,
-0.05761672928929329,
0.034097034484148026,
-0.0015843327855691314,
0.02548632211983204,
0.023113276809453964,
0.002105784835293889,
0.035408638417720795,
-0.035268623381853104,
0.03967266157269478,
0.04420137405395508,
-0.055712901055812836,
0.01576627418398857,
-0.02210926078259945,
-0.027362758293747902,
0.009876633062958717,
0.005198008380830288,
0.0854201540350914,
0.10712479799985886,
0.0294407457113266,
-0.022680802270770073,
0.03343089669942856,
-0.055728524923324585,
0.00932501070201397,
-0.10112199187278748,
0.007999420166015625,
-0.014184446074068546,
0.03771475329995155,
0.013581868261098862,
0.010129452683031559,
-0.028811616823077202,
0.04260851815342903,
0.033336833119392395,
-0.007130684331059456,
-0.08462637662887573,
-0.05973878130316734,
-0.06022557243704796,
0.0017880778759717941,
-0.07103470712900162,
-0.07784311473369598,
0.053951360285282135,
0.024547843262553215,
-0.005432952661067247,
-0.018094008788466454,
-0.06333480030298233,
0.08708608895540237,
0.0016001228941604495,
0.09533938765525818,
-0.002102666301652789,
0.0629534125328064,
0.006436711642891169,
0.009008705615997314,
-0.01948339119553566,
-0.10709241032600403,
0.06003013998270035,
-0.010246740654110909,
0.10509287565946579,
-0.010250277817249298,
-0.018434492871165276,
-0.036645177751779556,
0.03783074766397476,
0.07114522159099579,
0.012270170263946056,
0.11030525714159012,
-0.03225528448820114,
-0.014084032736718655,
0.12688076496124268,
0.018406519666314125,
-0.0424543097615242,
0.005184623412787914,
-0.0027546717319637537,
-0.03969277814030647,
0.009308451786637306,
0.10300706326961517,
0.05258210375905037,
0.06158385053277016,
0.0166629645973444,
-0.017888864502310753,
-0.07480832189321518,
-0.05212099477648735,
-0.010334602557122707,
0.010694330558180809,
0.05428881570696831,
-0.02362910844385624,
0.07988610118627548,
0.046418286859989166,
-0.03494713827967644,
-0.005800458136945963,
-0.0022914400324225426,
0.0351945236325264,
-0.031220829114317894,
-0.023851875215768814,
0.08436527103185654,
0.06825771182775497,
-0.06885965168476105,
0.030873281881213188,
-0.024790339171886444,
-0.0706019327044487,
-0.06413429230451584,
-0.02590770274400711,
-0.03049449808895588,
2.7014441589140008e-33,
0.04804453253746033,
0.060444097965955734,
-0.02914697863161564,
0.03921781852841377,
-0.0400388166308403,
0.024382228031754494,
-0.025596052408218384,
-0.006778344977647066,
-0.042858611792325974,
-0.09763173013925552,
-0.05246787145733833,
0.011414455249905586,
-0.09396164119243622,
0.10618633031845093,
0.05060604587197304,
0.020071357488632202,
-0.0544101782143116,
0.03876989334821701,
0.010713337920606136,
-0.0012688596034422517,
0.04380926862359047,
0.05034356936812401,
0.06226590648293495,
-0.021249961107969284,
-0.06685949862003326,
0.042840681970119476,
0.004192693624645472,
-0.1365301012992859,
0.004069341812282801,
0.047687824815511703,
-0.10185978561639786,
-0.047393690794706345,
0.04057624191045761,
0.04665578901767731,
0.02521476149559021,
-0.06332185864448547,
0.09086958318948746,
-0.11490420997142792,
0.025201210752129555,
-0.06961239874362946,
-0.015608205460011959,
0.06677138060331345,
0.09069664031267166,
-0.03661659359931946,
-0.05484504625201225,
-0.05349460989236832,
0.05224712938070297,
-0.00942278653383255,
-0.008327445946633816,
0.051673807203769684,
-0.018034419044852257,
-0.0008894525817595422,
-0.04378034546971321,
0.001077154534868896,
0.012389355339109898,
0.060460858047008514,
-0.006931266747415066,
0.013295517303049564,
0.051952071487903595,
0.037119876593351364,
0.04817991331219673,
0.05785844847559929,
0.13763713836669922,
0.04370575770735741,
0.05007404834032059,
0.0756249949336052,
-0.06477620452642441,
-0.0065363612957298756,
0.08865532279014587,
0.018942346796393394,
-0.05910586565732956,
-0.039156969636678696,
0.022036762908101082,
-0.020219145342707634,
-0.009334259666502476,
-0.018053550273180008,
0.05346530303359032,
-0.09377791732549667,
-0.06465089321136475,
0.07788529247045517,
-0.06288040429353714,
-0.0072088297456502914,
-0.002106619765982032,
-0.07458257675170898,
-0.07877760380506516,
0.017397047951817513,
0.036532074213027954,
-0.043542977422475815,
0.015381133183836937,
-0.00461967708542943,
-0.012703120708465576,
-0.013936235569417477,
-0.08079426735639572,
-0.043044865131378174,
0.008866663090884686,
-2.6965588778603803e-33,
-0.041615989059209824,
-0.0028345317114144564,
-0.019760524854063988,
0.06509126722812653,
-0.0728575587272644,
-0.11435678601264954,
-0.008717479184269905,
0.06620444357395172,
-0.05306398868560791,
-0.10027577728033066,
-0.012434282340109348,
-0.052106887102127075,
0.10541334748268127,
-0.02399156615138054,
0.014872612431645393,
-0.08552204072475433,
0.015388567000627518,
-0.008511019870638847,
0.023340122774243355,
0.10218403488397598,
0.0020451608579605818,
0.00584468012675643,
-0.08372224867343903,
0.052542250603437424,
0.0691479742527008,
0.007013786118477583,
-0.030186301097273827,
0.04456702619791031,
0.028425373136997223,
-0.0001609803002793342,
0.020236525684595108,
0.019357332959771156,
-0.03957321122288704,
-0.022129885852336884,
-0.09407877177000046,
-0.026444358751177788,
0.07896313816308975,
0.04922166466712952,
-0.016031483188271523,
0.09063109755516052,
0.06469692289829254,
0.006080301944166422,
-0.043694064021110535,
-0.020424818620085716,
-0.08736507594585419,
0.0541272796690464,
-0.05467753857374191,
-0.005575002636760473,
0.03664877265691757,
-0.06079597771167755,
0.06483843922615051,
0.010159577243030071,
-0.062467750161886215,
-0.038624875247478485,
-0.0023561299312859774,
-0.13442014157772064,
0.0039189658127725124,
-0.03024563379585743,
-0.07736948132514954,
-0.03732181340456009,
-0.01894194260239601,
-0.008712765760719776,
0.031907692551612854,
-0.043697018176317215,
0.009920613840222359,
-0.027581162750720978,
-0.031188225373625755,
0.06502455472946167,
-0.02205204963684082,
0.0054099420085549355,
0.010061237029731274,
-0.04915205016732216,
0.03629318252205849,
0.030875686556100845,
-0.07103648036718369,
-0.004846465308219194,
-0.05680015683174133,
-0.08560900390148163,
0.005804131738841534,
-0.04248795285820961,
-0.02596914954483509,
0.036272212862968445,
0.046199776232242584,
-0.017840590327978134,
0.08026659488677979,
0.004191526211798191,
0.008582019247114658,
0.06860306113958359,
-0.023203309625387192,
0.06397923827171326,
-0.01750396378338337,
0.046996358782052994,
0.016088644042611122,
0.1092238575220108,
-0.023268673568964005,
-4.928585539687447e-8,
-0.10036026686429977,
-0.08543693274259567,
-0.05667533352971077,
0.043692369014024734,
-0.0632617175579071,
-0.05135186389088631,
-0.044563859701156616,
0.05795900896191597,
-0.004856989253312349,
-0.04537979140877724,
0.026169562712311745,
-0.0192934051156044,
-0.10812611132860184,
-0.03243906795978546,
-0.023265480995178223,
0.0900031328201294,
0.0730813518166542,
0.033177152276039124,
-0.006827827077358961,
-0.007914028130471706,
0.035199277102947235,
0.02132420241832733,
-0.03570567071437836,
-0.05455496907234192,
-0.0036273349542170763,
0.0026706706266850233,
-0.06009562313556671,
0.042920585721731186,
0.026588737964630127,
-0.0936935544013977,
-0.023826679214835167,
-0.00777853699401021,
-0.05805236101150513,
-0.08848080039024353,
0.0193537138402462,
0.05582818016409874,
-0.04051310941576958,
-0.03991267457604408,
-0.015325214713811874,
0.0030240679625421762,
0.1387616991996765,
0.007900134660303593,
-0.07168209552764893,
0.001889705192297697,
0.05936175957322121,
-0.016901392489671707,
-0.06355007737874985,
-0.034098852425813675,
0.03696804866194725,
-0.029944967478513718,
0.04081444814801216,
0.0010607787407934666,
-0.013259132392704487,
0.026782726868987083,
0.028269195929169655,
0.012518307194113731,
-0.04283798113465309,
0.001366466167382896,
-0.005438411142677069,
0.053662557154893875,
-0.0039635226130485535,
0.03480786830186844,
0.05947698652744293,
-0.041941940784454346
] |
Ilyes/wav2vec2-large-xlsr-53-french | a3233bc9949d6da07e5e18660b004a6c120dc135 | 2022-02-09T08:28:27.000Z | [
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"fr",
"dataset:common_voice",
"transformers",
"audio",
"speech",
"xlsr-fine-tuning-week",
"license:apache-2.0",
"model-index"
] | automatic-speech-recognition | false | Ilyes | null | Ilyes/wav2vec2-large-xlsr-53-french | 2,774 | 1 | transformers | ---
language: fr
datasets:
- common_voice
tags:
- audio
- automatic-speech-recognition
- speech
- xlsr-fine-tuning-week
license: apache-2.0
model-index:
- name: wav2vec2-large-xlsr-53-French by Ilyes Rebai
results:
- task:
name: Speech Recognition
type: automatic-speech-recognition
dataset:
name: Common Voice fr
type: common_voice
args: fr
metrics:
- name: Test WER
type: wer
value: 12.82
---
## Evaluation on Common Voice FR Test
The script used for training and evaluation can be found here: https://github.com/irebai/wav2vec2
```python
import torch
import torchaudio
from datasets import load_dataset, load_metric
from transformers import (
Wav2Vec2ForCTC,
Wav2Vec2Processor,
)
import re
model_name = "Ilyes/wav2vec2-large-xlsr-53-french"
model = Wav2Vec2ForCTC.from_pretrained(model_name).to('cuda')
processor = Wav2Vec2Processor.from_pretrained(model_name)
ds = load_dataset("common_voice", "fr", split="test", cache_dir="./data/fr")
chars_to_ignore_regex = '[\,\?\.\!\;\:\"\“\%\‘\”\�\‘\’\’\’\‘\…\·\!\ǃ\?\«\‹\»\›“\”\\ʿ\ʾ\„\∞\\|\.\,\;\:\*\—\–\─\―\_\/\:\ː\;\,\=\«\»\→]'
def map_to_array(batch):
speech, _ = torchaudio.load(batch["path"])
batch["speech"] = resampler.forward(speech.squeeze(0)).numpy()
batch["sampling_rate"] = resampler.new_freq
batch["sentence"] = re.sub(chars_to_ignore_regex, '', batch["sentence"]).lower().replace("’", "'")
return batch
ds = ds.map(map_to_array)
resampler = torchaudio.transforms.Resample(48_000, 16_000)
def map_to_pred(batch):
features = processor(batch["speech"], sampling_rate=batch["sampling_rate"][0], padding=True, return_tensors="pt")
input_values = features.input_values.to(device)
attention_mask = features.attention_mask.to(device)
with torch.no_grad():
logits = model(input_values, attention_mask=attention_mask).logits
pred_ids = torch.argmax(logits, dim=-1)
batch["predicted"] = processor.batch_decode(pred_ids)
batch["target"] = batch["sentence"]
return batch
result = ds.map(map_to_pred, batched=True, batch_size=16, remove_columns=list(ds.features.keys()))
wer = load_metric("wer")
print(wer.compute(predictions=result["predicted"], references=result["target"]))
```
## Results
WER=12.82%
CER=4.40%
| [
-0.10427672415971756,
-0.07789427787065506,
-0.06257486343383789,
-0.05701051652431488,
0.007487901486456394,
0.031637392938137054,
-0.01099720411002636,
0.013164527714252472,
-0.022206496447324753,
-0.07619675993919373,
-0.013932320289313793,
-0.11351020634174347,
-0.041835494339466095,
-0.014770581386983395,
-0.02912643365561962,
-0.08062663674354553,
-0.017324671149253845,
-0.011199799366295338,
-0.05888839066028595,
-0.11542852222919464,
0.10717026144266129,
0.07443837821483612,
0.09617389738559723,
-0.06799381226301193,
0.037443291395902634,
-0.00533986184746027,
-0.08480978012084961,
0.05116195231676102,
0.06455423682928085,
-0.01594349928200245,
0.10835319012403488,
0.07109024375677109,
0.06667893379926682,
0.04679929092526436,
-0.022731000557541847,
0.023406801745295525,
-0.001143097528256476,
-0.06402834504842758,
-0.022968897596001625,
-0.020279739052057266,
-0.03173762187361717,
-0.010438581928610802,
-0.005826783366501331,
-0.07614254206418991,
-0.01142231933772564,
-0.05727344751358032,
-0.1040753647685051,
0.0033929143100976944,
-0.029052987694740295,
0.01617666706442833,
-0.053620900958776474,
-0.03665148466825485,
0.03802601620554924,
0.06499174982309341,
-0.05586947128176689,
0.02338593080639839,
0.020697733387351036,
0.014822731725871563,
0.05327823385596275,
-0.028492819517850876,
-0.0571962334215641,
-0.05592112988233566,
-0.02076748013496399,
-0.0022698615211993456,
-0.06024487316608429,
-0.01371361780911684,
-0.04619020223617554,
-0.025008387863636017,
0.0076524242758750916,
-0.004731339868158102,
-0.15337112545967102,
0.05905837193131447,
0.04334352910518646,
0.03173811733722687,
0.06249654293060303,
0.0007348828366957605,
0.041927993297576904,
-0.029151564463973045,
0.04361428692936897,
-0.09587433189153671,
-0.018124472349882126,
-0.06707581877708435,
0.034142132848501205,
0.008940394967794418,
0.12161523848772049,
-0.001722589135169983,
0.06203225627541542,
-0.0021565991919487715,
0.005808803718537092,
-0.014414002187550068,
-0.06741056591272354,
-0.03558631241321564,
-0.011253220029175282,
0.06239151582121849,
-0.020676255226135254,
0.07915493845939636,
0.0161789208650589,
0.0664324015378952,
-0.04934446141123772,
0.08850044757127762,
-0.02356976829469204,
-0.06552039086818695,
0.03894485533237457,
-0.02277805097401142,
-0.05027257651090622,
-0.027824632823467255,
-0.012505356222391129,
0.03988918662071228,
0.03840342536568642,
-0.08169925957918167,
-0.015147347003221512,
0.004497657995671034,
-0.01782212220132351,
-0.03575485572218895,
0.07440345734357834,
0.024943852797150612,
-0.040897224098443985,
-0.0647566094994545,
-0.01047610118985176,
0.04524385556578636,
-0.03970028832554817,
-0.020794911310076714,
-0.05192764475941658,
0.0038103158585727215,
0.02561328001320362,
-0.019441066309809685,
-0.019578387960791588,
7.349735082162166e-33,
0.00041856756433844566,
0.05483470484614372,
0.017502037808299065,
-0.0359540730714798,
-0.021692533046007156,
-0.05402090772986412,
0.0024879348929971457,
0.047044627368450165,
-0.01696513406932354,
-0.010769815184175968,
-0.04095037654042244,
0.04118970409035683,
-0.07027759402990341,
0.049618929624557495,
-0.046332865953445435,
0.022303901612758636,
0.018645422533154488,
0.009113926440477371,
0.015009059570729733,
-0.013863048516213894,
0.1891242116689682,
0.035978011786937714,
0.04968404024839401,
-0.0218791700899601,
0.0775059163570404,
0.02422206662595272,
0.06040683388710022,
-0.06188971921801567,
0.0007594830822199583,
0.03993166610598564,
-0.057679228484630585,
-0.041844166815280914,
0.025010615587234497,
0.014059578999876976,
0.016017472371459007,
-0.005748085677623749,
0.007965309545397758,
0.07229791581630707,
-0.025491639971733093,
-0.0345756858587265,
0.06019442155957222,
0.006844639778137207,
-0.012019529938697815,
-0.04675090312957764,
-0.03547609969973564,
-0.06514657288789749,
-0.03977822884917259,
0.05558360740542412,
0.06241552159190178,
0.04847114905714989,
-0.02424202859401703,
-0.0001413780264556408,
-0.026145251467823982,
0.04835693910717964,
-0.00946824997663498,
-0.01047044713050127,
0.0421409010887146,
0.0495884083211422,
0.03475750610232353,
-0.054560884833335876,
0.017932284623384476,
-0.013893038034439087,
0.0266868993639946,
0.016803737729787827,
0.009890733286738396,
-0.06062954664230347,
-0.02895171381533146,
-0.027311250567436218,
0.0212746262550354,
0.011225209571421146,
-0.038670867681503296,
-0.020091470330953598,
0.05058157071471214,
0.03699229657649994,
0.0578923299908638,
0.005668159574270248,
0.009006248787045479,
-0.0688425824046135,
-0.03233702853322029,
0.04348445683717728,
-0.02037474326789379,
0.07807647436857224,
-0.022110654041171074,
-0.0708402544260025,
-0.02745130844414234,
-0.055808667093515396,
0.025215743109583855,
-0.11949019134044647,
-0.03583724424242973,
-0.04299090802669525,
-0.07889307290315628,
0.020552432164549828,
-0.03529876098036766,
-0.03830220550298691,
-0.021478749811649323,
-9.00407954641939e-33,
0.03357204422354698,
0.15748271346092224,
-0.015578295104205608,
0.10245849192142487,
0.010855800472199917,
-0.004708050284534693,
0.12228426337242126,
0.09281373769044876,
-0.011842029169201851,
-0.05834878236055374,
0.0814988762140274,
-0.10147354006767273,
0.05449005216360092,
-0.03986291587352753,
0.08672290295362473,
-0.018427478149533272,
-0.06200790777802467,
0.028926685452461243,
0.06366918981075287,
0.07029663771390915,
-0.020311851054430008,
0.07956528663635254,
-0.038799624890089035,
0.05020105838775635,
-0.09461921453475952,
-0.032651230692863464,
-0.013465024530887604,
0.02539583295583725,
-0.019960874691605568,
-0.017979957163333893,
-0.04797576367855072,
0.05055071786046028,
-0.1261005401611328,
0.09685034304857254,
-0.032032113522291183,
-0.0058523607440292835,
0.06681448966264725,
-0.039767082780599594,
-0.01935100555419922,
0.09158067405223846,
0.1018764078617096,
0.0503242090344429,
-0.1061166301369667,
-0.04139687120914459,
-0.02603483758866787,
-0.01003417931497097,
-0.020494509488344193,
-0.003904067212715745,
-0.022404540330171585,
-0.08226435631513596,
0.04569626599550247,
-0.010224183090031147,
-0.04864403232932091,
0.01624596305191517,
-0.03138778731226921,
-0.016126487404108047,
0.0400797575712204,
-0.06185312569141388,
-0.09089300781488419,
0.005628557875752449,
-0.034983713179826736,
-0.016376785933971405,
-0.040883585810661316,
-0.052022840827703476,
0.029154624789953232,
0.01804223656654358,
-0.044739075005054474,
0.013505887240171432,
0.07067857682704926,
-0.008540482260286808,
0.003914121072739363,
0.021800467744469643,
0.01644311286509037,
-0.008811812847852707,
-0.05190904811024666,
0.011771482415497303,
-0.0958516076207161,
-0.0944756418466568,
-0.03851054608821869,
-0.056589141488075256,
-0.030112754553556442,
0.004456731025129557,
0.02889912761747837,
0.01298094168305397,
0.01794852502644062,
0.12132523953914642,
0.02304716780781746,
0.048145368695259094,
-0.0002100559213431552,
0.01149112731218338,
-0.03162534907460213,
0.07070320099592209,
0.011777915060520172,
0.06397653371095657,
0.05066172406077385,
-5.5450882996410655e-8,
-0.05735481530427933,
0.009464832954108715,
0.024350248277187347,
0.0013918994227424264,
-0.07846792787313461,
-0.0896957740187645,
-0.010135950520634651,
0.007710770238190889,
-0.015428115613758564,
-0.011548482812941074,
0.027017148211598396,
-0.007997940294444561,
0.00391922565177083,
0.03981829434633255,
-0.0033858749084174633,
0.038181986659765244,
-0.012274042703211308,
0.1475866734981537,
-0.020514804869890213,
-0.10344072431325912,
0.031984034925699234,
0.013154124841094017,
0.06507323682308197,
0.008243399672210217,
0.027064422145485878,
-0.040086306631565094,
-0.03698156028985977,
0.042053814977407455,
-0.03388724848628044,
0.006847916636615992,
-0.015752248466014862,
0.035968467593193054,
0.037321776151657104,
-0.05588338151574135,
0.03789716586470604,
0.05641643702983856,
-0.04568525031208992,
-0.010535847395658493,
-0.021298237144947052,
0.06690894067287445,
0.05223732069134712,
0.11279744654893875,
-0.12583695352077484,
-0.016627904027700424,
0.06980708241462708,
-0.0013031598646193743,
-0.019019395112991333,
-0.06983274966478348,
0.04033910483121872,
-0.013823219574987888,
0.028655430302023888,
0.07226637750864029,
-0.04563586041331291,
-0.0008473100606352091,
0.053067490458488464,
0.03574046865105629,
-0.05198618769645691,
-0.0003030335938092321,
0.022228144109249115,
0.0047628069296479225,
0.009856952354311943,
0.04681108891963959,
-0.031505655497312546,
-0.07211870700120926
] |
juliensimon/reviews-sentiment-analysis | 7086631c39dcbb051d17ad01d07d747073383882 | 2022-05-03T09:25:01.000Z | [
"pytorch",
"distilbert",
"text-classification",
"en",
"dataset:generated_reviews_enth",
"transformers",
"sentiment-analysis"
] | text-classification | false | juliensimon | null | juliensimon/reviews-sentiment-analysis | 2,773 | 1 | transformers | ---
language:
- en
tags:
- distilbert
- sentiment-analysis
datasets:
- generated_reviews_enth
---
Distilbert model fine-tuned on English language product reviews
A notebook for Amazon SageMaker is available in the 'code' subfolder.
| [
-0.10655969381332397,
-0.08960875868797302,
-0.07410435378551483,
0.023430628702044487,
0.07335308194160461,
-0.008552580140531063,
0.018859682604670525,
0.033899858593940735,
-0.02217845432460308,
-0.010008522309362888,
0.05062231793999672,
0.026879969984292984,
0.018879512324929237,
-0.03864666074514389,
0.05776691064238548,
0.10051534324884415,
-0.029338104650378227,
-0.020942779257893562,
-0.08330544829368591,
-0.0877644419670105,
0.008181571029126644,
0.03917056322097778,
0.005866515450179577,
0.11057277023792267,
0.012087909504771233,
-0.011014893651008606,
-0.032238785177469254,
0.05399954319000244,
0.018610868602991104,
-0.06114698573946953,
0.06669269502162933,
-0.00034693864290602505,
0.052850112318992615,
0.07745356112718582,
0.009399657137691975,
0.031852006912231445,
-0.0009836818790063262,
-0.06723558902740479,
0.029180267825722694,
0.002103232778608799,
-0.07257430255413055,
0.03719683736562729,
-0.07722325623035431,
0.014238500036299229,
0.07483803480863571,
-0.016016867011785507,
-0.04816348850727081,
0.0062353904359042645,
-0.03517564758658409,
-0.007398281712085009,
-0.0894927978515625,
-0.00003618189657572657,
0.006816535256803036,
-0.011083169840276241,
0.021790826693177223,
-0.01741975173354149,
-0.01170680858194828,
-0.03749755397439003,
0.05154522508382797,
-0.05114646628499031,
0.060200098901987076,
-0.06600338965654373,
0.007581774145364761,
0.05048416927456856,
-0.06931625306606293,
0.0396554060280323,
-0.05054696276783943,
0.056037597358226776,
-0.02229505218565464,
-0.04603878781199455,
-0.01596754789352417,
-0.043989747762680054,
0.01884605549275875,
0.10358654707670212,
0.007507129106670618,
0.015114150941371918,
0.047365203499794006,
-0.02800580859184265,
0.0016938531771302223,
-0.07056812196969986,
-0.05041424185037613,
-0.005926674231886864,
-0.03309224173426628,
0.059466343373060226,
0.015493601560592651,
-0.1021292433142662,
0.10743244737386703,
0.03330007568001747,
0.053161755204200745,
0.009020867757499218,
-0.0019250219920650125,
-0.08476033806800842,
0.053016506135463715,
0.01156823243945837,
-0.08144158124923706,
0.040781136602163315,
0.029410438612103462,
-0.06179165840148926,
-0.007099297363311052,
0.10018026828765869,
0.0035797273740172386,
0.035257723182439804,
0.024240346625447273,
-0.06845329701900482,
-0.08446504920721054,
-0.006939406506717205,
0.02034466154873371,
0.07571146637201309,
0.05071862414479256,
-0.05836211144924164,
-0.00371980806812644,
0.00831913948059082,
-0.0012295391643419862,
-0.057965219020843506,
0.03743860125541687,
-0.05216292291879654,
0.010950278490781784,
-0.007699201814830303,
0.08298973739147186,
0.10542111098766327,
0.014353590086102486,
0.06016800180077553,
0.06582451611757278,
-0.06377368420362473,
-0.07461007684469223,
0.017598723992705345,
-0.05303365737199783,
-5.510009464979506e-34,
-0.00047486653784289956,
-0.036202989518642426,
-0.039468709379434586,
-0.009234166704118252,
-0.021715376526117325,
-0.004736391827464104,
-0.02079654484987259,
0.007550003007054329,
-0.09746795147657394,
-0.05732960253953934,
0.03193599730730057,
0.06346394866704941,
-0.05477098748087883,
0.12442389875650406,
-0.03667347878217697,
0.0036811213940382004,
0.031082630157470703,
0.022921357303857803,
0.019152626395225525,
-0.010919995605945587,
0.034559037536382675,
0.006039596162736416,
0.016179855912923813,
0.021111726760864258,
-0.011488320305943489,
-0.02958633564412594,
0.061869196593761444,
-0.021954476833343506,
0.07028725743293762,
0.04691256582736969,
-0.005513689015060663,
0.012867811135947704,
0.006385236047208309,
-0.0282759852707386,
-0.0214859452098608,
-0.005918622948229313,
-0.10877607017755508,
0.011583122424781322,
0.08809308707714081,
-0.045051209628582,
0.016122328117489815,
0.058334287256002426,
-0.0483953095972538,
0.024106021970510483,
0.008146327920258045,
0.06189633160829544,
0.03905734419822693,
0.044336266815662384,
0.08709922432899475,
-0.045749738812446594,
-0.07417842000722885,
-0.03434706851840019,
0.08572101593017578,
-0.04390290379524231,
-0.014168559573590755,
-0.016048895195126534,
0.03041796013712883,
-0.02435428276658058,
0.05411364138126373,
-0.04481692612171173,
-0.040928635746240616,
0.09903973340988159,
0.03168325498700142,
-0.1409095972776413,
-0.03125058859586716,
-0.005073781590908766,
0.028551725670695305,
0.06399817019701004,
0.010867628268897533,
0.03429502621293068,
-0.11183036863803864,
0.06331723928451538,
-0.006173432804644108,
0.055382922291755676,
-0.03654538467526436,
-0.02747572399675846,
-0.052394431084394455,
-0.029085684567689896,
-0.010505836457014084,
-0.024869035929441452,
-0.12044207006692886,
-0.052880559116601944,
-0.014863355085253716,
0.055812690407037735,
-0.06143423914909363,
-0.04801718518137932,
-0.028111005201935768,
0.003312736516818404,
0.018600497394800186,
-0.030188733711838722,
-0.05455274507403374,
0.06492564082145691,
0.014264354482293129,
-0.0730505958199501,
0.0045834253542125225,
-1.2388930774304529e-33,
-0.06824839115142822,
-0.08064820617437363,
-0.09227566421031952,
0.10057460516691208,
-0.03006768599152565,
-0.0025838189758360386,
-0.039315707981586456,
0.07101111114025116,
0.07229022681713104,
0.030100002884864807,
0.03065507672727108,
-0.1003991886973381,
-0.03806697204709053,
-0.007234659511595964,
0.08682646602392197,
0.023012148216366768,
-0.04555043578147888,
-0.006917452439665794,
0.0020573448855429888,
0.01834038458764553,
-0.062120579183101654,
0.029288550838828087,
-0.026032762601971626,
0.018343061208724976,
0.05229726433753967,
0.05097781866788864,
-0.005368335638195276,
0.024089928716421127,
0.024124784395098686,
-0.02648822031915188,
0.025738826021552086,
-0.012708812020719051,
-0.06635307520627975,
0.07411732524633408,
-0.08524487912654877,
-0.04519939050078392,
0.03292158618569374,
-0.08467604219913483,
0.009258273988962173,
0.046750571578741074,
0.07326997816562653,
0.07247746735811234,
0.04257978871464729,
0.014148459769785404,
-0.03312547504901886,
0.0019926256500184536,
-0.05726886913180351,
-0.01898612640798092,
-0.044575102627277374,
-0.05602771416306496,
0.020861079916357994,
0.01559226680546999,
-0.07605408877134323,
-0.05365622416138649,
-0.056439924985170364,
-0.04256276786327362,
-0.0007881829515099525,
0.008471010252833366,
-0.010547517798841,
0.012933247722685337,
-0.0860857367515564,
-0.0008899294189177454,
0.07075803726911545,
-0.013919596560299397,
-0.06385935097932816,
-0.06517089903354645,
-0.022927070036530495,
0.029647311195731163,
-0.02622784674167633,
-0.030676566064357758,
0.11056873202323914,
0.021868040785193443,
-0.03253497555851936,
-0.01669211871922016,
-0.023882122710347176,
0.08528844267129898,
0.013929495587944984,
-0.024837035685777664,
0.0016937582986429334,
-0.09147283434867859,
0.060461319983005524,
-0.010033277794718742,
0.05365598201751709,
0.02965925633907318,
0.019687602296471596,
-0.028273819014430046,
-0.06559870392084122,
0.035568445920944214,
-0.03399716317653656,
-0.026796186342835426,
-0.01749034970998764,
-0.03899829089641571,
0.018615098670125008,
0.09942552447319031,
0.03954208269715309,
-3.1253790666596615e-8,
-0.04635726287961006,
-0.08532444387674332,
0.0005029167514294386,
0.128260537981987,
0.009648939594626427,
-0.021669737994670868,
0.051343392580747604,
0.09615380316972733,
-0.009509646333754063,
0.0734410285949707,
0.07331017404794693,
0.015772143378853798,
-0.1411408931016922,
-0.0007440587505698204,
-0.0403461828827858,
0.05507310479879379,
0.010000975802540779,
0.12343193590641022,
0.037310320883989334,
0.005116741172969341,
0.08069097995758057,
0.03599948436021805,
0.021201293915510178,
-0.017268028110265732,
0.05712644010782242,
-0.030809784308075905,
-0.040697529911994934,
-0.005206031259149313,
-0.014859311282634735,
-0.05666784197092056,
0.09119506180286407,
0.009100613184273243,
-0.029969699680805206,
-0.07318483293056488,
0.05511702597141266,
0.04566957429051399,
-0.06463918834924698,
-0.017247097566723824,
-0.05083387345075607,
0.031477198004722595,
0.033244308084249496,
0.04035031050443649,
-0.09792948514223099,
-0.0675964206457138,
0.02123342454433441,
-0.036665432155132294,
-0.004876782186329365,
0.008005371317267418,
-0.011418038979172707,
0.0014371799770742655,
0.008179105818271637,
-0.07554926723241806,
0.04124156013131142,
-0.0021770752500742674,
-0.038525696843862534,
0.06285545229911804,
-0.05637957900762558,
-0.1267043799161911,
0.02543879672884941,
0.002213369822129607,
0.038481321185827255,
0.025465210899710655,
0.09392551332712173,
0.02250414341688156
] |
microsoft/layoutlm-large-uncased | 1e7d50dced3cdfea3a3d63c610e2aab36933dbef | 2021-08-11T05:28:26.000Z | [
"pytorch",
"tf",
"layoutlm",
"arxiv:1912.13318",
"transformers"
] | null | false | microsoft | null | microsoft/layoutlm-large-uncased | 2,773 | 4 | transformers | # LayoutLM
Multimodal (text + layout/format + image) pre-training for document AI
[Microsoft Document AI](https://www.microsoft.com/en-us/research/project/document-ai/) | [GitHub](https://aka.ms/layoutlm)
## Model description
LayoutLM is a simple but effective pre-training method of text and layout for document image understanding and information extraction tasks, such as form understanding and receipt understanding. LayoutLM archives the SOTA results on multiple datasets. For more details, please refer to our paper:
[LayoutLM: Pre-training of Text and Layout for Document Image Understanding](https://arxiv.org/abs/1912.13318)
Yiheng Xu, Minghao Li, Lei Cui, Shaohan Huang, Furu Wei, Ming Zhou, [KDD 2020](https://www.kdd.org/kdd2020/accepted-papers)
## Training data
We pre-train LayoutLM on IIT-CDIP Test Collection 1.0\* dataset with two settings.
* LayoutLM-Base, Uncased (11M documents, 2 epochs): 12-layer, 768-hidden, 12-heads, 113M parameters
* LayoutLM-Large, Uncased (11M documents, 2 epochs): 24-layer, 1024-hidden, 16-heads, 343M parameters **(This Model)**
## Citation
If you find LayoutLM useful in your research, please cite the following paper:
``` latex
@misc{xu2019layoutlm,
title={LayoutLM: Pre-training of Text and Layout for Document Image Understanding},
author={Yiheng Xu and Minghao Li and Lei Cui and Shaohan Huang and Furu Wei and Ming Zhou},
year={2019},
eprint={1912.13318},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
| [
-0.03517979383468628,
0.04170338064432144,
0.011754426173865795,
0.01659846119582653,
0.02433745376765728,
0.0198415108025074,
-0.05277504771947861,
0.011934496462345123,
-0.003785250708460808,
-0.025287169963121414,
0.05691036954522133,
0.03305622562766075,
0.04870953783392906,
0.029094738885760307,
-0.02296903170645237,
-0.027341173961758614,
0.04557483643293381,
0.021809183061122894,
-0.10870909690856934,
0.0382225476205349,
0.04910099506378174,
0.013605069369077682,
0.09633424133062363,
-0.06020805984735489,
0.025403093546628952,
0.1010117307305336,
-0.03130302578210831,
-0.04074963554739952,
0.04773438349366188,
-0.05781271681189537,
0.051281172782182693,
0.0703294649720192,
0.08265305310487747,
0.12215334177017212,
0.024390650913119316,
0.057156797498464584,
-0.02199549414217472,
0.04349563643336296,
0.05324561893939972,
-0.009701097384095192,
-0.0633632242679596,
0.017815856263041496,
-0.010886458680033684,
-0.025284506380558014,
0.13310065865516663,
0.03805244341492653,
-0.08044510334730148,
-0.04653402790427208,
-0.02752823755145073,
0.02014285698533058,
-0.09401978552341461,
0.05594273656606674,
-0.000523704627994448,
0.06249966099858284,
-0.0026654351968318224,
0.02478121966123581,
0.07066584378480911,
0.03748219460248947,
-0.04521038755774498,
0.02611670270562172,
-0.030625194311141968,
-0.08446338027715683,
-0.048590321093797684,
0.00605727219954133,
0.03454206883907318,
0.06325071305036545,
-0.025773627683520317,
-0.039214327931404114,
0.003923378884792328,
-0.08439148217439651,
-0.06936018913984299,
0.05330698937177658,
0.006501161493360996,
0.0011803882662206888,
0.0166208166629076,
0.023201165720820427,
0.021353911608457565,
0.034802597016096115,
0.02562812902033329,
-0.19641448557376862,
0.053999993950128555,
0.05181111767888069,
0.10292534530162811,
-0.011019358411431313,
0.0327008031308651,
-0.03898049145936966,
-0.04224126785993576,
0.058007847517728806,
-0.03312039375305176,
0.03988855332136154,
0.05454019829630852,
-0.06583425402641296,
-0.05311799794435501,
0.05773498862981796,
-0.06191382184624672,
0.01805051416158676,
0.035070087760686874,
-0.06529207527637482,
-0.024383237585425377,
0.01294990535825491,
0.05767298489809036,
-0.003824744140729308,
-0.01218443363904953,
0.01670820638537407,
0.008791202679276466,
-0.06718366593122482,
0.09669411927461624,
-0.06675413995981216,
0.03357016295194626,
-0.06947973370552063,
0.047416869550943375,
-0.04923621937632561,
-0.14192621409893036,
-0.026555698364973068,
-0.02633742243051529,
-0.022124022245407104,
0.024576425552368164,
0.024028541520237923,
0.02055039256811142,
-0.0064522637985646725,
-0.06338028609752655,
-0.028830457478761673,
0.013273132033646107,
0.0036365329287946224,
-0.02767452411353588,
-0.0963444635272026,
-0.04484408348798752,
4.532641197239461e-33,
0.030292708426713943,
-0.012095794081687927,
-0.06537355482578278,
0.026907846331596375,
0.033536024391651154,
-0.023925840854644775,
-0.007109840866178274,
-0.027750670909881592,
-0.009655661880970001,
-0.0481855571269989,
-0.03386989235877991,
-0.024887217208743095,
-0.04282281547784805,
0.07825987040996552,
-0.02334168367087841,
0.002228829078376293,
-0.10325738787651062,
0.07084471732378006,
0.011483118869364262,
0.041196271777153015,
0.07717501372098923,
-0.030587436631321907,
0.052164677530527115,
-0.031245678663253784,
0.04746087267994881,
0.0174504816532135,
0.030246131122112274,
-0.07694222778081894,
-0.06120957061648369,
0.00857812725007534,
-0.08448459208011627,
-0.031899306923151016,
0.020028801634907722,
0.0038696429692208767,
0.03159451484680176,
-0.0012120382161810994,
0.012655870988965034,
-0.038836732506752014,
-0.006506503559648991,
-0.01407180167734623,
-0.022770902141928673,
0.030844470486044884,
0.08644084632396698,
-0.1291586309671402,
-0.060410719364881516,
0.003910872153937817,
-0.02382919378578663,
0.0616774782538414,
0.013466293923556805,
-0.0275358185172081,
-0.015222628600895405,
0.05944697931408882,
-0.038941364735364914,
-0.08249640464782715,
0.004925816785544157,
-0.00782768428325653,
0.029475919902324677,
0.015043778344988823,
0.09455213695764542,
0.04793039709329605,
0.02036806382238865,
0.0548614002764225,
-0.037811946123838425,
0.08217668533325195,
-0.025509700179100037,
-0.01327546127140522,
-0.048323553055524826,
-0.04240715876221657,
0.05442153662443161,
-0.06619704514741898,
-0.027073673903942108,
-0.0010862174676731229,
-0.025176791474223137,
-0.07613898813724518,
0.04230111464858055,
0.0001229985209647566,
-0.0014711320400238037,
-0.031043581664562225,
-0.021294046193361282,
0.033285945653915405,
-0.03412679582834244,
0.0699756070971489,
0.0274818055331707,
-0.1120811402797699,
-0.07646481692790985,
0.04574394226074219,
0.080960214138031,
-0.13425599038600922,
-0.01869809255003929,
-0.01548229530453682,
0.03419317677617073,
0.0009437809349037707,
-0.060014694929122925,
-0.008637881837785244,
0.059451084583997726,
-4.1701216781817444e-33,
0.07323626428842545,
0.05124581977725029,
-0.03941469267010689,
-0.017731325700879097,
0.005638111848384142,
0.010028420947492123,
0.05640716850757599,
0.10476382076740265,
0.01100020669400692,
-0.05267736315727234,
0.02684294804930687,
-0.006482643075287342,
-0.07829287648200989,
-0.037891022861003876,
-0.03915846720337868,
0.0052187819965183735,
0.024998437613248825,
0.02023327723145485,
0.07962876558303833,
0.04239770397543907,
0.011758249253034592,
0.05773632973432541,
-0.11071044951677322,
0.07467016577720642,
-0.023655058816075325,
0.05212916433811188,
-0.020754970610141754,
0.044058386236429214,
-0.01013868860900402,
0.03983351215720177,
0.0018406390445306897,
-0.04025479778647423,
-0.026903318241238594,
0.025051439180970192,
-0.022174721583724022,
0.013338402844965458,
-0.0075983707793056965,
0.012018480338156223,
-0.062215838581323624,
0.09891221672296524,
0.06786885112524033,
0.03036271594464779,
-0.11553850769996643,
-0.004389723762869835,
-0.037311773747205734,
-0.03196154907345772,
-0.05455492436885834,
0.04598772153258324,
0.04807839170098305,
0.0591341108083725,
0.015799835324287415,
0.017933713272213936,
-0.014827395789325237,
-0.04027971252799034,
-0.032672811299562454,
-0.0362418107688427,
-0.008235129527747631,
-0.07296302914619446,
-0.05827149376273155,
-0.039874784648418427,
-0.001529479632154107,
0.019379297271370888,
-0.055438827723264694,
0.013389740139245987,
0.05534469336271286,
-0.05823676288127899,
-0.03487321734428406,
-0.10576832294464111,
-0.06068693846464157,
0.020889900624752045,
-0.057242363691329956,
-0.07555390149354935,
0.025566529482603073,
0.003276980947703123,
0.08978051692247391,
0.031792473047971725,
-0.006599455140531063,
0.006907060742378235,
-0.013700331561267376,
-0.030279776081442833,
-0.08661229908466339,
0.02849769964814186,
-0.00988907739520073,
0.09479973465204239,
0.08129347860813141,
0.12387535721063614,
-0.01296412292867899,
-0.006017983425408602,
0.0408107154071331,
0.03225356340408325,
-0.04665059968829155,
0.03280184045433998,
0.054196104407310486,
0.071769580245018,
0.0012329051969572902,
-5.12540658803573e-8,
-0.07575272768735886,
-0.060958411544561386,
-0.008728938177227974,
-0.03864531219005585,
-0.02820807695388794,
-0.0157448910176754,
0.033885274082422256,
0.09785833209753036,
-0.047780591994524,
-0.09023236483335495,
0.07514144480228424,
0.011248395778238773,
-0.12335343658924103,
-0.04140466824173927,
-0.00980229303240776,
0.03345536068081856,
0.0449257455766201,
0.0012594959698617458,
-0.030591150745749474,
-0.047621142119169235,
0.08503574877977371,
-0.07429668307304382,
0.040661316365003586,
-0.03847438842058182,
0.018934287130832672,
0.03087630495429039,
-0.03218533471226692,
0.0535423643887043,
-0.007674666587263346,
-0.05987754091620445,
0.030044805258512497,
-0.02913631685078144,
0.057272378355264664,
0.008146969601511955,
0.05728033930063248,
0.03419939801096916,
0.08324321359395981,
-0.010144737549126148,
0.003552877577021718,
0.01921749860048294,
-0.008116164244711399,
-0.02790595218539238,
-0.04261881113052368,
0.009642419405281544,
0.12879572808742523,
-0.014840913005173206,
0.005376122891902924,
0.007272558286786079,
0.0028604217804968357,
-0.06654676795005798,
-0.03769967332482338,
-0.02045562118291855,
-0.03843165189027786,
0.08371768146753311,
0.057443730533123016,
-0.007943497970700264,
0.07853515446186066,
0.053236063569784164,
0.09555607289075851,
0.06602537631988525,
0.010955754667520523,
0.025056609883904457,
-0.04055795446038246,
-0.009780936874449253
] |
KoboldAI/fairseq-dense-13B-Janeway | da54db082f7cab156e6c7f69aaab6c048a834286 | 2022-04-07T10:51:39.000Z | [
"pytorch",
"xglm",
"text-generation",
"en",
"transformers",
"license:mit"
] | text-generation | false | KoboldAI | null | KoboldAI/fairseq-dense-13B-Janeway | 2,766 | 1 | transformers | ---
language: en
license: mit
---
# Fairseq-dense 13B - Janeway
## Model Description
Fairseq-dense 13B-Janeway is a finetune created using Fairseq's MoE dense model.
## Training data
The training data contains around 2210 ebooks, mostly in the sci-fi and fantasy genres. The dataset is identical as dataset used by GPT-Neo-2.7B-Janeway.
Some parts of the dataset have been prepended using the following text: `[Genre: <genre1>,<genre2>]`
### How to use
You can use this model directly with a pipeline for text generation. This example generates a different sequence each time it's run:
```py
>>> from transformers import pipeline
>>> generator = pipeline('text-generation', model='KoboldAI/fairseq-dense-13B-Janeway')
>>> generator("Welcome Captain Janeway, I apologize for the delay.", do_sample=True, min_length=50)
[{'generated_text': 'Welcome Captain Janeway, I apologize for the delay."\nIt's all right," Janeway said. "I'm certain that you're doing your best to keep me informed of what\'s going on."'}]
```
### Limitations and Biases
Based on known problems with NLP technology, potential relevant factors include bias (gender, profession, race and religion).
### BibTeX entry and citation info
```
Artetxe et al. (2021): Efficient Large Scale Language Modeling with Mixtures of Experts
``` | [
-0.08571798354387283,
-0.01852951943874359,
-0.02684442140161991,
0.050123684108257294,
-0.005334177054464817,
0.02366558462381363,
-0.015058512799441814,
-0.01396127138286829,
-0.002692029345780611,
-0.0771937221288681,
-0.006644063629209995,
-0.07611359655857086,
0.0029199812561273575,
-0.06881463527679443,
0.023839054629206657,
0.026944000273942947,
0.009231042116880417,
-0.004400294739753008,
-0.06542472541332245,
-0.08820898085832596,
0.09712228178977966,
0.13850852847099304,
0.09197846800088882,
0.0682351291179657,
-0.012973441742360592,
-0.050233934074640274,
0.01830073446035385,
0.04978835955262184,
0.02637695148587227,
-0.019143864512443542,
0.08858145773410797,
0.07156944274902344,
-0.027774637565016747,
0.053247660398483276,
0.03777458891272545,
0.07205451279878616,
-0.03501816466450691,
-0.0411841981112957,
0.01916075125336647,
0.00958565715700388,
0.0750666931271553,
-0.03746459260582924,
-0.027347277849912643,
-0.00048599790898151696,
0.06674855202436447,
-0.09270545840263367,
-0.09795413911342621,
-0.06077411398291588,
-0.009755500592291355,
0.04938548058271408,
-0.06011427193880081,
-0.04949931800365448,
-0.02595060132443905,
0.05502413585782051,
0.019702114164829254,
0.03147106617689133,
0.036372847855091095,
-0.007684259209781885,
0.03594203665852547,
-0.11109365522861481,
-0.06475697457790375,
-0.11637186259031296,
-0.026741689071059227,
-0.03361612185835838,
-0.05870584025979042,
-0.036591969430446625,
-0.023599855601787567,
0.09838873147964478,
0.016193600371479988,
-0.022028081119060516,
-0.07512687891721725,
0.06175043061375618,
-0.06917588412761688,
0.08310060203075409,
-0.023236604407429695,
0.07548335939645767,
0.0382893867790699,
-0.04328225553035736,
0.011668364517390728,
-0.039961572736501694,
-0.026535792276263237,
-0.057870056480169296,
0.06798814237117767,
0.02044621855020523,
-0.05948714166879654,
0.0065938918851315975,
0.04050527140498161,
0.06894209235906601,
-0.02896631695330143,
0.017807316035032272,
-0.05021928995847702,
-0.02143472246825695,
0.058207932859659195,
0.02776210941374302,
-0.08451273292303085,
0.07610121369361877,
0.027459727600216866,
-0.025775592774152756,
0.08840189129114151,
0.06786248087882996,
0.023636844009160995,
0.06056596711277962,
0.041246332228183746,
0.025651291012763977,
-0.09243673086166382,
-0.11914913356304169,
0.04582725092768669,
0.014246165752410889,
-0.0519346222281456,
-0.04568767547607422,
0.06763305515050888,
-0.07062412053346634,
0.04876198619604111,
-0.06114276498556137,
0.047484368085861206,
0.07306195050477982,
-0.06412184238433838,
0.005611616652458906,
-0.03615483641624451,
0.08164102584123611,
-0.027163686230778694,
0.021385978907346725,
-0.06559618562459946,
0.051918815821409225,
-0.036129653453826904,
-0.02068750560283661,
0.008972319774329662,
5.2919146716126696e-33,
0.01348987314850092,
0.001971956342458725,
0.038167163729667664,
0.029569726437330246,
0.0893481895327568,
0.030300704762339592,
0.02472744509577751,
-0.028685113415122032,
-0.07958661764860153,
-0.012220991775393486,
-0.05687245354056358,
0.012591921724379063,
-0.09669514000415802,
-0.008684059605002403,
-0.06274576485157013,
-0.06582663208246231,
-0.048849526792764664,
0.026275865733623505,
-0.03644262254238129,
0.044456493109464645,
0.04305153340101242,
0.009352611377835274,
-0.018701177090406418,
-0.06838347762823105,
-0.05313323810696602,
-0.004897196777164936,
0.022320225834846497,
-0.045846350491046906,
-0.05155996233224869,
0.010168024338781834,
-0.10824426263570786,
0.031146008521318436,
0.071971096098423,
0.03520122542977333,
-0.027717038989067078,
-0.014874928630888462,
0.005896618124097586,
-0.04169005528092384,
-0.020577864721417427,
-0.0007368258666247129,
0.016193697229027748,
0.025449011474847794,
0.06511379033327103,
-0.034837037324905396,
-0.09208875894546509,
0.01449815183877945,
0.03567078337073326,
0.025681277737021446,
0.037592340260744095,
0.029826130717992783,
0.015392442233860493,
0.03659098222851753,
-0.034078989177942276,
0.007257625460624695,
0.035580214112997055,
0.01600757986307144,
0.025857942178845406,
0.020136239007115364,
0.11014522612094879,
0.0013776748673990369,
0.05127016827464104,
0.0796913281083107,
0.07383354008197784,
0.06423436850309372,
0.09430789202451706,
0.051743436604738235,
0.023819858208298683,
0.025385210290551186,
0.08932922780513763,
0.015765583142638206,
0.012706934474408627,
-0.032694123685359955,
-0.07386364042758942,
-0.032213080674409866,
0.055897168815135956,
-0.0650048479437828,
0.007504752837121487,
-0.025445550680160522,
-0.05952270328998566,
0.026610735803842545,
-0.02256447821855545,
0.006408246699720621,
-0.0327850766479969,
-0.04740294814109802,
-0.033812373876571655,
-0.014922237023711205,
-0.005168997682631016,
-0.052966147661209106,
0.004883065354079008,
-0.0633426308631897,
-0.06480041146278381,
-0.036576174199581146,
-0.04822416231036186,
-0.056350987404584885,
-0.005931253544986248,
-5.7079757542984334e-33,
0.05313925817608833,
0.04501251131296158,
-0.019816337153315544,
0.0845191478729248,
0.054030247032642365,
-0.022159691900014877,
0.029652362689375877,
0.05476969853043556,
0.030262595042586327,
-0.06852485984563828,
0.00273461383767426,
-0.036248546093702316,
0.0362699031829834,
-0.10951127856969833,
0.09249681979417801,
-0.051253415644168854,
0.002774403430521488,
-0.05226635932922363,
0.022671988233923912,
0.06693431735038757,
-0.058315638452768326,
-0.0071996464394032955,
-0.15779328346252441,
0.0643395185470581,
0.017999159172177315,
-0.0036115453112870455,
-0.02180526778101921,
0.04546922445297241,
0.02576330676674843,
-0.009329183958470821,
-0.0058482796885073185,
0.00864742323756218,
-0.04555933550000191,
-0.0030163016635924578,
-0.11384066939353943,
0.0027906049508601427,
0.12152790278196335,
0.03574322164058685,
-0.014608154073357582,
0.08527091890573502,
0.03159484639763832,
0.03975602611899376,
-0.09032626450061798,
0.05180762708187103,
-0.030395178124308586,
0.053056538105010986,
-0.02002391219139099,
-0.03780154511332512,
0.05223321169614792,
-0.006298241205513477,
0.018863501027226448,
0.055459629744291306,
-0.0602150559425354,
0.020399056375026703,
-0.037349633872509,
-0.08365088701248169,
-0.016139907762408257,
-0.029472339898347855,
-0.07299110293388367,
-0.045954685658216476,
-0.0752132311463356,
-0.016452424228191376,
0.03240356966853142,
-0.08544135838747025,
0.011724856682121754,
-0.10605102777481079,
-0.04585416615009308,
-0.08101492375135422,
0.03526980057358742,
0.009938539005815983,
0.018413307145237923,
-0.04367605969309807,
0.03052433766424656,
0.00640875706449151,
-0.012396037578582764,
0.021108152344822884,
-0.07811444997787476,
0.0037667262367904186,
-0.022447986528277397,
0.036968689411878586,
-0.016043009236454964,
0.08748140186071396,
0.05659855902194977,
0.0905768871307373,
0.057338640093803406,
-0.012751158326864243,
0.013617448508739471,
0.04903914034366608,
-0.033626340329647064,
0.043653495609760284,
-0.008941855281591415,
0.020978176966309547,
-0.010682293213903904,
0.10079187899827957,
-0.06188104674220085,
-5.44192069185101e-8,
-0.025228258222341537,
-0.049137044697999954,
-0.10127148777246475,
0.10225623846054077,
-0.03234969452023506,
0.0006384229636751115,
-0.004826119635254145,
0.020209264010190964,
0.007587498985230923,
-0.04191132262349129,
0.019350318238139153,
-0.028783243149518967,
-0.03351260721683502,
0.00392788415774703,
0.017506921663880348,
0.056881919503211975,
0.07404694706201553,
0.04961203783750534,
-0.019659409299492836,
0.011742099188268185,
0.03995241969823837,
0.03733345866203308,
0.004164876416325569,
-0.05018269270658493,
0.002082457998767495,
0.03637506440281868,
-0.0350223146378994,
0.016361569985747337,
-0.00386846368201077,
-0.038914188742637634,
0.024247784167528152,
-0.06664988398551941,
-0.08859502524137497,
0.028001070022583008,
0.037954192608594894,
0.028230642899870872,
0.0007310420623980463,
-0.03892567753791809,
0.041422098875045776,
0.04446825757622719,
0.051298778504133224,
0.035727035254240036,
-0.08598753809928894,
-0.0337129682302475,
-0.006744035519659519,
-0.03031902387738228,
-0.04106603562831879,
-0.08028591424226761,
0.07344066351652145,
0.0664275661110878,
-0.010714820586144924,
-0.06402751058340073,
-0.02770170383155346,
-0.03861934691667557,
0.08018235117197037,
0.03187168389558792,
0.04440981522202492,
0.034710418432950974,
-0.05208761617541313,
0.028824135661125183,
0.03754448518157005,
0.01120270136743784,
0.10248303413391113,
-0.07746676355600357
] |
philschmid/tiny-bert-sst2-distilled | 874eb28543ea7a7df80b6158bbf772d203efcab6 | 2022-01-31T18:50:41.000Z | [
"pytorch",
"bert",
"text-classification",
"dataset:glue",
"transformers",
"generated_from_trainer",
"license:apache-2.0",
"model-index"
] | text-classification | false | philschmid | null | philschmid/tiny-bert-sst2-distilled | 2,763 | null | transformers | ---
license: apache-2.0
tags:
- generated_from_trainer
datasets:
- glue
metrics:
- accuracy
model-index:
- name: tiny-bert-sst2-distilled
results:
- task:
name: Text Classification
type: text-classification
dataset:
name: glue
type: glue
args: sst2
metrics:
- name: Accuracy
type: accuracy
value: 0.8325688073394495
---
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# tiny-bert-sst2-distilled
This model is a fine-tuned version of [google/bert_uncased_L-2_H-128_A-2](https://huggingface.co/google/bert_uncased_L-2_H-128_A-2) on the glue dataset.
It achieves the following results on the evaluation set:
- Loss: 1.7305
- Accuracy: 0.8326
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0007199555649276667
- train_batch_size: 1024
- eval_batch_size: 1024
- seed: 33
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 7
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Accuracy |
|:-------------:|:-----:|:----:|:---------------:|:--------:|
| 1.77 | 1.0 | 66 | 1.6939 | 0.8165 |
| 0.729 | 2.0 | 132 | 1.5090 | 0.8326 |
| 0.5242 | 3.0 | 198 | 1.5369 | 0.8257 |
| 0.4017 | 4.0 | 264 | 1.7025 | 0.8326 |
| 0.327 | 5.0 | 330 | 1.6743 | 0.8245 |
| 0.2749 | 6.0 | 396 | 1.7305 | 0.8337 |
| 0.2521 | 7.0 | 462 | 1.7305 | 0.8326 |
### Framework versions
- Transformers 4.12.3
- Pytorch 1.9.1
- Datasets 1.15.1
- Tokenizers 0.10.3
| [
-0.12650729715824127,
-0.039381399750709534,
0.05404707416892052,
0.05799023061990738,
0.0231813732534647,
0.03403216600418091,
0.009173563681542873,
0.0455574207007885,
-0.049630504101514816,
-0.09282848984003067,
0.061060503125190735,
-0.06735484302043915,
0.01141077745705843,
-0.001093745231628418,
-0.08711663633584976,
0.04206616431474686,
0.081838458776474,
-0.0755336657166481,
-0.10971855372190475,
0.023019131273031235,
0.02615479566156864,
0.09190146625041962,
0.03908534348011017,
0.014423901215195656,
0.01621859148144722,
0.010068581439554691,
-0.09563490748405457,
-0.0030040948186069727,
0.07518313080072403,
0.02654392644762993,
0.011100288480520248,
0.02199135161936283,
-0.015105389058589935,
0.05429542437195778,
0.02883997932076454,
0.07549096643924713,
-0.016331704333424568,
-0.03827898949384689,
0.009787309914827347,
0.00031738707912154496,
0.03667192533612251,
-0.057777710258960724,
-0.05757199227809906,
0.020378820598125458,
0.03366066515445709,
0.0034436944406479597,
-0.03574969619512558,
-0.08348095417022705,
-0.07714413106441498,
-0.03296015039086342,
-0.11721286922693253,
-0.02302689664065838,
0.07951405644416809,
-0.05866074189543724,
-0.059186529368162155,
0.06259521842002869,
-0.009002142585814,
0.003092060564085841,
-0.04404476657509804,
-0.034765273332595825,
0.011575342155992985,
-0.0026145013980567455,
-0.04988043010234833,
-0.008955867029726505,
-0.06831201910972595,
-0.005061766132712364,
0.002965543419122696,
-0.012471156194806099,
0.05153999105095863,
-0.0022482960484921932,
0.03483875095844269,
0.08124317228794098,
-0.0007030353299342096,
0.05681296065449715,
0.06249351054430008,
-0.03385394811630249,
-0.004589123651385307,
-0.007969841361045837,
0.06221151351928711,
-0.061021123081445694,
-0.06260347366333008,
-0.08062482625246048,
0.022272957488894463,
0.024347685277462006,
0.06494168192148209,
0.011072954162955284,
0.05530422180891037,
0.0017941354308277369,
0.0019157828064635396,
-0.007437502965331078,
0.011716455221176147,
-0.06042170897126198,
0.024263817816972733,
0.029964029788970947,
0.014806278981268406,
0.03845897689461708,
0.0044044130481779575,
0.022168274968862534,
-0.08621381968259811,
0.1111525371670723,
-0.0329425148665905,
0.09411478787660599,
-0.015787504613399506,
-0.041314102709293365,
0.058174025267362595,
0.036388855427503586,
-0.025957323610782623,
0.020896153524518013,
0.09130770713090897,
-0.07796355336904526,
0.06903987377882004,
-0.027839573100209236,
-0.020292993634939194,
-0.01445093099027872,
-0.03925551474094391,
0.08731059730052948,
-0.06502331793308258,
-0.006032055709511042,
-0.02200913429260254,
0.0633665919303894,
-0.01485843863338232,
-0.009793531149625778,
-0.01643401011824608,
0.0016986350528895855,
-0.07626009732484818,
-0.0413508266210556,
-0.07009518891572952,
7.146188776261183e-34,
0.021860918030142784,
0.03415960446000099,
0.003977171611040831,
-0.01105761993676424,
0.007111104670912027,
-0.034137021750211716,
-0.02633151412010193,
-0.0048154545947909355,
-0.0007938015623949468,
-0.023516075685620308,
-0.023223670199513435,
-0.018568357452750206,
-0.07716202735900879,
0.050953708589076996,
0.005756005644798279,
0.020341625437140465,
0.004705765750259161,
0.08615238964557648,
0.07844876497983932,
0.005240757018327713,
0.12844476103782654,
0.056835971772670746,
-0.00031493723508901894,
-0.13032448291778564,
-0.050646208226680756,
0.06769992411136627,
0.043266888707876205,
-0.0147069301456213,
0.0017937173834070563,
0.05809032917022705,
-0.08175696432590485,
-0.04847455024719238,
0.006992894224822521,
-0.0012965166242793202,
0.025472834706306458,
-0.040014930069446564,
0.030329912900924683,
-0.02811245433986187,
-0.025796636939048767,
-0.047568146139383316,
0.033003926277160645,
0.06101781502366066,
0.02130935899913311,
-0.07608328759670258,
0.017573336139321327,
-0.019775886088609695,
0.09281838685274124,
0.0400206483900547,
0.028889968991279602,
0.028440361842513084,
-0.008572696708142757,
-0.0003136426967103034,
0.02192305400967598,
0.006589300464838743,
-0.07323164492845535,
-0.02472595125436783,
0.0625026673078537,
0.06887248158454895,
0.024167761206626892,
-0.014044252224266529,
-0.022341154515743256,
0.021443801000714302,
0.012158947065472603,
0.024844655767083168,
0.015700209885835648,
-0.03968251496553421,
-0.018429448828101158,
-0.01940489560365677,
-0.015623475424945354,
0.017127465456724167,
-0.025328177958726883,
-0.003552882233634591,
0.018864048644900322,
-0.00027765563572756946,
0.04910682141780853,
-0.11234335601329803,
0.027093688026070595,
-0.06006675213575363,
-0.045723747462034225,
-0.013503413647413254,
0.01600910909473896,
0.04167437180876732,
-0.027914181351661682,
-0.09917628020048141,
-0.12464959174394608,
-0.07151248306035995,
0.031125212088227272,
-0.05648164823651314,
-0.015346313826739788,
0.065938800573349,
-0.002690979279577732,
0.004824443254619837,
-0.05293285846710205,
-0.010614637285470963,
-0.06326918303966522,
-2.10686185119481e-33,
-0.0588499940931797,
0.012166468426585197,
-0.039068784564733505,
0.06598184257745743,
-0.028462601825594902,
-0.04317127540707588,
0.0029874679166823626,
0.17754490673542023,
0.0125814750790596,
-0.028112266212701797,
0.09532815963029861,
0.021179309114813805,
-0.046412404626607895,
-0.07311008125543594,
0.028883954510092735,
0.07270969450473785,
-0.04551169276237488,
0.026764053851366043,
0.015486471354961395,
0.031160106882452965,
0.08473759144544601,
0.06746715307235718,
-0.04658224806189537,
0.11563326418399811,
0.024201104417443275,
0.05706508457660675,
0.03730085492134094,
0.07076920568943024,
0.02356039732694626,
-0.022501448169350624,
0.038487114012241364,
-0.017138918861746788,
-0.07896682620048523,
0.040525034070014954,
-0.05143394693732262,
-0.003692029044032097,
0.03772157058119774,
-0.02679041586816311,
-0.011772570200264454,
0.016019552946090698,
0.056919146329164505,
0.009135633707046509,
-0.0756014883518219,
0.06266028434038162,
0.0019680585246533155,
-0.021398546174168587,
-0.00591242266818881,
-0.059583842754364014,
0.06776920706033707,
-0.05818728357553482,
0.04459088295698166,
-0.07317164540290833,
-0.05847890302538872,
-0.017519811168313026,
-0.046019766479730606,
-0.020495083183050156,
0.026882944628596306,
-0.015520951710641384,
-0.12609276175498962,
0.01527788583189249,
-0.018982458859682083,
0.010027080774307251,
-0.04757121950387955,
0.0012208889238536358,
0.04591875523328781,
-0.02991069294512272,
-0.06745351105928421,
0.049580980092287064,
0.004376251716166735,
-0.008734818547964096,
-0.048373959958553314,
0.0508885458111763,
0.048273082822561264,
-0.037334032356739044,
-0.004709551110863686,
0.016656117513775826,
-0.028184643015265465,
-0.022971851751208305,
-0.0006778316455893219,
-0.050838544964790344,
-0.05527596175670624,
-0.03289808705449104,
0.07484849542379379,
0.03239274024963379,
0.06863158941268921,
0.007247679401189089,
0.04737243056297302,
0.10205669701099396,
-0.04483427479863167,
0.032536737620830536,
-0.018667571246623993,
0.050127990543842316,
0.0030800404492765665,
0.14813153445720673,
-0.020914822816848755,
-5.6015082350313605e-8,
0.000795772997662425,
-0.011572766117751598,
-0.09506651759147644,
0.06357859820127487,
-0.06757581233978271,
-0.07183434069156647,
-0.08024342358112335,
0.008172603324055672,
-0.060764629393815994,
-0.007339318748563528,
0.023083528503775597,
0.031587786972522736,
-0.1446400135755539,
0.0289352685213089,
0.028817495331168175,
-0.04803813621401787,
-0.056334175169467926,
0.08639483153820038,
-0.05354210361838341,
-0.10410968959331512,
0.05367265269160271,
-0.025678178295493126,
0.05804594233632088,
-0.042315706610679626,
0.009245079010725021,
-0.07696419209241867,
-0.03648542985320091,
0.11584540456533432,
-0.02456388622522354,
0.02518659085035324,
-0.0042800577357411385,
0.009814536198973656,
-0.06972285360097885,
0.012420771643519402,
0.0571536049246788,
0.059025783091783524,
-0.04299779236316681,
-0.018845785409212112,
0.033948563039302826,
0.07994654774665833,
0.023856285959482193,
0.09860967844724655,
-0.09587610512971878,
0.024740522727370262,
0.08166194707155228,
-0.02974768728017807,
0.028209328651428223,
-0.06794822961091995,
0.07122048735618591,
0.05046007037162781,
0.026360705494880676,
-0.024091409519314766,
-0.07281480729579926,
0.020738190039992332,
-0.037473831325769424,
0.01779532991349697,
-0.049233317375183105,
-0.013660769909620285,
-0.015800872817635536,
-0.04655538126826286,
0.012400423176586628,
-0.012434784322977066,
0.03889910504221916,
0.07611716538667679
] |
google/tapas-large-finetuned-sqa | f214f24bdb51550ced615bac82668a1bc0e26806 | 2021-11-29T13:03:46.000Z | [
"pytorch",
"tf",
"tapas",
"table-question-answering",
"en",
"dataset:msr_sqa",
"arxiv:2004.02349",
"arxiv:2010.00571",
"transformers",
"license:apache-2.0"
] | table-question-answering | false | google | null | google/tapas-large-finetuned-sqa | 2,757 | 1 | transformers | ---
language: en
tags:
- tapas
license: apache-2.0
datasets:
- msr_sqa
---
# TAPAS large model fine-tuned on Sequential Question Answering (SQA)
This model has 2 versions which can be used. The default version corresponds to the `tapas_sqa_inter_masklm_large_reset` checkpoint of the [original Github repository](https://github.com/google-research/tapas).
This model was pre-trained on MLM and an additional step which the authors call intermediate pre-training, and then fine-tuned on [SQA](https://www.microsoft.com/en-us/download/details.aspx?id=54253). It uses relative position embeddings (i.e. resetting the position index at every cell of the table).
The other (non-default) version which can be used is:
- `no_reset`, which corresponds to `tapas_sqa_inter_masklm_large` (intermediate pre-training, absolute position embeddings).
Disclaimer: The team releasing TAPAS did not write a model card for this model so this model card has been written by
the Hugging Face team and contributors.
## Results on SQA - Dev Accuracy
Size | Reset | Dev Accuracy | Link
-------- | --------| -------- | ----
**LARGE** | **noreset** | **0.7223** | [tapas-large-finetuned-sqa (absolute pos embeddings)](https://huggingface.co/google/tapas-large-finetuned-sqa/tree/no_reset)
**LARGE** | **reset** | **0.7289** | [tapas-large-finetuned-sqa](https://huggingface.co/google/tapas-large-finetuned-sqa/tree/main)
BASE | noreset | 0.6737 | [tapas-base-finetuned-sqa (absolute pos embeddings)](https://huggingface.co/google/tapas-base-finetuned-sqa/tree/no_reset)
BASE | reset | 0.874 | [tapas-base-finetuned-sqa](https://huggingface.co/google/tapas-base-finetuned-sqa/tree/main)
MEDIUM | noreset | 0.6464 | [tapas-medium-finetuned-sqa (absolute pos embeddings)](https://huggingface.co/google/tapas-medium-finetuned-sqa/tree/no_reset)
MEDIUM | reset | 0.6561 | [tapas-medium-finetuned-sqa](https://huggingface.co/google/tapas-medium-finetuned-sqa/tree/main)
SMALL | noreset | 0.5876 | [tapas-small-finetuned-sqa (absolute pos embeddings)](https://huggingface.co/google/tapas-small-finetuned-sqa/tree/no_reset)
SMALL | reset | 0.6155 | [tapas-small-finetuned-sqa](https://huggingface.co/google/tapas-small-finetuned-sqa/tree/main)
MINI | noreset | 0.4574 | [tapas-mini-finetuned-sqa (absolute pos embeddings)](https://huggingface.co/google/tapas-mini-finetuned-sqa/tree/no_reset)
MINI | reset | 0.5148 | [tapas-mini-finetuned-sqa](https://huggingface.co/google/tapas-mini-finetuned-sqa/tree/main))
TINY | noreset | 0.2004 | [tapas-tiny-finetuned-sqa (absolute pos embeddings)](https://huggingface.co/google/tapas-tiny-finetuned-sqa/tree/no_reset)
TINY | reset | 0.2375 | [tapas-tiny-finetuned-sqa](https://huggingface.co/google/tapas-tiny-finetuned-sqa/tree/main)
## Model description
## Model description
TAPAS is a BERT-like transformers model pretrained on a large corpus of English data from Wikipedia in a self-supervised fashion.
This means it was pretrained on the raw tables and associated texts only, with no humans labelling them in any way (which is why it
can use lots of publicly available data) with an automatic process to generate inputs and labels from those texts. More precisely, it
was pretrained with two objectives:
- Masked language modeling (MLM): taking a (flattened) table and associated context, the model randomly masks 15% of the words in
the input, then runs the entire (partially masked) sequence through the model. The model then has to predict the masked words.
This is different from traditional recurrent neural networks (RNNs) that usually see the words one after the other,
or from autoregressive models like GPT which internally mask the future tokens. It allows the model to learn a bidirectional
representation of a table and associated text.
- Intermediate pre-training: to encourage numerical reasoning on tables, the authors additionally pre-trained the model by creating
a balanced dataset of millions of syntactically created training examples. Here, the model must predict (classify) whether a sentence
is supported or refuted by the contents of a table. The training examples are created based on synthetic as well as counterfactual statements.
This way, the model learns an inner representation of the English language used in tables and associated texts, which can then be used
to extract features useful for downstream tasks such as answering questions about a table, or determining whether a sentence is entailed
or refuted by the contents of a table. Fine-tuning is done by adding a cell selection head on top of the pre-trained model, and then jointly
train this randomly initialized classification head with the base model on SQA.
## Intended uses & limitations
You can use this model for answering questions related to a table in a conversational set-up.
For code examples, we refer to the documentation of TAPAS on the HuggingFace website.
## Training procedure
### Preprocessing
The texts are lowercased and tokenized using WordPiece and a vocabulary size of 30,000. The inputs of the model are
then of the form:
```
[CLS] Question [SEP] Flattened table [SEP]
```
### Fine-tuning
The model was fine-tuned on 32 Cloud TPU v3 cores for 200,000 steps with maximum sequence length 512 and batch size of 128.
In this setup, fine-tuning takes around 20 hours. The optimizer used is Adam with a learning rate of 1.25e-5, and a warmup ratio
of 0.2. An inductive bias is added such that the model only selects cells of the same column. This is reflected by the
`select_one_column` parameter of `TapasConfig`. See also table 12 of the [original paper](https://arxiv.org/abs/2004.02349).
### BibTeX entry and citation info
```bibtex
@misc{herzig2020tapas,
title={TAPAS: Weakly Supervised Table Parsing via Pre-training},
author={Jonathan Herzig and Paweł Krzysztof Nowak and Thomas Müller and Francesco Piccinno and Julian Martin Eisenschlos},
year={2020},
eprint={2004.02349},
archivePrefix={arXiv},
primaryClass={cs.IR}
}
```
```bibtex
@misc{eisenschlos2020understanding,
title={Understanding tables with intermediate pre-training},
author={Julian Martin Eisenschlos and Syrine Krichene and Thomas Müller},
year={2020},
eprint={2010.00571},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
```bibtex
@InProceedings{iyyer2017search-based,
author = {Iyyer, Mohit and Yih, Scott Wen-tau and Chang, Ming-Wei},
title = {Search-based Neural Structured Learning for Sequential Question Answering},
booktitle = {Proceedings of the 55th Annual Meeting of the Association for Computational Linguistics},
year = {2017},
month = {July},
abstract = {Recent work in semantic parsing for question answering has focused on long and complicated questions, many of which would seem unnatural if asked in a normal conversation between two humans. In an effort to explore a conversational QA setting, we present a more realistic task: answering sequences of simple but inter-related questions. We collect a dataset of 6,066 question sequences that inquire about semi-structured tables from Wikipedia, with 17,553 question-answer pairs in total. To solve this sequential question answering task, we propose a novel dynamic neural semantic parsing framework trained using a weakly supervised reward-guided search. Our model effectively leverages the sequential context to outperform state-of-the-art QA systems that are designed to answer highly complex questions.},
publisher = {Association for Computational Linguistics},
url = {https://www.microsoft.com/en-us/research/publication/search-based-neural-structured-learning-sequential-question-answering/},
}
``` | [
-0.05262656882405281,
-0.15270277857780457,
0.004848074167966843,
-0.005159073509275913,
-0.09047112613916397,
0.03620181232690811,
-0.05659937486052513,
-0.0352727472782135,
0.0075284154154360294,
0.0007312156958505511,
0.05080278962850571,
0.056864045560359955,
-0.009098918177187443,
0.006142688449472189,
-0.11987791210412979,
0.06515559554100037,
0.0913730189204216,
-0.005239757243543863,
-0.09968693554401398,
-0.014484794810414314,
0.04484064504504204,
0.020215317606925964,
0.06673790514469147,
-0.0049316794611513615,
0.04908362030982971,
-0.05006025731563568,
-0.01751820184290409,
0.05006769299507141,
0.1145382970571518,
-0.061903052031993866,
0.034288324415683746,
0.07924976944923401,
0.016176564618945122,
0.07223019748926163,
-0.06587813794612885,
0.05618847906589508,
-0.06207061558961868,
-0.06221212446689606,
0.044336315244436264,
0.017089329659938812,
-0.008409452624619007,
-0.019561873748898506,
-0.005457940511405468,
-0.027284255251288414,
0.09678981453180313,
0.011845319531857967,
-0.04192481189966202,
0.012369017116725445,
0.010686407797038555,
-0.0015396742383018136,
-0.07115667313337326,
-0.014806780032813549,
0.007796512451022863,
0.07443869113922119,
0.03078600950539112,
-0.012073147110641003,
-0.013658473268151283,
-0.027003705501556396,
0.04755821079015732,
0.05794794484972954,
-0.014599356800317764,
-0.026735441759228706,
-0.01989278756082058,
-0.008577198721468449,
-0.002113267546519637,
0.02766282670199871,
-0.024224644526839256,
-0.014472274109721184,
0.027086462825536728,
0.02039777673780918,
-0.015921620652079582,
0.01653514802455902,
-0.015523332171142101,
0.03369676694273949,
0.0011480096727609634,
0.020909268409013748,
0.02924908511340618,
0.05013391375541687,
0.02788468450307846,
-0.01941358484327793,
0.052788734436035156,
-0.06452599167823792,
0.03177127614617348,
-0.025437194854021072,
0.05831150710582733,
-0.017817100510001183,
0.04744262993335724,
0.03142206370830536,
0.07688390463590622,
-0.011318746022880077,
-0.012573782354593277,
0.034168537706136703,
0.00548965809866786,
0.008859334513545036,
0.01776236854493618,
0.030027935281395912,
0.0657413974404335,
-0.0026559883262962103,
-0.053787533193826675,
0.06099553778767586,
0.062009721994400024,
-0.002544874558225274,
0.018026387318968773,
-0.0025245677679777145,
-0.038520559668540955,
-0.07645468413829803,
0.01175693143159151,
0.09784882515668869,
0.007689756341278553,
-0.04905177280306816,
0.03203964605927467,
0.041969191282987595,
-0.01017572171986103,
-0.015572176314890385,
-0.07797075062990189,
-0.00021477582049556077,
-0.013360061682760715,
-0.052425578236579895,
0.03909585624933243,
-0.030986106023192406,
-0.052641090005636215,
-0.006994147319346666,
-0.0635170042514801,
-0.01893746852874756,
0.03931647539138794,
0.046160291880369186,
-0.10629641264677048,
9.023061576133262e-34,
0.06616056710481644,
-0.012225421145558357,
0.00857649464160204,
-0.08907193690538406,
0.034616295248270035,
-0.004916079808026552,
0.03091505542397499,
0.007108999881893396,
0.02715376950800419,
0.071632020175457,
-0.013370639644563198,
-0.008334383368492126,
-0.07121522724628448,
-0.013825317844748497,
-0.0023183703888207674,
-0.006266267504543066,
-0.018303269520401955,
0.021300792694091797,
-0.061447277665138245,
-0.01818421110510826,
0.12308334559202194,
-0.035591304302215576,
-0.004510355647653341,
-0.051704179495573044,
0.03387310728430748,
0.03802161291241646,
0.06459787487983704,
-0.09006360918283463,
-0.010236602276563644,
0.023126035928726196,
-0.11088690161705017,
-0.00948333740234375,
-0.05061263591051102,
-0.0821729376912117,
0.03119547665119171,
-0.04244939237833023,
0.005199565086513758,
-0.0886702910065651,
0.02873842418193817,
-0.047978416085243225,
0.06268025189638138,
0.08664434403181076,
0.07034993171691895,
-0.051035307347774506,
-0.04136771336197853,
-0.10694339126348495,
-0.05678531527519226,
0.008647902868688107,
-0.019045645371079445,
0.026656728237867355,
0.0192380603402853,
-0.0006612287252210081,
-0.06953515112400055,
-0.05642588436603546,
0.008878923021256924,
-0.05641254410147667,
0.02378673292696476,
0.02865133062005043,
0.0034922081977128983,
0.08253416419029236,
-0.03729545325040817,
-0.09656766802072525,
-0.04432205110788345,
0.046548500657081604,
0.05191696807742119,
-0.0148019315674901,
-0.08811728656291962,
-0.034578002989292145,
0.07464731484651566,
0.008078353479504585,
-0.05609233304858208,
-0.0639505684375763,
-0.021523380652070045,
0.0282778050750494,
-0.016575315967202187,
-0.09445797652006149,
0.015123995020985603,
-0.04260959476232529,
-0.004683864302933216,
-0.00028117457986809313,
0.03995347023010254,
0.09699840098619461,
-0.03803643956780434,
-0.05003939941525459,
-0.07053187489509583,
-0.13495737314224243,
0.06363517791032791,
-0.09573156386613846,
-0.006972445175051689,
-0.0640091672539711,
-0.0017142678843811154,
0.031209200620651245,
0.047072429209947586,
-0.0024307130370289087,
-0.04963646084070206,
-9.079466019518539e-34,
0.02101152390241623,
0.0185770895332098,
-0.015420951880514622,
0.10840896517038345,
0.025227518752217293,
-0.04047592356801033,
0.06348443776369095,
0.09686976671218872,
-0.021565785631537437,
-0.07133528590202332,
0.002976688090711832,
-0.02231917902827263,
0.04077025502920151,
-0.024147454649209976,
0.050689052790403366,
0.06574703007936478,
0.014902176335453987,
-0.02399517223238945,
-0.03845397010445595,
0.0785391628742218,
-0.023121776059269905,
0.018567463383078575,
0.0341440811753273,
0.06544207781553268,
0.030029043555259705,
-0.005768836010247469,
0.027167707681655884,
0.08676812052726746,
0.0015010908246040344,
-0.0017110771732404828,
-0.013242670334875584,
-0.011286815628409386,
-0.02542116492986679,
-0.013048965483903885,
-0.07248198240995407,
0.017134010791778564,
0.04826819896697998,
-0.09488267451524734,
0.07050103694200516,
0.10540961474180222,
0.1517239809036255,
0.013303915038704872,
-0.07046658545732498,
0.0912189781665802,
-0.028074918314814568,
0.0276448093354702,
-0.05712156742811203,
-0.0014683420304208994,
-0.07789657264947891,
-0.03560633957386017,
-0.030171087011694908,
-0.006238511297851801,
0.004891066811978817,
0.01962871290743351,
0.04317246749997139,
-0.0017320850165560842,
0.007372256834059954,
-0.0902799442410469,
-0.026788931339979172,
-0.045088332146406174,
0.05506807938218117,
0.02784716710448265,
0.028073851019144058,
-0.09889591485261917,
0.028331806883215904,
0.07694950699806213,
0.02793678268790245,
0.0015171068953350186,
-0.07838965952396393,
-0.014534367248415947,
-0.038561757653951645,
-0.02666996791958809,
0.010698072612285614,
0.01178787462413311,
0.011953184381127357,
-0.03505076840519905,
-0.07094375044107437,
-0.002636062214151025,
0.0064321113750338554,
-0.10473082214593887,
-0.08290823549032211,
0.05639687925577164,
0.02785838209092617,
0.030496029183268547,
0.10123317688703537,
0.1498032957315445,
0.04809083417057991,
0.03367334231734276,
-0.017100706696510315,
0.007515265140682459,
-0.03259605914354324,
-0.037922270596027374,
-0.006140032317489386,
0.1136002317070961,
0.02451847866177559,
-5.641415512513959e-8,
-0.12830689549446106,
0.04641580209136009,
-0.06974023580551147,
0.009253321215510368,
0.0475243404507637,
-0.09161558002233505,
-0.011823891662061214,
0.1043681800365448,
-0.002390343463048339,
-0.03333123028278351,
0.046330925077199936,
0.04864216595888138,
-0.07667192816734314,
-0.04373783990740776,
0.040967851877212524,
0.08489370346069336,
0.0007899667834863067,
-0.016064058989286423,
-0.05395003780722618,
0.020417168736457825,
0.022804483771324158,
0.062272172421216965,
0.034671518951654434,
0.020130975171923637,
0.02747250907123089,
-0.007888222113251686,
-0.054475750774145126,
0.1749313473701477,
0.037232473492622375,
-0.01625625602900982,
0.033029790967702866,
0.01793111115694046,
0.04534558951854706,
-0.04055900126695633,
-0.032559413462877274,
0.03916897997260094,
-0.01333814486861229,
0.02671741135418415,
0.008369380608201027,
0.020199237391352654,
0.021390823647379875,
0.013966411352157593,
-0.06360625475645065,
-0.012589891441166401,
0.020938042551279068,
0.0006551053957082331,
-0.0688977763056755,
-0.09585387259721756,
0.03322768583893776,
-0.014937478117644787,
-0.009653884917497635,
-0.014826425351202488,
-0.017476731911301613,
0.07634621858596802,
0.05074068158864975,
0.03063533641397953,
-0.017932888120412827,
-0.0450119748711586,
0.052948493510484695,
0.037686120718717575,
0.05873553827404976,
-0.01173796784132719,
-0.07075239717960358,
0.0329735241830349
] |
tunib/electra-ko-en-small | ac899d8d102ccec10ad2a0ee6a1ab12b5f7eac41 | 2021-09-17T08:59:47.000Z | [
"pytorch",
"electra",
"pretraining",
"arxiv:2003.10555",
"transformers"
] | null | false | tunib | null | tunib/electra-ko-en-small | 2,752 | 4 | transformers | # TUNiB-Electra
We release several new versions of the [ELECTRA](https://arxiv.org/abs/2003.10555) model, which we name TUNiB-Electra. There are two motivations. First, all the existing pre-trained Korean encoder models are monolingual, that is, they have knowledge about Korean only. Our bilingual models are based on the balanced corpora of Korean and English. Second, we want new off-the-shelf models trained on much more texts. To this end, we collected a large amount of Korean text from various sources such as blog posts, comments, news, web novels, etc., which sum up to 100 GB in total.
## How to use
You can use this model directly with [transformers](https://github.com/huggingface/transformers) library:
```python
from transformers import AutoModel, AutoTokenizer
# Small Model (Korean-English bilingual model)
tokenizer = AutoTokenizer.from_pretrained('tunib/electra-ko-en-small')
model = AutoModel.from_pretrained('tunib/electra-ko-en-small')
```
### Tokenizer example
```python
>>> from transformers import AutoTokenizer
>>> tokenizer = AutoTokenizer.from_pretrained('tunib/electra-ko-en-small')
>>> tokenizer.tokenize("tunib is a natural language processing tech startup.")
['tun', '##ib', 'is', 'a', 'natural', 'language', 'processing', 'tech', 'startup', '.']
>>> tokenizer.tokenize("튜닙은 자연어처리 테크 스타트업입니다.")
['튜', '##닙', '##은', '자연', '##어', '##처리', '테크', '스타트업', '##입니다', '.']
```
## Results on Korean downstream tasks
| |**# Params** |**Avg.**| **NSMC**<br/>(acc) | **Naver NER**<br/>(F1) | **PAWS**<br/>(acc) | **KorNLI**<br/>(acc) | **KorSTS**<br/>(spearman) | **Question Pair**<br/>(acc) | **KorQuaD (Dev)**<br/>(EM/F1) |**Korean-Hate-Speech (Dev)**<br/>(F1)|
| :----------------:| :----------------: | :--------------------: | :----------------: | :------------------: | :-----------------------: | :-------------------------: | :---------------------------: | :---------------------------: | :---------------------------: | :----------------: |
|***TUNiB-Electra-ko-small*** | 14M | 81.29| **89.56** | 84.98 | 72.85 | 77.08 | 78.76 | **94.98** | 61.17 / 87.64 | **64.50** |
|***TUNiB-Electra-ko-en-small*** | 18M | 81.44 | 89.28 | 85.15 | 75.75 | 77.06 | 77.61 | 93.79 | 80.55 / 89.77 |63.13 |
| [KoELECTRA-small-v3](https://github.com/monologg/KoELECTRA) | 14M | **82.58** | 89.36 | **85.40** | **77.45** | **78.60** | **80.79** | 94.85 | **82.11 / 91.13** | 63.07 |
## Results on English downstream tasks
| |**# Params** | **Avg.** |**CoLA**<br/>(MCC) | **SST**<br/>(Acc) |MRPC<br/>(Acc)| **STS**<br/>(Spearman) | **QQP**<br/>(Acc) | **MNLI**<br/>(Acc) | **QNLI**<br/>(Acc) | **RTE**<br/>(Acc) |
| :----------------:| :----------------: | :--------------------: | :----------------: | :------------------: | :-----------------------: | :-------------------------: | :---------------------------: | :---------------------------: | :---------------------------: | :---------------------------: |
|***TUNiB-Electra-ko-en-small*** | 18M | **80.44** | **56.76** | 88.76 | **88.73** | **86.12** | **88.66** | 79.03 | 87.26 |**68.23** |
|[ELECTRA-small](https://github.com/google-research/electra) | 13M | 79.71 | 55.6 | **91.1** | 84.9| 84.6 | 88.0 | **81.6** | **88.3** | 63.6 |
|[BERT-small](https://github.com/google-research/bert) | 13M | 74.06| 27.8 | 89.7 | 83.4| 78.8 | 87.0 | 77.6 | 86.4 | 61.8 |
| [
-0.0890570729970932,
-0.03428719937801361,
-0.02316685952246189,
0.022549355402588844,
0.0004963329993188381,
0.01371857151389122,
-0.026692721992731094,
-0.0067553143016994,
0.0027414385695010424,
-0.024283697828650475,
0.031424395740032196,
-0.035893943160772324,
0.025975177064538002,
0.005563844460994005,
0.040454450994729996,
-0.0028659054078161716,
0.012581114657223225,
0.028776468709111214,
-0.06755988299846649,
-0.1251477301120758,
0.06391908973455429,
-0.015547927469015121,
0.037447839975357056,
-0.013174835592508316,
0.047337453812360764,
-0.06119486317038536,
-0.0030048456974327564,
-0.003798306919634342,
0.005767665337771177,
-0.049289435148239136,
0.014480061829090118,
0.06634019315242767,
0.003882133634760976,
0.06547632813453674,
-0.005159800406545401,
0.04201536625623703,
-0.06754611432552338,
0.0058312127366662025,
-0.06173445284366608,
-0.021361196413636208,
-0.023920275270938873,
-0.046455372124910355,
0.0174589604139328,
-0.004956468939781189,
0.11857883632183075,
-0.022846398875117302,
-0.02528698556125164,
-0.026908280327916145,
0.011829917319118977,
-0.023663852363824844,
-0.008150513283908367,
-0.0006331691984087229,
0.03675810620188713,
0.07160454243421555,
-0.03878898173570633,
-0.05553105100989342,
-0.0030780562665313482,
0.011709686368703842,
0.03594246134161949,
-0.005821419879794121,
-0.07721851766109467,
-0.01331231277436018,
-0.06542117148637772,
0.009562315419316292,
-0.09713047742843628,
-0.01819094456732273,
0.08619198948144913,
0.04616302624344826,
-0.03616388514637947,
0.012968934141099453,
-0.005586640909314156,
0.04183657094836235,
0.021259183064103127,
0.08328238129615784,
-0.033976808190345764,
-0.03674071282148361,
0.11979489773511887,
-0.004211978521198034,
0.028318550437688828,
-0.03994655981659889,
-0.00020833959570154548,
0.014207967557013035,
0.03516281023621559,
-0.016586219891905785,
-0.005523383617401123,
-0.04858793690800667,
0.005746598355472088,
-0.005909985862672329,
0.006093024741858244,
0.029153630137443542,
0.03688972815871239,
-0.009202967397868633,
0.0980021059513092,
-0.0030315604526549578,
-0.05092679709196091,
0.0551033653318882,
-0.05361687391996384,
0.01645720936357975,
-0.012030530720949173,
0.04621544107794762,
0.01228682603687048,
0.04703814908862114,
0.040717385709285736,
-0.020765390247106552,
-0.11848867684602737,
-0.10818472504615784,
0.0862208679318428,
0.05453082174062729,
0.013703207485377789,
-0.0009448400815017521,
0.013416358269751072,
0.022148372605443,
-0.054239191114902496,
0.009255938231945038,
0.03929372504353523,
-0.0025203891564160585,
0.039712902158498764,
-0.02810746058821678,
0.03794737905263901,
0.0987715795636177,
-0.031221924349665642,
0.010087931528687477,
-0.04786284640431404,
-0.026908518746495247,
-0.019081898033618927,
0.010868102312088013,
0.03871428593993187,
4.362315371883173e-33,
0.04296351224184036,
0.10474886745214462,
0.0021117806900292635,
0.039103396236896515,
-0.05276838317513466,
0.03477592021226883,
0.011746624484658241,
0.05040080100297928,
-0.08914957195520401,
-0.03305746242403984,
-0.11181218177080154,
0.15518462657928467,
-0.05929161235690117,
0.07434011995792389,
-0.028615016490221024,
-0.018460841849446297,
-0.0600733608007431,
0.03261709585785866,
0.027796121314167976,
0.03870386630296707,
0.12158933281898499,
0.022411687299609184,
0.049840718507766724,
-0.04626272991299629,
-0.03071345016360283,
-0.02450324222445488,
0.06790371239185333,
-0.09491948038339615,
-0.04245065152645111,
0.029841236770153046,
-0.07913987338542938,
-0.02089831605553627,
0.03127271309494972,
0.019315671175718307,
-0.0167325958609581,
-0.03504582494497299,
-0.019444921985268593,
-0.02032601274549961,
-0.02560018189251423,
-0.092436783015728,
0.053731124848127365,
0.048507288098335266,
-0.06151451915502548,
-0.00818893127143383,
-0.050479017198085785,
0.0301554873585701,
0.05884212628006935,
-0.03388078138232231,
0.036950889974832535,
0.021877257153391838,
0.04862504079937935,
-0.026043731719255447,
-0.09913871437311172,
-0.005341034382581711,
0.07255107164382935,
0.09101561456918716,
0.06850986182689667,
-0.006862976588308811,
0.10471641272306442,
-0.044595737010240555,
-0.04636222496628761,
0.02410333976149559,
0.06940484046936035,
0.028145257383584976,
0.1369769424200058,
0.02211301214993,
0.00160064862575382,
-0.03552725538611412,
-0.009406360797584057,
-0.0725526213645935,
-0.03564159944653511,
-0.08722151070833206,
0.016485802829265594,
-0.021266980096697807,
-0.0034097209572792053,
-0.03741522505879402,
-0.030361728742718697,
-0.05559955909848213,
-0.01901550032198429,
0.033915530890226364,
-0.04330325499176979,
-0.05150733143091202,
0.04553195461630821,
-0.02142081968486309,
0.039863213896751404,
0.002779636299237609,
0.05098127946257591,
-0.038742419332265854,
-0.00844690389931202,
0.014127952978014946,
-0.014174303971230984,
0.04693951457738876,
-0.03354749083518982,
-0.07143968343734741,
0.02659199759364128,
-4.1661580581675655e-33,
0.024001609534025192,
0.0021714293397963047,
-0.039304427802562714,
0.06915352493524551,
-0.0457034558057785,
-0.049987029284238815,
0.029552409425377846,
0.1610998958349228,
-0.0045725442469120026,
-0.031839389353990555,
0.005676780361682177,
-0.07699146866798401,
0.1595715880393982,
-0.041022706776857376,
0.051878467202186584,
-0.07562136650085449,
0.02745825983583927,
0.08150597661733627,
0.06219170242547989,
0.09080614149570465,
-0.09267473965883255,
0.03662921488285065,
-0.11958446353673935,
0.010091084986925125,
0.003544541308656335,
0.015052278526127338,
-0.03710811212658882,
0.07426179200410843,
-0.028255710378289223,
-0.006049918010830879,
-0.026798190549016,
-0.03658140078186989,
-0.03393830731511116,
-0.006699312012642622,
-0.06214779242873192,
-0.006115504074841738,
0.016916867345571518,
-0.0001678336557233706,
-0.01156411413103342,
0.049855343997478485,
0.04070413485169411,
-0.000409054133342579,
-0.08310891687870026,
-0.02559819631278515,
-0.019976353272795677,
-0.07945138216018677,
-0.11179685592651367,
-0.025123247876763344,
0.005299444776028395,
-0.08427190035581589,
0.06678854674100876,
-0.022523170337080956,
-0.10165704041719437,
-0.025144806131720543,
0.0016435344005003572,
-0.06993190944194794,
0.031830377876758575,
-0.08997603505849838,
0.005369293037801981,
-0.052245818078517914,
-0.058193765580654144,
-0.046686604619026184,
0.0744151696562767,
-0.04752446711063385,
-0.005208740476518869,
-0.08230762928724289,
0.13190366327762604,
-0.026644259691238403,
0.06793612241744995,
0.015044697560369968,
0.04410219192504883,
-0.006619416642934084,
0.02106606401503086,
-0.002386414911597967,
-0.04491669312119484,
0.045221805572509766,
-0.04362010583281517,
0.024624116718769073,
0.05784828960895538,
-0.084865041077137,
-0.048397667706012726,
0.06492950022220612,
0.04296491667628288,
0.001730345655232668,
0.04467666149139404,
0.024159802123904228,
-0.020622428506612778,
0.05132964625954628,
0.06299737095832825,
0.005703502334654331,
-0.029638083651661873,
0.050257254391908646,
-0.010503995232284069,
0.0946260318160057,
-0.021904228255152702,
-4.8630525384396606e-8,
-0.010338976979255676,
0.0028357463888823986,
-0.015278659760951996,
0.07353460788726807,
-0.06064000725746155,
-0.04038909822702408,
-0.029514404013752937,
0.007579046301543713,
0.0007510334835387766,
0.0002583985624369234,
0.07617735117673874,
0.0035055982880294323,
-0.03005596436560154,
0.004840126261115074,
-0.004472764674574137,
0.05896629020571709,
0.11182570457458496,
0.07870771735906601,
-0.016967806965112686,
0.0659056007862091,
0.06045705825090408,
0.03853580728173256,
0.020991291850805283,
-0.06531587243080139,
0.05311012268066406,
0.01547317299991846,
-0.08298908919095993,
0.03228071331977844,
0.0078058685176074505,
-0.12783731520175934,
-0.020137660205364227,
-0.03331401199102402,
-0.05420601740479469,
0.01349077932536602,
0.01289050281047821,
0.03754488751292229,
-0.02234639786183834,
-0.04557699337601662,
0.012976175174117088,
0.035788316279649734,
0.09132382273674011,
-0.013264695182442665,
-0.06498249620199203,
-0.026673266664147377,
0.0007856183801777661,
-0.04143768921494484,
-0.05297926440834999,
-0.11400129646062851,
-0.014989347197115421,
0.06421922892332077,
0.01479482464492321,
-0.08606777340173721,
-0.10814084112644196,
0.038351450115442276,
0.04950516298413277,
0.020053721964359283,
-0.0028080318588763475,
-0.022107381373643875,
0.01299657579511404,
0.06416479498147964,
-0.024175062775611877,
0.012025435455143452,
-0.05842336639761925,
0.003104863688349724
] |
etalab-ia/dpr-ctx_encoder-fr_qa-camembert | a0bc241d0c8011d1d72c02487b3ff3e326a2e59c | 2021-06-16T11:22:59.000Z | [
"pytorch",
"camembert",
"fr",
"dataset:piaf",
"dataset:FQuAD",
"dataset:SQuAD-FR",
"arxiv:2004.04906",
"arxiv:1911.03894",
"transformers"
] | null | false | etalab-ia | null | etalab-ia/dpr-ctx_encoder-fr_qa-camembert | 2,751 | 3 | transformers | ---
language: fr
datasets:
- piaf
- FQuAD
- SQuAD-FR
---
# dpr-ctx_encoder-fr_qa-camembert
## Description
French [DPR model](https://arxiv.org/abs/2004.04906) using [CamemBERT](https://arxiv.org/abs/1911.03894) as base and then fine-tuned on a combo of three French Q&A
## Data
### French Q&A
We use a combination of three French Q&A datasets:
1. [PIAFv1.1](https://www.data.gouv.fr/en/datasets/piaf-le-dataset-francophone-de-questions-reponses/)
2. [FQuADv1.0](https://fquad.illuin.tech/)
3. [SQuAD-FR (SQuAD automatically translated to French)](https://github.com/Alikabbadj/French-SQuAD)
### Training
We are using 90 562 random questions for `train` and 22 391 for `dev`. No question in `train` exists in `dev`. For each question, we have a single `positive_context` (the paragraph where the answer to this question is found) and around 30 `hard_negtive_contexts`. Hard negative contexts are found by querying an ES instance (via bm25 retrieval) and getting the top-k candidates **that do not contain the answer**.
The files are over [here](https://drive.google.com/file/d/1W5Jm3sqqWlsWsx2sFpA39Ewn33PaLQ7U/view?usp=sharing).
### Evaluation
We use FQuADv1.0 and French-SQuAD evaluation sets.
## Training Script
We use the official [Facebook DPR implentation](https://github.com/facebookresearch/DPR) with a slight modification: by default, the code can work with Roberta models, still we changed a single line to make it easier to work with Camembert. This modification can be found [over here](https://github.com/psorianom/DPR).
### Hyperparameters
```shell
python -m torch.distributed.launch --nproc_per_node=8 train_dense_encoder.py \
--max_grad_norm 2.0 \
--encoder_model_type fairseq_roberta \
--pretrained_file data/camembert-base \
--seed 12345 \
--sequence_length 256 \
--warmup_steps 1237 \
--batch_size 16 \
--do_lower_case \
--train_file ./data/DPR_FR_train.json \
--dev_file ./data/DPR_FR_dev.json \
--output_dir ./output/ \
--learning_rate 2e-05 \
--num_train_epochs 35 \
--dev_batch_size 16 \
--val_av_rank_start_epoch 30 \
--pretrained_model_cfg ./data/camembert-base/
```
###
## Evaluation results
We obtain the following evaluation by using FQuAD and SQuAD-FR evaluation (or validation) sets. To obtain these results, we use [haystack's evaluation script](https://github.com/deepset-ai/haystack/blob/db4151bbc026f27c6d709fefef1088cd3f1e18b9/tutorials/Tutorial5_Evaluation.py) (**we report Retrieval results only**).
### DPR
#### FQuAD v1.0 Evaluation
```shell
For 2764 out of 3184 questions (86.81%), the answer was in the top-20 candidate passages selected by the retriever.
Retriever Recall: 0.87
Retriever Mean Avg Precision: 0.57
```
#### SQuAD-FR Evaluation
```shell
For 8945 out of 10018 questions (89.29%), the answer was in the top-20 candidate passages selected by the retriever.
Retriever Recall: 0.89
Retriever Mean Avg Precision: 0.63
```
### BM25
For reference, BM25 gets the results shown below. As in the original paper, regarding SQuAD-like datasets, the results of DPR are consistently superseeded by BM25.
#### FQuAD v1.0 Evaluation
```shell
For 2966 out of 3184 questions (93.15%), the answer was in the top-20 candidate passages selected by the retriever.
Retriever Recall: 0.93
Retriever Mean Avg Precision: 0.74
```
#### SQuAD-FR Evaluation
```shell
For 9353 out of 10018 questions (93.36%), the answer was in the top-20 candidate passages selected by the retriever.
Retriever Recall: 0.93
Retriever Mean Avg Precision: 0.77
```
## Usage
The results reported here are obtained with the `haystack` library. To get to similar embeddings using exclusively HF `transformers` library, you can do the following:
```python
from transformers import AutoTokenizer, AutoModel
query = "Salut, mon chien est-il mignon ?"
tokenizer = AutoTokenizer.from_pretrained("etalab-ia/dpr-ctx_encoder-fr_qa-camembert", do_lower_case=True)
input_ids = tokenizer(query, return_tensors='pt')["input_ids"]
model = AutoModel.from_pretrained("etalab-ia/dpr-ctx_encoder-fr_qa-camembert", return_dict=True)
embeddings = model.forward(input_ids).pooler_output
print(embeddings)
```
And with `haystack`, we use it as a retriever:
```
retriever = DensePassageRetriever(
document_store=document_store,
query_embedding_model="etalab-ia/dpr-question_encoder-fr_qa-camembert",
passage_embedding_model="etalab-ia/dpr-ctx_encoder-fr_qa-camembert",
model_version=dpr_model_tag,
infer_tokenizer_classes=True,
)
```
## Acknowledgments
This work was performed using HPC resources from GENCI–IDRIS (Grant 2020-AD011011224).
## Citations
### Datasets
#### PIAF
```
@inproceedings{KeraronLBAMSSS20,
author = {Rachel Keraron and
Guillaume Lancrenon and
Mathilde Bras and
Fr{\'{e}}d{\'{e}}ric Allary and
Gilles Moyse and
Thomas Scialom and
Edmundo{-}Pavel Soriano{-}Morales and
Jacopo Staiano},
title = {Project {PIAF:} Building a Native French Question-Answering Dataset},
booktitle = {{LREC}},
pages = {5481--5490},
publisher = {European Language Resources Association},
year = {2020}
}
```
#### FQuAD
```
@article{dHoffschmidt2020FQuADFQ,
title={FQuAD: French Question Answering Dataset},
author={Martin d'Hoffschmidt and Maxime Vidal and Wacim Belblidia and Tom Brendl'e and Quentin Heinrich},
journal={ArXiv},
year={2020},
volume={abs/2002.06071}
}
```
#### SQuAD-FR
```
@MISC{kabbadj2018,
author = "Kabbadj, Ali",
title = "Something new in French Text Mining and Information Extraction (Universal Chatbot): Largest Q&A French training dataset (110 000+) ",
editor = "linkedin.com",
month = "November",
year = "2018",
url = "\url{https://www.linkedin.com/pulse/something-new-french-text-mining-information-chatbot-largest-kabbadj/}",
note = "[Online; posted 11-November-2018]",
}
```
### Models
#### CamemBERT
HF model card : [https://huggingface.co/camembert-base](https://huggingface.co/camembert-base)
```
@inproceedings{martin2020camembert,
title={CamemBERT: a Tasty French Language Model},
author={Martin, Louis and Muller, Benjamin and Su{\'a}rez, Pedro Javier Ortiz and Dupont, Yoann and Romary, Laurent and de la Clergerie, {\'E}ric Villemonte and Seddah, Djam{\'e} and Sagot, Beno{\^\i}t},
booktitle={Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics},
year={2020}
}
```
#### DPR
```
@misc{karpukhin2020dense,
title={Dense Passage Retrieval for Open-Domain Question Answering},
author={Vladimir Karpukhin and Barlas Oğuz and Sewon Min and Patrick Lewis and Ledell Wu and Sergey Edunov and Danqi Chen and Wen-tau Yih},
year={2020},
eprint={2004.04906},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
| [
-0.09758041054010391,
-0.05847803130745888,
-0.02466079220175743,
-0.07649730890989304,
0.02477359026670456,
0.05156439542770386,
0.012393553741276264,
0.03220145404338837,
0.06553865969181061,
-0.03282203525304794,
0.02006671018898487,
-0.0777493417263031,
0.02413932792842388,
0.009842921979725361,
0.0067879511043429375,
-0.04747995361685753,
-0.04629410803318024,
0.023173131048679352,
-0.04910656064748764,
-0.0580819733440876,
0.014670905657112598,
-0.028236230835318565,
0.09966952353715897,
-0.028750527650117874,
0.05350302904844284,
-0.04639561474323273,
-0.02307351492345333,
0.07280892878770828,
0.028192907571792603,
-0.05876418948173523,
0.018492499366402626,
0.15593861043453217,
0.007834674790501595,
0.038062576204538345,
-0.023704474791884422,
0.055955030024051666,
-0.00019037610036320984,
-0.050319135189056396,
-0.05100481957197189,
0.08820740878582001,
-0.05038634315133095,
0.020833240821957588,
0.04150785505771637,
-0.019992781803011894,
0.07013994455337524,
0.042439136654138565,
-0.0676279291510582,
0.06625983864068985,
-0.0025067240931093693,
-0.04909287393093109,
-0.06247184798121452,
0.022031519562005997,
0.012985587120056152,
0.04935874044895172,
-0.004812643397599459,
0.001385313575156033,
0.059977658092975616,
-0.025662293657660484,
0.013210881501436234,
0.02573106251657009,
-0.04101216420531273,
-0.03956000506877899,
-0.033232901245355606,
-0.0022375243715941906,
-0.03783964738249779,
-0.021578926593065262,
-0.008281093090772629,
0.030615366995334625,
-0.03251998871564865,
0.02855103090405464,
-0.10754922777414322,
0.01751471310853958,
-0.03508557006716728,
0.04497937858104706,
0.05008688569068909,
0.09664955735206604,
-0.002804342657327652,
-0.02727792225778103,
0.05286005511879921,
-0.1531074047088623,
0.014235909096896648,
-0.018755808472633362,
0.06675524264574051,
-0.0013773067621514201,
0.12428967654705048,
-0.02212403155863285,
0.05652298778295517,
0.03583255782723427,
0.02318299002945423,
0.016457151621580124,
-0.060949571430683136,
0.011515721678733826,
0.05760173499584198,
0.07041915506124496,
-0.004476006142795086,
0.067043237388134,
0.06114327535033226,
0.038280241191387177,
-0.017510918900370598,
0.03805379942059517,
0.009250979870557785,
-0.008637688122689724,
0.056832119822502136,
0.005617931485176086,
-0.12406449019908905,
-0.020530100911855698,
0.05004077032208443,
0.048161257058382034,
0.019904695451259613,
-0.13729430735111237,
-0.03728203475475311,
-0.006201122421771288,
-0.04400060698390007,
-0.06677816063165665,
-0.047494396567344666,
0.03829626739025116,
0.030243776738643646,
-0.0525376982986927,
-0.012005177326500416,
-0.02395252324640751,
-0.01180250197649002,
-0.0351870097219944,
-0.03322521224617958,
0.01638900302350521,
0.013522283174097538,
-0.011533915996551514,
-0.038923949003219604,
3.1080346142229756e-33,
0.11126263439655304,
0.11923485994338989,
0.018626021221280098,
-0.0008652512915432453,
-0.012402190826833248,
-0.027740349993109703,
-0.0389622300863266,
0.02194357104599476,
-0.08208879083395004,
0.026810772716999054,
-0.07169985771179199,
0.014146566390991211,
-0.06192075088620186,
0.040270816534757614,
0.018062112852931023,
-0.06571035832166672,
0.023025132715702057,
-0.001013113302178681,
-0.014762550592422485,
0.007835187949240208,
0.1506834626197815,
0.04655693098902702,
0.005566758569329977,
-0.03356310725212097,
0.0935259610414505,
0.0526561364531517,
0.004059122409671545,
-0.016077550128102303,
-0.07483040541410446,
0.022986749187111855,
-0.09272792190313339,
-0.0509798638522625,
-0.030302872881293297,
0.04699623957276344,
-0.037596169859170914,
-0.06664447486400604,
0.0016186059219762683,
-0.02319878339767456,
-0.005041039083153009,
-0.0018345197895541787,
0.08401142060756683,
-0.007947755046188831,
0.04530719667673111,
-0.022066134959459305,
-0.04859267175197601,
-0.050720810890197754,
0.009517413564026356,
-0.011265956796705723,
0.05858554691076279,
0.004265297669917345,
-0.007684154901653528,
-0.03211077302694321,
-0.049306679517030716,
-0.05848844721913338,
-0.014692545868456364,
0.06965561956167221,
-0.005802207160741091,
0.005622573662549257,
-0.013516793958842754,
0.00776085676625371,
0.01669491082429886,
0.008839467540383339,
-0.006700079422444105,
0.036155764013528824,
0.011534246616065502,
-0.02656884491443634,
0.0015800439286977053,
-0.029712194576859474,
0.09048085659742355,
-0.017545320093631744,
-0.05739707872271538,
0.009242822416126728,
0.07688935101032257,
-0.021598421037197113,
0.10540089756250381,
0.03909740969538689,
-0.009375869296491146,
-0.030632179230451584,
-0.021377811208367348,
-0.07442767918109894,
-0.04071551188826561,
0.03771745413541794,
-0.07198229432106018,
0.02018660120666027,
-0.024534350261092186,
0.03284447267651558,
0.08214136958122253,
-0.06887859851121902,
-0.04125816747546196,
-0.013723145239055157,
-0.07177787274122238,
-0.04966866970062256,
0.011175629682838917,
-0.05592630058526993,
0.004681430757045746,
-3.725953423541866e-33,
0.0334547683596611,
0.08750581741333008,
-0.013992234133183956,
0.031075315549969673,
0.06085701659321785,
-0.03677688166499138,
0.06368017941713333,
0.06315134465694427,
0.06046285107731819,
-0.04698750004172325,
-0.010894495993852615,
-0.11006486415863037,
0.016291139647364616,
-0.07747524976730347,
0.01428084634244442,
-0.02027585171163082,
-0.04419706016778946,
-0.03373685106635094,
0.06325287371873856,
0.09724155068397522,
-0.03133751079440117,
-0.013865413144230843,
-0.08428257703781128,
0.0165665615350008,
-0.036259911954402924,
0.03830407187342644,
0.020791135728359222,
0.01788569800555706,
-0.024107789620757103,
0.0007903213263489306,
-0.035096071660518646,
-0.09635622799396515,
-0.029739344492554665,
0.021626057103276253,
-0.07707050442695618,
0.030109828338027,
0.06400547921657562,
0.03191326558589935,
-0.05498061701655388,
0.11751367151737213,
0.03181798756122589,
0.017902707681059837,
-0.05748485401272774,
-0.01483133528381586,
0.001859751995652914,
-0.01498361211270094,
-0.03907762095332146,
-0.05109165608882904,
-0.05350908264517784,
-0.034211065620183945,
0.03871080279350281,
0.05721592530608177,
-0.0932774469256401,
0.017745042219758034,
-0.03478509932756424,
-0.034547097980976105,
0.04301832988858223,
-0.036254897713661194,
-0.05131356045603752,
-0.034846555441617966,
-0.04580632969737053,
-0.01007916685193777,
0.0008639017469249666,
-0.020801061764359474,
0.0569717139005661,
-0.04328013211488724,
-0.07696564495563507,
0.04733520746231079,
0.018487518653273582,
-0.020576493814587593,
0.049149543046951294,
-0.05909405276179314,
0.047113314270973206,
0.05803411826491356,
-0.030164970085024834,
-0.0900188460946083,
-0.1142469272017479,
-0.0379946194589138,
0.07883692532777786,
0.045411285012960434,
-0.06498999148607254,
-0.0657905712723732,
0.009120594710111618,
0.08704298734664917,
0.0011468013981357217,
0.06946123391389847,
0.03941013291478157,
0.047644197940826416,
0.08022578805685043,
-0.0498572513461113,
0.028482459485530853,
0.051338132470846176,
0.07343091070652008,
0.09581177681684494,
0.03663739562034607,
-4.921354701536984e-8,
0.013056560419499874,
0.005273299291729927,
-0.07286429405212402,
0.03792676702141762,
-0.022908154875040054,
-0.1235138401389122,
-0.08369261771440506,
0.002239751862362027,
0.007303968537598848,
-0.011271357536315918,
-0.0038570479955524206,
0.04901912063360214,
-0.01938471570611,
-0.05463269725441933,
-0.023808034136891365,
0.05408867448568344,
0.000513885635882616,
0.07399995625019073,
-0.025592627003788948,
0.033483605831861496,
0.003087308257818222,
0.08283114433288574,
-0.03329676762223244,
-0.053348809480667114,
0.03832681104540825,
-0.0065119401551783085,
-0.07432568818330765,
-0.03620879352092743,
0.034664127975702286,
0.02666122280061245,
-0.027226705104112625,
0.010165824554860592,
0.0062479074113070965,
-0.06316959112882614,
0.005646892357617617,
0.023108970373868942,
-0.05111849308013916,
-0.05873982980847359,
0.034652229398489,
0.036848053336143494,
0.13270780444145203,
0.013376169838011265,
-0.13947591185569763,
0.032579220831394196,
0.08792538940906525,
-0.017978183925151825,
-0.03187170997262001,
-0.13516107201576233,
0.06338690966367722,
-0.026246564462780952,
0.03100053034722805,
0.018709475174546242,
-0.045831505209207535,
0.052227895706892014,
0.037415761500597,
0.041656218469142914,
-0.07800343632698059,
-0.01786605641245842,
0.07638510316610336,
0.0009475210681557655,
-0.055766232311725616,
0.04767969250679016,
-0.004023435991257429,
0.003956903237849474
] |
hetpandya/t5-base-tapaco | 374f3753409f0a3aca1d69f8af2cee358b02daea | 2021-06-29T11:19:06.000Z | [
"pytorch",
"t5",
"text2text-generation",
"en",
"dataset:tapaco",
"transformers",
"autotrain_compatible"
] | text2text-generation | false | hetpandya | null | hetpandya/t5-base-tapaco | 2,737 | null | transformers | ---
language: en
datasets:
- tapaco
---
# T5-base for paraphrase generation
Google's T5-base fine-tuned on [TaPaCo](https://huggingface.co/datasets/tapaco) dataset for paraphrasing.
<!-- ## Model fine-tuning -->
<!-- The training script is a slightly modified version of [this Colab Notebook](https://github.com/patil-suraj/exploring-T5/blob/master/t5_fine_tuning.ipynb) created by [Suraj Patil](https://github.com/patil-suraj), so all credits to him! -->
## Model in Action 🚀
```python
from transformers import T5ForConditionalGeneration, T5Tokenizer
tokenizer = T5Tokenizer.from_pretrained("hetpandya/t5-base-tapaco")
model = T5ForConditionalGeneration.from_pretrained("hetpandya/t5-base-tapaco")
def get_paraphrases(sentence, prefix="paraphrase: ", n_predictions=5, top_k=120, max_length=256,device="cpu"):
text = prefix + sentence + " </s>"
encoding = tokenizer.encode_plus(
text, pad_to_max_length=True, return_tensors="pt"
)
input_ids, attention_masks = encoding["input_ids"].to(device), encoding[
"attention_mask"
].to(device)
model_output = model.generate(
input_ids=input_ids,
attention_mask=attention_masks,
do_sample=True,
max_length=max_length,
top_k=top_k,
top_p=0.98,
early_stopping=True,
num_return_sequences=n_predictions,
)
outputs = []
for output in model_output:
generated_sent = tokenizer.decode(
output, skip_special_tokens=True, clean_up_tokenization_spaces=True
)
if (
generated_sent.lower() != sentence.lower()
and generated_sent not in outputs
):
outputs.append(generated_sent)
return outputs
paraphrases = get_paraphrases("The house will be cleaned by me every Saturday.")
for sent in paraphrases:
print(sent)
```
## Output
```
The house will get cleaned for a whole week.
The house is cleaning by me every weekend.
What was going to do not get do with the house from me every Thursday.
The house should be cleaned on Sunday--durse.
It's time that I would be cleaning her house in tomorrow.
```
Created by [Het Pandya/@hetpandya](https://github.com/hetpandya) | [LinkedIn](https://www.linkedin.com/in/het-pandya)
Made with <span style="color: red;">♥</span> in India | [
-0.12788687646389008,
-0.05737802013754845,
0.05320509523153305,
0.0017571470234543085,
-0.027493270114064217,
-0.020548487082123756,
0.004073997028172016,
0.01961081102490425,
-0.03867259621620178,
-0.0525115467607975,
0.015077066607773304,
-0.02969738282263279,
-0.008014379069209099,
-0.01976362057030201,
0.008460788056254387,
0.043449319899082184,
0.009993384592235088,
0.013228349387645721,
-0.13117626309394836,
-0.15670831501483917,
0.11282533407211304,
0.07271113991737366,
0.059916168451309204,
-0.03191521391272545,
0.030515242367982864,
0.005245908163487911,
-0.04714124649763107,
-0.015139947645366192,
0.05439997836947441,
-0.009446173906326294,
-0.02350369095802307,
0.0889061987400055,
-0.06843739748001099,
0.04138197377324104,
-0.008839732967317104,
0.09322824329137802,
-0.08105836063623428,
0.037191517651081085,
0.020453324541449547,
0.002436523325741291,
0.04203816503286362,
-0.036471642553806305,
-0.04456429183483124,
0.0290316604077816,
0.11410520225763321,
0.004282613284885883,
-0.04357088357210159,
0.003780327271670103,
-0.034837014973163605,
-0.01058744452893734,
-0.06957466155290604,
-0.04694650322198868,
0.01976947672665119,
0.05450064316391945,
-0.006732108537107706,
0.008831582963466644,
0.01547032967209816,
0.0010961164953187108,
0.052557263523340225,
-0.08009406924247742,
-0.06125130504369736,
-0.04744446277618408,
-0.048420824110507965,
-0.041035041213035583,
-0.010232370346784592,
0.007558641955256462,
0.06374020129442215,
0.03262355923652649,
0.021132096648216248,
0.048011794686317444,
-0.08650533109903336,
0.0005252339178696275,
0.07183751463890076,
0.01665639504790306,
-0.02434195950627327,
0.03449277952313423,
0.10536957532167435,
-0.051847800612449646,
0.035787101835012436,
-0.0804314911365509,
-0.006268939469009638,
-0.05577702820301056,
0.10210210084915161,
0.0721435621380806,
0.0017850120784714818,
-0.04150599241256714,
0.016957642510533333,
-0.004139314871281385,
0.04930620267987251,
-0.0085507957264781,
0.026504142209887505,
-0.04561883583664894,
0.003805607557296753,
-0.03799085691571236,
-0.078755684196949,
0.08127183467149734,
-0.0788395032286644,
-0.0765451043844223,
-0.07432857155799866,
0.07577460259199142,
0.016053790226578712,
-0.00862320140004158,
0.03586481884121895,
-0.024649158120155334,
-0.03417429327964783,
-0.029797380790114403,
0.06262128055095673,
-0.0024517581332474947,
0.027478495612740517,
-0.014724915847182274,
-0.027479786425828934,
0.042738039046525955,
-0.02605562098324299,
-0.0249954741448164,
0.02269018068909645,
-0.0074450355023145676,
-0.056307289749383926,
-0.04837528616189957,
0.044862233102321625,
0.00742217805236578,
-0.006928207818418741,
0.04102542996406555,
-0.05434883013367653,
0.0459708571434021,
-0.06240188702940941,
-0.046605512499809265,
-0.056573349982500076,
5.927004147319212e-33,
0.06412291526794434,
0.04286731034517288,
0.05472288280725479,
-0.007966610603034496,
-0.046906325966119766,
0.0005673763807862997,
0.03494658321142197,
0.01744202710688114,
-0.08373115956783295,
0.06003006175160408,
-0.10579100996255875,
-0.035583559423685074,
-0.09574178606271744,
-0.011240676045417786,
0.007251570001244545,
-0.0622333325445652,
-0.13034631311893463,
0.06481026858091354,
0.005745291244238615,
0.0360807403922081,
0.08067221194505692,
0.09104558825492859,
-0.04011048749089241,
-0.05989896133542061,
-0.059526022523641586,
0.029203638434410095,
0.053244318813085556,
-0.09242603927850723,
-0.04020626097917557,
0.031215891242027283,
-0.07934106886386871,
0.029239550232887268,
-0.004324492998421192,
-0.016709771007299423,
0.004668206907808781,
-0.06825947761535645,
-0.004247589968144894,
-0.0278007835149765,
0.01488081831485033,
-0.062413912266492844,
0.011046767234802246,
0.036258723586797714,
0.05479663982987404,
0.005181021522730589,
-0.050369054079055786,
-0.03459721803665161,
-0.03425468131899834,
0.0043427832424640656,
0.015125537291169167,
0.044065702706575394,
-0.0266563780605793,
0.00033837827504612505,
0.023757925257086754,
-0.06333708018064499,
0.027750365436077118,
0.04088606685400009,
0.023541763424873352,
0.04004630818963051,
0.08503472805023193,
0.022013748064637184,
0.01975163072347641,
-0.013382872566580772,
-0.021339552477002144,
0.04573657736182213,
0.06650125235319138,
0.061943452805280685,
-0.060099489986896515,
-0.01528739370405674,
0.06812544912099838,
0.021172454580664635,
-0.06199701502919197,
-0.018135499209165573,
-0.046570952981710434,
0.010376694612205029,
0.032924506813287735,
-0.07302369922399521,
0.03384363278746605,
-0.0020718835294246674,
-0.08679711073637009,
-0.013383091427385807,
-0.001562767312861979,
-0.035408977419137955,
0.011282717809081078,
-0.000845931121148169,
0.00592229375615716,
-0.024036308750510216,
0.07182223349809647,
-0.12729023396968842,
-0.01706964336335659,
-0.0025974763557314873,
0.020801588892936707,
0.017719648778438568,
-0.03339371085166931,
-0.03055604360997677,
-0.043977633118629456,
-5.46732818345611e-33,
0.09087983518838882,
0.021778369322419167,
-0.044875361025333405,
0.10163760930299759,
0.016686156392097473,
-0.034822314977645874,
0.029721617698669434,
0.023198330774903297,
0.007462476380169392,
-0.005129351746290922,
0.036837607622146606,
-0.014574676752090454,
0.017978308722376823,
-0.06353772431612015,
0.026723330840468407,
-0.020307905972003937,
-0.04045502096414566,
0.04332996904850006,
0.002868430223315954,
0.03469335287809372,
-0.03641020879149437,
0.052377767860889435,
-0.09862134605646133,
0.05431215837597847,
-0.038985565304756165,
0.04405588284134865,
-0.011337053030729294,
0.061993904411792755,
0.029231853783130646,
0.014329571276903152,
-0.08897320926189423,
0.02806183509528637,
-0.03940454125404358,
0.10644426196813583,
-0.04526568576693535,
0.04862518608570099,
0.046365391463041306,
-0.03748641535639763,
-0.013000608421862125,
0.06286022067070007,
0.049527592957019806,
0.06489001214504242,
-0.03643769398331642,
0.03809293732047081,
-0.07401526719331741,
-0.007254945579916239,
-0.1064777597784996,
-0.007764865178614855,
-0.036809246987104416,
-0.06929334253072739,
0.07074172049760818,
0.008804026059806347,
-0.07577627897262573,
0.0035495914053171873,
-0.02229907549917698,
-0.05945366993546486,
0.11195482313632965,
-0.1590535193681717,
-0.04047829285264015,
-0.01079975813627243,
-0.06558134406805038,
-0.016392113640904427,
0.03714001178741455,
-0.09642180055379868,
0.03147058188915253,
-0.03442150726914406,
-0.01305965892970562,
0.004640776198357344,
0.06154906377196312,
0.011678009293973446,
-0.01122627779841423,
-0.015360924415290356,
0.07575316727161407,
-0.02761712297797203,
-0.010191185399889946,
-0.016809822991490364,
0.004551074001938105,
-0.038491617888212204,
0.004493859130889177,
-0.10265473276376724,
-0.018763946369290352,
0.026048870757222176,
0.03135089948773384,
0.07234730571508408,
-0.011795083992183208,
0.04332496598362923,
0.06858864426612854,
0.15245644748210907,
0.06948516517877579,
0.08256097137928009,
-0.03539760410785675,
0.008837741799652576,
0.0010029816767200828,
0.16594211757183075,
0.03662349283695221,
-6.091868698376857e-8,
-0.02754024602472782,
0.078748419880867,
-0.047183141112327576,
0.10005603730678558,
-0.02776719070971012,
-0.013919438235461712,
-0.01589258201420307,
0.030217070132493973,
0.041536517441272736,
0.017469534650444984,
0.04060961678624153,
0.0326913483440876,
-0.024650171399116516,
0.0286090224981308,
-0.018330013379454613,
0.06595198810100555,
-0.03180879354476929,
0.08454149216413498,
-0.02766694314777851,
0.020764173939824104,
-0.01275726780295372,
0.025478584691882133,
-0.021588057279586792,
-0.031633321195840836,
0.06197212636470795,
-0.015494931489229202,
-0.053067613393068314,
0.052247464656829834,
0.009666476398706436,
-0.023810992017388344,
-0.011568928137421608,
0.02619386836886406,
-0.020380858331918716,
-0.07392112165689468,
-0.00211178045719862,
0.14170247316360474,
0.001884161145426333,
-0.04451817274093628,
0.013070753775537014,
0.07185429334640503,
-0.024258896708488464,
0.05744413658976555,
-0.12160712480545044,
0.006365948356688023,
0.03126287832856178,
-0.06788139790296555,
-0.01691897213459015,
-0.059862397611141205,
0.027044719085097313,
0.02995762787759304,
0.03279495611786842,
0.06270486861467361,
-0.05018245428800583,
0.014240573160350323,
0.012374065816402435,
0.041303589940071106,
-0.008384152315557003,
-0.053738053888082504,
0.012079921551048756,
0.02150094509124756,
0.034486789256334305,
0.05620330572128296,
0.019985057413578033,
-0.05893753841519356
] |
monologg/koelectra-base-v3-finetuned-korquad | ea97b35e21bfd7f2524b5697931ae3db0394af9f | 2020-10-14T01:43:31.000Z | [
"pytorch",
"electra",
"question-answering",
"transformers",
"autotrain_compatible"
] | question-answering | false | monologg | null | monologg/koelectra-base-v3-finetuned-korquad | 2,735 | 3 | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
rinna/japanese-gpt2-small | d35a68cf1fea74b71708ce898b351471b5c698ce | 2021-08-23T03:19:56.000Z | [
"pytorch",
"tf",
"gpt2",
"text-generation",
"ja",
"dataset:cc100",
"dataset:wikipedia",
"transformers",
"japanese",
"lm",
"nlp",
"license:mit"
] | text-generation | false | rinna | null | rinna/japanese-gpt2-small | 2,731 | 4 | transformers | ---
language: ja
thumbnail: https://github.com/rinnakk/japanese-gpt2/blob/master/rinna.png
tags:
- ja
- japanese
- gpt2
- text-generation
- lm
- nlp
license: mit
datasets:
- cc100
- wikipedia
widget:
- text: "生命、宇宙、そして万物についての究極の疑問の答えは"
---
# japanese-gpt2-small

This repository provides a small-sized Japanese GPT-2 model. The model was trained using code from Github repository [rinnakk/japanese-pretrained-models](https://github.com/rinnakk/japanese-pretrained-models) by [rinna Co., Ltd.](https://corp.rinna.co.jp/)
# How to use the model
*NOTE:* Use `T5Tokenizer` to initiate the tokenizer.
~~~~
from transformers import T5Tokenizer, GPT2LMHeadModel
tokenizer = T5Tokenizer.from_pretrained("rinna/japanese-gpt2-small")
tokenizer.do_lower_case = True # due to some bug of tokenizer config loading
model = GPT2LMHeadModel.from_pretrained("rinna/japanese-gpt2-small")
~~~~
# Model architecture
A 12-layer, 768-hidden-size transformer-based language model.
# Training
The model was trained on [Japanese CC-100](http://data.statmt.org/cc-100/ja.txt.xz) and [Japanese Wikipedia](https://dumps.wikimedia.org/other/cirrussearch) to optimize a traditional language modelling objective on 8\\*V100 GPUs for around 15 days. It reaches around 21 perplexity on a chosen validation set from CC-100.
# Tokenization
The model uses a [sentencepiece](https://github.com/google/sentencepiece)-based tokenizer, the vocabulary was trained on the Japanese Wikipedia using the official sentencepiece training script.
# Licenese
[The MIT license](https://opensource.org/licenses/MIT)
| [
-0.13551969826221466,
-0.02166745997965336,
-0.015506036579608917,
0.024269575253129005,
0.03047218732535839,
-0.03153308108448982,
0.01752023957669735,
0.06925510615110397,
-0.012377876788377762,
-0.051026832312345505,
0.10484521090984344,
-0.03001861646771431,
0.028687944635748863,
0.03156008943915367,
0.0636579617857933,
-0.0010517046321183443,
-0.010749991051852703,
0.029736200347542763,
-0.08882163465023041,
-0.09770943969488144,
0.034470636397600174,
-0.022221606224775314,
0.06662993133068085,
0.009697673842310905,
0.07317682355642319,
0.031719453632831573,
0.09016986936330795,
0.023011144250631332,
0.04083351418375969,
0.030634868890047073,
-0.04543213173747063,
0.02486286498606205,
-0.030474020168185234,
0.04994847998023033,
-0.05115792155265808,
0.08711358904838562,
-0.037583883851766586,
-0.025025049224495888,
-0.012017629109323025,
-0.0020389121491461992,
-0.0010166147258132696,
-0.0007591975736431777,
0.019209006801247597,
-0.012307317927479744,
0.12382637709379196,
-0.028295498341321945,
-0.041719384491443634,
-0.06267298012971878,
-0.0874461829662323,
-0.04298684746026993,
-0.025259051471948624,
-0.028433550149202347,
0.019768178462982178,
0.008020455949008465,
0.027151793241500854,
-0.03325411677360535,
-0.018445080146193504,
-0.02549337036907673,
0.0342816561460495,
-0.027060791850090027,
0.0014738712925463915,
0.0031837201677262783,
-0.0401131734251976,
-0.039195023477077484,
-0.06402721256017685,
-0.016416633501648903,
-0.005283383186906576,
0.0068409317173063755,
0.061152756214141846,
-0.07939991354942322,
-0.02572764828801155,
0.030377618968486786,
-0.030591780319809914,
0.02954351156949997,
-0.013971862383186817,
-0.04208218306303024,
0.04984092712402344,
0.07769326865673065,
0.010836472734808922,
-0.06349252909421921,
0.03204626590013504,
-0.0016519080381840467,
0.14535997807979584,
0.01241098903119564,
0.04281231388449669,
0.026533303782343864,
0.023205900564789772,
0.03626713529229164,
0.03197362646460533,
0.04444511979818344,
-0.0030702517833560705,
0.01671714335680008,
0.0383615605533123,
0.06734740734100342,
-0.0541548915207386,
-0.01432520616799593,
-0.0648830384016037,
0.0050146798603236675,
-0.11468548327684402,
0.08645397424697876,
-0.017700057476758957,
0.022001685574650764,
0.042485788464546204,
0.05054968222975731,
-0.05712945759296417,
-0.025055568665266037,
-0.016044700518250465,
0.010318317450582981,
0.022893384099006653,
-0.010896672494709492,
0.0617268905043602,
-0.02500416897237301,
-0.0707106664776802,
-0.03044120781123638,
-0.03333807736635208,
0.04156392067670822,
-0.04316054284572601,
-0.010355295613408089,
-0.026713818311691284,
0.0760277658700943,
-0.02981683984398842,
0.011497306637465954,
-0.09172295033931732,
-0.05267781391739845,
-0.05696137994527817,
0.07729421555995941,
-0.07416106760501862,
7.026069324818973e-33,
0.06885246932506561,
0.06155059114098549,
-0.0034636047203093767,
0.03650662675499916,
-0.040111932903528214,
0.0013875714503228664,
-0.03360319510102272,
-0.0665775015950203,
-0.03316248953342438,
-0.054580241441726685,
-0.0419955812394619,
0.032003432512283325,
-0.15994957089424133,
-0.005983909126371145,
-0.004028319846838713,
-0.08908265084028244,
-0.031114382669329643,
0.0268691498786211,
0.04882059991359711,
0.019067781046032906,
0.09879764914512634,
0.08006845414638519,
-0.0050222198478877544,
-0.07650250941514969,
-0.05711282417178154,
0.09344547986984253,
0.07793667167425156,
-0.08670958131551743,
0.02261144109070301,
0.08255891501903534,
-0.0596124604344368,
-0.041552431881427765,
0.0024807804729789495,
0.03276457265019417,
-0.03736972436308861,
-0.06805117428302765,
0.01504150964319706,
-0.06945016980171204,
0.028284214437007904,
-0.08574522286653519,
0.03554566204547882,
-0.010020416229963303,
0.021833643317222595,
-0.039319854229688644,
-0.023297568783164024,
-0.019016703590750694,
0.06336674839258194,
0.008703834377229214,
0.04165451228618622,
0.053364962339401245,
-0.06802265346050262,
0.046633608639240265,
-0.08008606731891632,
0.025127911940217018,
-0.010780397802591324,
0.03605741634964943,
0.08980678766965866,
0.007662918418645859,
0.030264481902122498,
0.029608234763145447,
-0.03981301560997963,
0.01292258407920599,
0.02025970257818699,
0.06832072138786316,
0.05431446060538292,
0.07689289003610611,
-0.07493088394403458,
-0.09595954418182373,
0.027215100824832916,
0.04469071328639984,
-0.03013053722679615,
-0.04369739070534706,
0.046669505536556244,
-0.012056984007358551,
0.058390066027641296,
-0.10499011725187302,
0.05005030333995819,
-0.024046631529927254,
-0.07041989266872406,
0.02206316590309143,
-0.08236786723136902,
-0.012960030697286129,
-0.006906213238835335,
-0.008327298797667027,
-0.006280913483351469,
-0.0406201109290123,
0.07694576680660248,
-0.05176596716046333,
-0.029872165992856026,
-0.04186149314045906,
0.020416393876075745,
-0.0010264109587296844,
-0.02106105349957943,
-0.01414298266172409,
-0.02222619391977787,
-6.432487289323935e-33,
0.07276760786771774,
0.0418185219168663,
-0.015468427911400795,
0.05897846445441246,
-0.034181445837020874,
-0.10621411353349686,
0.031168436631560326,
0.07044285535812378,
-0.025550726801156998,
0.0146158616989851,
0.05661334469914436,
-0.01755177602171898,
-0.037194449454545975,
-0.019260011613368988,
0.08201368898153305,
-0.04196736961603165,
0.0556425005197525,
-0.003435632912442088,
0.023372692987322807,
0.02238219976425171,
0.05496683344244957,
0.018028734251856804,
-0.12651364505290985,
0.07710564136505127,
-0.0395263247191906,
0.03594670072197914,
0.061916280537843704,
0.03873211890459061,
0.03432747721672058,
0.029928546398878098,
-0.0615132711827755,
-0.006604956462979317,
-0.041565410792827606,
0.0075929719023406506,
-0.08862469345331192,
-0.05134084075689316,
0.0504220612347126,
0.05367507413029671,
-0.07180534303188324,
0.06822306662797928,
0.026405394077301025,
-0.04466882348060608,
-0.08667653799057007,
0.06852876394987106,
-0.08256970345973969,
0.02740776352584362,
-0.009032092057168484,
0.04628770798444748,
-0.0014554314548149705,
-0.07219430804252625,
0.04959786310791969,
-0.039128974080085754,
-0.006009657867252827,
-0.019143911078572273,
-0.11525415629148483,
-0.0652150884270668,
0.060419633984565735,
-0.017561955377459526,
-0.06042473763227463,
-0.006987651344388723,
-0.003923236392438412,
-0.11607277393341064,
0.07452675700187683,
-0.044936902821063995,
-0.03414689004421234,
-0.06052788347005844,
0.04853246733546257,
0.01801452971994877,
0.06200385466217995,
0.02197657898068428,
0.01000429131090641,
0.01603073813021183,
0.10710200667381287,
-0.02631504461169243,
0.003749751253053546,
-0.022318044677376747,
-0.07465146481990814,
-0.007339904084801674,
0.10547159612178802,
-0.04734944552183151,
-0.04557075724005699,
0.049363259226083755,
0.03829361870884895,
0.010457263328135014,
0.07931042462587357,
-0.058810219168663025,
-0.0010692519135773182,
0.09395740926265717,
0.04400167986750603,
0.013466627337038517,
-0.04583749547600746,
0.10028231143951416,
0.04755299538373947,
0.09369523078203201,
0.01159058790653944,
-5.897775423591156e-8,
-0.07761344313621521,
-0.09009314328432083,
-0.04755081236362457,
0.005053464323282242,
-0.06053759902715683,
-0.05783342942595482,
-0.045316994190216064,
0.03150707855820656,
0.020962268114089966,
-0.030428307130932808,
-0.0043369499035179615,
0.060903582721948624,
-0.1115521565079689,
-0.0031574657186865807,
0.004881788045167923,
0.039645612239837646,
0.07253727316856384,
0.12485770881175995,
-0.0331445150077343,
-0.0271852258592844,
-0.026323983445763588,
0.004479273688048124,
0.03706490248441696,
-0.07668863236904144,
-0.04736046493053436,
0.013513751327991486,
-0.09240119159221649,
0.07380226999521255,
-0.0031470407266169786,
-0.01678371988236904,
0.019732778891921043,
-0.0077142901718616486,
0.0020866382401436567,
0.027097178623080254,
-0.02721342444419861,
0.041896138340234756,
0.014578156173229218,
-0.053641896694898605,
0.014596722088754177,
-0.019497938454151154,
0.07451887428760529,
-0.005850295070558786,
-0.07787343114614487,
0.02362421154975891,
0.02973046898841858,
0.03363247215747833,
-0.005508135538548231,
-0.07307545095682144,
0.020480047911405563,
0.02341560646891594,
0.04597581923007965,
-0.0009540001046843827,
-0.09361551702022552,
-0.006758911535143852,
-0.03972136974334717,
0.04062314331531525,
-0.021583450958132744,
-0.05845721811056137,
0.013942130841314793,
0.05447010695934296,
-0.003241931553930044,
0.009821875020861626,
0.010768614709377289,
0.0009422330185770988
] |
Jonesy/HomersNightOut | 3b14400af228e5e589bdff6d4333a9645869e220 | 2022-04-28T21:08:05.000Z | [
"pytorch",
"gpt2",
"text-generation",
"transformers",
"conversational"
] | conversational | false | Jonesy | null | Jonesy/HomersNightOut | 2,729 | null | transformers | ---
tags:
- conversational
---
# DialoGPT-medium Model of Simpsons Episode s1e10 "Homer's Night Out"
| [
-0.03787623718380928,
-0.026492537930607796,
0.0001718651328701526,
-0.04339809715747833,
0.027409322559833527,
-0.0010801675962284207,
0.08986509591341019,
0.07419411838054657,
0.015904191881418228,
-0.09661661088466644,
-0.02897590398788452,
0.019834183156490326,
-0.024674048647284508,
-0.014990576542913914,
-0.04193992540240288,
-0.05010071024298668,
0.028874319046735764,
-0.08552734553813934,
0.03359094262123108,
0.07174739986658096,
0.03621944412589073,
0.04583873972296715,
0.03056010976433754,
0.0048506311140954494,
0.07195905596017838,
0.07656517624855042,
-0.07880228012800217,
-0.08112242817878723,
-0.007058837451040745,
-0.013445953838527203,
-0.00638468936085701,
0.07786372303962708,
0.06738027930259705,
0.03672143071889877,
-0.004665588494390249,
-0.03591100126504898,
0.09353144466876984,
0.0679234191775322,
-0.04693310707807541,
0.09453027695417404,
-0.001178471720777452,
-0.008839566260576248,
-0.0772346705198288,
-0.02398693934082985,
-0.06454803794622421,
0.029763301834464073,
-0.0700145736336708,
-0.09661790728569031,
-0.016774145886301994,
0.08347290009260178,
-0.08080701529979706,
0.04632095620036125,
-0.02538335509598255,
0.07132275402545929,
-0.007593884132802486,
0.014416448771953583,
0.03605294227600098,
0.006335783284157515,
0.04924479126930237,
0.03767776861786842,
-0.03915812075138092,
-0.040139198303222656,
-0.05271918699145317,
0.0750756785273552,
0.01738373190164566,
0.0037031732499599457,
-0.08626043796539307,
0.05500335991382599,
-0.0579366497695446,
0.04465430602431297,
-0.029254227876663208,
0.04570719972252846,
0.0038517501670867205,
-0.048604998737573624,
-0.06632168591022491,
0.023509323596954346,
0.06179017201066017,
-0.0821775570511818,
0.06374800950288773,
0.00657086493447423,
0.01719515770673752,
-0.09235147386789322,
-0.057922277599573135,
-0.015028436668217182,
0.0009785388829186559,
-0.06031552329659462,
0.029845181852579117,
-0.06914699822664261,
-0.02220560610294342,
0.014334267005324364,
-0.08787114173173904,
-0.08955119550228119,
0.017478305846452713,
-0.009711208753287792,
-0.04335283115506172,
0.0089398929849267,
-0.05781577527523041,
-0.1260070949792862,
-0.032964419573545456,
0.11138270795345306,
-0.02226051315665245,
0.028080767020583153,
0.06763649731874466,
-0.10223006457090378,
0.015606031753122807,
0.05187245085835457,
-0.07674605399370193,
-0.013565908186137676,
0.06633058190345764,
-0.0012575979344546795,
0.013410932384431362,
-0.03853113204240799,
0.04723481461405754,
-0.024113288149237633,
0.13919000327587128,
-0.03298026695847511,
0.06329908967018127,
-0.023820627480745316,
0.05796158313751221,
-0.05136421322822571,
0.004368431866168976,
-0.019317183643579483,
-0.01339693833142519,
-0.0276910699903965,
-0.028850318863987923,
-0.021490683779120445,
-0.026144247502088547,
-1.9470170273440415e-33,
0.017102880403399467,
0.012846131809055805,
-0.0401114895939827,
0.06339843571186066,
0.0344998724758625,
0.0625418871641159,
-0.0868036225438118,
-0.004072663839906454,
0.03776174783706665,
0.009420676156878471,
0.03291095048189163,
-0.06737295538187027,
-0.06071875989437103,
0.04165148735046387,
0.027445267885923386,
0.011248698458075523,
-0.029052676633000374,
0.06519194692373276,
-0.02084415778517723,
-0.051677457988262177,
0.02222820371389389,
0.06690236181020737,
0.017445744946599007,
0.007896743714809418,
0.03263545781373978,
-0.047070253640413284,
0.007572708185762167,
-0.09992243349552155,
0.0033386158756911755,
0.031956806778907776,
-0.019809184595942497,
0.016962066292762756,
0.018249178305268288,
0.03600568696856499,
-0.0022145970724523067,
0.0044487593695521355,
-0.01058285217732191,
-0.048748474568128586,
-0.010256871581077576,
-0.1221928745508194,
-0.07963857054710388,
-0.030062193050980568,
-0.03308577835559845,
0.0013944287784397602,
0.0034473459236323833,
0.00930559542030096,
0.014544930309057236,
0.04133910313248634,
-0.014700223691761494,
0.008550243452191353,
-0.032922446727752686,
-0.001314825494773686,
0.06528401374816895,
-0.09151112288236618,
0.004686441272497177,
-0.030409935861825943,
-0.011259409599006176,
-0.04503941163420677,
0.02680942602455616,
0.004564571660012007,
0.029623717069625854,
0.026294201612472534,
0.07403073459863663,
-0.09611406922340393,
0.08483316749334335,
0.0291108638048172,
0.011722201481461525,
0.0011649879161268473,
0.005951562896370888,
-0.0122337955981493,
-0.08290717005729675,
-0.01530130859464407,
0.012000144459307194,
0.028243109583854675,
-0.03082665242254734,
0.034089215099811554,
-0.0016243006102740765,
-0.03183240070939064,
0.00910154078155756,
0.09597355872392654,
-0.00039115906110964715,
-0.08877799659967422,
-0.0385836660861969,
-0.05103360489010811,
0.03306884691119194,
-0.02912140265107155,
0.07321268320083618,
-0.07680685073137283,
-0.030329281464219093,
0.01690460555255413,
0.02620142698287964,
-0.021672798320651054,
-0.02814512886106968,
0.016758864745497704,
-0.025656001642346382,
-8.389299307017195e-34,
-0.028948452323675156,
-0.01429680548608303,
-0.11341579258441925,
0.02820398099720478,
0.027488185092806816,
0.021161099895834923,
0.011654067784547806,
0.12924964725971222,
-0.014231625944375992,
-0.01702696457505226,
-0.03298104926943779,
-0.008993741124868393,
-0.042041126638650894,
-0.061627015471458435,
0.12435304373502731,
-0.06697376072406769,
0.018728194758296013,
0.012796199880540371,
-0.05641589313745499,
0.035828929394483566,
0.07121086120605469,
-0.031743668019771576,
-0.12579017877578735,
0.11043812334537506,
0.06816036254167557,
-0.03338110074400902,
-0.002571261487901211,
0.044963207095861435,
-0.005919449497014284,
-0.05536213144659996,
-0.11275359988212585,
0.0162814874202013,
-0.011799327097833157,
0.015336734242737293,
0.01916392520070076,
0.07038048654794693,
0.05574681609869003,
-0.019303174689412117,
-0.04228714480996132,
0.014469983987510204,
0.05176214873790741,
0.004967504646629095,
0.023174742236733437,
0.05942877382040024,
-0.03888991102576256,
-0.03161057084798813,
-0.0974467471241951,
-0.011021086946129799,
-0.05167975276708603,
0.05924860015511513,
-0.012316497042775154,
-0.012265371158719063,
-0.08848217129707336,
-0.01567191258072853,
-0.10407997667789459,
-0.035949915647506714,
0.004143841564655304,
0.018732160329818726,
-0.08830684423446655,
0.030671099200844765,
-0.020333735272288322,
-0.08986496925354004,
0.02713608182966709,
-0.0191337987780571,
0.037692319601774216,
-0.06424489617347717,
-0.00959701742976904,
-0.04046763479709625,
-0.03350170701742172,
0.003930720966309309,
0.08652214705944061,
-0.016620682552456856,
0.02317623980343342,
0.034956201910972595,
0.12474554032087326,
-0.022444814443588257,
0.003917104098945856,
-0.000418117648223415,
-0.028227465227246284,
-0.09805460274219513,
0.0013687609462067485,
-0.006304634269326925,
0.03267193213105202,
0.09648274630308151,
0.10781430453062057,
0.011660689488053322,
0.011466138064861298,
0.07967069745063782,
0.012490217573940754,
0.01548521127551794,
0.04520034417510033,
0.02683129347860813,
0.014498298987746239,
0.04337950423359871,
0.014020146802067757,
-3.1732039218468344e-8,
-0.022761652246117592,
-0.05680851265788078,
0.03477473929524422,
-0.03136245161294937,
0.08293437212705612,
-0.008572530001401901,
0.024915508925914764,
-0.011724748648703098,
-0.07728084921836853,
0.0049008033238351345,
0.04727477952837944,
0.09562400728464127,
0.0013858653837814927,
0.02418862283229828,
-0.007507484406232834,
0.01996053196489811,
-0.039859671145677567,
0.020964495837688446,
-0.026720188558101654,
-0.006113956682384014,
0.052586786448955536,
-0.062236037105321884,
-0.02126123197376728,
0.036760587245225906,
0.05348134785890579,
0.03511520102620125,
-0.05763300508260727,
0.06977114826440811,
-0.014522961340844631,
0.03218016400933266,
0.07495222240686417,
0.048656921833753586,
-0.18213236331939697,
-0.054347142577171326,
-0.047812145203351974,
0.06749502569437027,
0.04853297397494316,
-0.018160151317715645,
0.00989667046815157,
-0.04699811339378357,
0.007363787852227688,
-0.00918623898178339,
-0.008560189045965672,
-0.008800727315247059,
0.057777270674705505,
0.08068155497312546,
0.010414453223347664,
-0.04426741227507591,
-0.017787735909223557,
0.016494618728756905,
-0.07061595469713211,
-0.01413710881024599,
0.055764611810445786,
-0.05135707929730415,
-0.008508438244462013,
-0.06293078511953354,
0.00044720876030623913,
0.03480875492095947,
0.07842361927032471,
0.01877160184085369,
0.0827895849943161,
0.15074066817760468,
0.0013154603075236082,
-0.020507002249360085
] |
sberbank-ai/sbert_large_mt_nlu_ru | 4b9767cce506403f64e69309eab741263479b099 | 2021-09-21T19:47:13.000Z | [
"pytorch",
"tf",
"jax",
"bert",
"feature-extraction",
"ru",
"transformers",
"PyTorch",
"Transformers"
] | feature-extraction | false | sberbank-ai | null | sberbank-ai/sbert_large_mt_nlu_ru | 2,720 | 2 | transformers | ---
language:
- ru
tags:
- PyTorch
- Transformers
---
# BERT large model multitask (cased) for Sentence Embeddings in Russian language.
The model is described [in this article](https://habr.com/ru/company/sberdevices/blog/560748/)
Russian SuperGLUE [metrics](https://russiansuperglue.com/login/submit_info/944)
For better quality, use mean token embeddings.
## Usage (HuggingFace Models Repository)
You can use the model directly from the model repository to compute sentence embeddings:
```python
from transformers import AutoTokenizer, AutoModel
import torch
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
token_embeddings = model_output[0] #First element of model_output contains all token embeddings
input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
sum_embeddings = torch.sum(token_embeddings * input_mask_expanded, 1)
sum_mask = torch.clamp(input_mask_expanded.sum(1), min=1e-9)
return sum_embeddings / sum_mask
#Sentences we want sentence embeddings for
sentences = ['Привет! Как твои дела?',
'А правда, что 42 твое любимое число?']
#Load AutoModel from huggingface model repository
tokenizer = AutoTokenizer.from_pretrained("sberbank-ai/sbert_large_mt_nlu_ru")
model = AutoModel.from_pretrained("sberbank-ai/sbert_large_mt_nlu_ru")
#Tokenize sentences
encoded_input = tokenizer(sentences, padding=True, truncation=True, max_length=24, return_tensors='pt')
#Compute token embeddings
with torch.no_grad():
model_output = model(**encoded_input)
#Perform pooling. In this case, mean pooling
sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
``` | [
-0.05673832446336746,
-0.0912284404039383,
0.001779627869836986,
0.03071325644850731,
-0.008437956683337688,
0.0817280188202858,
0.01519913598895073,
0.08348936587572098,
0.07117880880832672,
-0.07368389517068863,
-0.06392824649810791,
-0.03327194228768349,
0.019874820485711098,
0.12573179602622986,
-0.010759403929114342,
0.037867434322834015,
0.062648706138134,
0.057874538004398346,
-0.11296507716178894,
-0.1257457286119461,
0.12904244661331177,
0.015097618103027344,
0.11522267758846283,
-0.024915851652622223,
0.06718805432319641,
-0.0017357683973386884,
-0.06255345791578293,
-0.05892704799771309,
0.09562015533447266,
0.08012504875659943,
0.01411200501024723,
-0.050026632845401764,
0.016753526404500008,
0.09681250154972076,
0.028754601255059242,
0.056352514773607254,
-0.06759823858737946,
-0.05200020596385002,
0.000983947771601379,
0.004821400158107281,
-0.011407515965402126,
-0.01959744654595852,
-0.04069766774773598,
-0.003607574850320816,
0.08372070640325546,
0.0005672925035469234,
-0.04301231727004051,
-0.0038446709513664246,
-0.028374582529067993,
-0.023404108360409737,
-0.029045525938272476,
-0.012566793709993362,
-0.0008129634661599994,
0.07545031607151031,
0.022561797872185707,
-0.013972815126180649,
0.0008786764810793102,
-0.0756806805729866,
-0.007777527906000614,
-0.10327962785959244,
-0.07767664641141891,
-0.08629832416772842,
0.06304673850536346,
-0.01247837208211422,
-0.05940505489706993,
0.011751766316592693,
-0.03215321525931358,
0.08605124056339264,
-0.0671781450510025,
0.10431011021137238,
-0.043201349675655365,
0.05599786341190338,
-0.05334297567605972,
-0.022164909169077873,
-0.006139383185654879,
-0.025322772562503815,
0.08681944757699966,
-0.061709024012088776,
0.03484358265995979,
-0.055756792426109314,
0.09352495521306992,
-0.029100481420755386,
0.06978891044855118,
0.011319062672555447,
0.0577886700630188,
-0.05401826649904251,
0.08052382618188858,
0.006887997500598431,
0.001761946128681302,
0.006632930599153042,
-0.038351915776729584,
-0.10253815352916718,
0.029843704774975777,
-0.022195888683199883,
0.02204354666173458,
-0.002378798322752118,
-0.000814159691799432,
-0.01701965183019638,
-0.030755313113331795,
0.02013532631099224,
0.013102082535624504,
0.013464776799082756,
0.06997035443782806,
-0.06897515803575516,
-0.017671968787908554,
0.020526299253106117,
-0.021171528846025467,
0.05985018238425255,
-0.019685979932546616,
-0.11776711791753769,
-0.014464030042290688,
-0.00027575032436288893,
0.006044995039701462,
0.020767005160450935,
0.03972754627466202,
-0.003757441183552146,
0.02421504631638527,
0.004893605597317219,
0.03110443241894245,
0.08708803355693817,
0.014758321456611156,
0.06399472802877426,
-0.003510212292894721,
0.05765779688954353,
0.0005344059900380671,
0.0012965953210368752,
0.01621260866522789,
4.3110425123522126e-33,
-0.0004641303385142237,
0.04020030051469803,
-0.01621805876493454,
-0.026375941932201385,
-0.06449074298143387,
0.04899271950125694,
-0.0001551663299323991,
0.009229304268956184,
-0.03984908387064934,
0.013782463036477566,
-0.03995424136519432,
0.03801944479346275,
-0.0454016737639904,
0.022610167041420937,
-0.016251185908913612,
0.02365194447338581,
-0.004078808706253767,
-0.005359706003218889,
0.027586281299591064,
0.07149626314640045,
0.07969485223293304,
0.044784098863601685,
-0.014037134125828743,
-0.041504405438899994,
-0.09169366955757141,
-0.0028892161790281534,
0.07568089663982391,
-0.10142448544502258,
-0.08421649783849716,
-0.003311297157779336,
-0.09712373465299606,
0.00295422226190567,
-0.050894077867269516,
0.05600262060761452,
-0.020677238702774048,
-0.03711255267262459,
0.0055900332517921925,
0.022046754136681557,
0.01815768890082836,
-0.04178792983293533,
0.007450147531926632,
0.017254101112484932,
-0.05266940966248512,
-0.052583977580070496,
-0.033508703112602234,
0.00772485276684165,
0.04013880714774132,
0.022166699171066284,
0.02767271362245083,
0.02406446449458599,
0.04848341643810272,
-0.0007503408123739064,
-0.041944850236177444,
0.048171382397413254,
0.020244982093572617,
0.01106848381459713,
0.058953870087862015,
0.012036650441586971,
0.09155111014842987,
-0.037438321858644485,
-0.018602682277560234,
-0.04892304912209511,
0.0637536272406578,
0.0031506677623838186,
0.0858030766248703,
-0.02594490349292755,
-0.030895167961716652,
0.06292609870433807,
-0.00695063779130578,
0.07409199327230453,
-0.036162640899419785,
0.02875276654958725,
-0.07649409770965576,
0.05119021236896515,
-0.036144863814115524,
-0.047895658761262894,
0.024805191904306412,
-0.07049793004989624,
-0.041134435683488846,
0.09947685152292252,
-0.02079780399799347,
-0.004745298530906439,
0.035279206931591034,
-0.05967902019619942,
-0.0779636949300766,
-0.01821948029100895,
0.03614911809563637,
-0.08352863043546677,
-0.022087212651968002,
-0.04461241140961647,
-0.06493009626865387,
-0.07666247338056564,
0.05241277068853378,
0.00702013960108161,
-0.05254841968417168,
-4.7620495727199845e-33,
0.04193999990820885,
0.03183843567967415,
-0.03917580097913742,
0.09197328239679337,
-0.03852266073226929,
-0.04541614651679993,
0.04172862321138382,
0.11560046672821045,
-0.024116873741149902,
0.003636060981079936,
-0.04249703139066696,
-0.05598566681146622,
0.020789844915270805,
-0.01735265552997589,
0.10540767014026642,
-0.010410218499600887,
0.02510114014148712,
0.06388645619153976,
-0.014540077187120914,
0.04072226956486702,
-0.0007209490868262947,
0.020293258130550385,
-0.05656032636761665,
0.05401216447353363,
-0.03438461199402809,
0.0640416070818901,
0.021286549046635628,
-0.04022587835788727,
-0.03865228593349457,
-0.022192461416125298,
0.008135990239679813,
-0.01722642220556736,
-0.05157850682735443,
0.06694076210260391,
-0.10049854218959808,
0.005160350818186998,
0.06985892355442047,
-0.040727484971284866,
-0.07727383077144623,
0.021742451936006546,
0.08026549965143204,
0.018896158784627914,
-0.07237343490123749,
0.07793432474136353,
-0.024187779054045677,
0.03673899918794632,
-0.14109350740909576,
-0.06388556957244873,
0.0007473118603229523,
-0.05642630159854889,
-0.06455275416374207,
-0.025657275691628456,
-0.13145118951797485,
0.012992982752621174,
-0.08481723815202713,
-0.0834278091788292,
0.019809916615486145,
-0.07719489932060242,
-0.03446098417043686,
-0.05115312710404396,
-0.02885536476969719,
-0.046910759061574936,
0.030538322404026985,
-0.10440399497747421,
0.01124955527484417,
-0.06885728985071182,
0.030521266162395477,
0.0022503265645354986,
0.02920050546526909,
-0.02426689676940441,
0.041353777050971985,
-0.015509523451328278,
0.008379409089684486,
0.06882815062999725,
0.01145500410348177,
0.08075004070997238,
0.01852971315383911,
-0.03740531578660011,
0.03219825401902199,
-0.053822316229343414,
-0.038989026099443436,
-0.05747290328145027,
0.0301983542740345,
0.017565060406923294,
0.0049330140464007854,
0.03462918847799301,
0.08183304965496063,
0.08322647958993912,
0.016507744789123535,
-0.010626641102135181,
-0.046850379556417465,
0.04854103550314903,
-0.012114464305341244,
0.1192285344004631,
0.04074195772409439,
-5.080760701048348e-8,
-0.039995692670345306,
-0.015256224200129509,
-0.016163887456059456,
0.04302435740828514,
-0.08969976752996445,
-0.07225269824266434,
-0.024232491850852966,
0.02137329801917076,
-0.059768449515104294,
-0.0486781969666481,
-0.02588299661874771,
-0.03250659257173538,
-0.06993153691291809,
-0.0188148096203804,
-0.015383879654109478,
0.06447042524814606,
-0.02480398491024971,
0.04674834758043289,
0.024148114025592804,
-0.003373226383700967,
-0.014237457886338234,
0.025670628994703293,
0.035460010170936584,
-0.05062005668878555,
-0.044587068259716034,
-0.0008930828771553934,
0.004581237677484751,
0.028618546202778816,
0.004534335806965828,
-0.018428543582558632,
0.06310147792100906,
0.004381268750876188,
-0.035267312079668045,
-0.010590339079499245,
0.04446592554450035,
0.06933723390102386,
0.02118605747818947,
-0.05417107045650482,
0.047572001814842224,
0.07386882603168488,
0.051688577979803085,
0.04243519529700279,
-0.03888663649559021,
-0.002231970429420471,
0.07485964894294739,
0.02684619091451168,
-0.04197898134589195,
-0.12546148896217346,
0.031725551933050156,
0.03947697952389717,
0.09240264445543289,
-0.045821137726306915,
-0.10359787195920944,
0.08198445290327072,
-0.021203847602009773,
0.02756332792341709,
0.014687229879200459,
-0.021398484706878662,
0.028839709237217903,
-0.012440179474651814,
0.003136757528409362,
0.03174148127436638,
-0.03551945090293884,
-0.010265743359923363
] |
deep-learning-analytics/GrammarCorrector | 6ca90bd771c373a0542d4257a5c34d26cd0d3c59 | 2021-12-23T02:51:34.000Z | [
"pytorch",
"tf",
"t5",
"text2text-generation",
"transformers",
"autotrain_compatible"
] | text2text-generation | false | deep-learning-analytics | null | deep-learning-analytics/GrammarCorrector | 2,719 | 3 | transformers | ## Model description
T5 model trained for Grammar Correction. This model corrects grammatical mistakes in input sentences
### Dataset Description
The T5-base model has been trained on C4_200M dataset.
### Model in Action 🚀
```
import torch
from transformers import T5Tokenizer, T5ForConditionalGeneration
model_name = 'deep-learning-analytics/GrammarCorrector'
torch_device = 'cuda' if torch.cuda.is_available() else 'cpu'
tokenizer = T5Tokenizer.from_pretrained(model_name)
model = T5ForConditionalGeneration.from_pretrained(model_name).to(torch_device)
def correct_grammar(input_text,num_return_sequences):
batch = tokenizer([input_text],truncation=True,padding='max_length',max_length=64, return_tensors="pt").to(torch_device)
translated = model.generate(**batch,max_length=64,num_beams=num_beams, num_return_sequences=num_return_sequences, temperature=1.5)
tgt_text = tokenizer.batch_decode(translated, skip_special_tokens=True)
return tgt_text
```
### Example Usage
```
text = 'He are moving here.'
print(correct_grammar(text, num_return_sequences=2))
['He is moving here.', 'He is moving here now.']
```
Another example
```
text = 'Cat drinked milk'
print(correct_grammar(text, num_return_sequences=2))
['Cat drank milk.', 'Cat drink milk.']
```
Model Developed by [Priya-Dwivedi](https://www.linkedin.com/in/priyanka-dwivedi-6864362) | [
-0.04643706977367401,
-0.05102677643299103,
0.07697544246912003,
0.02398204430937767,
-0.033256106078624725,
-0.011778013780713081,
-0.005936147645115852,
0.028353901579976082,
-0.051870837807655334,
-0.050397079437971115,
0.009023472666740417,
-0.057301104068756104,
-0.024853765964508057,
-0.038100264966487885,
-0.022001653909683228,
0.021749675273895264,
-0.0031797478441148996,
0.04774913564324379,
-0.12200218439102173,
-0.16496706008911133,
0.06947799772024155,
0.0957205593585968,
0.02218322455883026,
0.03625712916254997,
0.03005114570260048,
0.02182384394109249,
-0.04396824166178703,
-0.022950904443860054,
0.03613133728504181,
0.014682549051940441,
-0.04834512621164322,
0.0340178906917572,
-0.06921100616455078,
0.0918792337179184,
0.03428005799651146,
0.0562848225235939,
-0.11349377781152725,
-0.03476131334900856,
0.0026282593607902527,
-0.03500648960471153,
0.017116941511631012,
-0.07975498586893082,
-0.016297638416290283,
0.009323661215603352,
0.06364943832159042,
0.012548469007015228,
0.023395271971821785,
-0.006292637437582016,
-0.027397748082876205,
-0.040798839181661606,
-0.04784134030342102,
0.012843732722103596,
0.03541073203086853,
0.05808921530842781,
-0.04672658443450928,
-0.022762304171919823,
0.06372509151697159,
0.0018424888839945197,
0.05766666308045387,
-0.10597359389066696,
-0.015587075613439083,
-0.051306258887052536,
-0.032549843192100525,
-0.049737751483917236,
-0.0186912938952446,
-0.0013312383089214563,
0.01582559198141098,
-0.002291952958330512,
0.0036345887929201126,
0.03753431513905525,
-0.025844188407063484,
0.052968163043260574,
0.03923298791050911,
0.1179267093539238,
-0.06730271130800247,
0.021589679643511772,
0.13566836714744568,
0.0018840287812054157,
0.10518334805965424,
-0.0680047795176506,
0.03232373297214508,
-0.018118416890501976,
0.06269823014736176,
0.02573259174823761,
0.07713396102190018,
-0.07853864133358002,
0.022089002653956413,
0.05644703283905983,
0.06067109480500221,
-0.013454808853566647,
-0.024620110169053078,
-0.0888199508190155,
0.054534658789634705,
0.011749066412448883,
-0.05758606269955635,
0.02255987375974655,
-0.050716497004032135,
-0.0014737577876076102,
-0.0582149438560009,
0.03337043151259422,
0.0037430792581290007,
0.023110026493668556,
-0.0221558790653944,
0.005348174832761288,
-0.0943884626030922,
0.01756500080227852,
0.026568422093987465,
0.07347280532121658,
0.03891218453645706,
-0.08116303384304047,
0.0341789536178112,
0.02121909335255623,
-0.03390783816576004,
-0.0005750591517426074,
0.08707022666931152,
-0.03162234276533127,
-0.07468842715024948,
0.008468054234981537,
-0.014390590600669384,
0.10342040657997131,
-0.04527166113257408,
0.0466553196310997,
-0.04718971624970436,
0.07079937309026718,
-0.0444549061357975,
-0.09398889541625977,
0.005200190469622612,
6.627318865817005e-33,
-0.022464994341135025,
0.08589746803045273,
-0.023623250424861908,
0.002701597288250923,
-0.003822804195806384,
0.036313168704509735,
-0.011358118616044521,
0.03230892866849899,
-0.03917887061834335,
-0.0006127061787992716,
-0.011932366527616978,
-0.04159440100193024,
-0.049477629363536835,
0.03295448049902916,
-0.050857510417699814,
-0.014538652263581753,
0.01795014925301075,
0.02091035433113575,
0.02651948481798172,
0.03102896735072136,
0.06980597972869873,
0.09783771634101868,
-0.044320784509181976,
-0.07330770045518875,
-0.08063209801912308,
0.043741315603256226,
0.02425859123468399,
-0.060155462473630905,
0.009624859318137169,
0.013338142074644566,
-0.13474105298519135,
-0.035261526703834534,
0.07909714430570602,
0.00446478184312582,
0.03745370730757713,
-0.04717252776026726,
0.0963374599814415,
-0.01123913936316967,
-0.0004891516291536391,
-0.08634483069181442,
0.007893023081123829,
0.0702359601855278,
-0.01839539222419262,
0.006583721376955509,
-0.029510095715522766,
0.0136411739513278,
-0.05013703554868698,
0.01303618960082531,
0.01698271743953228,
0.0019559210631996393,
-0.01181070227175951,
0.041774682700634,
-0.019908791407942772,
-0.02444731630384922,
0.05747018754482269,
0.01981038972735405,
0.04728984832763672,
0.04019290953874588,
0.11843588203191757,
-0.03988520801067352,
0.07230008393526077,
0.018634868785738945,
-0.0014301835326477885,
0.0348852202296257,
0.038200270384550095,
0.03684466704726219,
0.004579092375934124,
0.027201658114790916,
0.05457361787557602,
-0.017309486865997314,
-0.13562847673892975,
-0.004580471199005842,
-0.061898842453956604,
0.06317229568958282,
0.03395536541938782,
-0.06968587636947632,
0.04298980534076691,
-0.08088210225105286,
-0.10387098789215088,
0.022085681557655334,
-0.07925613969564438,
-0.005646614823490381,
0.000769500678870827,
0.008164997212588787,
-0.0399465449154377,
-0.05747586861252785,
0.00528366444632411,
-0.015640461817383766,
-0.004350149538367987,
-0.034697506576776505,
0.028950491920113564,
-0.0833301842212677,
0.006753015797585249,
0.03316068649291992,
-0.04916731268167496,
-7.472613180073466e-33,
-0.003099138615652919,
0.06672916561365128,
-0.07671629637479782,
0.07825170457363129,
-0.0663537010550499,
-0.06324077397584915,
-0.004290123004466295,
0.013432689942419529,
-0.017170405015349388,
-0.06045350432395935,
0.04623633623123169,
-0.0689430758357048,
-0.04834755137562752,
-0.026569973677396774,
0.04925958812236786,
-0.0015208690892904997,
-0.0277375690639019,
0.040511198341846466,
0.019332993775606155,
0.054775480180978775,
0.005380768328905106,
0.07119245827198029,
-0.1616174429655075,
0.06823752820491791,
-0.09323295950889587,
0.0621219128370285,
-0.039344239979982376,
0.03483168035745621,
0.01973727159202099,
0.0033259489573538303,
-0.01621219329535961,
0.00999455712735653,
-0.01741996966302395,
0.0697464570403099,
-0.05989113822579384,
0.004725911188870668,
0.053153619170188904,
-0.05299931764602661,
-0.030636390671133995,
0.06162932142615318,
0.11920510232448578,
0.04546723887324333,
-0.05408322811126709,
0.0833081305027008,
-0.09805203229188919,
0.017837734892964363,
-0.02165648527443409,
-0.018569087609648705,
0.031842559576034546,
0.009071112610399723,
0.06082095205783844,
-0.05020113289356232,
-0.11365815252065659,
-0.052393827587366104,
-0.04676297679543495,
-0.03991081565618515,
0.0843573734164238,
-0.11773190647363663,
-0.04351932555437088,
-0.017622360959649086,
-0.0592668317258358,
-0.01007179543375969,
0.0891665518283844,
-0.0956764966249466,
0.031142927706241608,
-0.004838069900870323,
-0.018949463963508606,
0.09645842760801315,
0.07688009738922119,
-0.05603128671646118,
0.005494433920830488,
0.022221269086003304,
0.04067150130867958,
-0.0019171020248904824,
0.005755539517849684,
0.018618077039718628,
-0.021919086575508118,
-0.03306391090154648,
0.03607086092233658,
-0.02463349886238575,
-0.06132752448320389,
0.028818046674132347,
0.026899250224232674,
0.07033456116914749,
0.005613191984593868,
0.02075899764895439,
0.08051811903715134,
0.0992845967411995,
0.017733687534928322,
0.026733314618468285,
0.006164755206555128,
0.014021608047187328,
0.08444087952375412,
0.06984367966651917,
-0.01761109009385109,
-5.276585213209728e-8,
-0.05400693789124489,
0.029772629961371422,
-0.024009864777326584,
0.07514958083629608,
-0.020462514832615852,
-0.05514601618051529,
-0.016552435234189034,
0.047309596091508865,
0.037351351231336594,
-0.04844781756401062,
-0.016135303303599358,
0.006271883379667997,
-0.04852301999926567,
-0.03566274791955948,
-0.049753595143556595,
0.07929553091526031,
-0.04009658843278885,
0.016319144517183304,
0.004909822717308998,
-0.02494589425623417,
0.006996470503509045,
-0.011499892920255661,
-0.005350829567760229,
-0.038678184151649475,
-0.008275979198515415,
-0.022893117740750313,
-0.04877293109893799,
0.0822291299700737,
-0.03397258371114731,
-0.03317653387784958,
-0.003221397753804922,
-0.015934884548187256,
0.012287952937185764,
0.019813844934105873,
0.0033609529491513968,
0.07876858860254288,
0.04873563349246979,
-0.05743040889501572,
0.03995381295681,
0.027997564524412155,
0.01693040318787098,
0.02803237922489643,
-0.09620351344347,
-0.005652629304677248,
0.011661680415272713,
-0.016126103699207306,
-0.02419699914753437,
-0.10409602522850037,
0.008468548767268658,
-0.04175369441509247,
-0.021794995293021202,
0.056794341653585434,
-0.06359422206878662,
0.0636838972568512,
0.04597153887152672,
-0.030007027089595795,
-0.02541760727763176,
-0.028698714450001717,
-0.013853841461241245,
-0.004231792874634266,
-0.022379130125045776,
0.06735654920339584,
-0.017107075080275536,
-0.10173963755369186
] |
uclanlp/visualbert-vqa-coco-pre | 884aaef1fb6bed1429cae8c3abc314011a3a429f | 2021-05-31T11:34:13.000Z | [
"pytorch",
"visual_bert",
"pretraining",
"transformers"
] | null | false | uclanlp | null | uclanlp/visualbert-vqa-coco-pre | 2,718 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
aware-ai/roberta-large-squadv2 | 59a93e1104aa42295190ecec42bf829fbc83b0bb | 2021-05-20T12:37:36.000Z | [
"pytorch",
"jax",
"roberta",
"question-answering",
"transformers",
"autotrain_compatible"
] | question-answering | false | aware-ai | null | aware-ai/roberta-large-squadv2 | 2,709 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
sentence-transformers/nli-distilroberta-base-v2 | ee9754ad61d9164d693c8e4c458238433037023f | 2022-06-15T21:56:58.000Z | [
"pytorch",
"tf",
"jax",
"roberta",
"feature-extraction",
"arxiv:1908.10084",
"sentence-transformers",
"sentence-similarity",
"transformers",
"license:apache-2.0"
] | sentence-similarity | false | sentence-transformers | null | sentence-transformers/nli-distilroberta-base-v2 | 2,695 | null | sentence-transformers | ---
pipeline_tag: sentence-similarity
license: apache-2.0
tags:
- sentence-transformers
- feature-extraction
- sentence-similarity
- transformers
---
# sentence-transformers/nli-distilroberta-base-v2
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
## Usage (Sentence-Transformers)
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
```
pip install -U sentence-transformers
```
Then you can use the model like this:
```python
from sentence_transformers import SentenceTransformer
sentences = ["This is an example sentence", "Each sentence is converted"]
model = SentenceTransformer('sentence-transformers/nli-distilroberta-base-v2')
embeddings = model.encode(sentences)
print(embeddings)
```
## Usage (HuggingFace Transformers)
Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
```python
from transformers import AutoTokenizer, AutoModel
import torch
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
token_embeddings = model_output[0] #First element of model_output contains all token embeddings
input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
# Sentences we want sentence embeddings for
sentences = ['This is an example sentence', 'Each sentence is converted']
# Load model from HuggingFace Hub
tokenizer = AutoTokenizer.from_pretrained('sentence-transformers/nli-distilroberta-base-v2')
model = AutoModel.from_pretrained('sentence-transformers/nli-distilroberta-base-v2')
# Tokenize sentences
encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
# Compute token embeddings
with torch.no_grad():
model_output = model(**encoded_input)
# Perform pooling. In this case, max pooling.
sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
print("Sentence embeddings:")
print(sentence_embeddings)
```
## Evaluation Results
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=sentence-transformers/nli-distilroberta-base-v2)
## Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 75, 'do_lower_case': False}) with Transformer model: RobertaModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
)
```
## Citing & Authors
This model was trained by [sentence-transformers](https://www.sbert.net/).
If you find this model helpful, feel free to cite our publication [Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks](https://arxiv.org/abs/1908.10084):
```bibtex
@inproceedings{reimers-2019-sentence-bert,
title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
author = "Reimers, Nils and Gurevych, Iryna",
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
month = "11",
year = "2019",
publisher = "Association for Computational Linguistics",
url = "http://arxiv.org/abs/1908.10084",
}
``` | [
-0.03952135518193245,
-0.06605999171733856,
0.01391987781971693,
0.03528258576989174,
0.017574207857251167,
0.048294275999069214,
-0.05239424481987953,
0.02167707122862339,
0.010287443175911903,
-0.07410968095064163,
0.055216964334249496,
-0.019395560026168823,
0.04224833846092224,
0.04476742073893547,
0.07108130306005478,
0.043563712388277054,
0.011842058971524239,
0.0866447389125824,
-0.077902652323246,
-0.09630604088306427,
0.13731855154037476,
0.1307350993156433,
0.013170056976377964,
0.020401330664753914,
-0.033742647618055344,
0.08935828506946564,
-0.04006404057145119,
-0.0072182295843958855,
0.019646087661385536,
-0.01865938864648342,
0.0464591309428215,
-0.00654616067185998,
-0.03933195397257805,
0.08383375406265259,
0.03628844767808914,
0.07313127815723419,
0.0016974483150988817,
0.018135538324713707,
-0.0021079625003039837,
-0.08399615436792374,
0.000821727211587131,
-0.02578563801944256,
-0.05572818964719772,
-0.0010177892399951816,
0.04034554213285446,
-0.08445016294717789,
-0.11582483351230621,
-0.015263681299984455,
-0.005469662137329578,
-0.01862211525440216,
-0.11125478148460388,
0.031058859080076218,
0.042040519416332245,
0.07651104778051376,
0.009843960404396057,
0.04224708676338196,
0.044990766793489456,
0.01901295594871044,
0.011578352190554142,
-0.1401662528514862,
-0.05974679812788963,
-0.005891023203730583,
-0.00447550555691123,
-0.011633845046162605,
-0.04716765135526657,
-0.025982314720749855,
0.054430652409791946,
0.0009853950468823314,
0.03750348836183548,
-0.0023300109896808863,
-0.08908875286579132,
0.05612954869866371,
-0.05057495832443237,
-0.02909570373594761,
-0.06283006072044373,
0.03199555724859238,
0.09370829164981842,
0.003491716692224145,
0.05142725259065628,
0.026169676333665848,
-0.004670038819313049,
-0.06902678310871124,
0.0614049956202507,
0.09458640217781067,
0.032565150409936905,
-0.06967639923095703,
0.00017211816157214344,
-0.04678454250097275,
-0.01150167640298605,
-0.015773804858326912,
-0.0649656280875206,
-0.11942128092050552,
0.017094815149903297,
-0.05030151829123497,
0.029714791104197502,
0.026420941576361656,
-0.0319029875099659,
-0.011186471208930016,
0.04203988239169121,
0.052967097610235214,
0.03147794306278229,
0.020223893225193024,
0.03353818506002426,
-0.09791003167629242,
-0.060411933809518814,
0.026677925139665604,
-0.03631724789738655,
-0.024222085252404213,
0.05981644615530968,
-0.11848647147417068,
0.014034325256943703,
0.0019872453995049,
-0.016496213153004646,
-0.010407842695713043,
0.06387537717819214,
-0.039313312619924545,
0.03256329149007797,
-0.01944820024073124,
0.008612803183495998,
0.08800140023231506,
-0.025169534608721733,
0.056786756962537766,
-0.016307752579450607,
0.03472025319933891,
-0.01543030422180891,
-0.0369269885122776,
0.017015384510159492,
5.345538979022755e-34,
-0.021867504343390465,
0.0033156343270093203,
-0.017647862434387207,
-0.008553553372621536,
0.02988285757601261,
0.009742781519889832,
0.01593896560370922,
0.06788326054811478,
-0.1081150621175766,
-0.04812933877110481,
-0.05690383166074753,
0.02906685136258602,
-0.025591909885406494,
0.07044854015111923,
0.02316824346780777,
-0.0047302101738750935,
-0.03029473125934601,
-0.041132088750600815,
0.06497599184513092,
0.0030181475449353456,
0.028605693951249123,
0.0369679257273674,
0.014584950171411037,
-0.029241617769002914,
-0.10686367750167847,
-0.023780381307005882,
0.05856483429670334,
-0.08118384331464767,
-0.04879570007324219,
0.005738993175327778,
-0.0590188167989254,
0.022018129006028175,
-0.003555122995749116,
0.00767935998737812,
-0.016190260648727417,
-0.014912557788193226,
0.027080928906798363,
-0.02565646916627884,
-0.04186349734663963,
-0.08173228800296783,
-0.036328643560409546,
0.03243991732597351,
-0.015604446642100811,
-0.0730493813753128,
0.009587862528860569,
0.005287138745188713,
0.02599308267235756,
-0.003619920928031206,
0.09878590703010559,
0.0014234173577278852,
0.08538854867219925,
0.007182209752500057,
-0.0035010077990591526,
-0.04840666800737381,
0.03439551591873169,
0.0023891720920801163,
0.062402255833148956,
0.03217346966266632,
0.12110091745853424,
-0.008450298570096493,
0.024621134623885155,
-0.023859187960624695,
0.04939338192343712,
0.03547064587473869,
0.09213544428348541,
-0.015028001740574837,
0.05304764583706856,
0.029448585584759712,
0.009595328941941261,
0.08013717085123062,
-0.047124966979026794,
0.014599074609577656,
-0.0557619072496891,
0.03501969203352928,
0.029315829277038574,
-0.02251758612692356,
-0.009166844189167023,
-0.08469603955745697,
-0.026411080732941628,
0.08162272721529007,
-0.04754307121038437,
-0.03817069157958031,
0.07210372388362885,
-0.05709932744503021,
-0.002380234422162175,
-0.05142781883478165,
0.0015212454600259662,
-0.030766166746616364,
0.06269652396440506,
-0.04736054316163063,
0.047130219638347626,
-0.00027294663595966995,
0.006560962647199631,
0.033264875411987305,
0.07672043144702911,
-2.4802453257770245e-33,
0.006184906233102083,
0.033245280385017395,
-0.07684178650379181,
0.029584651812911034,
-0.02069205977022648,
-0.05221676453948021,
0.01126489695161581,
0.06105353683233261,
0.005329571198672056,
-0.023748980835080147,
-0.04744822531938553,
-0.025246722623705864,
0.09473369270563126,
-0.06533622741699219,
0.08319451659917831,
0.07918020337820053,
-0.022459521889686584,
0.05896876007318497,
0.015924090519547462,
0.06873665750026703,
0.021671341732144356,
0.07010304927825928,
-0.115235336124897,
0.05829327180981636,
-0.015242449007928371,
-0.026534374803304672,
-0.021375218406319618,
-0.010818731971085072,
-0.025294268503785133,
-0.062282856553792953,
-0.015497838146984577,
-0.0031557476613670588,
-0.05214396119117737,
-0.04111745208501816,
-0.11852377653121948,
0.008152306079864502,
-0.039585769176483154,
-0.042908329516649246,
0.026504570618271828,
0.04604872316122055,
0.022395119071006775,
0.0842503011226654,
-0.0256896261125803,
-0.00013065598614048213,
-0.02329368144273758,
-0.007350931875407696,
-0.07585997879505157,
-0.0831093043088913,
0.03172781690955162,
0.0016452888958156109,
-0.020319795235991478,
0.04107674956321716,
-0.12652292847633362,
0.025467926636338234,
-0.03533687815070152,
-0.06883490830659866,
-0.03901851177215576,
-0.012149017304182053,
-0.10880067199468613,
-0.05910202115774155,
-0.06224154680967331,
-0.018696408718824387,
0.010503659024834633,
-0.0738213062286377,
0.07167435437440872,
-0.0478387251496315,
-0.010665716603398323,
0.04676662012934685,
-0.04229942709207535,
-0.03929821401834488,
-0.013937363401055336,
-0.03147619590163231,
0.02352520264685154,
0.07354854792356491,
0.034241072833538055,
-0.05209009721875191,
-0.007257942575961351,
0.003224965650588274,
-0.03387768194079399,
-0.05673564597964287,
0.04093535244464874,
-0.02867843583226204,
0.016665911301970482,
-0.04444345831871033,
0.03963260352611542,
-0.01720551960170269,
0.026697920635342598,
0.07931564003229141,
-0.01743205077946186,
0.0485745407640934,
0.009346994571387768,
-0.02793685905635357,
-0.01240041945129633,
0.0674782246351242,
0.06053083762526512,
-4.989390589571485e-8,
-0.07877795398235321,
-0.04122622683644295,
-0.0819745659828186,
0.058347828686237335,
-0.09393226355314255,
-0.04670962691307068,
0.07152697443962097,
0.07218937575817108,
-0.0700523853302002,
-0.02642449550330639,
0.023515673354268074,
0.021994179114699364,
-0.09079214930534363,
0.012203073129057884,
-0.02863001450896263,
0.12419690191745758,
-0.01812315359711647,
0.04409477114677429,
0.029508616775274277,
-0.017708860337734222,
0.02539222687482834,
-0.011378944851458073,
-0.022779470309615135,
0.04809587076306343,
-0.010723400861024857,
0.016362862661480904,
-0.03916962072253227,
0.015506284311413765,
-0.0018176068551838398,
-0.00956010352820158,
0.0023829296696931124,
0.02431412972509861,
-0.032158758491277695,
-0.055985189974308014,
0.001170819508843124,
0.0430624783039093,
0.05106455460190773,
-0.05897269770503044,
0.022079631686210632,
0.06550652533769608,
0.05971873179078102,
0.04924032464623451,
-0.12873558700084686,
-0.012373273260891438,
0.11559519171714783,
0.028754115104675293,
-0.0015891720540821552,
-0.05535402521491051,
0.044945575296878815,
0.012420052662491798,
0.0854586511850357,
-0.06847944855690002,
-0.013719924725592136,
-0.013879185542464256,
0.029636293649673462,
0.04189843684434891,
0.010971632786095142,
-0.011550143361091614,
0.06827112287282944,
-0.08296281844377518,
0.07250415533781052,
0.076749287545681,
0.1199251040816307,
-0.10040467977523804
] |
google/multiberts-seed_0 | 1d4bb03ab3a40f4c935a4efbd57917eb9e8d74d5 | 2021-11-05T22:01:32.000Z | [
"pytorch",
"tf",
"bert",
"pretraining",
"en",
"arxiv:2106.16163",
"arxiv:1908.08962",
"transformers",
"multiberts",
"multiberts-seed_0",
"license:apache-2.0"
] | null | false | google | null | google/multiberts-seed_0 | 2,682 | null | transformers | ---
language: en
tags:
- multiberts
- multiberts-seed_0
license: apache-2.0
---
# MultiBERTs - Seed 0
MultiBERTs is a collection of checkpoints and a statistical library to support
robust research on BERT. We provide 25 BERT-base models trained with
similar hyper-parameters as
[the original BERT model](https://github.com/google-research/bert) but
with different random seeds, which causes variations in the initial weights and order of
training instances. The aim is to distinguish findings that apply to a specific
artifact (i.e., a particular instance of the model) from those that apply to the
more general procedure.
We also provide 140 intermediate checkpoints captured
during the course of pre-training (we saved 28 checkpoints for the first 5 runs).
The models were originally released through
[http://goo.gle/multiberts](http://goo.gle/multiberts). We describe them in our
paper
[The MultiBERTs: BERT Reproductions for Robustness Analysis](https://arxiv.org/abs/2106.16163).
This is model #0.
## Model Description
This model is a reproduction of
[BERT-base uncased](https://github.com/google-research/bert), for English: it
is a Transformers model pretrained on a large corpus of English data, using the
Masked Language Modelling (MLM) and the Next Sentence Prediction (NSP)
objectives.
The intended uses, limitations, training data and training procedure are similar
to [BERT-base uncased](https://github.com/google-research/bert). Two major
differences with the original model:
* We pre-trained the MultiBERTs models for 2 million steps using sequence
length 512 (instead of 1 million steps using sequence length 128 then 512).
* We used an alternative version of Wikipedia and Books Corpus, initially
collected for [Turc et al., 2019](https://arxiv.org/abs/1908.08962).
This is a best-effort reproduction, and so it is probable that differences with
the original model have gone unnoticed. The performance of MultiBERTs on GLUE is oftentimes comparable to that of original
BERT, but we found significant differences on the dev set of SQuAD (MultiBERTs outperforms original BERT).
See our [technical report](https://arxiv.org/abs/2106.16163) for more details.
### How to use
Using code from
[BERT-base uncased](https://huggingface.co/bert-base-uncased), here is an example based on
Tensorflow:
```
from transformers import BertTokenizer, TFBertModel
tokenizer = BertTokenizer.from_pretrained('google/multiberts-seed_0')
model = TFBertModel.from_pretrained("google/multiberts-seed_0")
text = "Replace me by any text you'd like."
encoded_input = tokenizer(text, return_tensors='tf')
output = model(encoded_input)
```
PyTorch version:
```
from transformers import BertTokenizer, BertModel
tokenizer = BertTokenizer.from_pretrained('google/multiberts-seed_0')
model = BertModel.from_pretrained("google/multiberts-seed_0")
text = "Replace me by any text you'd like."
encoded_input = tokenizer(text, return_tensors='pt')
output = model(**encoded_input)
```
## Citation info
```bibtex
@article{sellam2021multiberts,
title={The MultiBERTs: BERT Reproductions for Robustness Analysis},
author={Thibault Sellam and Steve Yadlowsky and Jason Wei and Naomi Saphra and Alexander D'Amour and Tal Linzen and Jasmijn Bastings and Iulia Turc and Jacob Eisenstein and Dipanjan Das and Ian Tenney and Ellie Pavlick},
journal={arXiv preprint arXiv:2106.16163},
year={2021}
}
```
| [
-0.139724999666214,
-0.06939509510993958,
0.0446694940328598,
0.004350858274847269,
0.05141816288232803,
0.022631969302892685,
0.021370628848671913,
0.019349686801433563,
-0.0030406846199184656,
-0.03055230900645256,
-0.01476133894175291,
0.0068193962797522545,
0.0680687353014946,
-0.02071780152618885,
-0.035873714834451675,
-0.00045752216828987,
0.06423687189817429,
0.02332114428281784,
-0.07087516039609909,
-0.014065024442970753,
0.024756573140621185,
0.008890228345990181,
0.03355111554265022,
-0.01977105438709259,
0.004014437552541494,
-0.06691576540470123,
-0.08772137016057968,
-0.03855792433023453,
0.0951196476817131,
-0.011874619871377945,
0.0497361496090889,
0.00016193624469451606,
-0.07917854189872742,
0.06028635427355766,
0.09486928582191467,
0.06418517976999283,
-0.06514450162649155,
-0.016975462436676025,
0.08058971166610718,
0.03303616866469383,
-0.017540792003273964,
0.014233339577913284,
-0.039548780769109726,
-0.036697085946798325,
0.04183308035135269,
-0.05940241739153862,
-0.03879941254854202,
-0.03979363292455673,
-0.023831738159060478,
-0.050824325531721115,
-0.05194436013698578,
-0.03250034898519516,
0.06467961519956589,
0.041088398545980453,
0.030986860394477844,
-0.015497579239308834,
0.018856268376111984,
-0.07349824905395508,
-0.0038971598260104656,
-0.06635642796754837,
-0.07316827028989792,
-0.0463448129594326,
-0.05133029818534851,
-0.0365210697054863,
-0.031376779079437256,
0.10047493129968643,
0.004170384258031845,
0.040735676884651184,
0.05606687068939209,
0.07317686825990677,
-0.030905133113265038,
0.08319609612226486,
-0.07555664330720901,
0.03855957090854645,
0.004243122413754463,
0.02897929586470127,
0.01759745180606842,
-0.01593230850994587,
-0.03628076612949371,
-0.10608470439910889,
0.00038377009332180023,
-0.04073859751224518,
0.0542619451880455,
-0.012542796321213245,
0.0745331346988678,
-0.03183550015091896,
0.03376166149973869,
0.043615277856588364,
0.10685677081346512,
0.04036381095647812,
-0.05176861584186554,
-0.01922592893242836,
0.09414283186197281,
0.030683478340506554,
0.03738431632518768,
0.04385954514145851,
0.09864357858896255,
-0.06812472641468048,
0.05392800271511078,
0.10112176090478897,
0.026514729484915733,
0.040831658989191055,
0.12212667614221573,
-0.04347091540694237,
0.0836111456155777,
0.022317664697766304,
0.037139274179935455,
-0.0432608388364315,
0.03023890219628811,
-0.08138436079025269,
0.0305978674441576,
0.0032610928174108267,
0.04497808963060379,
-0.012831420637667179,
0.016281690448522568,
-0.011809526942670345,
-0.009932738728821278,
-0.017131196334958076,
-0.017633721232414246,
0.059605374932289124,
0.03362917900085449,
-0.023008806630969048,
0.12385658919811249,
0.018887599930167198,
-0.03559093177318573,
0.022801849991083145,
-0.001287891180254519,
-3.8236746566486226e-34,
0.06765822321176529,
0.025642497465014458,
0.01811622455716133,
0.00022530651767738163,
0.03431843966245651,
0.004541168455034494,
-0.03644575923681259,
-0.03962983936071396,
-0.018109822645783424,
-0.08495447039604187,
-0.033407941460609436,
-0.00211068126372993,
-0.0813618153333664,
0.041459985077381134,
-0.011984606273472309,
0.047263164073228836,
-0.024863827973604202,
0.10494989156723022,
0.018949417397379875,
0.05537491291761398,
0.04455222934484482,
-0.03222509101033211,
-0.045541808009147644,
-0.08684167265892029,
-0.017729735001921654,
0.05389023944735527,
0.07899253815412521,
-0.030416786670684814,
-0.056631844490766525,
0.06618909537792206,
-0.15698477625846863,
0.054759882390499115,
-0.007330979220569134,
0.04976804926991463,
0.0038117067888379097,
0.030321570113301277,
-0.03163031488656998,
-0.01972457952797413,
0.014269113540649414,
-0.02807530201971531,
-0.020107343792915344,
-0.0016786608612164855,
0.03134334832429886,
-0.08701497316360474,
-0.012147365137934685,
-0.06338377296924591,
-0.00869990885257721,
0.009329761378467083,
-0.0005513177020475268,
-0.09809667617082596,
-0.02768637239933014,
0.04154098778963089,
-0.02215149626135826,
-0.06257271766662598,
-0.043304692953825,
-0.02259579859673977,
0.02803737297654152,
0.018330536782741547,
0.03160685673356056,
0.07206191122531891,
0.05649860203266144,
-0.011547732166945934,
-0.05719730257987976,
0.025381555780768394,
0.03399287536740303,
-0.023127200081944466,
-0.05935664102435112,
0.004266512114554644,
0.06683319061994553,
0.03168174624443054,
-0.061237938702106476,
-0.06591444462537766,
0.023448005318641663,
-0.001352011924609542,
0.06569922715425491,
-0.023133883252739906,
0.05933097377419472,
-0.019430354237556458,
-0.06358374655246735,
0.00004431907655089162,
-0.01177315041422844,
-0.018413519486784935,
-0.06270468980073929,
-0.03649928420782089,
-0.11105222254991531,
-0.030613742768764496,
0.03631289303302765,
-0.07903692126274109,
-0.06468355655670166,
-0.04103850573301315,
0.027479322627186775,
-0.05951466038823128,
0.006046222522854805,
-0.008457653224468231,
-0.09193585813045502,
-6.129231879166359e-34,
-0.03828252851963043,
0.0371200330555439,
-0.03674601390957832,
0.0698828175663948,
-0.028363117948174477,
-0.07777800410985947,
0.02285955846309662,
0.10564377158880234,
-0.07300224155187607,
-0.06603823602199554,
-0.06003469601273537,
-0.049459513276815414,
-0.00039733690209686756,
-0.05569752678275108,
0.03527390584349632,
-0.007549795787781477,
0.010264666751027107,
0.012952943332493305,
0.03233085945248604,
0.03825516253709793,
0.1120295375585556,
-0.06106385588645935,
-0.06442804634571075,
0.0775560513138771,
-0.010131546296179295,
0.10085255652666092,
-0.06030958518385887,
0.05186094343662262,
-0.034417685121297836,
-0.012802617624402046,
-0.009169721975922585,
0.03801591694355011,
-0.0033351562451571226,
0.026295136660337448,
-0.005564140155911446,
0.048946548253297806,
0.03262731060385704,
0.009631616063416004,
0.0062014334835112095,
0.04056670889258385,
0.08103121072053909,
0.00043705260031856596,
-0.001615761430002749,
0.01976137049496174,
0.010284646414220333,
0.029992777854204178,
-0.08123389631509781,
-0.036272451281547546,
0.0060634673573076725,
-0.07454215735197067,
-0.03253151848912239,
0.006158201023936272,
-0.08564386516809464,
0.02294168621301651,
-0.049757711589336395,
-0.11001601070165634,
0.0225763451308012,
-0.05073567107319832,
-0.03318404406309128,
0.05405008792877197,
-0.09884333610534668,
0.006055798847228289,
-0.020789919421076775,
0.03601323440670967,
0.009931826964020729,
-0.019542524591088295,
-0.013568934053182602,
-0.005196158774197102,
-0.07512230426073074,
0.011680562980473042,
-0.001195673132315278,
-0.03740844875574112,
0.011057427152991295,
0.013908470049500465,
0.02793479524552822,
0.00010536747868172824,
-0.009126029908657074,
-0.09298434853553772,
-0.010289006866514683,
-0.06527465581893921,
-0.0899207815527916,
-0.058698683977127075,
0.04303745552897453,
0.08985044807195663,
-0.04745751991868019,
0.07545004785060883,
0.06005197390913963,
0.0412466898560524,
0.04557442292571068,
-0.00632956949993968,
-0.029169293120503426,
0.02380593866109848,
-0.07775963097810745,
0.05661843344569206,
0.006245265249162912,
-5.2080434187473656e-8,
-0.03577441722154617,
0.04989481344819069,
-0.025153612717986107,
0.05724523961544037,
0.03639686480164528,
-0.025927532464265823,
-0.07857081294059753,
0.0849781408905983,
-0.01665230095386505,
-0.031038010492920876,
0.06997432559728622,
0.001757152727805078,
-0.047288499772548676,
-0.002327438211068511,
-0.020469307899475098,
0.07658910006284714,
-0.07667038589715958,
0.05725201591849327,
-0.05467695742845535,
-0.01985412836074829,
-0.0006511171231977642,
0.07576986402273178,
0.057053010910749435,
-0.05617401748895645,
0.02897610329091549,
-0.013639167882502079,
0.01718517579138279,
0.09590809047222137,
0.023981058970093727,
0.0190285537391901,
-0.015444576740264893,
0.01615653559565544,
-0.06043890118598938,
-0.008291967213153839,
0.004817035980522633,
0.14428925514221191,
-0.0408344566822052,
-0.0425565242767334,
0.032403212040662766,
0.02159074880182743,
0.049202702939510345,
0.07020870596170425,
-0.08607400208711624,
0.004547369200736284,
0.10750426352024078,
-0.01285285223275423,
-0.08447682857513428,
-0.05523383617401123,
0.017126688733696938,
-0.026036754250526428,
0.010172976180911064,
-0.06339667737483978,
0.011171368882060051,
0.11671920120716095,
-0.04201008379459381,
0.004662587773054838,
-0.0893954187631607,
-0.0584757998585701,
0.07183205336332321,
0.004618267994374037,
0.07429897040128708,
0.018140804022550583,
0.009410819970071316,
0.06652619689702988
] |
taeminlee/kogpt2 | 629b33aaaa679f16abd284f703c650c6f71bc802 | 2021-05-23T13:04:34.000Z | [
"pytorch",
"jax",
"gpt2",
"text-generation",
"transformers"
] | text-generation | false | taeminlee | null | taeminlee/kogpt2 | 2,679 | 1 | transformers | # KoGPT2-Transformers
KoGPT2 on Huggingface Transformers
### KoGPT2-Transformers
- [SKT-AI 에서 공개한 KoGPT2 (ver 1.0)](https://github.com/SKT-AI/KoGPT2)를 [Transformers](https://github.com/huggingface/transformers)에서 사용하도록 하였습니다.
- **SKT-AI 에서 KoGPT2 2.0을 공개하였습니다. https://huggingface.co/skt/kogpt2-base-v2/**
### Demo
- 일상 대화 챗봇 : http://demo.tmkor.com:36200/dialo
- 화장품 리뷰 생성 : http://demo.tmkor.com:36200/ctrl
### Example
```python
from transformers import GPT2LMHeadModel, PreTrainedTokenizerFast
model = GPT2LMHeadModel.from_pretrained("taeminlee/kogpt2")
tokenizer = PreTrainedTokenizerFast.from_pretrained("taeminlee/kogpt2")
input_ids = tokenizer.encode("안녕", add_special_tokens=False, return_tensors="pt")
output_sequences = model.generate(input_ids=input_ids, do_sample=True, max_length=100, num_return_sequences=3)
for generated_sequence in output_sequences:
generated_sequence = generated_sequence.tolist()
print("GENERATED SEQUENCE : {0}".format(tokenizer.decode(generated_sequence, clean_up_tokenization_spaces=True)))
``` | [
-0.21660123765468597,
-0.0007740537985228002,
0.03466164320707321,
-0.06705863773822784,
0.028753604739904404,
-0.08673252165317535,
-0.05757914111018181,
0.12908117473125458,
-0.06231696158647537,
-0.04151160269975662,
0.06764063984155655,
-0.0005556419491767883,
0.02781246043741703,
-0.013902392238378525,
0.044032882899045944,
0.007479495368897915,
-0.017990369349718094,
0.039159756153821945,
-0.02924155816435814,
-0.018575163558125496,
0.09071951359510422,
-0.0021114072296768427,
0.02069510892033577,
0.010610922239720821,
0.028337284922599792,
-0.0005248918896540999,
0.0007504544919356704,
0.03193104267120361,
-0.010892566293478012,
-0.07078630477190018,
-0.05327609181404114,
0.09623225033283234,
-0.06565605103969574,
-0.0656905397772789,
-0.07467459887266159,
0.0664210394024849,
-0.035891834646463394,
0.016247086226940155,
-0.06392860412597656,
-0.008289992809295654,
-0.010995753109455109,
-0.049771662801504135,
0.01180003210902214,
-0.09281041473150253,
0.04745086655020714,
0.08044110238552094,
-0.10219938308000565,
-0.0849602222442627,
-0.08891088515520096,
-0.07374519854784012,
-0.02202460914850235,
-0.046936459839344025,
0.021734386682510376,
0.02609066851437092,
-0.008632627315819263,
0.016872065141797066,
-0.023792674764990807,
0.011283895000815392,
0.039661820977926254,
-0.0323805958032608,
-0.02468070387840271,
0.038697950541973114,
-0.07722249627113342,
0.028674084693193436,
-0.07411520183086395,
0.012661063112318516,
0.0850914716720581,
0.0039778100326657295,
0.08332207798957825,
-0.021411065012216568,
-0.04807862266898155,
-0.012107420712709427,
-0.0426713302731514,
0.014354594983160496,
0.004671795293688774,
-0.03660197556018829,
0.06706415861845016,
-0.02551007643342018,
-0.06913731247186661,
-0.01976485550403595,
0.05570577457547188,
-0.0050107152201235294,
-0.03831968829035759,
-0.023686131462454796,
-0.055118758231401443,
0.004329955205321312,
-0.030848560854792595,
0.035144250839948654,
-0.03274141997098923,
0.08425168693065643,
0.06701106578111649,
0.07197155058383942,
-0.0001405428774887696,
0.033191826194524765,
-0.04483639448881149,
0.0450308658182621,
-0.05355999618768692,
0.018035639077425003,
-0.04155351594090462,
0.010400594212114811,
-0.02476513758301735,
-0.06306087970733643,
0.005321397911757231,
0.012192126363515854,
-0.01567036844789982,
-0.0605560764670372,
-0.01950998604297638,
-0.07570982724428177,
0.03739310801029205,
0.04295746982097626,
-0.006186546757817268,
-0.06137096509337425,
0.011482839472591877,
-0.08918888121843338,
0.025074169039726257,
-0.003928697668015957,
0.0005131458165124059,
-0.011897648684680462,
-0.024824252352118492,
0.04336169362068176,
0.08591607958078384,
-0.06501761823892593,
-0.03672458231449127,
-0.01216175127774477,
0.01776834949851036,
-0.0085661131888628,
-0.008954946883022785,
7.003845134748739e-33,
0.06522362679243088,
0.010400624014437199,
0.01735251024365425,
0.043116357177495956,
-0.05085663124918938,
0.01315074972808361,
-0.019153688102960587,
0.009194131009280682,
-0.007447684183716774,
0.024606764316558838,
-0.1418832689523697,
0.04952996224164963,
-0.09172815084457397,
0.029049206525087357,
-0.07312469929456711,
-0.0801810473203659,
-0.0017365344101563096,
0.035066328942775726,
-0.054285865277051926,
0.056843869388103485,
0.045562099665403366,
0.049875885248184204,
-0.02059033513069153,
0.019325273111462593,
0.04980187490582466,
0.01303025335073471,
-0.013647973537445068,
-0.11022104322910309,
-0.0038631854113191366,
0.036937810480594635,
-0.021352030336856842,
-0.019293246790766716,
-0.06846053153276443,
0.038957156240940094,
-0.08159779012203217,
-0.023404378443956375,
-0.039349209517240524,
-0.08228381723165512,
-0.06346073001623154,
-0.03686154633760452,
-0.0013412891421467066,
0.027268992736935616,
-0.032528989017009735,
0.01684439741075039,
0.03987123817205429,
-0.0446091927587986,
-0.012655433267354965,
0.038662515580654144,
0.08029724657535553,
0.0355861522257328,
-0.0015274889301508665,
0.03255562484264374,
-0.012289307080209255,
0.05024794861674309,
0.03989916294813156,
0.09317599982023239,
0.09084952622652054,
-0.02467426471412182,
0.026617610827088356,
0.008132697083055973,
-0.0545504130423069,
0.032257769256830215,
0.02947569452226162,
0.020375097170472145,
0.07140173763036728,
-0.0038381803315132856,
-0.06279570609331131,
-0.03340163826942444,
-0.010316182859241962,
0.0005084060248918831,
-0.08776555210351944,
-0.05625765398144722,
-0.013946528546512127,
0.021609382703900337,
0.031566377729177475,
-0.059245530515909195,
0.042742080986499786,
0.043307747691869736,
0.004221596289426088,
-0.012015355750918388,
-0.00888263713568449,
0.0658280998468399,
0.04371399059891701,
-0.0534307099878788,
0.12629464268684387,
-0.0758962407708168,
0.05260920897126198,
-0.03265455737709999,
-0.046260423958301544,
0.04707992821931839,
-0.06645485013723373,
-0.046088747680187225,
-0.06927279382944107,
0.029802855104207993,
0.03917434439063072,
-8.937722155631502e-33,
0.06396423280239105,
0.09513425081968307,
-0.00786434207111597,
0.07729028165340424,
0.013842419721186161,
0.04324411600828171,
0.04958978667855263,
0.08814680576324463,
0.024690907448530197,
0.012309085577726364,
0.06977859884500504,
0.00040271636680699885,
0.017939722165465355,
-0.05315367132425308,
0.12375734746456146,
-0.05820510536432266,
-0.014917059801518917,
-0.03691727668046951,
-0.04322325065732002,
0.05619245395064354,
0.025124985724687576,
0.07401270419359207,
-0.1090233325958252,
-0.005185737274587154,
-0.05484464019536972,
0.008771338500082493,
-0.06095307320356369,
0.06427091360092163,
0.048464756458997726,
0.05156200751662254,
0.03131524473428726,
0.013280654326081276,
-0.0426611490547657,
0.11908333748579025,
0.009404436685144901,
0.005068277940154076,
0.05778088420629501,
0.07577379792928696,
-0.0377936027944088,
-0.02311127260327339,
0.059896405786275864,
0.016484372317790985,
0.042049676179885864,
0.011847470887005329,
-0.02643466554582119,
-0.05446968600153923,
0.010618429630994797,
-0.07695266604423523,
-0.026481036096811295,
-0.043147534132003784,
0.04669537767767906,
0.025470683351159096,
-0.044077467173337936,
-0.00006019787542754784,
0.039823103696107864,
-0.06350483745336533,
-0.003612808184698224,
-0.0355352982878685,
-0.05358748137950897,
-0.03603307530283928,
0.06601127237081528,
-0.09401905536651611,
0.018896130844950676,
-0.10355792194604874,
-0.06773054599761963,
0.003067794255912304,
0.053510405123233795,
-0.009461363777518272,
0.059211429208517075,
-0.004327199887484312,
-0.06990563869476318,
0.01531107909977436,
0.08757752925157547,
-0.08790893852710724,
0.050349075347185135,
-0.016164224594831467,
-0.10120336711406708,
0.024713482707738876,
0.05683482438325882,
-0.02598673291504383,
-0.06313925981521606,
-0.0004368386580608785,
0.06516740471124649,
0.0015849045012146235,
0.000780027883592993,
-0.007599673233926296,
-0.032338958233594894,
0.08715812861919403,
0.07003261893987656,
-0.045829880982637405,
-0.06162489205598831,
0.10293785482645035,
0.05809301137924194,
0.11157836765050888,
0.04701371118426323,
-5.614976572587693e-8,
0.04445565491914749,
0.011579726822674274,
0.03754095733165741,
0.015821706503629684,
-0.07885181903839111,
-0.016178173944354057,
0.021404800936579704,
-0.0024770614691078663,
-0.07749227434396744,
-0.013355366885662079,
0.019212406128644943,
0.05193915218114853,
0.010441080667078495,
0.013453898020088673,
0.027470696717500687,
0.008656823076307774,
-0.05105375871062279,
0.06489371508359909,
-0.09451518207788467,
-0.017153827473521233,
0.011212565004825592,
-0.05325455963611603,
0.00935311894863844,
-0.037667520344257355,
-0.0025554210878908634,
0.020580263808369637,
-0.06879667192697525,
0.02290244773030281,
-0.02324678748846054,
-0.007083327509462833,
-0.03177391365170479,
-0.037425484508275986,
-0.013613426126539707,
0.01697133481502533,
0.006609198171645403,
-0.00235894788056612,
0.03735663741827011,
-0.0075990185141563416,
0.011460638605058193,
0.0571923702955246,
0.06108363717794418,
-0.09445372968912125,
-0.10454102605581284,
0.0801071897149086,
0.06637047231197357,
-0.02099461294710636,
0.0939193144440651,
-0.08957643061876297,
-0.03691234439611435,
0.032297633588314056,
-0.007840624079108238,
0.02384772151708603,
-0.07324407994747162,
-0.022373484447598457,
0.012159807607531548,
0.08496401458978653,
-0.019251439720392227,
-0.03012930601835251,
0.021253811195492744,
-0.01718882843852043,
-0.06120771914720535,
0.026390260085463524,
0.010990474373102188,
0.04174517095088959
] |
ThomasSimonini/t5-end2end-question-generation | 1dda3f93db6cfa1e7fc84e1208d0a49febb5fb5c | 2021-10-10T08:30:38.000Z | [
"pytorch",
"t5",
"text2text-generation",
"dataset:squad",
"transformers",
"generated_from_trainer",
"license:apache-2.0",
"model-index",
"autotrain_compatible"
] | text2text-generation | false | ThomasSimonini | null | ThomasSimonini/t5-end2end-question-generation | 2,676 | 2 | transformers | ---
license: apache-2.0
tags:
- generated_from_trainer
datasets:
- squad
model-index:
- name: t5-end2end-question-generation
results:
- task:
name: Sequence-to-sequence Language Modeling
type: text2text-generation
dataset:
name: squad
type: squad
args: plain_text
---
# t5-end2end-question-generation
This model is a fine-tuned version of [t5-base](https://huggingface.co/t5-base) on the squad dataset to generate questions based on a context.
👉 If you want to learn how to fine-tune the t5 model to do the same, you can follow this [tutorial](https://colab.research.google.com/drive/1z-Zl2hftMrFXabYfmz8o9YZpgYx6sGeW?usp=sharing)
For instance:
```
Context: "Python is an interpreted, high-level, general-purpose programming language. Created by Guido van Rossum and first released in 1991, Python's design philosophy emphasizes code readability with its notable use of significant whitespace."
```
```
Questions:
Who created Python?,
When was Python first released?
What is Python's design philosophy?
```
It achieves the following results on the evaluation set:
- Loss: 1.5691
## Use the Model
```
from transformers import T5ForConditionalGeneration, T5TokenizerFast
hfmodel = T5ForConditionalGeneration.from_pretrained("ThomasSimonini/t5-end2end-question-generation")
text= "The abolition of feudal privileges by the National Constituent Assembly on 4 August 1789 and the Declaration \\nof the Rights of Man and of the Citizen (La Déclaration des Droits de l'Homme et du Citoyen), drafted by Lafayette \\nwith the help of Thomas Jefferson and adopted on 26 August, paved the way to a Constitutional Monarchy \\n(4 September 1791 – 21 September 1792). Despite these dramatic changes, life at the court continued, while the situation \\nin Paris was becoming critical because of bread shortages in September. On 5 October 1789, a crowd from Paris descended upon Versailles \\nand forced the royal family to move to the Tuileries Palace in Paris, where they lived under a form of house arrest under \\nthe watch of Lafayette's Garde Nationale, while the Comte de Provence and his wife were allowed to reside in the \\nPetit Luxembourg, where they remained until they went into exile on 20 June 1791."
def run_model(input_string, **generator_args):
generator_args = {
"max_length": 256,
"num_beams": 4,
"length_penalty": 1.5,
"no_repeat_ngram_size": 3,
"early_stopping": True,
}
input_string = "generate questions: " + input_string + " </s>"
input_ids = tokenizer.encode(input_string, return_tensors="pt")
res = hfmodel.generate(input_ids, **generator_args)
output = tokenizer.batch_decode(res, skip_special_tokens=True)
output = [item.split("<sep>") for item in output]
return output
run_model(text)
=> [['When did the National Constituent Assembly abolish feudal privileges?',
' Who drafted the Declaration of the Rights of Man and of the Citizen?',
' When was the Constitutional Monarchy established?',
' What was the name of the Declaration that paved the way to a constitutional monarchy?',
'']]
```
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0001
- train_batch_size: 4
- eval_batch_size: 4
- seed: 42
- gradient_accumulation_steps: 16
- total_train_batch_size: 64
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 7
### Training results
| Training Loss | Epoch | Step | Validation Loss |
|:-------------:|:-----:|:----:|:---------------:|
| 2.5834 | 0.34 | 100 | 1.9107 |
| 1.9642 | 0.68 | 200 | 1.7227 |
| 1.8526 | 1.02 | 300 | 1.6627 |
| 1.7383 | 1.36 | 400 | 1.6354 |
| 1.7223 | 1.69 | 500 | 1.6154 |
| 1.6871 | 2.03 | 600 | 1.6096 |
| 1.6309 | 2.37 | 700 | 1.6048 |
| 1.6242 | 2.71 | 800 | 1.5923 |
| 1.6226 | 3.05 | 900 | 1.5855 |
| 1.5645 | 3.39 | 1000 | 1.5874 |
| 1.5705 | 3.73 | 1100 | 1.5822 |
| 1.5543 | 4.07 | 1200 | 1.5817 |
| 1.5284 | 4.41 | 1300 | 1.5841 |
| 1.5275 | 4.75 | 1400 | 1.5741 |
| 1.5269 | 5.08 | 1500 | 1.5715 |
| 1.5079 | 5.42 | 1600 | 1.5701 |
| 1.4876 | 5.76 | 1700 | 1.5754 |
| 1.498 | 6.1 | 1800 | 1.5699 |
| 1.4852 | 6.44 | 1900 | 1.5693 |
| 1.4776 | 6.78 | 2000 | 1.5691 |
### Framework versions
- Transformers 4.10.3
- Pytorch 1.9.0+cu102
- Datasets 1.12.1
- Tokenizers 0.10.3
| [
-0.10845770686864853,
-0.01846165768802166,
-0.006255442276597023,
-0.01143213827162981,
0.0044549270533025265,
-0.05255178362131119,
0.0037577457260340452,
0.0010077179176732898,
-0.023738177493214607,
-0.034376129508018494,
0.026906106621026993,
-0.05918915569782257,
0.05425082892179489,
0.006074103992432356,
0.05754746496677399,
0.006476071197539568,
0.09499816596508026,
-0.05930172652006149,
-0.045091163367033005,
-0.08432981371879578,
0.032831694930791855,
0.08602862805128098,
0.06458346545696259,
0.0398104228079319,
-0.06214011088013649,
-0.02634025178849697,
-0.04619714245200157,
0.08338125795125961,
0.047105614095926285,
0.030267978087067604,
-0.017806053161621094,
0.12036309391260147,
-0.026857636868953705,
0.0064145480282604694,
-0.04184906929731369,
0.10466274619102478,
-0.005982490722090006,
0.005441430490463972,
-0.0308122169226408,
0.02833418734371662,
0.006692508701235056,
-0.04839639365673065,
-0.016690390184521675,
0.032555583864450455,
0.038673654198646545,
-0.054027047008275986,
-0.0022911790292710066,
-0.013950907625257969,
0.0034377719275653362,
-0.04849531501531601,
-0.14219586551189423,
-0.07626672089099884,
-0.0028158242348581553,
-0.05192653462290764,
-0.04416288435459137,
-0.008895386941730976,
0.025202898308634758,
-0.003279098542407155,
-0.05199173092842102,
-0.01977389119565487,
-0.055041275918483734,
-0.03258369863033295,
-0.0678853839635849,
0.015174495987594128,
-0.03141402080655098,
-0.014956731349229813,
0.031151169911026955,
0.04542115703225136,
0.06301413476467133,
0.022730406373739243,
-0.1147129014134407,
-0.02813400886952877,
0.012950779870152473,
0.031983163207769394,
-0.03049110434949398,
0.03950057178735733,
0.08749561011791229,
-0.02133401669561863,
0.08008252084255219,
-0.08885347843170166,
-0.0636749416589737,
-0.036937881261110306,
0.029171781614422798,
0.0324346087872982,
0.010187559761106968,
-0.016926689073443413,
0.04780583828687668,
0.0843334048986435,
-0.0012878559064120054,
0.07295937091112137,
-0.019378820434212685,
-0.017871158197522163,
0.047525323927402496,
0.033723149448633194,
-0.04873999208211899,
0.1257402002811432,
0.021551398560404778,
0.008205113001167774,
-0.032767318189144135,
0.04400790110230446,
0.025980781763792038,
-0.033380430191755295,
0.03954235464334488,
0.0019675251096487045,
-0.02976047247648239,
-0.015216678380966187,
0.002223921474069357,
-0.0063744718208909035,
0.04993763566017151,
-0.13117171823978424,
-0.00868704542517662,
0.04378604516386986,
-0.07845371216535568,
-0.05671217292547226,
0.018277235329151154,
-0.02142026275396347,
-0.0345357283949852,
0.03719925135374069,
0.04083987697958946,
0.0932081788778305,
-0.00027002705610357225,
-0.02505117654800415,
-0.03609292581677437,
-0.03914180397987366,
0.006864023860543966,
-0.09031505137681961,
-0.06898621469736099,
2.2049849564327543e-33,
0.0983111783862114,
0.002070738235488534,
0.05660908669233322,
0.08984015882015228,
0.04683653265237808,
0.02628079615533352,
0.004652099218219519,
0.03239614516496658,
-0.061491210013628006,
0.04196245223283768,
0.006297689396888018,
0.006975687574595213,
-0.05124477669596672,
0.01526698935776949,
0.08114643394947052,
-0.02069742977619171,
-0.08723178505897522,
0.04837579280138016,
0.011808170937001705,
0.05900225415825844,
0.02419542521238327,
0.08886134624481201,
-0.03132529556751251,
-0.06851265579462051,
0.05380924418568611,
0.10598070919513702,
-0.01671067625284195,
-0.07125325500965118,
-0.017903033643960953,
0.046941179782152176,
-0.144049733877182,
-0.034404054284095764,
-0.018049420788884163,
0.016072334721684456,
0.028666144236922264,
-0.0032224957831203938,
0.008684893138706684,
0.0027470621280372143,
-0.0014878419460728765,
-0.0695859044790268,
0.048809971660375595,
0.05079056695103645,
0.04573358967900276,
0.020526913926005363,
-0.054129328578710556,
-0.028677860274910927,
0.008617029525339603,
-0.004451016895473003,
-0.0307534858584404,
0.020413151010870934,
0.023299193009734154,
-0.0354396216571331,
0.08202797919511795,
-0.07455377280712128,
-0.0031881858594715595,
0.0263581071048975,
0.019718226045370102,
0.09060052782297134,
0.01059720478951931,
0.02582504227757454,
-0.006899278610944748,
0.006914135534316301,
0.07388784736394882,
0.04358207806944847,
0.09873697906732559,
0.041330114006996155,
-0.0370427742600441,
-0.026198968291282654,
0.1014510840177536,
-0.025958245620131493,
-0.026018373668193817,
0.05346675217151642,
-0.06572599709033966,
0.07833240926265717,
0.0260295607149601,
-0.034601058810949326,
0.029122548177838326,
-0.08360700309276581,
-0.035765111446380615,
0.03468317911028862,
-0.01107574813067913,
0.010611719451844692,
0.022144678980112076,
-0.038713932037353516,
-0.05861081928014755,
0.04536828026175499,
0.06157157942652702,
-0.03331826627254486,
0.0351974293589592,
-0.08119034022092819,
-0.03296414390206337,
-0.06772161275148392,
0.008792830631136894,
-0.03060157224535942,
-0.007307492662221193,
-4.7122005307214576e-33,
0.06827675551176071,
-0.05300582945346832,
-0.08673761039972305,
0.02655666694045067,
0.0010551242157816887,
-0.07495000958442688,
0.040885407477617264,
0.034883394837379456,
-0.026812629774212837,
-0.011937846429646015,
0.059676799923181534,
-0.04440972954034805,
0.013729282654821873,
-0.024815697222948074,
0.042926572263240814,
-0.050157178193330765,
-0.08928540349006653,
-0.10372824221849442,
0.04910911247134209,
0.008881998248398304,
-0.012374857440590858,
0.0024335188791155815,
-0.10373927652835846,
0.007598748430609703,
0.02137383632361889,
-0.0038984010461717844,
-0.11436164379119873,
0.011552771553397179,
0.04487842693924904,
0.010441550984978676,
-0.044544994831085205,
-0.04947775602340698,
0.02234269306063652,
0.010315059684216976,
-0.04011795297265053,
0.056182242929935455,
-0.00016107472765725106,
-0.008154409006237984,
0.0004472954315133393,
0.11485616117715836,
0.08617190271615982,
0.07418768107891083,
0.010310575366020203,
0.04302111640572548,
-0.08196809887886047,
0.02029425837099552,
-0.06981732696294785,
-0.029864951968193054,
-0.001105081057175994,
-0.025133041664958,
0.025215445086359978,
-0.022965598851442337,
-0.10912942886352539,
-0.07823533564805984,
-0.0984305813908577,
-0.06050784885883331,
0.054638639092445374,
-0.009561466984450817,
-0.06439025700092316,
-0.016839969903230667,
-0.04106806963682175,
0.00783727876842022,
-0.017704740166664124,
-0.017888566479086876,
-0.01185007207095623,
-0.0035001107025891542,
-0.11231888085603714,
-0.020189277827739716,
-0.01743662916123867,
-0.07826364785432816,
-0.05368337407708168,
-0.05272963270545006,
0.022813741117715836,
0.03818541392683983,
0.001249266555532813,
-0.021020477637648582,
-0.004981272388249636,
-0.018364397808909416,
0.043953172862529755,
0.013773204758763313,
-0.009183821268379688,
0.0009118104935623705,
0.0035972807090729475,
0.11260132491588593,
-0.03426435589790344,
0.010778052732348442,
0.03410453349351883,
0.16538622975349426,
0.04028148949146271,
0.00002160765325243119,
0.05332256481051445,
0.03138148784637451,
-0.03532986715435982,
0.16568449139595032,
-0.012981795705854893,
-6.058146340137682e-8,
-0.038064371794462204,
0.05443203076720238,
-0.012869740836322308,
0.09633138030767441,
-0.034246936440467834,
0.04894668236374855,
-0.037865251302719116,
-0.013223228044807911,
0.0714130625128746,
0.043538887053728104,
-0.0010849647223949432,
-0.007878860458731651,
-0.061111029237508774,
-0.0006033796817064285,
-0.06342753767967224,
0.08170295506715775,
-0.026128800585865974,
0.01073384378105402,
-0.001832223148085177,
-0.039366237819194794,
0.013281675055623055,
-0.020236246287822723,
-0.0797160416841507,
-0.006566412281244993,
0.04914721101522446,
-0.010180577635765076,
-0.09580483287572861,
0.037718407809734344,
-0.0038424150552600622,
-0.03401537239551544,
-0.01551130972802639,
-0.01266141515225172,
-0.08955421298742294,
0.028305012732744217,
0.04528447240591049,
0.08488266915082932,
-0.08622659742832184,
-0.09558749943971634,
0.02441614866256714,
0.03439202159643173,
0.03954561427235603,
0.012580117210745811,
-0.09253739565610886,
-0.016046511009335518,
0.06281513720750809,
0.010372989811003208,
0.021555209532380104,
-0.0736638605594635,
-0.014759589917957783,
-0.024311568588018417,
-0.03416648507118225,
0.009714381769299507,
-0.060139887034893036,
-0.003640517359599471,
0.07076063752174377,
0.08244775980710983,
0.019201789051294327,
-0.009486441500484943,
0.00038482819218188524,
-0.014603596180677414,
-0.007121359463781118,
0.07033535093069077,
0.03088478557765484,
-0.04458852857351303
] |
dkleczek/bert-base-polish-uncased-v1 | 62be9821055981deafb23f217b68cc41f38cdb76 | 2021-05-19T15:55:32.000Z | [
"pytorch",
"jax",
"bert",
"fill-mask",
"pl",
"transformers",
"autotrain_compatible"
] | fill-mask | false | dkleczek | null | dkleczek/bert-base-polish-uncased-v1 | 2,671 | 2 | transformers | ---
language: pl
thumbnail: https://raw.githubusercontent.com/kldarek/polbert/master/img/polbert.png
---
# Polbert - Polish BERT
Polish version of BERT language model is here! It is now available in two variants: cased and uncased, both can be downloaded and used via HuggingFace transformers library. I recommend using the cased model, more info on the differences and benchmark results below.

## Cased and uncased variants
* I initially trained the uncased model, the corpus and training details are referenced below. Here are some issues I found after I published the uncased model:
* Some Polish characters and accents are not tokenized correctly through the BERT tokenizer when applying lowercase. This doesn't impact sequence classification much, but may influence token classfication tasks significantly.
* I noticed a lot of duplicates in the Open Subtitles dataset, which dominates the training corpus.
* I didn't use Whole Word Masking.
* The cased model improves on the uncased model in the following ways:
* All Polish characters and accents should now be tokenized correctly.
* I removed duplicates from Open Subtitles dataset. The corpus is smaller, but more balanced now.
* The model is trained with Whole Word Masking.
## Pre-training corpora
Below is the list of corpora used along with the output of `wc` command (counting lines, words and characters). These corpora were divided into sentences with srxsegmenter (see references), concatenated and tokenized with HuggingFace BERT Tokenizer.
### Uncased
| Tables | Lines | Words | Characters |
| ------------- |--------------:| -----:| -----:|
| [Polish subset of Open Subtitles](http://opus.nlpl.eu/OpenSubtitles-v2018.php) | 236635408| 1431199601 | 7628097730 |
| [Polish subset of ParaCrawl](http://opus.nlpl.eu/ParaCrawl.php) | 8470950 | 176670885 | 1163505275 |
| [Polish Parliamentary Corpus](http://clip.ipipan.waw.pl/PPC) | 9799859 | 121154785 | 938896963 |
| [Polish Wikipedia - Feb 2020](https://dumps.wikimedia.org/plwiki/latest/plwiki-latest-pages-articles.xml.bz2) | 8014206 | 132067986 | 1015849191 |
| Total | 262920423 | 1861093257 | 10746349159 |
### Cased
| Tables | Lines | Words | Characters |
| ------------- |--------------:| -----:| -----:|
| [Polish subset of Open Subtitles (Deduplicated) ](http://opus.nlpl.eu/OpenSubtitles-v2018.php) | 41998942| 213590656 | 1424873235 |
| [Polish subset of ParaCrawl](http://opus.nlpl.eu/ParaCrawl.php) | 8470950 | 176670885 | 1163505275 |
| [Polish Parliamentary Corpus](http://clip.ipipan.waw.pl/PPC) | 9799859 | 121154785 | 938896963 |
| [Polish Wikipedia - Feb 2020](https://dumps.wikimedia.org/plwiki/latest/plwiki-latest-pages-articles.xml.bz2) | 8014206 | 132067986 | 1015849191 |
| Total | 68283960 | 646479197 | 4543124667 |
## Pre-training details
### Uncased
* Polbert was trained with code provided in Google BERT's github repository (https://github.com/google-research/bert)
* Currently released model follows bert-base-uncased model architecture (12-layer, 768-hidden, 12-heads, 110M parameters)
* Training set-up: in total 1 million training steps:
* 100.000 steps - 128 sequence length, batch size 512, learning rate 1e-4 (10.000 steps warmup)
* 800.000 steps - 128 sequence length, batch size 512, learning rate 5e-5
* 100.000 steps - 512 sequence length, batch size 256, learning rate 2e-5
* The model was trained on a single Google Cloud TPU v3-8
### Cased
* Same approach as uncased model, with the following differences:
* Whole Word Masking
* Training set-up:
* 100.000 steps - 128 sequence length, batch size 2048, learning rate 1e-4 (10.000 steps warmup)
* 100.000 steps - 128 sequence length, batch size 2048, learning rate 5e-5
* 100.000 steps - 512 sequence length, batch size 256, learning rate 2e-5
## Usage
Polbert is released via [HuggingFace Transformers library](https://huggingface.co/transformers/).
For an example use as language model, see [this notebook](/LM_testing.ipynb) file.
### Uncased
```python
from transformers import *
model = BertForMaskedLM.from_pretrained("dkleczek/bert-base-polish-uncased-v1")
tokenizer = BertTokenizer.from_pretrained("dkleczek/bert-base-polish-uncased-v1")
nlp = pipeline('fill-mask', model=model, tokenizer=tokenizer)
for pred in nlp(f"Adam Mickiewicz wielkim polskim {nlp.tokenizer.mask_token} był."):
print(pred)
# Output:
# {'sequence': '[CLS] adam mickiewicz wielkim polskim poeta był. [SEP]', 'score': 0.47196975350379944, 'token': 26596}
# {'sequence': '[CLS] adam mickiewicz wielkim polskim bohaterem był. [SEP]', 'score': 0.09127858281135559, 'token': 10953}
# {'sequence': '[CLS] adam mickiewicz wielkim polskim człowiekiem był. [SEP]', 'score': 0.0647173821926117, 'token': 5182}
# {'sequence': '[CLS] adam mickiewicz wielkim polskim pisarzem był. [SEP]', 'score': 0.05232388526201248, 'token': 24293}
# {'sequence': '[CLS] adam mickiewicz wielkim polskim politykiem był. [SEP]', 'score': 0.04554257541894913, 'token': 44095}
```
### Cased
```python
model = BertForMaskedLM.from_pretrained("dkleczek/bert-base-polish-cased-v1")
tokenizer = BertTokenizer.from_pretrained("dkleczek/bert-base-polish-cased-v1")
nlp = pipeline('fill-mask', model=model, tokenizer=tokenizer)
for pred in nlp(f"Adam Mickiewicz wielkim polskim {nlp.tokenizer.mask_token} był."):
print(pred)
# Output:
# {'sequence': '[CLS] Adam Mickiewicz wielkim polskim pisarzem był. [SEP]', 'score': 0.5391148328781128, 'token': 37120}
# {'sequence': '[CLS] Adam Mickiewicz wielkim polskim człowiekiem był. [SEP]', 'score': 0.11683262139558792, 'token': 6810}
# {'sequence': '[CLS] Adam Mickiewicz wielkim polskim bohaterem był. [SEP]', 'score': 0.06021466106176376, 'token': 17709}
# {'sequence': '[CLS] Adam Mickiewicz wielkim polskim mistrzem był. [SEP]', 'score': 0.051870670169591904, 'token': 14652}
# {'sequence': '[CLS] Adam Mickiewicz wielkim polskim artystą był. [SEP]', 'score': 0.031787533313035965, 'token': 35680}
```
See the next section for an example usage of Polbert in downstream tasks.
## Evaluation
Thanks to Allegro, we now have the [KLEJ benchmark](https://klejbenchmark.com/leaderboard/), a set of nine evaluation tasks for the Polish language understanding. The following results are achieved by running standard set of evaluation scripts (no tricks!) utilizing both cased and uncased variants of Polbert.
| Model | Average | NKJP-NER | CDSC-E | CDSC-R | CBD | PolEmo2.0-IN | PolEmo2.0-OUT | DYK | PSC | AR |
| ------------- |--------------:|--------------:|--------------:|--------------:|--------------:|--------------:|--------------:|--------------:|--------------:|--------------:|
| Polbert cased | 81.7 | 93.6 | 93.4 | 93.8 | 52.7 | 87.4 | 71.1 | 59.1 | 98.6 | 85.2 |
| Polbert uncased | 81.4 | 90.1 | 93.9 | 93.5 | 55.0 | 88.1 | 68.8 | 59.4 | 98.8 | 85.4 |
Note how the uncased model performs better than cased on some tasks? My guess this is because of the oversampling of Open Subtitles dataset and its similarity to data in some of these tasks. All these benchmark tasks are sequence classification, so the relative strength of the cased model is not so visible here.
## Bias
The data used to train the model is biased. It may reflect stereotypes related to gender, ethnicity etc. Please be careful when using the model for downstream task to consider these biases and mitigate them.
## Acknowledgements
* I'd like to express my gratitude to Google [TensorFlow Research Cloud (TFRC)](https://www.tensorflow.org/tfrc) for providing the free TPU credits - thank you!
* Also appreciate the help from Timo Möller from [deepset](https://deepset.ai) for sharing tips and scripts based on their experience training German BERT model.
* Big thanks to Allegro for releasing KLEJ Benchmark and specifically to Piotr Rybak for help with the evaluation and pointing out some issues with the tokenization.
* Finally, thanks to Rachel Thomas, Jeremy Howard and Sylvain Gugger from [fastai](https://www.fast.ai) for their NLP and Deep Learning courses!
## Author
Darek Kłeczek - contact me on Twitter [@dk21](https://twitter.com/dk21)
## References
* https://github.com/google-research/bert
* https://github.com/narusemotoki/srx_segmenter
* SRX rules file for sentence splitting in Polish, written by Marcin Miłkowski: https://raw.githubusercontent.com/languagetool-org/languagetool/master/languagetool-core/src/main/resources/org/languagetool/resource/segment.srx
* [KLEJ benchmark](https://klejbenchmark.com/leaderboard/) | [
-0.161208376288414,
-0.05478787049651146,
0.06631886214017868,
-0.04026399180293083,
0.01315255742520094,
0.04397734999656677,
0.015606059692800045,
0.029957957565784454,
-0.011135702952742577,
-0.026144275441765785,
0.01582634262740612,
0.009150180034339428,
0.029164224863052368,
0.05484208092093468,
0.03167887404561043,
0.02562440000474453,
0.05507156625390053,
0.0073635936714708805,
-0.06720786541700363,
-0.00977412611246109,
0.0243659857660532,
0.03909344598650932,
0.0732809528708458,
-0.04241395369172096,
0.04670143499970436,
0.007770105730742216,
0.014183915220201015,
-0.03940412402153015,
0.0751856118440628,
0.03801838308572769,
-0.039266858249902725,
-0.009006764739751816,
0.007765964604914188,
0.03223419561982155,
0.06013648957014084,
0.05857815966010094,
-0.002305729081854224,
-0.006944142747670412,
0.034112587571144104,
0.05040867626667023,
-0.017573483288288116,
-0.02607056498527527,
-0.020581476390361786,
0.03689293563365936,
0.060748666524887085,
0.00788837019354105,
-0.052327267825603485,
0.01647719368338585,
-0.04486074298620224,
-0.04020833969116211,
-0.08724801242351532,
-0.04160910099744797,
0.07392818480730057,
0.022618884220719337,
0.008196271024644375,
-0.013918336480855942,
0.023287028074264526,
-0.012806743383407593,
-0.0024988139048218727,
-0.11149826645851135,
-0.11880658566951752,
-0.06779272109270096,
-0.03129347786307335,
-0.03572309762239456,
-0.02421477809548378,
0.05280967429280281,
0.03151491656899452,
0.0052372757345438,
0.02197670005261898,
0.019644780084490776,
0.0475316196680069,
0.05112294480204582,
-0.03787042945623398,
0.04601462557911873,
-0.07020757347345352,
-0.03597087040543556,
0.076656274497509,
-0.03965252265334129,
0.03552211448550224,
-0.08755577355623245,
0.04040520265698433,
-0.04401027038693428,
0.029206659644842148,
-0.013320312835276127,
0.05667416751384735,
-0.03167035058140755,
0.07506732642650604,
-0.039857201278209686,
-0.03908447176218033,
0.06286350637674332,
0.031596507877111435,
-0.015101758763194084,
0.05962219089269638,
-0.033583469688892365,
-0.04134970158338547,
-0.01368663925677538,
0.05268881097435951,
0.08835650235414505,
-0.009143352508544922,
0.08676359057426453,
-0.015178270637989044,
-0.03098730742931366,
0.04567832872271538,
-0.07369278371334076,
0.0151359923183918,
-0.008580440655350685,
-0.00550851272419095,
-0.042263127863407135,
0.030192911624908447,
-0.08301808685064316,
-0.016223249956965446,
-0.029500076547265053,
-0.07022691518068314,
-0.0016311851795762777,
0.012990768067538738,
-0.055452555418014526,
0.06656332314014435,
-0.014751999638974667,
-0.006445246282964945,
0.0659310594201088,
-0.0025854106061160564,
-0.005310002714395523,
0.011829471215605736,
0.05268903821706772,
-0.04432934895157814,
-0.001671677571721375,
-0.05442158132791519,
4.472708250420802e-33,
0.004743744153529406,
0.030330399051308632,
-0.04748137295246124,
-0.025205139070749283,
-0.020677270367741585,
-0.009810887277126312,
-0.009325406514108181,
0.03131703659892082,
-0.08230756968259811,
-0.03768536075949669,
-0.042793381959199905,
0.035334523767232895,
-0.0630762055516243,
0.03542965278029442,
-0.09029757976531982,
0.007297034841030836,
0.0057015493512153625,
0.02339518629014492,
-0.01786060445010662,
0.032466690987348557,
0.1024022176861763,
0.09840955585241318,
0.0465896874666214,
-0.009040738455951214,
-0.09212075918912888,
0.020319703966379166,
0.11521666496992111,
-0.09939713776111603,
0.035756949335336685,
0.03717390447854996,
-0.09052087366580963,
0.015566002577543259,
-0.007145894691348076,
0.08063239604234695,
0.02636413276195526,
0.025380996987223625,
0.020484071224927902,
-0.05235493928194046,
0.021659404039382935,
-0.04536987841129303,
-0.02875290811061859,
0.014773881062865257,
-0.0022489032708108425,
-0.04192923754453659,
-0.004402365535497665,
0.007078788708895445,
0.013942885212600231,
-0.03943890705704689,
0.007745719514787197,
-0.030935615301132202,
0.07710495591163635,
0.048861533403396606,
-0.06183398887515068,
0.03180154412984848,
-0.04022755101323128,
-0.020081456750631332,
0.07202312350273132,
0.0009526984649710357,
0.027290422469377518,
-0.013407581485807896,
0.02292233146727085,
0.013646178878843784,
0.11014102399349213,
0.01068547461181879,
0.0637519359588623,
-0.04247266799211502,
-0.09719575196504593,
0.03788501024246216,
0.008355990052223206,
-0.04771758243441582,
0.0291197057813406,
-0.030928557738661766,
-0.014220643788576126,
0.10616471618413925,
0.007771109696477652,
-0.048700056970119476,
0.0734558030962944,
-0.04071830213069916,
-0.054835833609104156,
-0.02821553871035576,
-0.0675797164440155,
0.04909505322575569,
-0.02043818309903145,
-0.06217779591679573,
-0.10502663999795914,
-0.04151015728712082,
0.0680084079504013,
-0.06218233332037926,
0.011034827679395676,
-0.000831186305731535,
0.02231215126812458,
-0.08438967913389206,
-0.013987247832119465,
0.025536395609378815,
-0.0449022576212883,
-4.944591723117193e-33,
0.00020307594968471676,
-0.01835745945572853,
-0.0804704874753952,
-0.01825454644858837,
-0.0889275074005127,
-0.06617908924818039,
0.03865091875195503,
0.20751436054706573,
0.007784973829984665,
-0.01723441854119301,
0.038879506289958954,
-0.067324697971344,
-0.013454343192279339,
-0.026513852179050446,
0.02200099267065525,
0.009021810255944729,
-0.017998026683926582,
0.07413040101528168,
0.014772938564419746,
0.02630619704723358,
0.03235188126564026,
0.007249788846820593,
-0.09766889363527298,
0.10728228837251663,
-0.05813691392540932,
0.0697675570845604,
-0.041815657168626785,
0.006439042743295431,
0.05013427138328552,
0.04018400236964226,
-0.0019053969299420714,
0.03122984617948532,
-0.06534449011087418,
0.03481912240386009,
-0.06107759848237038,
0.046037834137678146,
0.03921019658446312,
-0.02434745989739895,
-0.001063661533407867,
0.06530313938856125,
0.006362696178257465,
-0.033441465348005295,
-0.052847933024168015,
0.0737692192196846,
0.05410642921924591,
-0.06557860225439072,
-0.045584678649902344,
-0.03251136094331741,
-0.012843787670135498,
-0.049584731459617615,
0.015052598901093006,
0.004428518004715443,
-0.05267585441470146,
-0.01842951588332653,
-0.062357328832149506,
-0.09850236028432846,
0.028899231925606728,
-0.07459437102079391,
-0.042546339333057404,
0.01593438908457756,
0.02497565932571888,
0.0018585172947496176,
-0.02719481848180294,
-0.058328282088041306,
-0.026276152580976486,
-0.0847691148519516,
0.02472568117082119,
0.030024686828255653,
0.03843049332499504,
-0.056902673095464706,
-0.028505835682153702,
-0.03839410841464996,
0.03270314261317253,
0.026811828836798668,
0.03194880485534668,
0.023043353110551834,
0.03533526510000229,
-0.015117577277123928,
-0.017912624403834343,
-0.02601127326488495,
-0.06551649421453476,
-0.06104937568306923,
0.023656688630580902,
0.09131453186273575,
-0.019488414749503136,
0.11499852687120438,
0.07701002806425095,
0.03352275863289833,
0.0069388290867209435,
0.013828977011144161,
-0.018848685547709465,
0.07422477751970291,
0.017234401777386665,
0.14145232737064362,
0.07428596913814545,
-5.969307892428333e-8,
-0.0068131061270833015,
0.013295616954565048,
-0.010110524483025074,
0.008695971220731735,
-0.07477376610040665,
-0.07683315873146057,
-0.13864752650260925,
-0.03695681318640709,
-0.035735711455345154,
-0.0463755764067173,
-0.03831152990460396,
0.04888720437884331,
-0.14229711890220642,
-0.05717720463871956,
-0.0566520020365715,
0.10971549898386002,
-0.03663583844900131,
0.08525435626506805,
-0.013795061968266964,
0.026500774547457695,
-0.02427602745592594,
0.046253349632024765,
0.023140691220760345,
-0.04853134974837303,
-0.05150627717375755,
-0.02077532932162285,
-0.03614094853401184,
0.032820507884025574,
-0.057132329791784286,
-0.04026065021753311,
-0.015092547982931137,
0.04311777651309967,
-0.04903167113661766,
-0.042862724512815475,
0.06064838543534279,
0.08988889306783676,
0.020129980519413948,
-0.059218358248472214,
-0.023388361558318138,
0.0864352360367775,
0.08170567452907562,
-0.04133250191807747,
-0.04490767791867256,
-0.00770601537078619,
0.06371001899242401,
0.019386880099773407,
0.01876404508948326,
-0.07795371860265732,
0.05046786367893219,
0.05996367335319519,
0.043609701097011566,
-0.05665937438607216,
-0.12265179306268692,
0.05457497760653496,
-0.011973432265222073,
0.05207597464323044,
-0.06914042681455612,
0.045515093952417374,
0.066350057721138,
0.045407604426145554,
0.008384830318391323,
0.03058009408414364,
0.023494284600019455,
0.04243197292089462
] |
tuner007/t5_abs_qa | c896608015dba727b3fe0ae8a397fa1a4286c72e | 2020-12-11T22:02:51.000Z | [
"pytorch",
"t5",
"text2text-generation",
"transformers",
"autotrain_compatible"
] | text2text-generation | false | tuner007 | null | tuner007/t5_abs_qa | 2,664 | 1 | transformers | # T5 for abstractive question-answering
This is T5-base model fine-tuned for abstractive QA using text-to-text approach
## Model training
This model was trained on colab TPU with 35GB RAM for 2 epochs
## Model in Action 🚀
```
from transformers import AutoModelWithLMHead, AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("tuner007/t5_abs_qa")
model = AutoModelWithLMHead.from_pretrained("tuner007/t5_abs_qa")
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = model.to(device)
def get_answer(question, context):
input_text = "context: %s <question for context: %s </s>" % (context,question)
features = tokenizer([input_text], return_tensors='pt')
out = model.generate(input_ids=features['input_ids'].to(device), attention_mask=features['attention_mask'].to(device))
return tokenizer.decode(out[0])
```
#### Example 1: Answer available
```
context = "In Norse mythology, Valhalla is a majestic, enormous hall located in Asgard, ruled over by the god Odin."
question = "What is Valhalla?"
get_answer(question, context)
# output: 'It is a hall of worship ruled by Odin.'
```
#### Example 2: Answer not available
```
context = "In Norse mythology, Valhalla is a majestic, enormous hall located in Asgard, ruled over by the god Odin."
question = "What is Asgard?"
get_answer(question, context)
# output: 'No answer available in context.'
```
> Created by Arpit Rajauria
[](https://twitter.com/arpit_rajauria)
| [
-0.041961297392845154,
0.006734407506883144,
-0.02675347588956356,
0.06953374296426773,
-0.021325424313545227,
-0.007930264808237553,
0.040392570197582245,
0.08854028582572937,
0.00019136437913402915,
-0.08316104859113693,
0.0015916485572233796,
-0.10041571408510208,
-0.04735827073454857,
0.025709738954901695,
0.014772188849747181,
0.05674653872847557,
0.052075035870075226,
-0.07345450669527054,
-0.13043484091758728,
-0.1504364162683487,
0.08310317993164062,
0.055136993527412415,
0.08192183077335358,
0.06570471078157425,
-0.02567438594996929,
-0.002602300839498639,
-0.017324170097708702,
-0.009832990355789661,
0.04145749285817146,
0.03623358905315399,
-0.016715435311198235,
0.03816010057926178,
-0.03611062094569206,
0.08438180387020111,
0.060453902930021286,
0.032929759472608566,
-0.12106341123580933,
-0.026357386261224747,
-0.05057643726468086,
-0.042740605771541595,
0.012311380356550217,
-0.09001970291137695,
-0.05468720942735672,
0.012008379213511944,
0.09899462759494781,
-0.03263533487915993,
0.006325494963675737,
0.03149859607219696,
0.02557074837386608,
-0.09840045124292374,
-0.04732290282845497,
-0.0045031653717160225,
-0.015486925840377808,
0.03710529953241348,
-0.00871011707931757,
0.006112528033554554,
0.027433115988969803,
-0.08011674135923386,
0.02251100353896618,
-0.0780116468667984,
-0.03708159551024437,
-0.04639322683215141,
-0.025597253814339638,
-0.0017011633608490229,
-0.011150436475872993,
-0.02868194691836834,
-0.005956222768872976,
0.008448771201074123,
-0.006865928880870342,
-0.0028841954190284014,
-0.03237801417708397,
0.03666922077536583,
0.02379772625863552,
0.07068782299757004,
0.004773357417434454,
-0.010019821114838123,
0.05949883535504341,
-0.019641151651740074,
0.03523413464426994,
-0.03450017422437668,
0.027989191934466362,
-0.08475734293460846,
0.07563993334770203,
0.07020855695009232,
0.09677872806787491,
-0.03438004478812218,
0.03473925217986107,
0.07941482961177826,
0.011477778665721416,
-0.04010031372308731,
-0.06354346871376038,
-0.06308728456497192,
0.036568429321050644,
-0.024650247767567635,
0.027483247220516205,
0.047757163643836975,
0.022351739928126335,
-0.06639140099287033,
-0.08224691450595856,
0.05746980756521225,
0.0554690808057785,
0.022117218002676964,
-0.04864879325032234,
0.018353793770074844,
-0.03809332102537155,
0.04060589149594307,
0.007584983482956886,
0.0708058699965477,
0.013454644940793514,
-0.053408440202474594,
0.03840787708759308,
0.023788312450051308,
0.014210252091288567,
-0.012415280565619469,
0.05269486829638481,
-0.051180996000766754,
-0.011725385673344135,
0.08349836617708206,
-0.011987625621259212,
0.09047481417655945,
0.00026548156165517867,
0.03403670713305473,
-0.07358615845441818,
0.04872516170144081,
0.002329619135707617,
-0.040472693741321564,
-0.024960026144981384,
9.62826116923427e-33,
0.0023229222279042006,
0.06693586707115173,
-0.00016631559992674738,
0.02853057160973549,
-0.03364938870072365,
0.05437619611620903,
0.014516518451273441,
0.06771703064441681,
0.014012595638632774,
0.03003217838704586,
-0.01723315939307213,
0.018064063042402267,
-0.04785885289311409,
-0.020067712292075157,
-0.023740718141198158,
-0.03344688564538956,
-0.04514216259121895,
0.007371462881565094,
0.013565081171691418,
-0.0237076785415411,
0.07808566838502884,
0.03754789009690285,
-0.07317399978637695,
0.010564973577857018,
-0.05033761262893677,
0.020925134420394897,
-0.0020567059982568026,
-0.0692734643816948,
-0.06756937503814697,
-0.005849510431289673,
-0.16267184913158417,
-0.040999945253133774,
-0.02715335600078106,
-0.046159952878952026,
0.018376588821411133,
-0.014239521697163582,
0.06410441547632217,
-0.0086907260119915,
-0.03632504492998123,
-0.0892585963010788,
0.030343616381287575,
0.07096781581640244,
0.037077646702528,
-0.07603045552968979,
-0.0738871842622757,
0.018019553273916245,
-0.07265225797891617,
0.0036921517457813025,
-0.03184375539422035,
-0.00022394639381673187,
-0.014685324393212795,
-0.021097393706440926,
-0.037005193531513214,
-0.03483663499355316,
0.07759507745504379,
-0.012242734432220459,
0.05271408334374428,
0.05434953421354294,
0.11808755248785019,
0.01771697774529457,
0.023620784282684326,
-0.0064704641699790955,
-0.024736016988754272,
0.05553675442934036,
0.00721337553113699,
0.01296776719391346,
-0.008875075727701187,
0.04585891216993332,
0.02901054173707962,
0.036528125405311584,
-0.07872127741575241,
0.004479263909161091,
-0.0434018112719059,
0.000950802699662745,
0.025721194222569466,
-0.06107930466532707,
-0.006892715115100145,
-0.10986149311065674,
-0.07567018270492554,
0.021409738808870316,
-0.005632203537970781,
-0.005269620101898909,
0.029703006148338318,
-0.05367334932088852,
-0.013784457929432392,
-0.04362819343805313,
0.037068791687488556,
-0.05060499906539917,
-0.029189033433794975,
-0.06538453698158264,
-0.07738251984119415,
-0.024645181372761726,
0.054374080151319504,
0.00514816353097558,
-0.010198499076068401,
-9.331499538845522e-33,
0.02147955633699894,
-0.0013807447394356132,
-0.051982905715703964,
0.044211000204086304,
0.04290517792105675,
-0.04081743210554123,
0.012666490860283375,
0.01928899623453617,
0.004836370702832937,
-0.044384025037288666,
0.01396376732736826,
-0.02761872299015522,
-0.0019278789404779673,
-0.0036449148319661617,
0.051409415900707245,
0.010839898139238358,
-0.0633895993232727,
-0.0016826859209686518,
0.015061750076711178,
0.10206961631774902,
-0.12812650203704834,
0.08934555947780609,
-0.13617897033691406,
-0.024345966055989265,
-0.08461997658014297,
0.05636889860033989,
-0.013208471238613129,
0.06640602648258209,
0.014853060245513916,
-0.01252695731818676,
-0.016108769923448563,
-0.05474025011062622,
-0.028345517814159393,
0.04882343113422394,
-0.09164490550756454,
0.07965047657489777,
0.0763457641005516,
-0.07444416731595993,
-0.021973375231027603,
0.10703030228614807,
0.15766441822052002,
0.01951676234602928,
-0.06923806667327881,
0.060199759900569916,
-0.11064605414867401,
0.027886729687452316,
-0.042763348668813705,
-0.05845949426293373,
-0.008604001253843307,
0.03088238835334778,
0.06495324522256851,
-0.06689183413982391,
-0.07823637127876282,
-0.01316576823592186,
-0.07853787392377853,
-0.011575454846024513,
0.05610065534710884,
-0.0296737402677536,
-0.029911059886217117,
-0.018234489485621452,
0.027985135093331337,
-0.03602626919746399,
0.07032155990600586,
-0.04012368246912956,
0.0401216521859169,
-0.03932172432541847,
-0.02386685274541378,
0.08195800334215164,
0.05545920878648758,
-0.012935703620314598,
-0.03029741160571575,
0.049249134957790375,
0.07377537339925766,
0.02369830384850502,
0.038584593683481216,
0.07545318454504013,
-0.03951452672481537,
-0.04821697995066643,
0.046515293419361115,
-0.05496969446539879,
-0.06401377171278,
0.00243474286980927,
0.005287951789796352,
0.08943919837474823,
-0.028635017573833466,
-0.017105476930737495,
0.07799387723207474,
0.11564189940690994,
0.017879178747534752,
0.0006533995037898421,
0.0373358279466629,
-0.021631289273500443,
0.057891596108675,
0.1298300176858902,
-0.021283715963363647,
-5.714494832886885e-8,
-0.028673656284809113,
0.04246547818183899,
-0.03825046494603157,
0.11481077969074249,
-0.014149020425975323,
-0.030467012897133827,
0.0015445968601852655,
-0.01822831481695175,
0.016176678240299225,
-0.012953215278685093,
-0.014057462103664875,
-0.019394978880882263,
-0.0024120754096657038,
-0.00222231587395072,
0.006009561941027641,
0.10079266130924225,
-0.002506100106984377,
0.01453709788620472,
0.007720933761447668,
-0.11568412184715271,
0.0698162093758583,
0.01656518504023552,
-0.02520308457314968,
0.00903500709682703,
-0.03643501549959183,
0.020744482055306435,
-0.06181520223617554,
0.09538454562425613,
-0.014520464465022087,
-0.0228037778288126,
0.03395901620388031,
-0.03206411004066467,
-0.04080716893076897,
0.0224399883300066,
0.03589167818427086,
0.032333891838788986,
0.03404918685555458,
-0.05074609816074371,
0.04179654270410538,
0.0470941886305809,
0.003610480111092329,
0.03124496340751648,
-0.129177525639534,
-0.014634491875767708,
0.014174890704452991,
0.007227088790386915,
-0.06078460067510605,
-0.1061621829867363,
-0.001743694068863988,
0.04731407389044762,
-0.02643376775085926,
0.005416153930127621,
-0.043591804802417755,
0.05462373048067093,
0.03317658603191376,
-0.04158863052725792,
0.024764416739344597,
0.017938906326889992,
-0.07293228805065155,
-0.0246609877794981,
0.028913158923387527,
0.03641355410218239,
-0.021747248247265816,
-0.0400991253554821
] |
monologg/koelectra-base-v3-generator | 502d48ff8cac576e1324c8c2ce51ab2c866417c5 | 2021-10-20T16:53:23.000Z | [
"pytorch",
"electra",
"fill-mask",
"ko",
"transformers",
"korean",
"license:apache-2.0",
"autotrain_compatible"
] | fill-mask | false | monologg | null | monologg/koelectra-base-v3-generator | 2,649 | 1 | transformers | ---
language: ko
license: apache-2.0
tags:
- korean
---
# KoELECTRA v3 (Base Generator)
Pretrained ELECTRA Language Model for Korean (`koelectra-base-v3-generator`)
For more detail, please see [original repository](https://github.com/monologg/KoELECTRA/blob/master/README_EN.md).
## Usage
### Load model and tokenizer
```python
>>> from transformers import ElectraModel, ElectraTokenizer
>>> model = ElectraModel.from_pretrained("monologg/koelectra-base-v3-generator")
>>> tokenizer = ElectraTokenizer.from_pretrained("monologg/koelectra-base-v3-generator")
```
### Tokenizer example
```python
>>> from transformers import ElectraTokenizer
>>> tokenizer = ElectraTokenizer.from_pretrained("monologg/koelectra-base-v3-generator")
>>> tokenizer.tokenize("[CLS] 한국어 ELECTRA를 공유합니다. [SEP]")
['[CLS]', '한국어', 'EL', '##EC', '##TRA', '##를', '공유', '##합니다', '.', '[SEP]']
>>> tokenizer.convert_tokens_to_ids(['[CLS]', '한국어', 'EL', '##EC', '##TRA', '##를', '공유', '##합니다', '.', '[SEP]'])
[2, 11229, 29173, 13352, 25541, 4110, 7824, 17788, 18, 3]
```
## Example using ElectraForMaskedLM
```python
from transformers import pipeline
fill_mask = pipeline(
"fill-mask",
model="monologg/koelectra-base-v3-generator",
tokenizer="monologg/koelectra-base-v3-generator"
)
print(fill_mask("나는 {} 밥을 먹었다.".format(fill_mask.tokenizer.mask_token)))
```
| [
-0.1014108881354332,
0.025687290355563164,
-0.037777144461870193,
-0.018212297931313515,
0.002907725516706705,
0.02412722446024418,
0.009197589010000229,
-0.018571972846984863,
-0.02801261469721794,
-0.037904709577560425,
0.03836403787136078,
-0.1168820783495903,
0.034940700978040695,
-0.034250058233737946,
0.058777809143066406,
0.026615116745233536,
-0.04847380891442299,
0.05374655872583389,
-0.05492371320724487,
-0.022254984825849533,
0.1801615059375763,
-0.031955819576978683,
0.031511735171079636,
-0.027851564809679985,
0.06097400560975075,
0.017656611278653145,
0.012512912042438984,
0.04000179469585419,
0.047972455620765686,
-0.05089614540338516,
0.0674673542380333,
0.06173926591873169,
0.035732101649045944,
-0.027893269434571266,
-0.0005526847089640796,
0.07327619194984436,
-0.07198011875152588,
-0.03788548335433006,
-0.0277697890996933,
0.012031934224069118,
0.0431523360311985,
0.01120915450155735,
-0.008123633451759815,
-0.05239975452423096,
0.011904898099601269,
-0.06807270646095276,
-0.04409319907426834,
-0.05963612720370293,
-0.03957608714699745,
-0.07038778066635132,
0.02416229248046875,
-0.0837307795882225,
0.127254456281662,
0.025746803730726242,
-0.08943880349397659,
-0.015873847529292107,
-0.029617153108119965,
0.037900667637586594,
0.08739293366670609,
-0.040284935384988785,
-0.05654733255505562,
0.029684016481041908,
-0.015129185281693935,
0.01862339675426483,
-0.09191786497831345,
-0.05335463955998421,
0.07036982476711273,
-0.027729572728276253,
0.014321370050311089,
-0.025059713050723076,
-0.04339904710650444,
-0.055287040770053864,
0.075533926486969,
0.04431626573204994,
0.002304289722815156,
0.0019012659322470427,
0.1523141711950302,
0.044255323708057404,
0.006871217396110296,
-0.09575224667787552,
0.01304828841239214,
-0.057833705097436905,
-0.07801658660173416,
0.019242841750383377,
0.05572796240448952,
0.016747750341892242,
-0.04509659484028816,
-0.051139019429683685,
0.015734579414129257,
0.07963407039642334,
-0.01576833985745907,
0.007872291840612888,
0.04780165106058121,
-0.020250685513019562,
-0.030652303248643875,
0.052045032382011414,
-0.01841105706989765,
0.07253774255514145,
-0.0009242039523087442,
0.0985473170876503,
0.01677151396870613,
0.03610360249876976,
-0.006199294701218605,
-0.001356456894427538,
-0.11821848899126053,
-0.12067405879497528,
-0.027521103620529175,
0.05132625997066498,
-0.03314764425158501,
-0.0012343386188149452,
0.028101012110710144,
-0.012225921265780926,
0.03143094852566719,
-0.014257251285016537,
0.036537256091833115,
0.03260938450694084,
0.03507842496037483,
-0.04535144940018654,
0.06437306106090546,
0.0376356802880764,
0.0324600413441658,
-0.09107474982738495,
-0.024004289880394936,
-0.004404251929372549,
-0.03465220332145691,
0.024339301511645317,
-0.02227972075343132,
4.914800655505525e-33,
0.07260359078645706,
0.01128997839987278,
0.023127757012844086,
-0.022272322326898575,
-0.04755458980798721,
0.00395976472645998,
0.008516840636730194,
0.038167353719472885,
-0.08876054733991623,
-0.06136072054505348,
-0.07119883596897125,
0.10549557954072952,
-0.0361989364027977,
0.029677102342247963,
-0.058557234704494476,
-0.035428620874881744,
-0.0813293531537056,
0.002402989659458399,
0.02644195593893528,
-0.0031433922704309225,
0.0831747055053711,
-0.008059663698077202,
-0.008526640012860298,
0.06936793774366379,
-0.0192799661308527,
0.009059340693056583,
0.03030095063149929,
-0.10568464547395706,
-0.06432611495256424,
0.04759513586759567,
0.03476179018616676,
-0.006808385718613863,
0.012648941949009895,
0.05150911957025528,
-0.10732721537351608,
0.011936891824007034,
-0.02418646216392517,
0.0057168323546648026,
-0.0581032857298851,
-0.08545804768800735,
0.02249082364141941,
0.0107737947255373,
-0.02512427419424057,
0.046400412917137146,
-0.0036944583989679813,
0.017665229737758636,
-0.02211196906864643,
0.03401489928364754,
0.13986599445343018,
-0.004494100343436003,
0.009921886026859283,
-0.024068912491202354,
0.023724962025880814,
0.0417620949447155,
0.027719097211956978,
0.16507306694984436,
0.01823551207780838,
-0.0021553528495132923,
0.05920204147696495,
-0.09589599817991257,
-0.10336750000715256,
0.12700863182544708,
0.022116513922810555,
0.0018230744171887636,
0.05593153089284897,
-0.0009874235838651657,
0.01223225612193346,
-0.08711604028940201,
-0.013890105299651623,
-0.0662510097026825,
-0.04979194700717926,
-0.05605379119515419,
-0.0336415097117424,
0.04417845606803894,
0.01971443183720112,
-0.035935308784246445,
-0.016447491943836212,
-0.03947625681757927,
-0.07104609906673431,
-0.007668123580515385,
-0.0024468256160616875,
-0.034313563257455826,
0.02664763666689396,
-0.020526118576526642,
0.08411288261413574,
-0.06454169750213623,
0.006850950885564089,
-0.012434386648237705,
-0.025362670421600342,
-0.0022596053313463926,
0.04319586604833603,
-0.030421387404203415,
-0.039241641759872437,
-0.035026952624320984,
-0.038646288216114044,
-5.285787774650993e-33,
0.05037433281540871,
0.05031156539916992,
-0.004913759883493185,
0.057402417063713074,
-0.016607433557510376,
-0.01309739239513874,
0.03693012520670891,
0.08772148936986923,
-0.038264643400907516,
-0.0350072979927063,
0.0552208386361599,
-0.05326555296778679,
0.05082699656486511,
-0.023851899430155754,
0.09114640951156616,
-0.016500145196914673,
-0.019233549013733864,
0.06526508182287216,
0.05959123373031616,
0.05967709794640541,
-0.009276327677071095,
0.07319941371679306,
-0.11379362642765045,
0.025361377745866776,
-0.01533849909901619,
-0.012253810651600361,
-0.02701772190630436,
0.0779447853565216,
0.05156085267663002,
0.0024983766488730907,
-0.027413073927164078,
0.02771580033004284,
-0.04825973883271217,
0.048248372972011566,
-0.026508769020438194,
-0.06268887221813202,
-0.024744268506765366,
-0.003819586941972375,
-0.004619025159627199,
0.04668484628200531,
-0.03390265628695488,
0.03378430753946304,
-0.03292850777506828,
0.027555949985980988,
0.011583114974200726,
-0.03340369835495949,
-0.012639728374779224,
-0.04121573269367218,
0.023251276463270187,
-0.04414171352982521,
0.028738301247358322,
-0.021817687898874283,
-0.08966661244630814,
0.020718278363347054,
-0.002563211601227522,
-0.0385773740708828,
0.041907552629709244,
-0.00756471324712038,
-0.0419364795088768,
-0.034715376794338226,
-0.014036190696060658,
-0.11617552489042282,
0.1084313616156578,
-0.06059173867106438,
-0.00875265896320343,
-0.05154133588075638,
0.08607742935419083,
0.032897308468818665,
0.05027465894818306,
-0.016775665804743767,
-0.03105493076145649,
-0.021564556285738945,
0.021947918459773064,
-0.027075758203864098,
0.016601555049419403,
-0.006325462367385626,
-0.1421704739332199,
0.04312390834093094,
0.057108208537101746,
-0.0902920737862587,
-0.037672776728868484,
0.06062023341655731,
0.0664994940161705,
-0.01722770370543003,
0.04386523365974426,
-0.007846815511584282,
-0.0003700472298078239,
0.11771618574857712,
0.07518985867500305,
-0.022348204627633095,
-0.03640284389257431,
0.05064082145690918,
0.029790213331580162,
0.08846381306648254,
0.010569022037088871,
-4.5741025189727225e-8,
0.02025042288005352,
-0.03182867169380188,
-0.0008885732968337834,
0.016806308180093765,
0.028316142037510872,
-0.008822808042168617,
-0.02383423037827015,
-0.09687653183937073,
-0.024338549003005028,
-0.12973053753376007,
0.030698716640472412,
0.026532670482993126,
-0.00667770579457283,
-0.003892756998538971,
0.004471263848245144,
0.001154155470430851,
0.01485749427229166,
0.17068524658679962,
-0.03468595817685127,
0.017097333446145058,
-0.013078619726002216,
0.008054862730205059,
-0.01524400245398283,
-0.0262133851647377,
0.057092659175395966,
0.04821455478668213,
-0.05456400290131569,
0.041480518877506256,
0.0354723185300827,
-0.05035821348428726,
-0.05745377764105797,
0.01863626390695572,
0.02616274170577526,
-0.015023180283606052,
-0.047538015991449356,
0.07486099749803543,
-0.03518490493297577,
-0.07710271328687668,
-0.004378039389848709,
0.014894969761371613,
0.0487482026219368,
-0.06935686618089676,
-0.07898467779159546,
-0.003661506110802293,
0.013643045909702778,
0.011792460456490517,
-0.002482661744579673,
-0.02367098443210125,
-0.006980906706303358,
0.02714536339044571,
-0.026017582044005394,
-0.06154194101691246,
-0.11453108489513397,
-0.015213732607662678,
-0.027809806168079376,
-0.016599107533693314,
-0.052680402994155884,
-0.02827618457376957,
0.0266277976334095,
-0.019817817956209183,
0.014128054492175579,
0.03684353455901146,
0.027217788621783257,
0.023879477754235268
] |
AI-Growth-Lab/PatentSBERTa | 7550939f981e2236a4cabfe3bd6cb6996d317a63 | 2022-05-04T11:45:01.000Z | [
"pytorch",
"mpnet",
"feature-extraction",
"arxiv:2103.11933",
"sentence-transformers",
"sentence-similarity",
"transformers"
] | sentence-similarity | false | AI-Growth-Lab | null | AI-Growth-Lab/PatentSBERTa | 2,643 | 7 | sentence-transformers | ---
pipeline_tag: sentence-similarity
tags:
- sentence-transformers
- feature-extraction
- sentence-similarity
- transformers
---
# PatentSBERTa
## PatentSBERTa: A Deep NLP based Hybrid Model for Patent Distance and Classification using Augmented SBERT
### Aalborg University Business School, AI: Growth-Lab
https://arxiv.org/abs/2103.11933
https://github.com/AI-Growth-Lab/PatentSBERTa
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
<!--- Describe your model here -->
## Usage (Sentence-Transformers)
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
```
pip install -U sentence-transformers
```
Then you can use the model like this:
```python
from sentence_transformers import SentenceTransformer
sentences = ["This is an example sentence", "Each sentence is converted"]
model = SentenceTransformer('AI-Growth-Lab/PatentSBERTa')
embeddings = model.encode(sentences)
print(embeddings)
```
## Usage (HuggingFace Transformers)
Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
```python
from transformers import AutoTokenizer, AutoModel
import torch
def cls_pooling(model_output, attention_mask):
return model_output[0][:,0]
# Sentences we want sentence embeddings for
sentences = ['This is an example sentence', 'Each sentence is converted']
# Load model from HuggingFace Hub
tokenizer = AutoTokenizer.from_pretrained('AI-Growth-Lab/PatentSBERTa')
model = AutoModel.from_pretrained('AI-Growth-Lab/PatentSBERTa')
# Tokenize sentences
encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
# Compute token embeddings
with torch.no_grad():
model_output = model(**encoded_input)
# Perform pooling. In this case, cls pooling.
sentence_embeddings = cls_pooling(model_output, encoded_input['attention_mask'])
print("Sentence embeddings:")
print(sentence_embeddings)
```
## Evaluation Results
<!--- Describe how your model was evaluated -->
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name={MODEL_NAME})
## Training
The model was trained with the parameters:
**DataLoader**:
`torch.utils.data.dataloader.DataLoader` of length 5 with parameters:
```
{'batch_size': 16, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
```
**Loss**:
`sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss`
Parameters of the fit()-Method:
```
{
"epochs": 1,
"evaluation_steps": 0,
"evaluator": "NoneType",
"max_grad_norm": 1,
"optimizer_class": "<class 'transformers.optimization.AdamW'>",
"optimizer_params": {
"lr": 2e-05
},
"scheduler": "WarmupLinear",
"steps_per_epoch": null,
"warmup_steps": 100,
"weight_decay": 0.01
}
```
## Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: MPNetModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
)
```
## Citing & Authors
@article{bekamiri2021patentsberta,
title={PatentSBERTa: A Deep NLP based Hybrid Model for Patent Distance and Classification using Augmented SBERT},
author={Bekamiri, Hamid and Hain, Daniel S and Jurowetzki, Roman},
journal={arXiv preprint arXiv:2103.11933},
year={2021}
} | [
-0.06242523342370987,
-0.03177584707736969,
-0.024601802229881287,
-0.0008892971673049033,
0.013952627778053284,
0.05880836397409439,
-0.02642732858657837,
0.05801553279161453,
0.0033708831761032343,
-0.04608780890703201,
0.058790162205696106,
0.026409480720758438,
0.01265102531760931,
0.03856831043958664,
0.03542480245232582,
0.09184521436691284,
0.04197936877608299,
0.049532148987054825,
-0.11127360165119171,
-0.110699862241745,
0.14626404643058777,
0.11944562196731567,
0.0011269249953329563,
0.0203904677182436,
-0.05602908506989479,
0.11786920577287674,
0.006296161562204361,
-0.03362610191106796,
0.007292864378541708,
0.013858761638402939,
0.002853234764188528,
0.007644956931471825,
-0.02328900247812271,
0.09592919796705246,
0.038561999797821045,
0.0641966387629509,
-0.05490122362971306,
0.04627663269639015,
0.0050334567204117775,
-0.07496458292007446,
0.01758231595158577,
-0.061909161508083344,
-0.05820215865969658,
-0.008610452525317669,
0.07777965813875198,
-0.036101702600717545,
-0.11485005170106888,
0.0020473310723900795,
0.02342268079519272,
0.002648727735504508,
-0.13298630714416504,
0.013680118136107922,
0.02556493878364563,
0.09967588633298874,
-0.03372492268681526,
0.034014634788036346,
0.033765994012355804,
-0.05112459510564804,
0.02687130682170391,
-0.11949573457241058,
-0.02034124545753002,
-0.028910627588629723,
0.02294645644724369,
-0.019936757162213326,
-0.027810128405690193,
-0.020124126225709915,
-0.015664415434002876,
0.014335528016090393,
0.013752679340541363,
0.0013911647256463766,
-0.016250399872660637,
0.0936291292309761,
-0.06444934755563736,
-0.015827221795916557,
-0.0823107585310936,
0.0004900440108031034,
0.050978485494852066,
-0.012002186849713326,
0.07003430277109146,
-0.01925019547343254,
-0.03405587002635002,
-0.04500772804021835,
0.027441909536719322,
0.09630125015974045,
0.0009169761324301362,
-0.05437878891825676,
0.014452044852077961,
-0.03954824432730675,
0.032473355531692505,
-0.03312714397907257,
-0.03659781441092491,
-0.12084954231977463,
0.024158135056495667,
-0.07013104110956192,
-0.004035893362015486,
0.009649215266108513,
-0.03750426694750786,
-0.10021790117025375,
0.032818034291267395,
0.03695059195160866,
0.02094973996281624,
0.046740077435970306,
0.02034812979400158,
-0.12675084173679352,
-0.093414306640625,
0.05216512829065323,
-0.024794679135084152,
-0.04827699437737465,
0.08775722980499268,
-0.07110154628753662,
-0.0022905708756297827,
-0.0064367675222456455,
-0.050920519977808,
-0.018907319754362106,
0.06964313238859177,
-0.041682418435811996,
0.007107748184353113,
-0.019259009510278702,
-0.00784810446202755,
0.09812156111001968,
-0.034805551171302795,
0.07794900983572006,
-0.04160706326365471,
0.0042033507488667965,
-0.02012297883629799,
-0.020010756328701973,
-0.009433388710021973,
2.2352541655551686e-34,
-0.04216553643345833,
0.03710602596402168,
-0.012937196530401707,
0.04536701738834381,
0.017471767961978912,
-0.016947243362665176,
0.04419441148638725,
0.07911214977502823,
-0.1197730228304863,
-0.01765541173517704,
-0.07617444545030594,
0.044630251824855804,
-0.03775358200073242,
0.057650305330753326,
0.023583153262734413,
-0.021406732499599457,
-0.022661039605736732,
-0.007061876356601715,
0.0764995589852333,
0.013252855278551579,
0.024324147030711174,
0.007456427440047264,
0.04371879622340202,
0.0005280814366415143,
-0.04195694997906685,
-0.056942522525787354,
0.030420392751693726,
-0.08321059495210648,
-0.03548557683825493,
0.014688525348901749,
-0.059525467455387115,
0.0696047991514206,
-0.00948375090956688,
-0.021063264459371567,
0.006296338047832251,
-0.00197936431504786,
-0.0051803793758153915,
-0.03815882280468941,
0.00920027494430542,
-0.06202630698680878,
-0.04794081673026085,
0.004002678673714399,
0.024757103994488716,
-0.06889334321022034,
-0.0029069872107356787,
0.025562724098563194,
-0.020745115354657173,
-0.002999904565513134,
0.1313374787569046,
0.018150079995393753,
0.082036592066288,
-0.0134660042822361,
-0.025870217010378838,
-0.0684303343296051,
0.048978038132190704,
0.006658468395471573,
-0.0047953762114048,
0.06329663842916489,
0.12796244025230408,
0.013705359771847725,
0.049379862844944,
0.009563764557242393,
0.024784373119473457,
0.062249332666397095,
0.04512252286076546,
0.010582358576357365,
0.06562025845050812,
0.06435886025428772,
0.05745190382003784,
0.05972056835889816,
-0.00521476287394762,
0.006136956159025431,
-0.051600050181150436,
0.01447619590908289,
0.020646659657359123,
0.005437587853521109,
-0.0012370861368253827,
-0.08534528315067291,
-0.054024942219257355,
0.08675001561641693,
-0.06620337069034576,
-0.05968400090932846,
0.0752430185675621,
-0.01628016121685505,
-0.043099287897348404,
-0.04304273799061775,
0.030712483450770378,
-0.039380669593811035,
0.05054997652769089,
-0.07088841497898102,
0.024386653676629066,
0.02856450341641903,
-0.03709927573800087,
0.06596241146326065,
0.0528605617582798,
-2.756709107462949e-33,
0.025488046929240227,
0.004115236923098564,
-0.03180447593331337,
0.011878058314323425,
0.012679637409746647,
-0.06465107947587967,
-0.03291945904493332,
0.08191690593957901,
-0.004920241888612509,
0.002033547731116414,
-0.042595088481903076,
-0.01954740658402443,
0.053141795098781586,
-0.0499422624707222,
0.05799800530076027,
0.08115270733833313,
0.001037892885506153,
0.027897588908672333,
0.03802450746297836,
0.10370507836341858,
0.04333186894655228,
0.05477210506796837,
-0.15494586527347565,
0.05111343786120415,
-0.03558145835995674,
-0.029859168455004692,
-0.03702414408326149,
-0.004044493660330772,
0.0072571593336761,
-0.03729008138179779,
-0.03192863613367081,
0.014102080836892128,
-0.04674656316637993,
-0.015690231695771217,
-0.099217988550663,
0.028720475733280182,
-0.006428058259189129,
-0.0724564865231514,
0.018760057166218758,
-0.019359752535820007,
0.0038531296886503696,
0.07909277826547623,
-0.03751716390252113,
-0.018747787922620773,
-0.0346573069691658,
-0.042866699397563934,
-0.06776630878448486,
-0.0675787553191185,
0.03154394403100014,
-0.02088302932679653,
-0.015240783803164959,
0.03316080942749977,
-0.10055215656757355,
0.010437418706715107,
-0.09167852252721786,
-0.0575086772441864,
-0.001812764792703092,
-0.05818318948149681,
-0.07855880260467529,
-0.05122163146734238,
-0.06278523802757263,
-0.007923955097794533,
0.04383638873696327,
-0.03219153732061386,
0.03390706330537796,
-0.08930766582489014,
0.00987387727946043,
0.026036499068140984,
-0.06458711624145508,
-0.06335146725177765,
0.026187164708971977,
-0.00346502591855824,
0.038173455744981766,
0.05672726780176163,
-0.027416838333010674,
-0.03896598517894745,
0.04247365891933441,
-0.034383948892354965,
-0.05180459842085838,
-0.038835495710372925,
0.03208516910672188,
0.013573802076280117,
0.0240784864872694,
0.018228748813271523,
-0.0043426575139164925,
0.04034814611077309,
0.02748551033437252,
0.0506170354783535,
0.004333520773798227,
0.04087298363447189,
-0.026114942505955696,
-0.03999869152903557,
-0.015081017278134823,
0.058743514120578766,
-0.0036694793961942196,
-4.984693546816743e-8,
-0.07220926880836487,
-0.00856650248169899,
-0.040104858577251434,
0.04393375292420387,
-0.06846725195646286,
-0.05861307308077812,
0.047017619013786316,
0.07724086940288544,
-0.05849746614694595,
-0.06675458699464798,
0.007537210360169411,
0.020463839173316956,
-0.07027518004179001,
0.014479830861091614,
-0.030068084597587585,
0.12379853427410126,
-0.011147192679345608,
0.01616211235523224,
0.05026254430413246,
-0.021521301940083504,
0.05177515000104904,
0.015535763464868069,
-0.006449766457080841,
0.044822368770837784,
0.031068943440914154,
-0.026299161836504936,
-0.021023673936724663,
0.0001977107604034245,
-0.02811114490032196,
0.021069739013910294,
0.035631779581308365,
0.02297467738389969,
-0.03416894003748894,
0.0008155335672199726,
0.05385203659534454,
0.04299607500433922,
0.07451315224170685,
-0.055744633078575134,
-0.023328252136707306,
0.02216048166155815,
0.025686785578727722,
0.05471885949373245,
-0.12765462696552277,
-0.02385869063436985,
0.12692242860794067,
0.005894669331610203,
0.007212773431092501,
-0.09921809285879135,
0.07081791013479233,
0.034760236740112305,
0.07074441015720367,
-0.05213676765561104,
-0.028598302975296974,
-0.040243420749902725,
0.03749953955411911,
0.039523933082818985,
0.001119699445553124,
-0.04773476719856262,
0.02827974408864975,
-0.10040505975484848,
0.0794353112578392,
0.0646015852689743,
0.07667277753353119,
-0.014803140424191952
] |
p-christ/12412fsasf | 73cb7588db19266796e71e3f3bbfe03d98baa9ec | 2022-05-18T11:14:34.000Z | [
"pytorch",
"t5",
"text2text-generation",
"generic"
] | text2text-generation | false | p-christ | null | p-christ/12412fsasf | 2,641 | null | generic | ---
tags:
- text2text-generation
library_name: generic
---
random test repo | [
-0.06363589316606522,
-0.0274987630546093,
-0.0379350371658802,
0.0028069557156413794,
0.06966287642717361,
-0.038727402687072754,
0.07074781507253647,
-0.011820419691503048,
0.021122507750988007,
-0.0221461970359087,
0.07594586908817291,
-0.07059419900178909,
0.04556693881750107,
-0.009943380951881409,
0.029781412333250046,
0.06333059072494507,
-0.00017728334933053702,
-0.06592637300491333,
0.0267898328602314,
0.018121633678674698,
0.009974740445613861,
0.06376950442790985,
0.0652465671300888,
0.05541115254163742,
0.03775618225336075,
0.029055431485176086,
-0.07973214983940125,
0.013894096948206425,
0.004817434120923281,
0.008221454918384552,
0.11805477738380432,
0.05988135188817978,
-0.033488448709249496,
-0.021765025332570076,
0.10946615785360336,
0.04870101436972618,
-0.009571254253387451,
-0.009281988255679607,
-0.028674883767962456,
0.014226841740310192,
0.01733173429965973,
0.023774711415171623,
0.010963279753923416,
0.06549897789955139,
-0.03215459734201431,
-0.06325019896030426,
0.03368588164448738,
-0.030587323009967804,
-0.008441191166639328,
0.004589874297380447,
-0.036369629204273224,
-0.07802408188581467,
-0.02311675250530243,
-0.07477235794067383,
0.024254877120256424,
0.04802626371383667,
-0.053057655692100525,
0.020498136058449745,
0.03198084235191345,
0.04929704591631889,
0.06912317126989365,
-0.04362450912594795,
-0.0012972623808309436,
0.020725859329104424,
-0.03384698927402496,
0.058346617966890335,
0.08792933821678162,
0.009919332340359688,
-0.008006202057003975,
-0.02607923001050949,
-0.05459616705775261,
-0.0017019495135173202,
0.07728507369756699,
0.0647604763507843,
0.03960457816720009,
0.011292613111436367,
-0.0390465073287487,
0.0025301554705947638,
0.04067312926054001,
-0.059960123151540756,
-0.023592250421643257,
-0.04979638755321503,
0.03353666886687279,
0.014033290557563305,
-0.046852998435497284,
0.041395872831344604,
0.10653983801603317,
0.021830910816788673,
0.02268359437584877,
0.08449088782072067,
0.01899443380534649,
-0.09431719779968262,
0.12497878819704056,
0.03667348250746727,
-0.12955647706985474,
0.044875651597976685,
0.02070006914436817,
-0.021493962034583092,
0.03147464990615845,
0.060833897441625595,
-0.009120545350015163,
-0.04977460578083992,
0.05941268801689148,
0.0057953642681241035,
-0.01651459001004696,
-0.035516832023859024,
-0.003927771933376789,
-0.01659662462770939,
-0.021771874278783798,
-0.027441848069429398,
0.07155891507863998,
-0.0034026941284537315,
-0.020035505294799805,
-0.05125230923295021,
-0.042600516229867935,
0.032797180116176605,
0.005467591807246208,
-0.053031500428915024,
0.013241306878626347,
0.0531558096408844,
-0.038802847266197205,
0.010828058235347271,
-0.05936644598841667,
-0.09231852740049362,
-0.15122397243976593,
0.04576154425740242,
0.028551224619150162,
-2.5244266482946648e-33,
0.04337703436613083,
-0.005497373174875975,
0.03579846769571304,
0.13755589723587036,
0.007358253467828035,
-0.037556082010269165,
-0.026666881516575813,
0.010216030292212963,
-0.05991566181182861,
-0.0808551087975502,
0.0664488896727562,
-0.12257909774780273,
-0.07270435988903046,
0.06192243471741676,
0.03049287013709545,
-0.05350619554519653,
0.006169931031763554,
0.018035899847745895,
0.013171332888305187,
0.04222065582871437,
-0.053504399955272675,
0.06090647354722023,
0.02245018631219864,
-0.08001821488142014,
-0.03097144514322281,
0.10680846124887466,
0.07603129744529724,
-0.08945845812559128,
0.04745689406991005,
0.018205750733613968,
-0.027617312967777252,
-0.025777971372008324,
0.05983523651957512,
0.12099169194698334,
-0.03008139692246914,
-0.010454952716827393,
-0.056853506714105606,
-0.0030202313791960478,
-0.07330409437417984,
-0.00489836186170578,
0.0030043465085327625,
0.06010347604751587,
0.004910698160529137,
-0.049304839223623276,
0.13350661098957062,
-0.04479747265577316,
0.024570828303694725,
-0.0030181771144270897,
0.003205769695341587,
-0.06771213561296463,
-0.0076326183043420315,
0.116928830742836,
0.006095401477068663,
0.023261047899723053,
0.01861140877008438,
-0.009749536402523518,
-0.03655947744846344,
0.058885347098112106,
0.06994745135307312,
-0.02573494426906109,
-0.03392656147480011,
0.02888810820877552,
0.02247682213783264,
-0.04245295003056526,
0.0769980251789093,
0.04094059392809868,
-0.02534247748553753,
-0.06780759245157242,
0.06341059505939484,
0.05549953505396843,
0.06474070996046066,
-0.0555008240044117,
-0.05505134165287018,
0.10621435195207596,
0.05020826309919357,
0.010450211353600025,
-0.010870923288166523,
0.01643507182598114,
0.004109922796487808,
0.007626183796674013,
-0.050960417836904526,
-0.054362110793590546,
0.04599063843488693,
0.00303196394816041,
-0.07317423820495605,
-0.037979841232299805,
-0.009436581283807755,
-0.11537928134202957,
0.013931091874837875,
0.03019814006984234,
0.0150472242385149,
0.01898675225675106,
-0.08893375843763351,
-0.08213341981172562,
0.007057536859065294,
-4.896456055970486e-34,
0.021281642839312553,
-0.019303059205412865,
-0.058760352432727814,
0.02037268877029419,
0.018859561532735825,
-0.03158024325966835,
-0.046001821756362915,
0.05160640925168991,
0.009718596003949642,
0.030139852315187454,
0.003006062237545848,
0.013427169993519783,
-0.0033382216934114695,
-0.01691361702978611,
0.07678798586130142,
-0.027147579938173294,
0.05944795161485672,
-0.052402228116989136,
0.023034779354929924,
-0.009219801053404808,
0.037717677652835846,
0.04797658696770668,
0.004156591836363077,
0.030331583693623543,
0.04693884029984474,
-0.017872873693704605,
0.016241909936070442,
0.03465348109602928,
0.03546774759888649,
-0.019151251763105392,
-0.016228530555963516,
0.12140998244285583,
-0.09176388382911682,
-0.0067656380124390125,
-0.019613174721598625,
-0.044752709567546844,
0.015415582805871964,
0.0008757302421145141,
-0.001367122051306069,
0.05522614344954491,
0.015116785652935505,
0.031500738114118576,
0.010982590727508068,
-0.01643778197467327,
-0.07173912972211838,
-0.038762181997299194,
-0.02414017915725708,
-0.0360066220164299,
0.03284763544797897,
-0.07207194715738297,
-0.02836582250893116,
-0.051065634936094284,
-0.06662045419216156,
-0.08971305191516876,
-0.05738299712538719,
-0.05167507007718086,
0.008173310197889805,
0.0785185694694519,
-0.01918516866862774,
0.01702088676393032,
-0.034101780503988266,
-0.01614813320338726,
-0.03431900218129158,
-0.0014071821933612227,
0.007459684740751982,
-0.046438440680503845,
-0.11510515958070755,
-0.020329829305410385,
-0.022212795913219452,
-0.0005968916229903698,
0.05041021108627319,
0.013948570936918259,
-0.062125589698553085,
-0.032015372067689896,
-0.0304946880787611,
-0.02956482023000717,
0.016929075121879578,
0.04536328837275505,
0.030920211225748062,
-0.07288236916065216,
0.04891625791788101,
0.045675814151763916,
0.09169114381074905,
0.054981574416160583,
0.027817536145448685,
-0.08310991525650024,
0.00945592112839222,
0.022879360243678093,
-0.06196931004524231,
0.018538741394877434,
0.05118821561336517,
0.008192826993763447,
-0.09641295671463013,
0.07727756351232529,
0.07846596091985703,
-2.5587993945919152e-8,
-0.036153800785541534,
-0.0667629987001419,
-0.10672593116760254,
0.05070841684937477,
-0.007565166335552931,
0.050927042961120605,
-0.08340221643447876,
0.0007013588328845799,
0.007611268665641546,
-0.03351663053035736,
-0.04842487350106239,
0.014659524895250797,
-0.06417393684387207,
0.046552129089832306,
-0.04775971546769142,
0.021805457770824432,
-0.06981980055570602,
0.05193186178803444,
-0.030150627717375755,
-0.03661461919546127,
-0.014372644014656544,
-0.0032651943620294333,
-0.000562406494282186,
-0.05587877333164215,
-0.024514123797416687,
0.017401235178112984,
0.06071317568421364,
0.05928519368171692,
0.015842966735363007,
-0.02620091289281845,
0.07337324321269989,
-0.01875564083456993,
-0.17258432507514954,
-0.10080549865961075,
0.01986285299062729,
0.02532920055091381,
0.0015879884595051408,
-0.10347346216440201,
0.08098632097244263,
-0.0205267071723938,
-0.04003673419356346,
0.03505853936076164,
-0.019438013434410095,
-0.0074327015317976475,
-0.03583982214331627,
-0.07665243744850159,
-0.06867541372776031,
0.0196559838950634,
0.026296377182006836,
-0.06866703927516937,
0.030380716547369957,
-0.047198861837387085,
-0.02488894760608673,
-0.04292319715023041,
-0.03207272291183472,
0.02074703760445118,
0.0599929578602314,
0.009482256136834621,
-0.0365728996694088,
-0.074436254799366,
0.12186921387910843,
0.0023315181024372578,
0.08079418540000916,
-0.05427908897399902
] |
cointegrated/rubert-base-cased-nli-threeway | 982964680ac0044ca95f3b5bb930b9514e0ee895 | 2021-10-10T11:09:27.000Z | [
"pytorch",
"bert",
"text-classification",
"ru",
"transformers",
"rubert",
"russian",
"nli",
"rte",
"zero-shot-classification"
] | zero-shot-classification | false | cointegrated | null | cointegrated/rubert-base-cased-nli-threeway | 2,638 | 5 | transformers | ---
language: ru
pipeline_tag: zero-shot-classification
tags:
- rubert
- russian
- nli
- rte
- zero-shot-classification
widget:
- text: "Я хочу поехать в Австралию"
candidate_labels: "спорт,путешествия,музыка,кино,книги,наука,политика"
hypothesis_template: "Тема текста - {}."
---
# RuBERT for NLI (natural language inference)
This is the [DeepPavlov/rubert-base-cased](https://huggingface.co/DeepPavlov/rubert-base-cased) fine-tuned to predict the logical relationship between two short texts: entailment, contradiction, or neutral.
## Usage
How to run the model for NLI:
```python
# !pip install transformers sentencepiece --quiet
import torch
from transformers import AutoTokenizer, AutoModelForSequenceClassification
model_checkpoint = 'cointegrated/rubert-base-cased-nli-threeway'
tokenizer = AutoTokenizer.from_pretrained(model_checkpoint)
model = AutoModelForSequenceClassification.from_pretrained(model_checkpoint)
if torch.cuda.is_available():
model.cuda()
text1 = 'Сократ - человек, а все люди смертны.'
text2 = 'Сократ никогда не умрёт.'
with torch.inference_mode():
out = model(**tokenizer(text1, text2, return_tensors='pt').to(model.device))
proba = torch.softmax(out.logits, -1).cpu().numpy()[0]
print({v: proba[k] for k, v in model.config.id2label.items()})
# {'entailment': 0.009525929, 'contradiction': 0.9332064, 'neutral': 0.05726764}
```
You can also use this model for zero-shot short text classification (by labels only), e.g. for sentiment analysis:
```python
def predict_zero_shot(text, label_texts, model, tokenizer, label='entailment', normalize=True):
label_texts
tokens = tokenizer([text] * len(label_texts), label_texts, truncation=True, return_tensors='pt', padding=True)
with torch.inference_mode():
result = torch.softmax(model(**tokens.to(model.device)).logits, -1)
proba = result[:, model.config.label2id[label]].cpu().numpy()
if normalize:
proba /= sum(proba)
return proba
classes = ['Я доволен', 'Я недоволен']
predict_zero_shot('Какая гадость эта ваша заливная рыба!', classes, model, tokenizer)
# array([0.05609814, 0.9439019 ], dtype=float32)
predict_zero_shot('Какая вкусная эта ваша заливная рыба!', classes, model, tokenizer)
# array([0.9059292 , 0.09407079], dtype=float32)
```
Alternatively, you can use [Huggingface pipelines](https://huggingface.co/transformers/main_classes/pipelines.html) for inference.
## Sources
The model has been trained on a series of NLI datasets automatically translated to Russian from English.
Most datasets were taken [from the repo of Felipe Salvatore](https://github.com/felipessalvatore/NLI_datasets):
[JOCI](https://github.com/sheng-z/JOCI),
[MNLI](https://cims.nyu.edu/~sbowman/multinli/),
[MPE](https://aclanthology.org/I17-1011/),
[SICK](http://www.lrec-conf.org/proceedings/lrec2014/pdf/363_Paper.pdf),
[SNLI](https://nlp.stanford.edu/projects/snli/).
Some datasets obtained from the original sources:
[ANLI](https://github.com/facebookresearch/anli),
[NLI-style FEVER](https://github.com/easonnie/combine-FEVER-NSMN/blob/master/other_resources/nli_fever.md),
[IMPPRES](https://github.com/facebookresearch/Imppres).
## Performance
The table below shows ROC AUC (one class vs rest) for five models on the corresponding *dev* sets:
- [tiny](https://huggingface.co/cointegrated/rubert-tiny-bilingual-nli): a small BERT predicting entailment vs not_entailment
- [twoway](https://huggingface.co/cointegrated/rubert-base-cased-nli-twoway): a base-sized BERT predicting entailment vs not_entailment
- [threeway](https://huggingface.co/cointegrated/rubert-base-cased-nli-threeway) (**this model**): a base-sized BERT predicting entailment vs contradiction vs neutral
- [vicgalle-xlm](https://huggingface.co/vicgalle/xlm-roberta-large-xnli-anli): a large multilingual NLI model
- [facebook-bart](https://huggingface.co/facebook/bart-large-mnli): a large multilingual NLI model
|model |add_one_rte|anli_r1|anli_r2|anli_r3|copa|fever|help|iie |imppres|joci|mnli |monli|mpe |scitail|sick|snli|terra|total |
|------------------------|-----------|-------|-------|-------|----|-----|----|-----|-------|----|-----|-----|----|-------|----|----|-----|------|
|n_observations |387 |1000 |1000 |1200 |200 |20474|3355|31232|7661 |939 |19647|269 |1000|2126 |500 |9831|307 |101128|
|tiny/entailment |0.77 |0.59 |0.52 |0.53 |0.53|0.90 |0.81|0.78 |0.93 |0.81|0.82 |0.91 |0.81|0.78 |0.93|0.95|0.67 |0.77 |
|twoway/entailment |0.89 |0.73 |0.61 |0.62 |0.58|0.96 |0.92|0.87 |0.99 |0.90|0.90 |0.99 |0.91|0.96 |0.97|0.97|0.87 |0.86 |
|threeway/entailment |0.91 |0.75 |0.61 |0.61 |0.57|0.96 |0.56|0.61 |0.99 |0.90|0.91 |0.67 |0.92|0.84 |0.98|0.98|0.90 |0.80 |
|vicgalle-xlm/entailment |0.88 |0.79 |0.63 |0.66 |0.57|0.93 |0.56|0.62 |0.77 |0.80|0.90 |0.70 |0.83|0.84 |0.91|0.93|0.93 |0.78 |
|facebook-bart/entailment|0.51 |0.41 |0.43 |0.47 |0.50|0.74 |0.55|0.57 |0.60 |0.63|0.70 |0.52 |0.56|0.68 |0.67|0.72|0.64 |0.58 |
|threeway/contradiction | |0.71 |0.64 |0.61 | |0.97 | | |1.00 |0.77|0.92 | |0.89| |0.99|0.98| |0.85 |
|threeway/neutral | |0.79 |0.70 |0.62 | |0.91 | | |0.99 |0.68|0.86 | |0.79| |0.96|0.96| |0.83 |
For evaluation (and for training of the [tiny](https://huggingface.co/cointegrated/rubert-tiny-bilingual-nli) and [twoway](https://huggingface.co/cointegrated/rubert-base-cased-nli-twoway) models), some extra datasets were used:
[Add-one RTE](https://cs.brown.edu/people/epavlick/papers/ans.pdf),
[CoPA](https://people.ict.usc.edu/~gordon/copa.html),
[IIE](https://aclanthology.org/I17-1100), and
[SCITAIL](https://allenai.org/data/scitail) taken from [the repo of Felipe Salvatore](https://github.com/felipessalvatore/NLI_datasets) and translatted,
[HELP](https://github.com/verypluming/HELP) and [MoNLI](https://github.com/atticusg/MoNLI) taken from the original sources and translated,
and Russian [TERRa](https://russiansuperglue.com/ru/tasks/task_info/TERRa).
| [
-0.04571026563644409,
-0.039892394095659256,
-0.0036129935178905725,
0.008560040965676308,
0.04648280516266823,
0.04915259778499603,
-0.0030769382137805223,
0.05452689900994301,
-0.010778489522635937,
-0.0768294632434845,
-0.00849726889282465,
-0.06503335386514664,
-0.02644917741417885,
0.06324781477451324,
0.0358104482293129,
0.02803172916173935,
-0.0008902226109057665,
-0.03049740567803383,
-0.09567410498857498,
-0.10189308226108551,
0.08656086772680283,
0.04067651182413101,
0.08969654887914658,
0.002475143177434802,
-0.019061867147684097,
0.017554644495248795,
-0.02141880989074707,
0.021331854164600372,
0.018610943108797073,
0.024240821599960327,
0.014798210933804512,
0.054613012820482254,
-0.03623711317777634,
0.07044211775064468,
0.057034723460674286,
0.03968795761466026,
-0.05115525424480438,
0.0003855392278637737,
0.042026665061712265,
0.0719841718673706,
-0.004973174072802067,
-0.07494118809700012,
-0.08003055304288864,
0.04810164123773575,
0.0802026018500328,
0.019096739590168,
-0.09762100130319595,
0.014249352738261223,
-0.02608347125351429,
0.019540509209036827,
-0.15405499935150146,
-0.007308681961148977,
0.00337940058670938,
0.06883418560028076,
0.006277525797486305,
-0.03364650160074234,
0.04554509371519089,
-0.018227724358439445,
-0.00350496475584805,
-0.07630941271781921,
-0.03189840912818909,
-0.05871891975402832,
-0.045794811099767685,
-0.021728571504354477,
-0.01296630222350359,
0.008338149636983871,
-0.03828652575612068,
0.03297958895564079,
-0.0036067303735762835,
0.11475536972284317,
0.0004173702618572861,
0.06993836164474487,
-0.06827488541603088,
0.06155526265501976,
-0.0580315962433815,
-0.02768985740840435,
0.05759882554411888,
0.006168945226818323,
0.009687226265668869,
-0.03118498995900154,
-0.024615218862891197,
0.02025040239095688,
0.04270593822002411,
0.003743694396689534,
0.028129208832979202,
-0.005756007041782141,
0.025466760620474815,
-0.009693418629467487,
-0.018504569306969643,
0.01582358591258526,
-0.055013805627822876,
-0.08705151081085205,
0.05443894490599632,
-0.0007173257181420922,
0.033603303134441376,
0.0452137291431427,
-0.029162589460611343,
-0.011807054281234741,
-0.0247908066958189,
0.06041482463479042,
-0.04929215461015701,
-0.034280821681022644,
0.004110611043870449,
-0.035940371453762054,
-0.06542859226465225,
0.017477573826909065,
-0.0266241654753685,
-0.08511554449796677,
0.030607251450419426,
-0.11448748409748077,
0.017243102192878723,
-0.03333672136068344,
-0.01877438835799694,
-0.01822071336209774,
0.01040578167885542,
-0.012163328938186169,
0.012560373172163963,
0.021286750212311745,
-0.014642506837844849,
0.0075825355015695095,
-0.02717062085866928,
0.04750337451696396,
-0.07567236572504044,
0.05473159998655319,
0.00041415749001316726,
-0.05870460346341133,
-0.016903145238757133,
7.837527034187014e-33,
0.0745725929737091,
0.03497263789176941,
-0.02271977812051773,
0.019615398719906807,
-0.05388064309954643,
-0.023398956283926964,
-0.06334824860095978,
0.006983795203268528,
-0.11444690078496933,
0.10059478878974915,
-0.02447415143251419,
0.0358484610915184,
-0.06892551481723785,
-0.011635547503829002,
-0.0009595078299753368,
0.01161110494285822,
-0.007668784353882074,
0.018506763502955437,
-0.030042115598917007,
0.0676352009177208,
0.07986781746149063,
0.0517667755484581,
-0.032481204718351364,
-0.03167036175727844,
-0.05222154036164284,
0.043838515877723694,
0.07064394652843475,
-0.10626579821109772,
-0.05768391489982605,
-0.01246003620326519,
-0.10137496888637543,
-0.016780730336904526,
0.048497579991817474,
0.041423771530389786,
-0.03767770901322365,
-0.11778993904590607,
-0.07820840924978256,
-0.020297029986977577,
-0.008206745609641075,
-0.042659223079681396,
-0.07219177484512329,
0.024806374683976173,
0.008952224627137184,
-0.045863278210163116,
0.012186238542199135,
-0.05226900056004524,
-0.03309207037091255,
-0.006230877712368965,
0.04672423005104065,
0.05219704657793045,
0.03048449568450451,
-0.018729250878095627,
0.027298901230096817,
0.05441982299089432,
0.005702952854335308,
-0.01796414889395237,
0.04774460569024086,
0.05734248086810112,
0.07124678790569305,
-0.011167152784764767,
0.02796279266476631,
-0.026789477095007896,
0.03148217126727104,
-0.013731017708778381,
0.047775864601135254,
0.04451990872621536,
-0.04800019785761833,
-0.004615974612534046,
0.09725764393806458,
0.04001021012663841,
-0.05966143310070038,
0.008525298908352852,
-0.048517100512981415,
0.06601127982139587,
0.08294188231229782,
-0.02196640521287918,
-0.01530092116445303,
-0.08222010731697083,
-0.06517960131168365,
0.06974126398563385,
-0.0927594006061554,
-0.00222934246994555,
0.036459967494010925,
0.01355043426156044,
-0.04309040307998657,
-0.015172834508121014,
0.02164652943611145,
-0.08968614041805267,
-0.042269546538591385,
-0.018702566623687744,
-0.061463505029678345,
0.017763519659638405,
-0.06575562059879303,
0.028275927528738976,
0.05236773565411568,
-8.670283231824016e-33,
0.08380551636219025,
-0.006495547015219927,
-0.057825058698654175,
0.029949845746159554,
-0.03343288600444794,
-0.05510643869638443,
0.036013126373291016,
0.06196226924657822,
0.10649622976779938,
0.023118669167160988,
0.05037352442741394,
-0.06222284212708473,
-0.008087670430541039,
0.041418809443712234,
0.08475495129823685,
0.00868528988212347,
-0.02542342245578766,
0.02618071623146534,
-0.01567838154733181,
0.09529703855514526,
0.033806122839450836,
0.08260465413331985,
-0.14574061334133148,
0.03309875726699829,
-0.00936486292630434,
0.015246601775288582,
0.09463194757699966,
0.0006092461408115923,
-0.062382061034440994,
-0.041512567549943924,
-0.023692641407251358,
-0.04524417594075203,
-0.12682312726974487,
0.00609594164416194,
-0.016866888850927353,
0.01936110481619835,
0.010203985497355461,
-0.06606107205152512,
-0.039745643734931946,
0.08593576401472092,
0.06349637359380722,
0.05921078473329544,
-0.046116188168525696,
0.015453541651368141,
-0.0835118368268013,
-0.05441562831401825,
-0.053118184208869934,
0.011785781010985374,
0.05135264992713928,
-0.032331433147192,
0.016065794974565506,
0.030777383595705032,
-0.09745250642299652,
0.04364414140582085,
-0.03005853295326233,
-0.12529639899730682,
0.010488690808415413,
-0.08985971659421921,
-0.06020274758338928,
0.005844527389854193,
-0.028964409604668617,
0.04014933109283447,
0.046090271323919296,
-0.04613847658038139,
-0.019313666969537735,
-0.056512631475925446,
-0.045018505305051804,
0.040357351303100586,
0.08563138544559479,
-0.02637956663966179,
0.06418653577566147,
0.019511401653289795,
0.039463724941015244,
0.07273134589195251,
-0.001072201645001769,
0.026735682040452957,
-0.004313683602958918,
0.04138045012950897,
0.050127897411584854,
-0.012988926842808723,
0.007495010271668434,
-0.029155651107430458,
-0.0058731334283947945,
0.05520661920309067,
0.06932961195707321,
0.12377701699733734,
0.010007279925048351,
0.10063356906175613,
0.05096680670976639,
0.010259370319545269,
0.0304725281894207,
0.011799373663961887,
0.03202992305159569,
0.11083924025297165,
-0.035902801901102066,
-5.616715270662098e-8,
-0.042137548327445984,
-0.004869662690907717,
-0.0805600956082344,
0.05597104877233505,
-0.07476523518562317,
-0.05362695828080177,
0.018031951040029526,
-0.023975741118192673,
-0.07368126511573792,
-0.02947186306118965,
0.05633706599473953,
0.08757200092077255,
-0.11397147923707962,
-0.04710932448506355,
-0.04761770740151405,
0.0982167199254036,
-0.01815592311322689,
0.06726229935884476,
0.01861686073243618,
0.026989690959453583,
0.05797193571925163,
-0.016739198938012123,
-0.05838501825928688,
0.012680407613515854,
-0.03874649107456207,
-0.019494712352752686,
0.01350548304617405,
0.01448331493884325,
0.0003324977878946811,
-0.018654797226190567,
0.07492213696241379,
0.03365642949938774,
-0.08909671753644943,
-0.02838277444243431,
0.05738912150263786,
0.09556186944246292,
-0.00007845200889278203,
0.011786775663495064,
0.04437829926609993,
0.013433554209768772,
0.0035370883997529745,
0.01602267473936081,
-0.13839666545391083,
-0.03939064219594002,
0.02055404894053936,
0.01739192008972168,
-0.04281912371516228,
-0.12416829913854599,
0.03213507682085037,
0.02192254737019539,
-0.014909821562469006,
-0.013632378540933132,
-0.035278789699077606,
0.05615720525383949,
-0.01262294314801693,
0.043409448117017746,
0.04704011231660843,
-0.02148386649787426,
-0.04637819901108742,
-0.02810988761484623,
0.04971957206726074,
0.012716241180896759,
0.06759791821241379,
-0.04519665613770485
] |
facebook/dpr-ctx_encoder-multiset-base | 6c01adf9e9e7c812c0fa998fed97eec3262c2cf4 | 2020-11-25T16:58:57.000Z | [
"pytorch",
"tf",
"dpr",
"transformers"
] | null | false | facebook | null | facebook/dpr-ctx_encoder-multiset-base | 2,636 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
debyve/dumbbot | 04edc136f4f0028b01c2dc18d4ec0e4423441f7f | 2022-07-15T07:17:37.000Z | [
"pytorch",
"gpt2",
"text-generation",
"transformers",
"conversational"
] | conversational | false | debyve | null | debyve/dumbbot | 2,636 | null | transformers | ---
tags:
- conversational
---
# debyve/tobbmud Model | [
-0.06998490542173386,
-0.06302614510059357,
0.00997486524283886,
-0.013300815597176552,
0.021166140213608742,
-0.03511122614145279,
0.05950848385691643,
-0.008696346543729305,
0.07581824064254761,
-0.0013395952992141247,
-0.00336818746291101,
-0.04189386963844299,
0.016979800537228584,
0.011734687723219395,
0.003173263743519783,
0.06731520593166351,
0.07674895972013474,
-0.019333576783537865,
-0.011015347205102444,
0.02518230862915516,
-0.010436727665364742,
0.0916433036327362,
-0.042889587581157684,
0.033808592706918716,
0.030095066875219345,
0.0509711429476738,
-0.01257425919175148,
0.0083992388099432,
-0.004555917344987392,
-0.015795137733221054,
0.0694630965590477,
0.06695833057165146,
0.015145559795200825,
0.09017717093229294,
-0.04368166625499725,
0.06566048413515091,
0.028926189988851547,
0.0029716885183006525,
0.0316704660654068,
0.003577176947146654,
-0.05087587237358093,
0.06001349166035652,
-0.06169665977358818,
0.0026890281587839127,
0.05764982849359512,
-0.05422800034284592,
-0.11234962195158005,
0.015463072806596756,
-0.046498823910951614,
-0.004011358134448528,
-0.05160580202937126,
0.005208397284150124,
0.02013544924557209,
0.11915332078933716,
0.006033735349774361,
0.06608853489160538,
-0.0694407969713211,
-0.06255465000867844,
-0.01820722036063671,
-0.01571699231863022,
-0.027632733806967735,
-0.031140446662902832,
-0.05092281475663185,
0.06276635825634003,
0.005082447547465563,
0.04818824306130409,
-0.06421709060668945,
0.05262039229273796,
-0.03031218610703945,
0.13471795618534088,
0.047256190329790115,
-0.009352197870612144,
0.018688132986426353,
-0.01810797117650509,
-0.021277297288179398,
0.0408075675368309,
0.018035199493169785,
-0.02889077365398407,
0.03681235760450363,
-0.05910352244973183,
-0.03179667517542839,
-0.06346047669649124,
0.050146762281656265,
-0.0754753053188324,
0.014291866682469845,
-0.04249364137649536,
-0.030743001028895378,
-0.07846847921609879,
0.006973871495574713,
-0.007916211150586605,
-0.05688820779323578,
-0.03839947283267975,
0.05521615967154503,
0.06490909308195114,
-0.0292289350181818,
0.012520192191004753,
0.03856731206178665,
0.005157523788511753,
-0.030083181336522102,
0.11539153009653091,
-0.042086414992809296,
0.04443821683526039,
-0.009607136249542236,
-0.057775840163230896,
-0.025533799082040787,
-0.03247583657503128,
0.025335030630230904,
0.00724015012383461,
0.002996262861415744,
-0.03742499649524689,
-0.05255994200706482,
-0.0040517752058804035,
0.014336192049086094,
-0.014435051009058952,
0.050258442759513855,
-0.061739481985569,
0.07208842039108276,
-0.023693304508924484,
0.07608836144208908,
-0.0648687481880188,
0.01578141190111637,
0.022969329729676247,
-0.038115452975034714,
-0.03901103883981705,
-0.05616199970245361,
0.026565562933683395,
-0.024155179038643837,
-1.1494504469603543e-33,
0.1384362280368805,
-0.02031891606748104,
0.05362527072429657,
0.0975182056427002,
0.016703080385923386,
0.06289799511432648,
-0.06280133873224258,
0.0037410701625049114,
-0.009390980936586857,
-0.005136782303452492,
0.008405166678130627,
0.00004872518184129149,
-0.03943072259426117,
0.04961298406124115,
0.026371486485004425,
-0.06574641168117523,
-0.08825043588876724,
-0.01478424109518528,
-0.020005380734801292,
-0.010497813113033772,
-0.006309142801910639,
0.02594601735472679,
-0.04721563681960106,
0.03694651275873184,
0.06970687210559845,
0.05455239117145538,
0.042645324021577835,
-0.07718147337436676,
-0.04411961883306503,
0.0769261047244072,
-0.07160016894340515,
-0.04714324697852135,
-0.03240036219358444,
0.01677550934255123,
-0.011693395674228668,
0.0479569211602211,
-0.03243173286318779,
0.007541181985288858,
0.002223752671852708,
-0.12163539230823517,
0.010536114685237408,
-0.06540510058403015,
-0.0714908242225647,
-0.09509379416704178,
-0.05076838284730911,
0.06140263378620148,
0.03957676514983177,
-0.0016829747473821044,
-0.06743641942739487,
-0.012018847279250622,
0.015662385150790215,
-0.005040754098445177,
-0.023925555869936943,
-0.05698699504137039,
-0.045056212693452835,
-0.04489899054169655,
0.017243575304746628,
0.037351734936237335,
0.05357785150408745,
0.055834122002124786,
0.026264850050210953,
0.013447407633066177,
0.08511493355035782,
-0.12363774329423904,
0.1116657629609108,
0.012250425294041634,
-0.07666076719760895,
-0.03059004247188568,
0.021354902535676956,
-0.09866608679294586,
-0.07410221546888351,
0.06356044858694077,
-0.02088993787765503,
0.042785778641700745,
0.011387895792722702,
0.01273056399077177,
-0.018195487558841705,
-0.0593426413834095,
0.04173174127936363,
0.015232543461024761,
-0.037893787026405334,
-0.1240343302488327,
-0.0693817287683487,
-0.009528353810310364,
0.011757719330489635,
-0.02794751524925232,
0.024329306557774544,
-0.11535428464412689,
-0.013539399951696396,
0.012822763063013554,
-0.004002859815955162,
0.05415209010243416,
-0.048233721405267715,
-0.0587986521422863,
0.02472570538520813,
-1.5862129953545563e-33,
-0.032385535538196564,
-0.02144269272685051,
-0.12815596163272858,
0.08757753670215607,
0.01507613342255354,
-0.04200371727347374,
0.02871154248714447,
0.09432127326726913,
0.011456512846052647,
-0.0443120151758194,
-0.03274746239185333,
-0.04367704316973686,
-0.052125029265880585,
-0.007460415828973055,
0.11612282693386078,
0.05293397232890129,
0.030546629801392555,
-0.11225432902574539,
0.014338635839521885,
0.0026797568425536156,
0.07131018489599228,
0.050387684255838394,
-0.11800985783338547,
-0.004571536555886269,
0.01953398808836937,
0.010746849700808525,
0.0015767838340252638,
0.06146247684955597,
0.09653899073600769,
-0.03538678586483002,
-0.035328783094882965,
0.00029158592224121094,
0.014253063127398491,
-0.025564439594745636,
-0.08803687989711761,
0.04586908221244812,
-0.011981401592493057,
-0.006672970484942198,
0.0005692303529940546,
0.04003419354557991,
-0.015921836718916893,
-0.025110946968197823,
-0.059806276112794876,
0.03140809014439583,
0.0259231049567461,
-0.039349477738142014,
-0.06845250725746155,
-0.03241861239075661,
-0.010895557701587677,
0.05040648952126503,
0.06380468606948853,
-0.03158291429281235,
-0.07983997464179993,
-0.02908417396247387,
-0.08052203059196472,
-0.06364012509584427,
0.03038826212286949,
-0.06439545005559921,
-0.010723575949668884,
0.023478731513023376,
-0.0519927516579628,
0.0011099058901891112,
0.03821977972984314,
-0.0161829125136137,
0.0163838192820549,
-0.084539994597435,
-0.04507732018828392,
-0.048142608255147934,
-0.013729166239500046,
-0.0640501007437706,
0.12451314926147461,
0.006275740452110767,
-0.07596705108880997,
0.0575852133333683,
0.04736679792404175,
-0.04868915304541588,
-0.01440654881298542,
-0.01987479068338871,
0.031663600355386734,
-0.10353434830904007,
-0.10122177004814148,
-0.020440971478819847,
0.07833622395992279,
0.12701785564422607,
0.07076439261436462,
-0.005034660920500755,
0.014132113195955753,
0.03044099174439907,
-0.06907918304204941,
0.0009751720353960991,
-0.015969183295965195,
-0.011239617131650448,
-0.014714246615767479,
0.11970414966344833,
-0.015373088419437408,
-2.8580551258983178e-8,
-0.10481307655572891,
-0.07213712483644485,
-0.017863614484667778,
0.049800630658864975,
0.035891734063625336,
-0.0043761152774095535,
0.09896013140678406,
0.029659219086170197,
0.005123875103890896,
-0.05183624103665352,
0.06709408760070801,
0.09743602573871613,
-0.05662669986486435,
0.06690237671136856,
0.012732787057757378,
0.04719403386116028,
0.010082253254950047,
0.025600196793675423,
-0.03672624006867409,
-0.010229094885289669,
0.05821346864104271,
0.011658894829452038,
-0.08051785081624985,
0.032630886882543564,
0.0941891297698021,
-0.00485198013484478,
-0.044943954795598984,
0.06010352820158005,
0.04945693910121918,
0.04081089049577713,
0.03274097293615341,
0.047536592930555344,
-0.10151274502277374,
0.017011919990181923,
0.014754444360733032,
0.013775347732007504,
-0.052110154181718826,
-0.04974720627069473,
-0.0003134553844574839,
0.01623619720339775,
0.012690509669482708,
0.029184099286794662,
-0.07805419713258743,
0.033252373337745667,
0.06767134368419647,
0.02819347195327282,
-0.016345307230949402,
-0.07451888173818588,
0.027226127684116364,
-0.004447573330253363,
-0.06336601078510284,
-0.03562716394662857,
0.026418648660182953,
0.022931015118956566,
-0.037311527878046036,
0.04255162924528122,
0.020353956148028374,
0.027583014219999313,
0.04727958142757416,
0.01292849238961935,
0.05017303302884102,
0.06791427731513977,
-0.026385091245174408,
0.021853020414710045
] |
uclanlp/plbart-java-cs | 0426c742606ceb3c2e12de0ae9c46a969bba6023 | 2021-11-09T17:08:40.000Z | [
"pytorch",
"plbart",
"text2text-generation",
"transformers",
"autotrain_compatible"
] | text2text-generation | false | uclanlp | null | uclanlp/plbart-java-cs | 2,625 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
tli8hf/unqover-roberta-large-newsqa | 8e7427744cb23cd65a671630a85537824dc4216e | 2021-05-20T22:36:39.000Z | [
"pytorch",
"jax",
"roberta",
"question-answering",
"transformers",
"autotrain_compatible"
] | question-answering | false | tli8hf | null | tli8hf/unqover-roberta-large-newsqa | 2,613 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
moussaKam/barthez | 1ad22b19fab9b29d16d53872717e40a5b7758dd1 | 2021-11-15T12:59:17.000Z | [
"pytorch",
"mbart",
"text2text-generation",
"fr",
"arxiv:2010.12321",
"transformers",
"summarization",
"bart",
"license:apache-2.0",
"fill-mask",
"autotrain_compatible"
] | fill-mask | false | moussaKam | null | moussaKam/barthez | 2,610 | 3 | transformers | ---
tags:
- summarization
- bart
language:
- fr
widget:
- text: Barthez est le meilleur <mask> du monde.
license: apache-2.0
pipeline_tag: "fill-mask"
---
A french sequence to sequence pretrained model based on [BART](https://huggingface.co/facebook/bart-large). <br>
BARThez is pretrained by learning to reconstruct a corrupted input sentence. A corpus of 66GB of french raw text is used to carry out the pretraining. <br>
Unlike already existing BERT-based French language models such as CamemBERT and FlauBERT, BARThez is particularly well-suited for generative tasks (such as abstractive summarization), since not only its encoder but also its decoder is pretrained.
In addition to BARThez that is pretrained from scratch, we continue the pretraining of a multilingual BART [mBART](https://huggingface.co/facebook/mbart-large-cc25) which boosted its performance in both discriminative and generative tasks. We call the french adapted version [mBARThez](https://huggingface.co/moussaKam/mbarthez).
| Model | Architecture | #layers | #params |
| ------------- |:-------------:| :-----:|:-----:|
| [BARThez](https://huggingface.co/moussaKam/barthez) | BASE | 12 | 165M |
| [mBARThez](https://huggingface.co/moussaKam/mbarthez) | LARGE | 24 | 458M |
<br>
paper: https://arxiv.org/abs/2010.12321 \
github: https://github.com/moussaKam/BARThez
```
@article{eddine2020barthez,
title={BARThez: a Skilled Pretrained French Sequence-to-Sequence Model},
author={Eddine, Moussa Kamal and Tixier, Antoine J-P and Vazirgiannis, Michalis},
journal={arXiv preprint arXiv:2010.12321},
year={2020}
}
```
| [
-0.08240929991006851,
-0.033741291612386703,
0.04310769587755203,
-0.02391718700528145,
0.012750690802931786,
0.030433380976319313,
-0.03185059130191803,
0.04668458551168442,
0.08894018828868866,
-0.012212513014674187,
-0.038837824016809464,
-0.020367901772260666,
0.007192298304289579,
0.039990782737731934,
0.010327450931072235,
-0.002694702474400401,
0.07035684585571289,
0.06775861233472824,
-0.004335709381848574,
-0.07236051559448242,
0.07115308940410614,
0.013457413762807846,
0.04325764626264572,
-0.018960876390337944,
0.03392138332128525,
-0.06012373045086861,
-0.06492295861244202,
-0.056392449885606766,
0.10293391346931458,
-0.012517242692410946,
0.05761921778321266,
0.07438285648822784,
0.03431594371795654,
0.1052660346031189,
-0.12506626546382904,
0.09367751330137253,
-0.011728824116289616,
0.015167920850217342,
0.01612871326506138,
0.0341557115316391,
-0.0438305027782917,
0.05378662422299385,
-0.07562588900327682,
0.009703784249722958,
0.07580239325761795,
-0.07408734411001205,
-0.035282548516988754,
-0.014530771411955357,
-0.030170472338795662,
-0.0020974231883883476,
-0.12260550260543823,
0.04425078257918358,
0.0016346285119652748,
0.06375566869974136,
0.035654742270708084,
-0.018055621534585953,
-0.009722838178277016,
-0.06249736621975899,
0.05302856117486954,
-0.08536654710769653,
-0.06964407861232758,
-0.06159190088510513,
-0.009862714447081089,
-0.007812058087438345,
-0.011609538458287716,
-0.04026099294424057,
-0.012964700348675251,
0.07072395831346512,
-0.02972560003399849,
0.0918322503566742,
-0.021020332351326942,
0.04422809183597565,
-0.02209034189581871,
0.04459507390856743,
0.003913793712854385,
0.032510656863451004,
0.03271216154098511,
-0.04312172904610634,
0.018494319170713425,
-0.0808597132563591,
0.007169427815824747,
0.0023034089244902134,
0.03629095107316971,
0.012560955248773098,
0.06459555774927139,
0.008028765209019184,
0.062341801822185516,
0.01885467953979969,
0.0067483242601156235,
-0.0006165368249639869,
-0.06277081370353699,
-0.06932540237903595,
0.030464310199022293,
0.03654581680893898,
-0.059137534350156784,
0.01685890182852745,
0.038352616131305695,
-0.019257891923189163,
0.0024427948519587517,
0.11161871999502182,
0.0771944597363472,
0.06739293783903122,
0.039403028786182404,
-0.13177500665187836,
-0.04310981556773186,
-0.023350616917014122,
0.006623107474297285,
0.03294994309544563,
0.029285673052072525,
-0.10233128070831299,
0.061705611646175385,
-0.008082575164735317,
-0.04376571998000145,
-0.027077389881014824,
-0.0347910113632679,
-0.0045976596884429455,
0.03177864849567413,
-0.06927244365215302,
0.1041097491979599,
0.05998640134930611,
0.02451581135392189,
0.07067982107400894,
0.002612011507153511,
-0.015575182624161243,
-0.04670311138033867,
-0.04674306884407997,
0.028329191729426384,
2.17135259368779e-33,
-0.008369721472263336,
0.01981574483215809,
-0.012065164744853973,
0.05090278014540672,
-0.028381692245602608,
-0.023706480860710144,
-0.028769319877028465,
0.039506252855062485,
-0.024924153462052345,
-0.03502710908651352,
0.05899357795715332,
0.04798736795783043,
-0.09819246828556061,
0.12759490311145782,
0.02120058797299862,
-0.03820609301328659,
-0.005207792390137911,
0.009604358114302158,
0.026191290467977524,
-0.022883541882038116,
0.0596497543156147,
-0.004945504944771528,
0.041811294853687286,
-0.051051896065473557,
0.022728292271494865,
0.03772619366645813,
0.09358368813991547,
-0.10456455498933792,
-0.013896284624934196,
0.05929519608616829,
-0.12087001651525497,
-0.033283036202192307,
0.013346402905881405,
0.023433776572346687,
-0.013239878229796886,
0.002610534429550171,
-0.01697424054145813,
-0.07037052512168884,
0.02507513388991356,
-0.09459564834833145,
-0.028831781819462776,
-0.010453008115291595,
-0.03752700239419937,
-0.05998630449175835,
-0.05652398243546486,
-0.05143894627690315,
0.00014543425641022623,
-0.0026521391700953245,
0.02164875902235508,
0.00698190089315176,
0.0266753938049078,
-0.01320571918040514,
-0.006295100785791874,
-0.04608670994639397,
-0.010898560285568237,
0.03816741332411766,
0.02178117074072361,
-0.0035714921541512012,
0.04391099140048027,
-0.0028234091587364674,
-0.04460519552230835,
0.02242962270975113,
0.06009823828935623,
0.09336462616920471,
0.03415204957127571,
0.01543007418513298,
0.0037364016752690077,
0.09169725328683853,
0.034039873629808426,
-0.047771651297807693,
-0.0903875008225441,
0.01511190365999937,
-0.003169124945998192,
-0.004325494170188904,
0.06869509816169739,
0.04213283956050873,
0.0033665895462036133,
-0.10859741270542145,
-0.09882381558418274,
0.010705639608204365,
-0.050616249442100525,
-0.023326698690652847,
-0.023750385269522667,
0.011558052152395248,
-0.0731731727719307,
0.01100634504109621,
0.09600590914487839,
-0.01976689137518406,
0.024999968707561493,
-0.02844415232539177,
0.010961131192743778,
-0.022259702906012535,
0.019930969923734665,
0.009914524853229523,
-0.0033540825825184584,
-4.0734321250388264e-33,
-0.02658672071993351,
0.035588402301073074,
-0.09930281341075897,
0.02480820193886757,
-0.024118144065141678,
-0.054887622594833374,
0.027323836460709572,
0.09237358719110489,
-0.0736604779958725,
-0.08000773936510086,
-0.061531275510787964,
-0.10060366243124008,
-0.028504008427262306,
-0.017024457454681396,
0.051210131496191025,
-0.008528470061719418,
0.010972975753247738,
0.03383063152432442,
-0.007563858758658171,
0.03486877307295799,
0.016630541533231735,
0.03035375475883484,
-0.07786501944065094,
0.09071233868598938,
0.025390582159161568,
0.10017018765211105,
-0.023341313004493713,
0.059431292116642,
-0.04264847934246063,
-0.02327544055879116,
-0.020804865285754204,
0.007576394360512495,
0.014765055850148201,
0.002226593205705285,
-0.10344859957695007,
0.036983489990234375,
0.03515929356217384,
0.019390327855944633,
-0.029448676854372025,
0.08012163639068604,
0.07123755663633347,
-0.021972933784127235,
-0.008811128325760365,
-0.0008746602688916028,
-0.025761742144823074,
-0.05017717927694321,
-0.08759256452322006,
-0.11646848917007446,
0.07709932327270508,
0.03215010091662407,
0.025188380852341652,
0.042370378971099854,
-0.12469492852687836,
-0.05090497434139252,
-0.07057610154151917,
-0.06857403367757797,
0.0024760731030255556,
-0.06838610023260117,
-0.054660167545080185,
-0.07574662566184998,
-0.10924402624368668,
0.0072923386469483376,
0.07449359446763992,
-0.014864504337310791,
0.0682426318526268,
-0.1191684752702713,
-0.05633968114852905,
-0.022548332810401917,
-0.052134983241558075,
-0.029764430597424507,
0.09828262776136398,
0.030066946521401405,
0.008842889219522476,
0.07538709044456482,
0.030184991657733917,
0.02419101446866989,
0.06124194338917732,
-0.024493150413036346,
-0.07975445687770844,
-0.047469571232795715,
-0.017693012952804565,
-0.06774307787418365,
0.04091598093509674,
0.05663704499602318,
0.0060676513239741325,
0.04763668030500412,
-0.017550723627209663,
0.012264542281627655,
0.007967451587319374,
0.0011196064297109842,
-0.007731548976153135,
0.03393074497580528,
-0.046296264976263046,
0.040332820266485214,
0.009398994036018848,
-4.9343469754603575e-8,
-0.11846641451120377,
0.011213455349206924,
-0.07585154473781586,
0.03417209908366203,
-0.05516846105456352,
-0.12686829268932343,
-0.02106355130672455,
0.02606595680117607,
-0.05779203027486801,
-0.06867258995771408,
0.027557507157325745,
0.053211864084005356,
-0.059188809245824814,
-0.018187571316957474,
-0.025482473894953728,
0.07095177471637726,
0.01587687060236931,
-0.013369552791118622,
-0.006196647882461548,
-0.028860561549663544,
0.008362989872694016,
0.012600391171872616,
-0.033668678253889084,
-0.02050858549773693,
0.0324026420712471,
-0.05333929881453514,
-0.08562637865543365,
0.04390056058764458,
0.027010837569832802,
-0.03520810231566429,
0.018683720380067825,
0.08376512676477432,
-0.057992689311504364,
-0.009111526422202587,
0.041287392377853394,
0.07474572211503983,
-0.0012025822652503848,
-0.05152234807610512,
-0.0185294970870018,
0.04722563922405243,
0.08316583186388016,
0.05743078514933586,
-0.07854925096035004,
-0.017168138176202774,
0.04932025820016861,
-0.023564811795949936,
-0.039335981011390686,
-0.04004109278321266,
0.057893864810466766,
0.0059736305847764015,
0.0005123879527673125,
-0.037673745304346085,
0.006089122034609318,
-0.01672198437154293,
0.04049072787165642,
0.08062559366226196,
-0.061090435832738876,
-0.021089505404233932,
0.06330593675374985,
0.002592109376564622,
0.06275179237127304,
0.0664670541882515,
0.031750742346048355,
0.02156207524240017
] |
indobenchmark/indobert-large-p1 | ee2669aee95421008ad3833c3866c57a006e662d | 2021-05-19T20:26:01.000Z | [
"pytorch",
"tf",
"jax",
"bert",
"feature-extraction",
"id",
"dataset:Indo4B",
"arxiv:2009.05387",
"transformers",
"indobert",
"indobenchmark",
"indonlu",
"license:mit"
] | feature-extraction | false | indobenchmark | null | indobenchmark/indobert-large-p1 | 2,607 | null | transformers | ---
language: id
tags:
- indobert
- indobenchmark
- indonlu
license: mit
inference: false
datasets:
- Indo4B
---
# IndoBERT Large Model (phase1 - uncased)
[IndoBERT](https://arxiv.org/abs/2009.05387) is a state-of-the-art language model for Indonesian based on the BERT model. The pretrained model is trained using a masked language modeling (MLM) objective and next sentence prediction (NSP) objective.
## All Pre-trained Models
| Model | #params | Arch. | Training data |
|--------------------------------|--------------------------------|-------|-----------------------------------|
| `indobenchmark/indobert-base-p1` | 124.5M | Base | Indo4B (23.43 GB of text) |
| `indobenchmark/indobert-base-p2` | 124.5M | Base | Indo4B (23.43 GB of text) |
| `indobenchmark/indobert-large-p1` | 335.2M | Large | Indo4B (23.43 GB of text) |
| `indobenchmark/indobert-large-p2` | 335.2M | Large | Indo4B (23.43 GB of text) |
| `indobenchmark/indobert-lite-base-p1` | 11.7M | Base | Indo4B (23.43 GB of text) |
| `indobenchmark/indobert-lite-base-p2` | 11.7M | Base | Indo4B (23.43 GB of text) |
| `indobenchmark/indobert-lite-large-p1` | 17.7M | Large | Indo4B (23.43 GB of text) |
| `indobenchmark/indobert-lite-large-p2` | 17.7M | Large | Indo4B (23.43 GB of text) |
## How to use
### Load model and tokenizer
```python
from transformers import BertTokenizer, AutoModel
tokenizer = BertTokenizer.from_pretrained("indobenchmark/indobert-large-p1")
model = AutoModel.from_pretrained("indobenchmark/indobert-large-p1")
```
### Extract contextual representation
```python
x = torch.LongTensor(tokenizer.encode('aku adalah anak [MASK]')).view(1,-1)
print(x, model(x)[0].sum())
```
## Authors
<b>IndoBERT</b> was trained and evaluated by Bryan Wilie\*, Karissa Vincentio\*, Genta Indra Winata\*, Samuel Cahyawijaya\*, Xiaohong Li, Zhi Yuan Lim, Sidik Soleman, Rahmad Mahendra, Pascale Fung, Syafri Bahar, Ayu Purwarianti.
## Citation
If you use our work, please cite:
```bibtex
@inproceedings{wilie2020indonlu,
title={IndoNLU: Benchmark and Resources for Evaluating Indonesian Natural Language Understanding},
author={Bryan Wilie and Karissa Vincentio and Genta Indra Winata and Samuel Cahyawijaya and X. Li and Zhi Yuan Lim and S. Soleman and R. Mahendra and Pascale Fung and Syafri Bahar and A. Purwarianti},
booktitle={Proceedings of the 1st Conference of the Asia-Pacific Chapter of the Association for Computational Linguistics and the 10th International Joint Conference on Natural Language Processing},
year={2020}
}
```
| [
-0.055342353880405426,
-0.07549097388982773,
0.04854533076286316,
-0.004957129247486591,
-0.03858387842774391,
0.115557961165905,
0.004609860945492983,
-0.007012510672211647,
0.05067291855812073,
0.016636110842227936,
0.04763784632086754,
-0.0876988023519516,
-0.0438600592315197,
0.0022234206553548574,
0.01837686076760292,
0.028128577396273613,
0.08398758620023727,
-0.008369294926524162,
-0.04483155161142349,
-0.15514181554317474,
0.06799456477165222,
0.0982184186577797,
0.04565649852156639,
-0.05433523654937744,
0.03000115416944027,
0.0035857544280588627,
0.019629377871751785,
-0.06684307754039764,
0.049015019088983536,
0.03855450078845024,
-0.008259763941168785,
0.0003008101775776595,
0.006642034277319908,
0.09248948842287064,
-0.006306278053671122,
0.015391557477414608,
-0.05951422080397606,
-0.020190009847283363,
0.08319191634654999,
0.0031489378307014704,
-0.05624164640903473,
-0.042852457612752914,
-0.039828550070524216,
-0.06645944714546204,
0.1359713077545166,
-0.04930102080106735,
-0.07529844343662262,
0.01204230822622776,
-0.020886553451418877,
-0.05277614668011665,
-0.10918918997049332,
-0.040412697941064835,
0.015216310508549213,
0.027766570448875427,
-0.01865079440176487,
-0.047554340213537216,
0.02257990464568138,
0.019850116223096848,
0.011628019623458385,
-0.0322721041738987,
-0.10392405837774277,
-0.025320911779999733,
-0.03842821717262268,
0.04503336921334267,
-0.059302106499671936,
0.04810929298400879,
-0.060164883732795715,
0.024974901229143143,
-0.005099737551063299,
0.05968833714723587,
-0.029960403218865395,
0.07913246750831604,
0.03799806907773018,
0.018414035439491272,
-0.08006850630044937,
-0.03357408940792084,
0.09983981400728226,
-0.006187304388731718,
0.04933740571141243,
-0.04227044805884361,
0.017796490341424942,
0.024422094225883484,
0.09585250169038773,
0.008674168027937412,
0.0351097509264946,
0.0009992931736633182,
0.012930832803249359,
-0.01550525426864624,
-0.0578620508313179,
-0.026225341483950615,
-0.00849900022149086,
-0.09547805786132812,
0.04738277569413185,
-0.016931181773543358,
-0.00047506490955129266,
0.05071353539824486,
-0.011036665178835392,
-0.007380020339041948,
-0.005896087270230055,
0.0474933385848999,
0.03970130905508995,
0.06881207227706909,
-0.04017651081085205,
-0.07053034007549286,
0.006052285432815552,
-0.03966408967971802,
0.016895025968551636,
-0.06501073390245438,
0.0869813784956932,
-0.04381800442934036,
-0.01171646174043417,
-0.008109399117529392,
-0.010685168206691742,
-0.041086699813604355,
-0.022015448659658432,
-0.054574426263570786,
-0.005080417264252901,
-0.00023509757011197507,
-0.014802303165197372,
0.05284759774804115,
-0.04791935533285141,
0.001443038578145206,
0.03437013179063797,
0.030737636610865593,
-0.06399565935134888,
-0.03257598355412483,
0.006239386275410652,
-5.272634727549261e-33,
0.006837188731878996,
-0.025112953037023544,
0.04424407705664635,
-0.07207826524972916,
-0.013804036192595959,
-0.04341017082333565,
0.0023523655254393816,
-0.05379611253738403,
-0.025319868698716164,
-0.05262616276741028,
-0.04843030869960785,
-0.04134992137551308,
-0.13984380662441254,
0.028329115360975266,
-0.029645908623933792,
0.046899568289518356,
-0.05501898005604744,
0.05022946000099182,
0.007999631576240063,
-0.0070823379792273045,
0.0439959317445755,
0.046940140426158905,
0.02424696274101734,
-0.09052526950836182,
-0.03450079262256622,
0.022372132167220116,
0.08729788661003113,
-0.11478547751903534,
-0.06934405118227005,
0.05331142246723175,
-0.12621575593948364,
0.019115028902888298,
-0.07826732844114304,
0.03298335522413254,
-0.10463717579841614,
-0.03964461013674736,
0.018772397190332413,
-0.07394754141569138,
-0.0043456363491714,
-0.018903886899352074,
-0.023432128131389618,
0.02995784766972065,
0.05391846224665642,
-0.056561268866062164,
0.013597232289612293,
-0.02289854921400547,
-0.0280893687158823,
-0.01821768842637539,
0.010647964663803577,
0.07148328423500061,
-0.01252016332000494,
-0.005387897603213787,
-0.03624877706170082,
-0.0002121042343787849,
0.026707123965024948,
-0.00019703780708368868,
0.04396994039416313,
-0.0285512562841177,
0.02671995759010315,
0.04448267072439194,
0.004882372450083494,
-0.08388330042362213,
0.0038918431382626295,
-0.02148834802210331,
0.031345121562480927,
-0.01423425879329443,
-0.03332029655575752,
-0.06254859268665314,
0.0269778985530138,
-0.01910751312971115,
-0.025714386254549026,
-0.0528610460460186,
0.06343919783830643,
0.0780334621667862,
-0.0036294476594775915,
-0.016756750643253326,
0.046985626220703125,
-0.04500485211610794,
-0.029920458793640137,
0.02577182464301586,
-0.003927094861865044,
-0.021699706092476845,
-0.01757836528122425,
-0.06205444410443306,
-0.012943471781909466,
-0.02005707286298275,
0.03701169788837433,
-0.0343131348490715,
0.015224289149045944,
-0.016197584569454193,
0.026815684512257576,
-0.024205094203352928,
-0.01836238242685795,
0.05822446569800377,
-0.023045960813760757,
-2.2593019789903598e-34,
-0.011189590208232403,
0.07756923139095306,
-0.1303521990776062,
0.012048141099512577,
-0.0775856152176857,
-0.08386478573083878,
0.017540758475661278,
0.1470913589000702,
-0.00011005714623024687,
-0.008555985055863857,
-0.004113079514354467,
-0.07184461504220963,
0.07530433684587479,
0.009283790364861488,
0.053525660187006,
0.004325781483203173,
0.020524142310023308,
0.0714636892080307,
0.019867470487952232,
0.051359258592128754,
0.023697789758443832,
0.025134021416306496,
-0.10517636686563492,
0.09080641716718674,
0.014108075760304928,
0.022356176748871803,
-0.017234714701771736,
0.07277817279100418,
-0.0850108340382576,
0.051297642290592194,
0.0011272223200649023,
-0.01732793264091015,
-0.07932732999324799,
0.05528842285275459,
-0.07596878707408905,
-0.03778934106230736,
0.036976706236600876,
-0.06340167671442032,
-0.029739635065197945,
0.0651577040553093,
0.05784258991479874,
0.03748709335923195,
-0.0846812054514885,
-0.0024919298011809587,
-0.03982311114668846,
-0.0157125573605299,
-0.08599965274333954,
-0.015105970203876495,
0.07454976439476013,
-0.11002010107040405,
-0.03245534002780914,
0.023211712017655373,
-0.034144721925258636,
-0.025783447548747063,
-0.09671079367399216,
-0.04303828626871109,
-0.0041281613521277905,
-0.06868726760149002,
-0.03652182221412659,
-0.015249088406562805,
-0.07002395391464233,
-0.028874952346086502,
0.09502403438091278,
-0.004869763273745775,
0.041650645434856415,
0.006113692652434111,
0.052092041820287704,
0.12360448390245438,
0.027590051293373108,
-0.12741895020008087,
0.01168652344495058,
-0.04864209145307541,
0.007877159863710403,
0.07365568727254868,
0.015035328455269337,
-0.005080157890915871,
-0.011900321580469608,
-0.09124726802110672,
-0.004649537615478039,
-0.06409349292516708,
-0.004201545380055904,
-0.042240340262651443,
0.018581978976726532,
-0.019416317343711853,
0.03247304633259773,
0.08331181108951569,
0.01967628486454487,
0.017916902899742126,
-0.013894681818783283,
0.044629912823438644,
0.0067990561947226524,
0.0382949523627758,
0.0005427980213426054,
0.08272043615579605,
-0.01204012893140316,
-4.871676040352213e-8,
-0.02852700464427471,
-0.02645678073167801,
0.03731952980160713,
0.0374288409948349,
-0.042339179664850235,
-0.05653581768274307,
-0.028521833941340446,
0.012144932523369789,
-0.06900311261415482,
-0.08690287917852402,
-0.010102824307978153,
0.05487634241580963,
-0.07637137919664383,
0.049402132630348206,
-0.021549178287386894,
0.05229060351848602,
0.004441964440047741,
0.0807822123169899,
0.022850023582577705,
-0.08861236274242401,
0.044278789311647415,
0.01924837753176689,
0.05172697827219963,
-0.024830244481563568,
0.013780093751847744,
-0.0004816674627363682,
-0.013057749718427658,
0.09557229280471802,
0.03239991143345833,
0.044079091399908066,
-0.05468196049332619,
0.05730374529957771,
-0.07844153046607971,
0.04976202920079231,
0.03389600291848183,
0.08639923483133316,
0.02306271716952324,
-0.044863827526569366,
-0.0383775494992733,
0.06410679221153259,
0.050627175718545914,
-0.011006961576640606,
-0.07080961763858795,
-0.001869953703135252,
0.10889379680156708,
0.05247711390256882,
0.04577126353979111,
-0.12240895628929138,
0.04859349504113197,
-0.06050770729780197,
0.005837978795170784,
-0.01269331481307745,
-0.016447056084871292,
0.009477704763412476,
-0.012917897664010525,
0.03063253127038479,
-0.10537232458591461,
0.02738368511199951,
0.04044993594288826,
-0.018219588324427605,
0.05966084823012352,
0.049389567226171494,
0.011281673796474934,
0.06487315893173218
] |
castorini/monot5-base-med-msmarco | 7a4324f2785ab5f1dea00e7a39d6f81f3e2d273f | 2021-06-23T11:40:06.000Z | [
"pytorch",
"jax",
"t5",
"feature-extraction",
"transformers"
] | feature-extraction | false | castorini | null | castorini/monot5-base-med-msmarco | 2,603 | null | transformers | This model is a T5-base reranker fine-tuned on the MS MARCO passage dataset for 10k steps (or 1 epoch) and then fine-tuned again on MedMARCO (from [Sledge-Z paper](https://www.aclweb.org/anthology/2020.emnlp-main.341.pdf) for 1k steps.
For more details on how to use it, check [pygaggle.ai](pygaggle.ai)
Paper describing the model: [Document Ranking with a Pretrained Sequence-to-Sequence Model](https://www.aclweb.org/anthology/2020.findings-emnlp.63/) | [
-0.10545282810926437,
-0.06650065630674362,
0.028889667242765427,
-0.006616164930164814,
-0.017721518874168396,
0.045337095856666565,
-0.09542584419250488,
0.05743452161550522,
-0.028010772541165352,
-0.04267255216836929,
-0.046818703413009644,
0.08185259997844696,
0.015109661035239697,
-0.009319649077951908,
-0.10988821089267731,
0.005593643523752689,
0.022662339732050896,
0.017911570146679878,
-0.020250104367733,
-0.0829797089099884,
-0.013807485811412334,
0.021129032596945763,
0.04491715878248215,
0.002829022239893675,
0.049860190600156784,
-0.07862470299005508,
-0.03960127383470535,
0.05409238487482071,
0.056719545274972916,
-0.04056579992175102,
0.05190463364124298,
0.1017511710524559,
0.03725326061248779,
0.05429929494857788,
-0.031662121415138245,
0.04493154585361481,
-0.03645830228924751,
-0.10675176978111267,
-0.0034106511157006025,
0.013822603970766068,
0.07060902565717697,
0.009168501943349838,
-0.00043268842273391783,
0.029894204810261726,
0.11129551380872726,
-0.045141689479351044,
-0.05232780799269676,
-0.019421691074967384,
-0.04305609315633774,
-0.001792554627172649,
-0.11372075974941254,
0.019915198907256126,
-0.05403565615415573,
0.12920986115932465,
-0.01833968795835972,
0.04041244462132454,
-0.00030275937751866877,
-0.07068359851837158,
-0.007017364259809256,
-0.10177010297775269,
-0.01768764853477478,
-0.04041536524891853,
-0.06608767062425613,
-0.06855994462966919,
0.035583142191171646,
0.00040897243889048696,
-0.008173220790922642,
0.006731424480676651,
0.05266888812184334,
0.04791365563869476,
0.001646713586524129,
0.00028826171183027327,
0.04812140762805939,
0.030273422598838806,
-0.02591659501194954,
0.022117553278803825,
0.07448716461658478,
-0.0056625488214194775,
0.0004464029334485531,
-0.08651024848222733,
-0.04188045859336853,
-0.04708339646458626,
0.004188300110399723,
-0.023153219372034073,
-0.011352292262017727,
-0.1312321126461029,
0.07265017181634903,
-0.03044790029525757,
0.06923463195562363,
0.026839392259716988,
0.0481514111161232,
-0.010650708340108395,
0.015792442485690117,
-0.03802214190363884,
-0.021388789638876915,
0.09117459505796432,
0.014925082214176655,
-0.0273131150752306,
-0.011674902401864529,
0.08546502888202667,
0.015448685735464096,
0.09814925491809845,
0.06852807849645615,
-0.03870904818177223,
0.032280873507261276,
0.006706973537802696,
0.054867956787347794,
0.09515931457281113,
-0.0008594191749580204,
-0.09131080657243729,
-0.012783008627593517,
0.03257102519273758,
-0.0015077961143106222,
-0.013204225338995457,
-0.013558833859860897,
-0.020403344184160233,
-0.019984805956482887,
0.026667101308703423,
0.02523525059223175,
0.062450893223285675,
0.00008012877515284345,
0.013767682015895844,
0.0026083458214998245,
-0.019096996635198593,
-0.014149977825582027,
0.02062178961932659,
-0.05275878682732582,
1.986997610596423e-33,
0.08477957546710968,
0.008207338862121105,
0.019158346578478813,
-0.06002815440297127,
0.007206337992101908,
0.01778344251215458,
0.019872788339853287,
-0.02066841721534729,
-0.11259593814611435,
-0.048457272350788116,
-0.11035750061273575,
0.0243651382625103,
-0.05249794200062752,
0.01444730069488287,
-0.02957444079220295,
-0.09008678793907166,
-0.05249697342514992,
0.07580031454563141,
-0.062468692660331726,
-0.00019930968119297177,
0.10250424593687057,
-0.049313828349113464,
-0.013476175256073475,
-0.1213802844285965,
-0.014240034855902195,
0.07364920526742935,
-0.04118955880403519,
-0.06196051463484764,
-0.068552166223526,
0.046477410942316055,
-0.0667055994272232,
0.06325690448284149,
-0.0540056973695755,
-0.0193177480250597,
0.0456937812268734,
0.023529887199401855,
-0.013652944937348366,
-0.010858474299311638,
0.10775334388017654,
-0.11104654520750046,
0.0028327268082648516,
0.0289655439555645,
0.055775709450244904,
-0.068069227039814,
-0.0895005613565445,
-0.007190210744738579,
-0.052892059087753296,
0.06786973774433136,
0.07275279611349106,
0.008427510969340801,
0.016408920288085938,
-0.011786479502916336,
-0.020866991952061653,
-0.013376058079302311,
-0.02352645993232727,
-0.03879089280962944,
0.06982163339853287,
0.12614211440086365,
0.03397431597113609,
0.05853139981627464,
0.08772672712802887,
0.04902542009949684,
-0.0007743585738353431,
0.06437897682189941,
0.0999310240149498,
0.020775122568011284,
-0.06415864080190659,
0.03239698335528374,
0.0800471380352974,
0.04703059792518616,
0.00939465407282114,
0.058926910161972046,
-0.014980797655880451,
-0.010292253457009792,
0.04874006658792496,
-0.05649217963218689,
0.06748535484075546,
-0.09052250534296036,
-0.005727019626647234,
0.0019079542253166437,
-0.054414425045251846,
-0.03880052641034126,
-0.022246062755584717,
-0.09609337151050568,
-0.0032060430385172367,
-0.02792411670088768,
0.020138558000326157,
-0.03308909013867378,
-0.07497149705886841,
-0.0357840396463871,
0.02266857773065567,
0.017089074477553368,
0.05986078083515167,
0.04327036067843437,
-0.020709093660116196,
-1.295629689805333e-33,
0.021212056279182434,
-0.017657240852713585,
0.06633444130420685,
0.08665633946657181,
0.002972797956317663,
-0.0352264903485775,
-0.0266294926404953,
0.08997705578804016,
-0.05445173382759094,
0.035933542996644974,
0.046616967767477036,
-0.029685167595744133,
0.09059459716081619,
-0.060555506497621536,
0.04540727287530899,
-0.037135425955057144,
0.04850930720567703,
-0.05058671161532402,
0.025052636861801147,
0.046927258372306824,
0.011685075238347054,
0.013305594213306904,
-0.13979309797286987,
0.06057976558804512,
0.025679856538772583,
0.035975441336631775,
0.03969297185540199,
0.052294738590717316,
-0.09045054018497467,
-0.06675311923027039,
-0.001218288904055953,
-0.0336807556450367,
-0.04102521389722824,
-0.019578732550144196,
-0.01719924993813038,
0.05693252012133598,
0.026957988739013672,
-0.01451621949672699,
-0.03900344669818878,
0.08931177854537964,
0.08122340589761734,
0.08705907315015793,
-0.06956370919942856,
0.00915560033172369,
0.0011904699495062232,
0.02461407519876957,
-0.11807674914598465,
0.07272011041641235,
-0.003132142825052142,
-0.05575239285826683,
0.0203237384557724,
0.06458111107349396,
-0.01765662617981434,
0.01210024394094944,
-0.038757242262363434,
0.03732030466198921,
-0.0694059431552887,
-0.044681500643491745,
-0.06629372388124466,
0.005648219492286444,
-0.015518531203269958,
0.017161915078759193,
-0.039328623563051224,
0.004525148309767246,
0.07799655944108963,
-0.022105686366558075,
-0.030964722856879234,
-0.04002014547586441,
-0.07687430083751678,
0.008149951696395874,
0.02661214955151081,
0.004492956679314375,
0.04703323543071747,
-0.01826207898557186,
-0.014907521195709705,
-0.05660971999168396,
-0.023166047409176826,
0.002685720566660166,
-0.030205022543668747,
-0.08674595504999161,
-0.007607627660036087,
0.004724273923784494,
0.019726263359189034,
0.04169087111949921,
-0.014948616735637188,
0.03397981449961662,
0.062187209725379944,
0.03915832191705704,
0.04655226692557335,
0.02133123017847538,
0.005932495463639498,
-0.04418158531188965,
-0.0026901436503976583,
0.026693759486079216,
0.0026516802608966827,
-5.160230287515333e-8,
-0.05548539757728577,
0.04491396248340607,
-0.0475168451666832,
0.11337342113256454,
0.06487616151571274,
0.014325222931802273,
0.01069878414273262,
0.11437541991472244,
-0.01660957746207714,
0.007756234146654606,
0.0672520101070404,
-0.01630041003227234,
-0.05044778063893318,
0.013009490445256233,
0.04913075268268585,
0.021287983283400536,
0.0012572857085615396,
0.04873015731573105,
-0.058988384902477264,
-0.026071276515722275,
0.04448137432336807,
0.02700578048825264,
0.10127734392881393,
-0.0194901991635561,
-0.011184066534042358,
-0.013484042137861252,
0.015169930644333363,
0.06638005375862122,
0.04340318217873573,
-0.04946872964501381,
0.017615901306271553,
-0.0013396325521171093,
-0.031115185469388962,
0.04413526505231857,
-0.008818085305392742,
0.12895220518112183,
-0.05756106600165367,
-0.004607159178704023,
-0.049133822321891785,
0.031157005578279495,
0.04526686668395996,
0.0575033500790596,
-0.038130030035972595,
0.008408835157752037,
-0.008241860195994377,
-0.03618527576327324,
-0.06977739185094833,
-0.056862469762563705,
0.06571458280086517,
-0.008824487216770649,
0.024375122040510178,
-0.028041254729032516,
-0.04851827770471573,
-0.05137104168534279,
0.04558153823018074,
0.0701327845454216,
0.017395393922924995,
0.00018117106810677797,
-0.03307095542550087,
-0.01581623964011669,
0.050054971128702164,
-0.06903692334890366,
-0.02252379059791565,
0.03628412261605263
] |
sshleifer/distilbart-xsum-12-1 | e85cfe19c276077efa4389e576f99d456a45755b | 2021-06-14T07:56:06.000Z | [
"pytorch",
"jax",
"bart",
"text2text-generation",
"en",
"dataset:cnn_dailymail",
"dataset:xsum",
"transformers",
"summarization",
"license:apache-2.0",
"autotrain_compatible"
] | summarization | false | sshleifer | null | sshleifer/distilbart-xsum-12-1 | 2,601 | 1 | transformers | ---
language: en
tags:
- summarization
license: apache-2.0
datasets:
- cnn_dailymail
- xsum
thumbnail: https://huggingface.co/front/thumbnails/distilbart_medium.png
---
### Usage
This checkpoint should be loaded into `BartForConditionalGeneration.from_pretrained`. See the [BART docs](https://huggingface.co/transformers/model_doc/bart.html?#transformers.BartForConditionalGeneration) for more information.
### Metrics for DistilBART models
| Model Name | MM Params | Inference Time (MS) | Speedup | Rouge 2 | Rouge-L |
|:---------------------------|------------:|----------------------:|----------:|----------:|----------:|
| distilbart-xsum-12-1 | 222 | 90 | 2.54 | 18.31 | 33.37 |
| distilbart-xsum-6-6 | 230 | 132 | 1.73 | 20.92 | 35.73 |
| distilbart-xsum-12-3 | 255 | 106 | 2.16 | 21.37 | 36.39 |
| distilbart-xsum-9-6 | 268 | 136 | 1.68 | 21.72 | 36.61 |
| bart-large-xsum (baseline) | 406 | 229 | 1 | 21.85 | 36.50 |
| distilbart-xsum-12-6 | 306 | 137 | 1.68 | 22.12 | 36.99 |
| bart-large-cnn (baseline) | 406 | 381 | 1 | 21.06 | 30.63 |
| distilbart-12-3-cnn | 255 | 214 | 1.78 | 20.57 | 30.00 |
| distilbart-12-6-cnn | 306 | 307 | 1.24 | 21.26 | 30.59 |
| distilbart-6-6-cnn | 230 | 182 | 2.09 | 20.17 | 29.70 |
| [
-0.10242787003517151,
-0.08055665343999863,
0.058801449835300446,
0.001729630515910685,
-0.01020839437842369,
-0.020107010379433632,
-0.09146951138973236,
0.055804893374443054,
-0.03275766223669052,
-0.0814155638217926,
0.047091152518987656,
-0.024555958807468414,
0.02256167307496071,
-0.06064292788505554,
-0.07659092545509338,
0.011284125037491322,
0.03928124159574509,
-0.0036749355494976044,
-0.08591760694980621,
-0.05708478391170502,
0.052746228873729706,
-0.040730513632297516,
-0.02270055003464222,
-0.05265913903713226,
0.08935019373893738,
0.0025974700693041086,
-0.03890107572078705,
0.012740290723741055,
0.0906219556927681,
-0.04279907047748566,
-0.004099974874407053,
0.04570003226399422,
-0.07850365340709686,
0.018990738317370415,
-0.01020409632474184,
0.08468351513147354,
0.006208515260368586,
-0.007276525720953941,
0.02783900499343872,
0.03190219774842262,
0.04338159039616585,
-0.002901187865063548,
-0.030336445197463036,
-0.02716916799545288,
0.04014960676431656,
-0.058358509093523026,
0.013790902681648731,
-0.017771178856492043,
0.01821342296898365,
-0.02310691587626934,
-0.049805302172899246,
-0.00339138088747859,
-0.005217964760959148,
0.061979468911886215,
-0.018776532262563705,
-0.02264600805938244,
0.039845243096351624,
-0.019266774877905846,
0.021008379757404327,
-0.0679703801870346,
-0.09366007894277573,
-0.0025608239229768515,
-0.0858554095029831,
-0.027356861159205437,
-0.02404315024614334,
-0.004478763323277235,
0.03164851665496826,
0.01056541409343481,
0.0023012161254882812,
0.014064272865653038,
-0.06235840544104576,
0.028858404606580734,
0.03378373011946678,
0.004465024918317795,
0.07923771440982819,
-0.02148638851940632,
0.11610335111618042,
0.0398324579000473,
0.06239284574985504,
-0.17593292891979218,
0.04951082542538643,
-0.022424643859267235,
0.0025099257472902536,
0.021336156874895096,
0.042870569974184036,
-0.02120130881667137,
-0.0128794489428401,
-0.004414125811308622,
0.07955142110586166,
-0.06996607035398483,
-0.05295264348387718,
0.016240382567048073,
-0.11916537582874298,
0.008669688366353512,
-0.028409382328391075,
0.08660409599542618,
-0.015133734792470932,
0.042395152151584625,
-0.020870883017778397,
0.110101617872715,
-0.031033620238304138,
-0.06374292075634003,
0.038810137659311295,
0.0009930998785421252,
-0.0503569021821022,
-0.058442480862140656,
0.0753164291381836,
0.1175476536154747,
-0.032253846526145935,
-0.014065131545066833,
0.14000876247882843,
0.021389251574873924,
0.036053162068128586,
-0.01974489726126194,
0.030817579478025436,
0.0031923118513077497,
-0.030438171699643135,
-0.05716529116034508,
-0.010146639309823513,
-0.039032574743032455,
-0.0023189843632280827,
0.025894414633512497,
0.0314161442220211,
0.03209250792860985,
-0.049785830080509186,
-0.043941400945186615,
0.04122624173760414,
5.461544774208938e-35,
-0.024408750236034393,
-0.03235231712460518,
0.009395932778716087,
0.0143541619181633,
-0.019927645102143288,
0.011538815684616566,
-0.004154409281909466,
0.004460239317268133,
-0.039520375430583954,
-0.05001825839281082,
-0.027546891942620277,
-0.001932688639499247,
-0.1829814612865448,
0.011275903321802616,
-0.05464207008481026,
-0.009606490842998028,
-0.010763490572571754,
0.05929546430706978,
-0.017166277393698692,
0.009892466478049755,
0.08686050772666931,
-0.052564844489097595,
-0.0730401873588562,
-0.012235877104103565,
-0.05119817703962326,
0.05293474346399307,
0.047535110265016556,
-0.024820925667881966,
-0.051516685634851456,
0.038043733686208725,
-0.0038780367467552423,
0.04933198168873787,
0.02613821066915989,
-0.034857045859098434,
-0.05000770092010498,
0.012604543007910252,
-0.08287376910448074,
0.0021607009693980217,
-0.04153299331665039,
-0.0480438731610775,
-0.023325279355049133,
0.06455279886722565,
-0.03618278354406357,
-0.10329894721508026,
-0.08560588955879211,
-0.012695742771029472,
0.06148916110396385,
-0.0014318762114271522,
0.012406377121806145,
-0.0063721551559865475,
0.010638606734573841,
0.03645683452486992,
-0.059038691222667694,
-0.05735098943114281,
-0.022052470594644547,
0.043612636625766754,
0.06302247941493988,
0.04900328069925308,
0.03978622704744339,
0.031266748905181885,
0.014886674471199512,
0.04121227562427521,
-0.025450652465224266,
0.03354961797595024,
0.06778407096862793,
0.009504653513431549,
0.010917606763541698,
0.03269919008016586,
0.04048515111207962,
0.0311855711042881,
-0.09321687370538712,
-0.10306260734796524,
0.001251904759556055,
0.019347315654158592,
0.10530905425548553,
-0.05783497914671898,
0.05076569691300392,
0.0006828922196291387,
-0.05174199864268303,
-0.014428242109715939,
-0.08407353609800339,
-0.0058699436485767365,
-0.05062512680888176,
-0.06463600695133209,
-0.01425554696470499,
-0.0345621332526207,
0.07267823815345764,
-0.02438926324248314,
-0.0994235947728157,
-0.08070188015699387,
-0.003353934967890382,
0.02165825664997101,
0.01772112026810646,
-0.02488882653415203,
-0.04665885865688324,
-4.1838000243215e-33,
0.03438884764909744,
0.08637094497680664,
-0.027995970100164413,
0.09967635571956635,
0.03033537231385708,
0.004250023048371077,
0.013211085461080074,
0.14391984045505524,
-0.018230015411973,
0.0008563397568650544,
0.05915218964219093,
-0.08577528595924377,
-0.08741530030965805,
-0.02476281300187111,
0.02089402638375759,
0.009236098267138004,
0.0022220034152269363,
-0.04552528262138367,
-0.04196701943874359,
0.034156158566474915,
0.04021655395627022,
0.11718633025884628,
-0.09590455889701843,
0.08863051980733871,
-0.059591080993413925,
0.012649881653487682,
0.004810936748981476,
0.09402202069759369,
-0.08010457456111908,
-0.03196941688656807,
-0.03389061987400055,
-0.01679762452840805,
-0.030047638341784477,
0.02232223190367222,
-0.08654878288507462,
-0.010027778334915638,
0.021284284070134163,
-0.007165788672864437,
-0.063999705016613,
0.09917820245027542,
0.09497607499361038,
-0.020452525466680527,
-0.11983459442853928,
0.041702888906002045,
-0.07234808802604675,
-0.00285915844142437,
0.00486419815570116,
-0.11438306421041489,
0.06041710078716278,
-0.00478691840544343,
0.08743700385093689,
0.03726685792207718,
-0.05595160648226738,
0.03092111274600029,
-0.03529880940914154,
-0.028061416000127792,
0.05210427567362785,
-0.034117504954338074,
0.02170790731906891,
0.009024888277053833,
-0.08601925522089005,
-0.05220189690589905,
-0.04853806272149086,
-0.11871526390314102,
0.019619103521108627,
-0.020090097561478615,
-0.03150482475757599,
-0.03657294064760208,
-0.008592593483626842,
0.05554542690515518,
0.014322890900075436,
0.026464898139238358,
0.05759655684232712,
-0.038530249148607254,
-0.01761936955153942,
-0.01743265800178051,
0.02374177984893322,
0.006342989392578602,
0.05594303831458092,
-0.06053014099597931,
-0.11985619366168976,
-0.08457670360803604,
0.045173369348049164,
0.05776109918951988,
-0.0056351907551288605,
0.019826533272862434,
-0.014123903587460518,
0.01049207616597414,
0.02468194253742695,
0.030179986730217934,
-0.03690095618367195,
-0.04178742691874504,
-0.0030118089634925127,
0.045938149094581604,
0.014063847251236439,
-5.811313386061556e-8,
-0.053576551377773285,
0.039514027535915375,
-0.06619477272033691,
0.02592553198337555,
-0.01849374733865261,
-0.04122321680188179,
0.022507496178150177,
0.09204138815402985,
-0.05198988318443298,
0.004503786563873291,
0.099793940782547,
-0.00479306373745203,
-0.02616720274090767,
-0.009290494956076145,
0.007529478054493666,
0.03442858159542084,
0.038063887506723404,
0.07039548456668854,
-0.08950836211442947,
-0.060046736150979996,
-0.008433334529399872,
-0.005172078497707844,
-0.0021744512487202883,
-0.04427023604512215,
0.025741849094629288,
-0.0708618089556694,
0.008174183778464794,
0.044057417660951614,
0.019644221290946007,
-0.023653121665120125,
-0.04487171024084091,
0.02623075805604458,
-0.04028204083442688,
-0.05899606645107269,
-0.06468623876571655,
0.0873165875673294,
-0.0000015408877516165376,
-0.02174311690032482,
0.030886435881257057,
0.094451904296875,
0.030695859342813492,
0.06706656515598297,
-0.07480531930923462,
0.002431778237223625,
0.045595936477184296,
0.06469754129648209,
-0.01883578673005104,
-0.03154022619128227,
0.001480140257626772,
0.07079560309648514,
0.027046171948313713,
-0.05088590830564499,
-0.0037020540330559015,
0.0017577537801116705,
-0.005329455714672804,
-0.0219523087143898,
0.0181431882083416,
-0.015050824731588364,
0.010762940160930157,
0.022556515410542488,
0.050926342606544495,
-0.026853499934077263,
0.0590292252600193,
-0.013102889992296696
] |
mental/mental-bert-base-uncased | 93f3ff553a76674e1307d8f01dd2441fd8909284 | 2022-04-05T17:43:03.000Z | [
"pytorch",
"bert",
"fill-mask",
"arxiv:2110.15621",
"transformers",
"autotrain_compatible"
] | fill-mask | false | mental | null | mental/mental-bert-base-uncased | 2,590 | 5 | transformers | # MentalBERT
[MentalBERT](https://arxiv.org/abs/2110.15621) is a model initialized with BERT-Base (`uncased_L-12_H-768_A-12`) and trained with mental health-related posts collected from Reddit.
We follow the standard pretraining protocols of BERT and RoBERTa with [Huggingface’s Transformers library](https://github.com/huggingface/transformers).
We use four Nvidia Tesla v100 GPUs to train the two language models. We set the batch size to 16 per GPU, evaluate every 1,000 steps, and train for 624,000 iterations. Training with four GPUs takes around eight days.
## Usage
Load the model via [Huggingface’s Transformers library](https://github.com/huggingface/transformers):
```
from transformers import AutoTokenizer, AutoModel
tokenizer = AutoTokenizer.from_pretrained("mental/mental-bert-base-uncased")
model = AutoModel.from_pretrained("mental/mental-bert-base-uncased")
```
## Paper
For more details, refer to the paper [MentalBERT: Publicly Available Pretrained Language Models for Mental Healthcare](https://arxiv.org/abs/2110.15621).
```
@inproceedings{ji2022mentalbert,
title = {{MentalBERT: Publicly Available Pretrained Language Models for Mental Healthcare}},
author = {Shaoxiong Ji and Tianlin Zhang and Luna Ansari and Jie Fu and Prayag Tiwari and Erik Cambria},
year = {2022},
booktitle = {Proceedings of LREC}
}
```
## Social Impact
We train and release masked language models for mental health to facilitate the automatic detection of mental disorders in online social content for non-clinical use.
The models may help social workers find potential individuals in need of early prevention.
However, the model predictions are not psychiatric diagnoses.
We recommend anyone who suffers from mental health issues to call the local mental health helpline and seek professional help if possible.
Data privacy is an important issue, and we try to minimize the privacy impact when using social posts for model training.
During the data collection process, we only use anonymous posts that are manifestly available to the public.
We do not collect user profiles even though they are also manifestly public online.
We have not attempted to identify the anonymous users or interact with any anonymous users.
The collected data are stored securely with password protection even though they are collected from the open web.
There might also be some bias, fairness, uncertainty, and interpretability issues during the data collection and model training.
Evaluation of those issues is essential in future research. | [
-0.08884184062480927,
-0.05216280370950699,
-0.010745597071945667,
0.0486561581492424,
-0.04808295890688896,
0.008079677820205688,
-0.05764789134263992,
0.040640704333782196,
-0.04294143244624138,
-0.10342282801866531,
-0.02293010987341404,
-0.013511424884200096,
0.013692670501768589,
0.044357575476169586,
0.012425821274518967,
0.03083687089383602,
0.054750919342041016,
-0.02985357865691185,
-0.09674159437417984,
0.0014788322150707245,
0.04404187947511673,
0.024355854839086533,
0.05949696898460388,
-0.02082887291908264,
0.06173177808523178,
0.005693670362234116,
-0.015564344823360443,
-0.095145583152771,
0.1165836974978447,
0.04283866658806801,
0.05513779819011688,
0.015384852886199951,
-0.017870906740427017,
0.07182109355926514,
-0.0036364856641739607,
0.06318985670804977,
-0.08948284387588501,
-0.03878679499030113,
-0.025741666555404663,
-0.04881911724805832,
0.02651236578822136,
-0.028684210032224655,
-0.0286105927079916,
0.021622752770781517,
0.08978098630905151,
-0.017760176211595535,
-0.010669277980923653,
0.008784652687609196,
0.014662792906165123,
-0.11346504092216492,
-0.0632178783416748,
-0.007191051263362169,
0.04369450733065605,
0.029125962406396866,
-0.06753545254468918,
0.043520618230104446,
0.07059496641159058,
-0.06513781100511551,
-0.01357278972864151,
-0.017900723963975906,
-0.054239027202129364,
-0.07755118608474731,
-0.032321829348802567,
-0.019454821944236755,
-0.037378180772066116,
0.008104611188173294,
0.026768825948238373,
-0.02263585850596428,
0.039684392511844635,
0.06814055144786835,
0.011336863040924072,
0.06033650040626526,
-0.011651373468339443,
0.014967289753258228,
0.0340108796954155,
-0.02494385652244091,
0.04066628962755203,
-0.04854089021682739,
0.045771386474370956,
-0.07912678271532059,
0.021310240030288696,
-0.02807827666401863,
0.08862343430519104,
0.03111618384718895,
0.09114845842123032,
-0.020489711314439774,
0.06712798774242401,
0.020472727715969086,
-0.019968222826719284,
-0.0058756619691848755,
-0.012270099483430386,
-0.07136104255914688,
0.0645885020494461,
-0.00003573138019419275,
-0.006971862632781267,
0.033323273062705994,
0.010216125287115574,
-0.04827694967389107,
-0.07349128276109695,
0.055585429072380066,
0.0016178454970940948,
0.01086137630045414,
0.053936317563056946,
0.06918434798717499,
-0.01861616224050522,
0.031595926731824875,
0.047105856239795685,
0.053062353283166885,
-0.052737604826688766,
-0.03739321604371071,
0.026595082134008408,
0.007643839810043573,
-0.005271573550999165,
0.020062066614627838,
-0.016534313559532166,
0.07557258754968643,
-0.02663511037826538,
-0.010679858736693859,
-0.006528133992105722,
0.13210341334342957,
-0.0046691023744642735,
-0.0395173616707325,
0.026944970712065697,
0.025748098269104958,
-0.0029667012859135866,
-0.0024351770989596844,
-0.054868824779987335,
3.8580944169376454e-33,
-0.011203489266335964,
0.0263090617954731,
-0.00992679875344038,
-0.02629866451025009,
0.005881232209503651,
0.044542569667100906,
0.030266735702753067,
0.0349326953291893,
0.06813899427652359,
-0.024273168295621872,
-0.08021807670593262,
0.09749295562505722,
-0.05170420557260513,
0.11373113095760345,
-0.0726696103811264,
-0.05438665300607681,
-0.03681948781013489,
0.08354178071022034,
0.012182901613414288,
0.04312938451766968,
0.1368822157382965,
0.0598461739718914,
0.003234762931242585,
-0.05257394164800644,
-0.09636241942644119,
0.06379378587007523,
0.02661995403468609,
-0.020130429416894913,
-0.01966465264558792,
0.038828689604997635,
-0.07027611881494522,
0.0318111777305603,
-0.08962642401456833,
0.012867685407400131,
0.03521471470594406,
-0.02151363529264927,
0.07305196672677994,
-0.04784088954329491,
0.0388251356780529,
-0.04550265148282051,
-0.009176976978778839,
0.08283940702676773,
0.008435343392193317,
-0.14630275964736938,
-0.05069856718182564,
0.019375940784811974,
0.03150772675871849,
-0.03227424621582031,
-0.030623633414506912,
0.0021292720921337605,
0.025390366092324257,
0.0480414517223835,
-0.10467227548360825,
-0.035038191825151443,
-0.009211455471813679,
-0.04531153663992882,
0.0704847127199173,
0.05546695366501808,
0.11488829553127289,
0.004698935896158218,
0.015834150835871696,
0.0007409556419588625,
-0.006194386165589094,
0.06021547690033913,
0.016316315159201622,
-0.01868888922035694,
-0.029439378529787064,
0.03125939145684242,
-0.0195323396474123,
0.03906052187085152,
-0.06815379112958908,
0.026008952409029007,
-0.017260825261473656,
-0.05162206292152405,
0.08801094442605972,
-0.0675446018576622,
0.03567015379667282,
-0.03410545736551285,
-0.08977590501308441,
-0.038169022649526596,
-0.006566551048308611,
0.08909066021442413,
-0.05412554740905762,
-0.04871407523751259,
0.027406498789787292,
-0.04032287374138832,
-0.0013546223053708673,
-0.07846624404191971,
-0.1064540445804596,
-0.033674273639917374,
-0.03189815953373909,
-0.04393405094742775,
0.056099776178598404,
0.02471821941435337,
-0.044608257710933685,
-4.633822975512443e-33,
0.05347852036356926,
0.010734251700341702,
0.032774776220321655,
0.07545938342809677,
0.0494084507226944,
-0.047563761472702026,
0.02140149287879467,
0.13522912561893463,
0.008970648050308228,
-0.019439300522208214,
0.016360659152269363,
-0.01911068707704544,
0.00037990938290022314,
0.009486963041126728,
0.032385364174842834,
-0.031014801934361458,
-0.0007283652666956186,
0.01360669918358326,
-0.02857830747961998,
-0.011455288156867027,
-0.03263895586133003,
0.06749089807271957,
-0.10714801400899887,
0.009630236774682999,
-0.0494101457297802,
0.05435559153556824,
-0.026136407628655434,
0.005325021222233772,
0.07171767950057983,
-0.0043815504759550095,
-0.045511193573474884,
0.029591331258416176,
-0.0794842392206192,
0.04796605929732323,
-0.018493935465812683,
0.06576115638017654,
0.03831826150417328,
0.04773774370551109,
-0.04951145127415657,
0.04644499719142914,
0.14735840260982513,
-0.012918675318360329,
-0.049742843955755234,
0.0465027391910553,
0.024454066529870033,
0.01768597960472107,
-0.10169656574726105,
-0.08884061872959137,
-0.029123617336153984,
-0.017808876931667328,
0.0398050919175148,
-0.01827455312013626,
-0.08405490219593048,
0.005810616537928581,
-0.0373847596347332,
-0.11327491700649261,
0.040933094918727875,
-0.0655151903629303,
-0.02765611931681633,
-0.02791532129049301,
-0.03773561120033264,
-0.009990901686251163,
-0.02214125543832779,
-0.06765671819448471,
-0.041303135454654694,
-0.058709245175123215,
-0.06952783465385437,
0.06483273953199387,
0.018434060737490654,
0.06197071447968483,
-0.019651923328638077,
0.04173218831419945,
0.01822364702820778,
-0.014373408630490303,
-0.0775325745344162,
0.06238497421145439,
-0.009174885228276253,
-0.1531537026166916,
0.03283804655075073,
-0.08472379297018051,
-0.10816089063882828,
-0.056184183806180954,
0.06031263619661331,
0.03537975996732712,
-0.015705598518252373,
0.07337522506713867,
0.06087057664990425,
0.06263018399477005,
0.09397172182798386,
0.008174948394298553,
-0.02820882573723793,
0.05412726476788521,
0.04828374832868576,
0.05567377433180809,
-0.01834961771965027,
-5.0133859730294716e-8,
0.006746124010533094,
0.017840402200818062,
0.009462788701057434,
0.0795821100473404,
-0.008578624576330185,
-0.020440872758626938,
-0.08192173391580582,
0.03040079027414322,
-0.03511272370815277,
-0.006082725711166859,
0.07835550606250763,
0.003852010704576969,
-0.04032984375953674,
-0.01351314876228571,
-0.03821416199207306,
0.05135587602853775,
-0.009834213182330132,
0.04679013043642044,
-0.0171368345618248,
-0.006700511090457439,
-0.04961610957980156,
0.024161871522665024,
0.02764950506389141,
-0.04512340947985649,
-0.0390937402844429,
-0.09417720884084702,
0.021647989749908447,
0.030371489003300667,
0.0020372578874230385,
-0.0452473983168602,
-0.019221223890781403,
0.023693107068538666,
-0.04637681320309639,
0.017491456121206284,
-0.03196467459201813,
-0.0011423529358580709,
0.02991923689842224,
-0.03233134746551514,
0.04557706043124199,
0.01743233948945999,
0.05453425645828247,
0.048503577709198,
-0.0725957527756691,
-0.029688628390431404,
0.04155915975570679,
-0.005571749992668629,
-0.06498374789953232,
-0.14033424854278564,
0.018879711627960205,
0.06281609833240509,
0.0002102852304233238,
0.03548736497759819,
-0.10828068852424622,
0.07430722564458847,
0.018323585391044617,
0.005112530663609505,
-0.0835576057434082,
-0.05563759803771973,
0.012447521090507507,
0.010624796152114868,
-0.052003130316734314,
-0.015838637948036194,
-0.0021208280231803656,
0.02709163911640644
] |
ixa-ehu/berteus-base-cased | be4efdc31716b33b989efa20ec1e93f404a03fff | 2021-05-19T20:33:41.000Z | [
"pytorch",
"jax",
"bert",
"feature-extraction",
"eu",
"arxiv:2004.00033",
"transformers"
] | feature-extraction | false | ixa-ehu | null | ixa-ehu/berteus-base-cased | 2,589 | 1 | transformers | ---
language: eu
---
# BERTeus base cased
This is the Basque language pretrained model presented in [Give your Text Representation Models some Love: the Case for Basque](https://arxiv.org/pdf/2004.00033.pdf). This model has been trained on a Basque corpus comprising Basque crawled news articles from online newspapers and the Basque Wikipedia. The training corpus contains 224.6 million tokens, of which 35 million come from the Wikipedia.
BERTeus has been tested on four different downstream tasks for Basque: part-of-speech (POS) tagging, named entity recognition (NER), sentiment analysis and topic classification; improving the state of the art for all tasks. See summary of results below:
| Downstream task | BERTeus | mBERT | Previous SOTA |
| --------------- | ------- | ------| ------------- |
| Topic Classification | **76.77** | 68.42 | 63.00 |
| Sentiment | **78.10** | 71.02 | 74.02 |
| POS | **97.76** | 96.37 | 96.10 |
| NER | **87.06** | 81.52 | 76.72 |
If using this model, please cite the following paper:
```
@inproceedings{agerri2020give,
title={Give your Text Representation Models some Love: the Case for Basque},
author={Rodrigo Agerri and I{\~n}aki San Vicente and Jon Ander Campos and Ander Barrena and Xabier Saralegi and Aitor Soroa and Eneko Agirre},
booktitle={Proceedings of the 12th International Conference on Language Resources and Evaluation},
year={2020}
}
```
| [
-0.0764126405119896,
-0.03101978451013565,
0.020400263369083405,
0.011478693224489689,
0.016003338620066643,
0.025687681511044502,
0.005272213835269213,
-0.004965291358530521,
0.06190603971481323,
-0.04536929726600647,
-0.012527958489954472,
-0.021345192566514015,
0.06336509436368942,
0.07629700750112534,
0.02878027968108654,
0.023299941793084145,
0.03513645380735397,
0.01051193755120039,
-0.04805374890565872,
-0.06995329260826111,
0.03589693829417229,
0.06781111657619476,
0.0348895899951458,
-0.009352035820484161,
-0.006652926094830036,
-0.04158662632107735,
0.0010443817591294646,
-0.10326366871595383,
0.020867399871349335,
-0.019400957971811295,
-0.04564560577273369,
0.04031791538000107,
0.008747988380491734,
0.03151306137442589,
-0.03772639483213425,
0.059181563556194305,
-0.04227356240153313,
0.0009610599372535944,
-0.011561439372599125,
0.046335820108652115,
-0.04460630193352699,
-0.05260077118873596,
-0.06702470034360886,
0.012851033359766006,
0.10602099448442459,
0.01196153461933136,
-0.025727014988660812,
0.022759264335036278,
-0.015996236354112625,
0.03563547506928444,
-0.11771447211503983,
-0.03525892645120621,
0.073118656873703,
0.03604699298739433,
-0.051401108503341675,
-0.02626427635550499,
-0.01788596622645855,
0.03498666733503342,
0.016730017960071564,
-0.07022882252931595,
-0.012823425233364105,
-0.0571092925965786,
-0.019736258313059807,
-0.045103903859853745,
-0.027310235425829887,
-0.026134204119443893,
-0.0339910127222538,
0.033130943775177,
-0.05315271019935608,
-0.0057069347240030766,
0.03580795228481293,
0.054209087044000626,
-0.007335430942475796,
0.058916863054037094,
0.00944814458489418,
-0.022567642852663994,
-0.006054024677723646,
-0.016739051789045334,
0.03584125638008118,
-0.05895668640732765,
0.03574518486857414,
0.0247808825224638,
0.038970671594142914,
-0.04748640954494476,
0.08906865864992142,
-0.028984401375055313,
0.055390652269124985,
0.007620449177920818,
-0.0309920497238636,
0.025927066802978516,
-0.03203528746962547,
-0.11162815988063812,
0.11992960423231125,
-0.06349790841341019,
0.013124861754477024,
0.03302983567118645,
0.0010182138066738844,
0.022927571088075638,
0.010544261895120144,
0.08870537579059601,
0.051162708550691605,
0.06079699844121933,
0.0010621558176353574,
-0.0652678981423378,
-0.04170404374599457,
0.014167818240821362,
-0.03716925159096718,
0.00674132164567709,
0.04240971431136131,
-0.04648422822356224,
-0.039228785783052444,
0.039000485092401505,
-0.005024564452469349,
-0.0874832421541214,
0.011225048452615738,
-0.02155771479010582,
0.07567377388477325,
-0.009188232943415642,
0.06338118761777878,
0.10298481583595276,
-0.024061765521764755,
0.04440437629818916,
0.011198184452950954,
0.06546305865049362,
0.011243931949138641,
0.05021478235721588,
-0.02177102118730545,
1.9888572793936223e-33,
0.0006277748616412282,
0.014721361920237541,
-0.0030732066370546818,
0.01298467442393303,
-0.09832264482975006,
-0.04408399388194084,
-0.01167664211243391,
-0.06795158237218857,
-0.10281910747289658,
-0.09759918600320816,
-0.056911155581474304,
0.055499330163002014,
-0.033792268484830856,
0.08787698298692703,
-0.006213288754224777,
-0.052700918167829514,
0.00933387316763401,
-0.009989089332520962,
0.013583255000412464,
-0.0324409157037735,
0.07229432463645935,
0.02400985360145569,
0.03353593125939369,
-0.025702940300107002,
0.029039515182375908,
0.00871277041733265,
0.0317176878452301,
-0.1132422536611557,
-0.031248807907104492,
0.03667691349983215,
-0.08515840023756027,
-0.031968969851732254,
0.004714415408670902,
0.014820088632404804,
0.001821409328840673,
0.020558135583996773,
-0.03801794722676277,
-0.012984721921384335,
-0.02178705856204033,
-0.08706034719944,
-0.04052044078707695,
0.00955719780176878,
0.012736747041344643,
-0.01891092211008072,
-0.06761809438467026,
-0.032028455287218094,
-0.027232401072978973,
-0.06918145716190338,
0.03468484804034233,
-0.03809373825788498,
0.038681577891111374,
-0.03405902534723282,
-0.022570669651031494,
0.05088435858488083,
0.05545409768819809,
0.05122603848576546,
0.011133170686662197,
0.013595310971140862,
0.02802860736846924,
-0.04859770089387894,
0.06540429592132568,
-0.012706203386187553,
0.029504617676138878,
0.028093816712498665,
0.04850860685110092,
0.03075599856674671,
-0.020351143553853035,
0.1129884347319603,
0.02140628546476364,
-0.03025200590491295,
0.01775760017335415,
0.024230193346738815,
0.03387824073433876,
-0.028809748589992523,
-0.02947901003062725,
0.07258317619562149,
-0.008160650730133057,
-0.09725376963615417,
-0.03978665918111801,
0.045014940202236176,
0.004701386671513319,
-0.11543288826942444,
0.04179113358259201,
-0.058905910700559616,
-0.015051125548779964,
0.02861005999147892,
0.054941363632678986,
-0.08209863305091858,
0.0032577503006905317,
0.04822198674082756,
0.007154087536036968,
0.05586346611380577,
-0.02651282586157322,
0.046421438455581665,
-0.021033581346273422,
-3.4680014738294845e-33,
-0.049671877175569534,
0.011862813495099545,
-0.046227797865867615,
0.0007067342521622777,
-0.0796697735786438,
-0.022244460880756378,
-0.01418521162122488,
0.10141445696353912,
-0.0441809818148613,
0.01618259586393833,
-0.03534161299467087,
-0.12284085154533386,
-0.015468208119273186,
-0.0018484077882021666,
-0.0028923882637172937,
0.08524999022483826,
0.02928745374083519,
0.03981159254908562,
0.03576938435435295,
0.13753874599933624,
-0.08138155192136765,
0.05529491603374481,
-0.13273897767066956,
0.0921437218785286,
-0.006154912058264017,
0.02776065655052662,
0.0202780868858099,
0.0198994018137455,
-0.04883870854973793,
-0.04660334438085556,
-0.098983034491539,
-0.03824203088879585,
-0.031947165727615356,
0.019144363701343536,
-0.08728310465812683,
-0.017245149239897728,
-0.012229268439114094,
-0.04801633581519127,
0.04807377979159355,
0.06783483177423477,
0.05235736072063446,
0.07038240879774094,
-0.061686933040618896,
-0.009414545260369778,
-0.07511342316865921,
-0.00043807848123833537,
-0.0698842778801918,
0.008523223921656609,
-0.032384637743234634,
-0.08396042138338089,
0.03266071528196335,
0.010717560537159443,
-0.07594785839319229,
0.04065490514039993,
0.014667876996099949,
-0.0846049040555954,
0.03636297956109047,
-0.045804329216480255,
-0.07365266233682632,
-0.010852592997252941,
-0.06527780741453171,
0.06088593602180481,
0.024624964222311974,
-0.015179276466369629,
0.06133455038070679,
-0.07068531215190887,
-0.08764278888702393,
0.10138072818517685,
-0.07980465888977051,
-0.03826580569148064,
0.06415743380784988,
0.028625840321183205,
-0.006833009421825409,
-0.0058530764654278755,
0.004152711946517229,
-0.007221528794616461,
-0.005644954741001129,
-0.00792963057756424,
-0.005925564561039209,
-0.056813132017850876,
-0.01569974794983864,
-0.06403058767318726,
0.003106953576207161,
-0.011360246688127518,
0.07321694493293762,
0.04048799350857735,
0.028861815109848976,
0.025977957993745804,
0.023819636553525925,
0.01427337247878313,
0.01644245535135269,
-0.011778109706938267,
0.02020549215376377,
0.08470331877470016,
0.07101278007030487,
-5.037278683062141e-8,
-0.09705747663974762,
-0.042377907782793045,
-0.08299306780099869,
0.028425011783838272,
-0.03615826368331909,
-0.06939534842967987,
0.00579498428851366,
0.05019194260239601,
-0.0780184268951416,
-0.01942756585776806,
-0.008715879172086716,
0.04249976947903633,
-0.1169612854719162,
-0.020007222890853882,
0.0017123790457844734,
0.06992842257022858,
0.033853136003017426,
0.08304215967655182,
0.062481749802827835,
-0.036575838923454285,
0.08009768277406693,
0.0444486066699028,
0.013023244217038155,
-0.026474379003047943,
0.02907843142747879,
-0.04005784913897514,
-0.0023768760729581118,
0.02749837562441826,
0.038288820534944534,
-0.11874393373727798,
-0.07957229018211365,
0.03976578265428543,
-0.18443290889263153,
-0.008311926387250423,
0.063319131731987,
0.09989891201257706,
0.008168214000761509,
-0.038740649819374084,
-0.06331970542669296,
0.07768901437520981,
0.07914109528064728,
0.03470364958047867,
-0.09369441121816635,
-0.03497716039419174,
0.09150328487157822,
0.02046826295554638,
-0.04993108659982681,
-0.11669108271598816,
0.06817397475242615,
-0.033015161752700806,
0.029306894168257713,
-0.012081698514521122,
0.05708641931414604,
0.06861753016710281,
0.0602777898311615,
0.025307971984148026,
-0.01361518632620573,
0.0173379797488451,
0.015674250200390816,
0.060865625739097595,
0.03550788015127182,
0.060034289956092834,
0.05806684121489525,
0.01769999787211418
] |
facebook/data2vec-audio-base-960h | 32331f3123e703528918aa688a9a38232d58c872 | 2022-05-24T10:41:22.000Z | [
"pytorch",
"data2vec-audio",
"automatic-speech-recognition",
"en",
"dataset:librispeech_asr",
"arxiv:2202.03555",
"transformers",
"speech",
"hf-asr-leaderboard",
"license:apache-2.0",
"model-index"
] | automatic-speech-recognition | false | facebook | null | facebook/data2vec-audio-base-960h | 2,585 | 4 | transformers | ---
language: en
datasets:
- librispeech_asr
tags:
- speech
- hf-asr-leaderboard
license: apache-2.0
widget:
- example_title: Librispeech sample 1
src: https://cdn-media.huggingface.co/speech_samples/sample1.flac
- example_title: Librispeech sample 2
src: https://cdn-media.huggingface.co/speech_samples/sample2.flac
model-index:
- name: data2vec-audio-base-960h
results:
- task:
name: Automatic Speech Recognition
type: automatic-speech-recognition
dataset:
name: LibriSpeech (clean)
type: librispeech_asr
config: clean
split: test
args:
language: en
metrics:
- name: Test WER
type: wer
value: 2.77
- task:
name: Automatic Speech Recognition
type: automatic-speech-recognition
dataset:
name: LibriSpeech (other)
type: librispeech_asr
config: other
split: test
args:
language: en
metrics:
- name: Test WER
type: wer
value: 7.08
---
# Data2Vec-Audio-Base-960h
[Facebook's Data2Vec](https://ai.facebook.com/research/data2vec-a-general-framework-for-self-supervised-learning-in-speech-vision-and-language/)
The base model pretrained and fine-tuned on 960 hours of Librispeech on 16kHz sampled speech audio. When using the model
make sure that your speech input is also sampled at 16Khz.
[Paper](https://arxiv.org/abs/2202.03555)
Authors: Alexei Baevski, Wei-Ning Hsu, Qiantong Xu, Arun Babu, Jiatao Gu, Michael Auli
**Abstract**
While the general idea of self-supervised learning is identical across modalities, the actual algorithms and objectives differ widely because they were developed with a single modality in mind. To get us closer to general self-supervised learning, we present data2vec, a framework that uses the same learning method for either speech, NLP or computer vision. The core idea is to predict latent representations of the full input data based on a masked view of the input in a self-distillation setup using a standard Transformer architecture. Instead of predicting modality-specific targets such as words, visual tokens or units of human speech which are local in nature, data2vec predicts contextualized latent representations that contain information from the entire input. Experiments on the major benchmarks of speech recognition, image classification, and natural language understanding demonstrate a new state of the art or competitive performance to predominant approaches.
The original model can be found under https://github.com/pytorch/fairseq/tree/main/examples/data2vec .
# Pre-Training method

For more information, please take a look at the [official paper](https://arxiv.org/abs/2202.03555).
# Usage
To transcribe audio files the model can be used as a standalone acoustic model as follows:
```python
from transformers import Wav2Vec2Processor, Data2VecForCTC
from datasets import load_dataset
import torch
# load model and processor
processor = Wav2Vec2Processor.from_pretrained("facebook/data2vec-audio-base-960h")
model = Data2VecForCTC.from_pretrained("facebook/data2vec-audio-base-960h")
# load dummy dataset and read soundfiles
ds = load_dataset("patrickvonplaten/librispeech_asr_dummy", "clean", split="validation")
# tokenize
input_values = processor(ds[0]["audio"]["array"],, return_tensors="pt", padding="longest").input_values # Batch size 1
# retrieve logits
logits = model(input_values).logits
# take argmax and decode
predicted_ids = torch.argmax(logits, dim=-1)
transcription = processor.batch_decode(predicted_ids)
```
## Evaluation
This code snippet shows how to evaluate **facebook/data2vec-audio-base-960h** on LibriSpeech's "clean" and "other" test data.
```python
from transformers import Wav2Vec2Processor, Data2VecForCTC
from datasets import load_dataset
import torch
from jiwer import wer
# load model and processor
processor = Wav2Vec2Processor.from_pretrained("facebook/data2vec-audio-base-960h").to("cuda")
model = Data2VecForCTC.from_pretrained("facebook/data2vec-audio-base-960h")
librispeech_eval = load_dataset("librispeech_asr", "clean", split="test")
def map_to_pred(batch):
input_values = processor(batch["audio"]["array"], return_tensors="pt", padding="longest").input_values
with torch.no_grad():
logits = model(input_values.to("cuda")).logits
predicted_ids = torch.argmax(logits, dim=-1)
transcription = processor.batch_decode(predicted_ids)
batch["transcription"] = transcription
return batch
result = librispeech_eval.map(map_to_pred, batched=True, batch_size=1, remove_columns=["audio"])
print("WER:", wer(result["text"], result["transcription"]))
```
*Result (WER)*:
| "clean" | "other" |
|---|---|
| 2.77 | 7.08 | | [
-0.10373582690954208,
-0.16067510843276978,
-0.02905401401221752,
-0.028970962390303612,
0.04285217821598053,
0.003429416799917817,
0.01652468554675579,
-0.04338693246245384,
-0.05188877508044243,
-0.09332482516765594,
0.03871799632906914,
-0.11418624967336655,
-0.06788241118192673,
-0.014504456892609596,
0.013844098895788193,
-0.06041858345270157,
-0.015331893227994442,
-0.04861815646290779,
-0.050742316991090775,
-0.044967714697122574,
0.06614916771650314,
0.07899648696184158,
0.06695770472288132,
-0.00005955491724307649,
-0.013112093321979046,
-0.015582217834889889,
-0.04127965122461319,
0.004299168474972248,
0.03583759069442749,
-0.047070879489183426,
0.1515388786792755,
0.04975712299346924,
0.10311175882816315,
0.01702984794974327,
0.003157525323331356,
-0.017079375684261322,
0.014242050237953663,
-0.08064519613981247,
-0.04699668288230896,
-0.0002590919320937246,
-0.028767874464392662,
-0.004289119970053434,
0.036563441157341,
-0.05654989555478096,
-0.008598977699875832,
-0.07347913086414337,
-0.07240408658981323,
-0.035838790237903595,
0.003445590613409877,
0.09642305225133896,
-0.07713033258914948,
0.019027898088097572,
-0.03125080093741417,
0.04915228858590126,
-0.08260194212198257,
0.040346596390008926,
-0.028150124475359917,
0.020957177504897118,
0.0608842670917511,
0.03343609347939491,
-0.02357078529894352,
0.015193729661405087,
-0.006082489155232906,
0.03155524656176567,
-0.0795401930809021,
-0.019096538424491882,
-0.002614184282720089,
-0.02014707773923874,
0.041907988488674164,
-0.0027741703670471907,
-0.10783779621124268,
0.05151833966374397,
0.03843272849917412,
0.08994872123003006,
0.038786258548498154,
-0.030775420367717743,
0.022679969668388367,
0.019358495250344276,
0.0673070028424263,
-0.09295563399791718,
-0.005211822688579559,
-0.06311774253845215,
-0.027878446504473686,
0.042671818286180496,
0.06621046364307404,
-0.0317041277885437,
-0.03199901804327965,
-0.02278580144047737,
-0.01000041514635086,
0.026757067069411278,
-0.03205770254135132,
-0.020997101441025734,
-0.0292686577886343,
0.07274188101291656,
0.016005517914891243,
0.09615376591682434,
0.03365085646510124,
0.08898383378982544,
-0.051121313124895096,
0.1024971455335617,
-0.05184974521398544,
-0.04372302442789078,
0.039310604333877563,
-0.001002417178824544,
-0.07449257373809814,
-0.09550853073596954,
-0.025651710107922554,
0.03307504579424858,
0.014096589758992195,
-0.05926200747489929,
0.006767830811440945,
-0.015303226187825203,
0.018672829493880272,
-0.12079022824764252,
0.023471765220165253,
0.020666079595685005,
-0.07044554501771927,
-0.04897521063685417,
0.02118830382823944,
0.04823832958936691,
-0.006839053239673376,
0.009402947500348091,
0.02954123727977276,
0.00401564734056592,
0.05604616180062294,
0.00008661355241201818,
-0.03310360759496689,
8.039551166739178e-33,
0.06809747964143753,
-0.012548328377306461,
-0.005266414023935795,
0.041884325444698334,
0.03633849322795868,
-0.08885508030653,
-0.014531907625496387,
0.022255724295973778,
-0.08431942015886307,
-0.005109163001179695,
0.042725179344415665,
-0.03405372425913811,
-0.09104053676128387,
-0.013397054746747017,
0.020911768078804016,
0.005533603485673666,
-0.009585542604327202,
0.011376176960766315,
-0.013224618509411812,
-0.03698493167757988,
0.13939714431762695,
0.05826951935887337,
0.06205422431230545,
-0.008370705880224705,
0.08245864510536194,
0.061766915023326874,
0.02401050366461277,
-0.04479076340794563,
-0.011210964061319828,
0.0636134073138237,
-0.023787453770637512,
-0.06700760126113892,
0.004478926304727793,
0.006119620054960251,
0.04925716668367386,
0.0000746766381780617,
-0.028475891798734665,
-0.02137031964957714,
-0.07336533814668655,
-0.08198550343513489,
0.007445756811648607,
0.009404878132045269,
-0.031574174761772156,
-0.03286297246813774,
-0.01631455309689045,
-0.10294771194458008,
-0.02131836488842964,
0.07041112333536148,
0.07621082663536072,
0.02928965352475643,
-0.03235716000199318,
0.01567133329808712,
-0.007919006049633026,
0.06148912012577057,
-0.0007776113343425095,
0.03697199001908302,
0.01494512613862753,
0.07957722246646881,
0.033126652240753174,
-0.021039418876171112,
0.017093554139137268,
0.05420787259936333,
0.005179280880838633,
-0.016374941915273666,
0.061534203588962555,
-0.005942728370428085,
-0.03612712025642395,
-0.01994253881275654,
0.1256732940673828,
-0.0023607106413692236,
0.009249087423086166,
-0.07461702078580856,
0.036932725459337234,
0.08389164507389069,
0.03571830689907074,
-0.01657608523964882,
0.007443028502166271,
-0.05566457659006119,
-0.06177651882171631,
0.025885969400405884,
-0.05283169075846672,
0.009459744207561016,
0.027240345254540443,
0.001013761037029326,
-0.03333492577075958,
-0.017227355390787125,
0.0645451694726944,
-0.04396650940179825,
-0.0164237879216671,
-0.04919697344303131,
0.001491468632593751,
0.029473789036273956,
-0.02968692034482956,
-0.08575362712144852,
-0.0440404936671257,
-9.085577304445868e-33,
-0.04953830689191818,
0.06968852132558823,
-0.05609966814517975,
0.07683365046977997,
0.012228678911924362,
-0.03786493092775345,
0.13374395668506622,
0.048632826656103134,
0.026965366676449776,
-0.02442096173763275,
0.04013415798544884,
-0.06021969020366669,
0.03229549154639244,
-0.0946749672293663,
0.07472556829452515,
0.06515976041555405,
-0.00805190671235323,
-0.05308806896209717,
0.04425961524248123,
0.08518014848232269,
-0.027802493423223495,
0.09318733215332031,
0.04569936916232109,
0.06844507902860641,
-0.07988667488098145,
-0.05599270015954971,
-0.028331156820058823,
0.038385938853025436,
0.03578267991542816,
-0.020150208845734596,
0.015834985300898552,
0.07093654572963715,
-0.14916923642158508,
-0.000871916941832751,
-0.0586906261742115,
-0.06690796464681625,
0.007530826143920422,
-0.006079813465476036,
-0.054549552500247955,
0.0358763225376606,
0.12084679305553436,
0.044332996010780334,
-0.0937323272228241,
-0.0918113961815834,
0.03880432993173599,
-0.03492332249879837,
-0.044813863933086395,
-0.008934344165027142,
-0.009659020230174065,
-0.056766755878925323,
0.02765795588493347,
-0.009346110746264458,
-0.0027942475862801075,
0.011100050993263721,
-0.013324111700057983,
-0.023406527936458588,
0.033382315188646317,
-0.008621147833764553,
-0.07886852324008942,
0.02701684832572937,
0.03930545970797539,
-0.0489349290728569,
-0.05927259474992752,
-0.0346793495118618,
0.10455328971147537,
-0.0030063772574067116,
-0.05523333325982094,
0.04403415694832802,
-0.0013598246732726693,
-0.04591630399227142,
-0.061494119465351105,
-0.034340355545282364,
0.041374459862709045,
0.013741554692387581,
0.02677985653281212,
-0.01147126592695713,
-0.11122466623783112,
-0.02411964349448681,
-0.002277066232636571,
-0.008968761190772057,
-0.044507697224617004,
0.018098851665854454,
0.06686215847730637,
0.005381621886044741,
0.002218382665887475,
0.03586173430085182,
-0.02006002515554428,
0.06651579588651657,
0.02183333970606327,
0.012147415429353714,
-0.034442704170942307,
0.008943495340645313,
0.02101057767868042,
0.05927649140357971,
-0.020973650738596916,
-5.4741498445309844e-8,
-0.09855201840400696,
0.018574994057416916,
-0.00655707623809576,
-0.011762568727135658,
-0.011992629617452621,
-0.0491800382733345,
-0.0050501092337071896,
-0.005469915922731161,
0.000620358856394887,
-0.05300058424472809,
0.05039374157786369,
-0.007664505857974291,
-0.03157466650009155,
0.055145855993032455,
0.019867487251758575,
-0.028262069448828697,
-0.05174047872424126,
0.17199717462062836,
-0.0492195188999176,
-0.15964137017726898,
0.04979716241359711,
0.044717561453580856,
-0.009118231013417244,
0.01280591357499361,
0.022370140999555588,
0.023319212719798088,
-0.038002774119377136,
0.05915317311882973,
-0.009844490326941013,
-0.007488433737307787,
-0.04682521894574165,
0.015318188816308975,
0.00011912500485777855,
-0.05096251145005226,
0.039966411888599396,
0.03861364722251892,
-0.0710451528429985,
-0.029266182333230972,
-0.024320725351572037,
0.07279229909181595,
0.01541841309517622,
0.09508778154850006,
-0.08423087000846863,
0.011112588457763195,
0.07661056518554688,
0.021004432812333107,
-0.016384737566113472,
-0.02311626635491848,
0.05756459757685661,
0.008770334534347057,
0.018719064071774483,
0.02470257878303528,
-0.050194691866636276,
-0.015890181064605713,
0.07306293398141861,
0.032821644097566605,
-0.003929266706109047,
0.04075715318322182,
0.00581342214718461,
-0.051156606525182724,
0.013825088739395142,
-0.0021224399097263813,
-0.017908861860632896,
-0.004179673735052347
] |
lysandre/tiny-tapas-random-sqa | 2174c2e3dd74ba8a3bdaa58a6c566a7898e36cec | 2020-12-14T23:23:58.000Z | [
"pytorch",
"tapas",
"table-question-answering",
"transformers"
] | table-question-answering | false | lysandre | null | lysandre/tiny-tapas-random-sqa | 2,580 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
google/vit-base-patch16-384 | be89a4abf1f427fe502d37f261b8b6d6da7894bc | 2022-01-12T08:05:44.000Z | [
"pytorch",
"tf",
"jax",
"vit",
"image-classification",
"dataset:imagenet",
"dataset:imagenet-21k",
"arxiv:2010.11929",
"arxiv:2006.03677",
"transformers",
"vision",
"license:apache-2.0"
] | image-classification | false | google | null | google/vit-base-patch16-384 | 2,578 | 2 | transformers | ---
license: apache-2.0
tags:
- vision
- image-classification
datasets:
- imagenet
- imagenet-21k
---
# Vision Transformer (base-sized model)
Vision Transformer (ViT) model pre-trained on ImageNet-21k (14 million images, 21,843 classes) at resolution 224x224, and fine-tuned on ImageNet 2012 (1 million images, 1,000 classes) at resolution 384x384. It was introduced in the paper [An Image is Worth 16x16 Words: Transformers for Image Recognition at Scale](https://arxiv.org/abs/2010.11929) by Dosovitskiy et al. and first released in [this repository](https://github.com/google-research/vision_transformer). However, the weights were converted from the [timm repository](https://github.com/rwightman/pytorch-image-models) by Ross Wightman, who already converted the weights from JAX to PyTorch. Credits go to him.
Disclaimer: The team releasing ViT did not write a model card for this model so this model card has been written by the Hugging Face team.
## Model description
The Vision Transformer (ViT) is a transformer encoder model (BERT-like) pretrained on a large collection of images in a supervised fashion, namely ImageNet-21k, at a resolution of 224x224 pixels. Next, the model was fine-tuned on ImageNet (also referred to as ILSVRC2012), a dataset comprising 1 million images and 1,000 classes, at a higher resolution of 384x384.
Images are presented to the model as a sequence of fixed-size patches (resolution 16x16), which are linearly embedded. One also adds a [CLS] token to the beginning of a sequence to use it for classification tasks. One also adds absolute position embeddings before feeding the sequence to the layers of the Transformer encoder.
By pre-training the model, it learns an inner representation of images that can then be used to extract features useful for downstream tasks: if you have a dataset of labeled images for instance, you can train a standard classifier by placing a linear layer on top of the pre-trained encoder. One typically places a linear layer on top of the [CLS] token, as the last hidden state of this token can be seen as a representation of an entire image.
## Intended uses & limitations
You can use the raw model for image classification. See the [model hub](https://huggingface.co/models?search=google/vit) to look for
fine-tuned versions on a task that interests you.
### How to use
Here is how to use this model to classify an image of the COCO 2017 dataset into one of the 1,000 ImageNet classes:
```python
from transformers import ViTFeatureExtractor, ViTForImageClassification
from PIL import Image
import requests
url = 'http://images.cocodataset.org/val2017/000000039769.jpg'
image = Image.open(requests.get(url, stream=True).raw)
feature_extractor = ViTFeatureExtractor.from_pretrained('google/vit-base-patch16-384')
model = ViTForImageClassification.from_pretrained('google/vit-base-patch16-384')
inputs = feature_extractor(images=image, return_tensors="pt")
outputs = model(**inputs)
logits = outputs.logits
# model predicts one of the 1000 ImageNet classes
predicted_class_idx = logits.argmax(-1).item()
print("Predicted class:", model.config.id2label[predicted_class_idx])
```
Currently, both the feature extractor and model support PyTorch. Tensorflow and JAX/FLAX are coming soon, and the API of ViTFeatureExtractor might change.
## Training data
The ViT model was pretrained on [ImageNet-21k](http://www.image-net.org/), a dataset consisting of 14 million images and 21k classes, and fine-tuned on [ImageNet](http://www.image-net.org/challenges/LSVRC/2012/), a dataset consisting of 1 million images and 1k classes.
## Training procedure
### Preprocessing
The exact details of preprocessing of images during training/validation can be found [here](https://github.com/google-research/vision_transformer/blob/master/vit_jax/input_pipeline.py).
Images are resized/rescaled to the same resolution (224x224 during pre-training, 384x384 during fine-tuning) and normalized across the RGB channels with mean (0.5, 0.5, 0.5) and standard deviation (0.5, 0.5, 0.5).
### Pretraining
The model was trained on TPUv3 hardware (8 cores). All model variants are trained with a batch size of 4096 and learning rate warmup of 10k steps. For ImageNet, the authors found it beneficial to additionally apply gradient clipping at global norm 1. Pre-training resolution is 224.
## Evaluation results
For evaluation results on several image classification benchmarks, we refer to tables 2 and 5 of the original paper. Note that for fine-tuning, the best results are obtained with a higher resolution (384x384). Of course, increasing the model size will result in better performance.
### BibTeX entry and citation info
```bibtex
@misc{wu2020visual,
title={Visual Transformers: Token-based Image Representation and Processing for Computer Vision},
author={Bichen Wu and Chenfeng Xu and Xiaoliang Dai and Alvin Wan and Peizhao Zhang and Zhicheng Yan and Masayoshi Tomizuka and Joseph Gonzalez and Kurt Keutzer and Peter Vajda},
year={2020},
eprint={2006.03677},
archivePrefix={arXiv},
primaryClass={cs.CV}
}
```
```bibtex
@inproceedings{deng2009imagenet,
title={Imagenet: A large-scale hierarchical image database},
author={Deng, Jia and Dong, Wei and Socher, Richard and Li, Li-Jia and Li, Kai and Fei-Fei, Li},
booktitle={2009 IEEE conference on computer vision and pattern recognition},
pages={248--255},
year={2009},
organization={Ieee}
}
``` | [
-0.09721251577138901,
-0.029362304136157036,
-0.022420303896069527,
-0.03852984309196472,
0.041249584406614304,
-0.045458488166332245,
-0.016961142420768738,
0.06472743302583694,
-0.03152628615498543,
-0.04276318848133087,
0.02611558511853218,
0.0012826889287680387,
0.07469795644283295,
0.04358575865626335,
-0.05123632401227951,
0.040719132870435715,
0.04134805127978325,
0.10965802520513535,
-0.09626258164644241,
-0.019305527210235596,
0.02036515437066555,
-0.01293032057583332,
0.04005448892712593,
-0.05993042141199112,
0.036723457276821136,
0.003800989594310522,
-0.016427941620349884,
-0.08491842448711395,
0.0036820124369114637,
-0.048183031380176544,
-0.052741944789886475,
0.039843685925006866,
0.0020502437837421894,
0.024215880781412125,
-0.04277776926755905,
0.06777909398078918,
-0.003852756228297949,
-0.01686861366033554,
-0.035374414175748825,
-0.02404116839170456,
0.0008219882729463279,
0.0031954895239323378,
0.0023707200307399035,
-0.02044137567281723,
0.046751223504543304,
0.050445567816495895,
0.08286403119564056,
-0.049828074872493744,
-0.007132955361157656,
-0.05940907076001167,
-0.029311565682291985,
-0.02978779375553131,
-0.030683666467666626,
0.09727001190185547,
-0.054024238139390945,
0.004369498696178198,
-0.013277663849294186,
-0.04604555293917656,
-0.04686199873685837,
0.03396892920136452,
-0.027303045615553856,
0.036115873605012894,
-0.06746332347393036,
0.025335658341646194,
-0.07437098771333694,
0.003984496463090181,
0.04884504899382591,
-0.09784328937530518,
0.0549631342291832,
-0.13957656919956207,
-0.02644573152065277,
0.03176628053188324,
-0.000075157469836995,
0.042815692722797394,
0.027931267395615578,
0.019080914556980133,
0.14920857548713684,
-0.0010067519033327699,
0.07629840821027756,
-0.07269657403230667,
0.045528754591941833,
-0.0008730224799364805,
0.07104302197694778,
-0.030555086210370064,
0.09640929847955704,
0.030890924856066704,
-0.03733179718255997,
0.1077093631029129,
0.014045663177967072,
0.020814070478081703,
-0.03196476027369499,
-0.07801061123609543,
-0.039483603090047836,
0.002958639059215784,
0.0119027029722929,
-0.013427428901195526,
-0.03141854703426361,
-0.06357072293758392,
-0.04259078949689865,
0.07795456796884537,
0.023812495172023773,
-0.05293237417936325,
0.08168568462133408,
-0.01974540390074253,
0.04017200320959091,
0.032202303409576416,
0.02561485581099987,
0.11172933131456375,
0.058616749942302704,
-0.04204445704817772,
0.04875900596380234,
-0.028275221586227417,
-0.04631400480866432,
-0.08298730105161667,
0.060968562960624695,
0.030213337391614914,
-0.030743783339858055,
0.033357229083776474,
0.016666898503899574,
0.03418578580021858,
-0.02785365842282772,
-0.01567932404577732,
-0.028819113969802856,
-0.05122583732008934,
0.031037598848342896,
0.04951246455311775,
-0.1338418424129486,
3.6387023553520815e-33,
-0.02540782280266285,
0.07896443456411362,
0.0622824989259243,
-0.003753121243789792,
0.059890713542699814,
-0.04512181878089905,
0.061871517449617386,
-0.005501054227352142,
-0.04037472605705261,
-0.0625242292881012,
-0.03264417499303818,
-0.004150900989770889,
-0.013090139254927635,
0.11198554933071136,
0.010229462757706642,
-0.10536973178386688,
-0.021497555077075958,
0.025705894455313683,
0.05089562386274338,
0.03601858764886856,
0.038857053965330124,
0.012080933898687363,
0.02616191655397415,
0.005458333995193243,
-0.09282196313142776,
-0.00796216819435358,
0.004197950474917889,
-0.019716953858733177,
0.010058032348752022,
0.010339994914829731,
-0.06863585114479065,
0.02437029965221882,
0.04765937477350235,
0.005712899379432201,
-0.003170786891132593,
-0.05557434633374214,
-0.04137406125664711,
-0.06882771849632263,
0.01410431507974863,
-0.09378986060619354,
-0.0011130146449431777,
0.09089026600122452,
0.01892833784222603,
-0.04486428573727608,
-0.009069247171282768,
-0.004823802039027214,
-0.009073354303836823,
0.07305417209863663,
0.002377510303631425,
-0.015704195946455002,
0.047423359006643295,
0.016676390543580055,
-0.081780344247818,
-0.023478815332055092,
0.0043658604845404625,
0.03552500531077385,
0.07329350709915161,
0.04013707488775253,
0.03793994337320328,
0.02392464131116867,
0.03545403108000755,
0.028357647359371185,
-0.00230911816470325,
0.06732572615146637,
0.029776014387607574,
-0.043484579771757126,
-0.005055889952927828,
-0.024421749636530876,
-0.13266324996948242,
0.04995064064860344,
-0.016001038253307343,
0.03691614791750908,
-0.00016374517872463912,
-0.05962631478905678,
0.10911168903112411,
-0.024001654237508774,
0.05709889531135559,
-0.0530078150331974,
-0.03545766696333885,
0.09682273119688034,
-0.08405414968729019,
0.054698195308446884,
0.050292208790779114,
-0.08260419219732285,
-0.04576919227838516,
0.05509615316987038,
0.04666604846715927,
-0.056623078882694244,
0.05582372844219208,
0.0008060396648943424,
0.09474416077136993,
0.04127988964319229,
-0.03183324262499809,
-0.025023693218827248,
-0.05087300390005112,
-2.0318755971879178e-33,
-0.026079608127474785,
0.08543131500482559,
-0.08155260980129242,
0.06798134744167328,
-0.0021802999544888735,
-0.05132248252630234,
0.02707374095916748,
0.10638570785522461,
-0.020284408703446388,
-0.07513672858476639,
0.0748729258775711,
0.006023853085935116,
-0.06454847007989883,
-0.043065134435892105,
0.036152955144643784,
-0.0727868527173996,
0.029524806886911392,
-0.061170607805252075,
0.0041474709287285805,
0.010935910977423191,
0.05815634876489639,
0.11572792381048203,
-0.06663496047258377,
0.03876509144902229,
-0.0913926362991333,
-0.0013469012919813395,
-0.02963384985923767,
0.047625381499528885,
0.0026919108349829912,
-0.003469314891844988,
-0.014707047492265701,
-0.09111056476831436,
-0.0017105435254052281,
0.012724601663649082,
-0.04333001747727394,
0.013750369660556316,
0.050066471099853516,
-0.01275321189314127,
-0.021601242944598198,
0.0840955525636673,
0.04379677027463913,
-0.030845923349261284,
-0.017985928803682327,
0.03366069495677948,
-0.09201351553201675,
-0.06353212147951126,
-0.020524658262729645,
-0.05438210442662239,
0.04309413209557533,
0.01358349621295929,
-0.008353346958756447,
0.016320044174790382,
-0.08549833297729492,
0.039306316524744034,
-0.029872752726078033,
-0.0466863177716732,
-0.006256651598960161,
0.04026195779442787,
0.06471368670463562,
-0.0027750839944928885,
-0.051563017070293427,
-0.04565409943461418,
-0.07512158155441284,
0.006874930113554001,
-0.026527659967541695,
0.04826544225215912,
-0.08008318394422531,
0.02751925401389599,
-0.04997913911938667,
0.10777834802865982,
0.006714385002851486,
-0.011732341721653938,
0.047744810581207275,
0.021742992103099823,
-0.021787459030747414,
-0.07304833829402924,
0.04015490785241127,
0.056097570806741714,
0.016076216474175453,
-0.06293294578790665,
-0.014362323097884655,
-0.028817549347877502,
0.017591018229722977,
0.08170535415410995,
0.08140294998884201,
0.07260674238204956,
0.062331024557352066,
-0.03442860767245293,
0.024544712156057358,
0.022701038047671318,
-0.012619417160749435,
0.005986046511679888,
0.03222344443202019,
0.02995951659977436,
0.03750507906079292,
-5.3931142218743844e-8,
-0.07123597711324692,
0.08682450652122498,
-0.06897705793380737,
-0.058750081807374954,
0.00741612259298563,
-0.0493321567773819,
0.03115866333246231,
0.12470954656600952,
-0.018113622441887856,
0.07457074522972107,
0.04780422896146774,
0.010761368088424206,
-0.033928435295820236,
0.010239098221063614,
0.009800500236451626,
0.054702356457710266,
0.004004229325801134,
0.041933074593544006,
-0.0029561729170382023,
-0.048250678926706314,
-0.04722857102751732,
-0.005018506199121475,
-0.0019478061003610492,
-0.0387786440551281,
0.015175707638263702,
-0.029025768861174583,
-0.08658172935247421,
0.029817556962370872,
-0.0008946263114921749,
-0.04682840034365654,
-0.05186482146382332,
0.06690903007984161,
-0.032871779054403305,
-0.08789490163326263,
0.11018189042806625,
0.04995102435350418,
-0.08995898067951202,
0.005020748823881149,
-0.051806606352329254,
-0.011423622258007526,
0.019985299557447433,
0.0022365895565599203,
-0.005698021501302719,
0.011156541295349598,
0.1022384837269783,
0.035370342433452606,
-0.02226260118186474,
-0.07607369869947433,
-0.022090932354331017,
0.021711165085434914,
0.08963154256343842,
0.065971739590168,
-0.013836403377354145,
0.055099207907915115,
0.03778572008013725,
-0.020201610401272774,
0.035474490374326706,
-0.11164023727178574,
0.023169364780187607,
0.07747125625610352,
0.024924593046307564,
-0.004157981835305691,
-0.01158243790268898,
-0.009257031604647636
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.