modelId
stringlengths 4
112
| sha
stringlengths 40
40
| lastModified
stringlengths 24
24
| tags
sequence | pipeline_tag
stringclasses 29
values | private
bool 1
class | author
stringlengths 2
38
⌀ | config
null | id
stringlengths 4
112
| downloads
float64 0
36.8M
⌀ | likes
float64 0
712
⌀ | library_name
stringclasses 17
values | readme
stringlengths 0
186k
| embedding
sequence |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
hyunwoongko/ctrlsum-cnndm | 3f8f0a6caf964a79f13ba9cbb28a25757b72b4cd | 2021-03-21T15:55:50.000Z | [
"pytorch",
"bart",
"text2text-generation",
"transformers",
"autotrain_compatible"
] | text2text-generation | false | hyunwoongko | null | hyunwoongko/ctrlsum-cnndm | 1,307 | 2 | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
dennlinger/roberta-cls-consec | 26d06e22b97525aa959aaa5dfdaf4e3ab8bcd387 | 2021-06-14T13:07:40.000Z | [
"pytorch",
"jax",
"roberta",
"text-classification",
"arxiv:2012.03619",
"transformers"
] | text-classification | false | dennlinger | null | dennlinger/roberta-cls-consec | 1,304 | 1 | transformers | # About this model: Topical Change Detection in Documents
This network has been fine-tuned for the task described in the paper *Topical Change Detection in Documents via Embeddings of Long Sequences* and is our best-performing base-transformer model. You can find more detailed information in our GitHub page for the paper [here](https://github.com/dennlinger/TopicalChange), or read the [paper itself](https://arxiv.org/abs/2012.03619). The weights are based on RoBERTa-base.
# Load the model
```python
from transformers import AutoModelForSequenceClassification, AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained('dennlinger/roberta-cls-consec')
model = AutoModelForSequenceClassification.from_pretrained('dennlinger/roberta-cls-consec')
```
# Input Format
The model expects two segments that are separated with the `[SEP]` token. In our training setup, we had entire paragraphs as samples (or up to 512 tokens across two paragraphs), specifically trained on a Terms of Service data set. Note that this might lead to poor performance on "general" topics, such as news articles or Wikipedia.
# Training objective
The training task is to determine whether two text segments (paragraphs) belong to the same topical section or not. This can be utilized to create a topical segmentation of a document by consecutively predicting the "coherence" of two segments.
If you are experimenting via the Huggingface Model API, the following are interpretations of the `LABEL`s:
* `LABEL_0`: Two input segments separated by `[SEP]` do *not* belong to the same topic.
* `LABEL_1`: Two input segments separated by `[SEP]` do belong to the same topic.
# Performance
The results of this model can be found in the paper. We average over models from five different random seeds, which is why the specific results for this model might be different from the exact values in the paper.
Note that this model is *not* trained to work on classifying single texts, but only works with two (separated) inputs. | [
-0.1770334243774414,
-0.06559965759515762,
0.04291548207402229,
0.023299645632505417,
-0.012156950309872627,
0.0013196569634601474,
-0.028945477679371834,
0.0354122556746006,
0.038648586720228195,
-0.0265353936702013,
-0.055899716913700104,
-0.009138931520283222,
0.02125435322523117,
0.01843537762761116,
-0.009690695442259312,
0.04917788505554199,
0.025124091655015945,
0.02366703562438488,
-0.10524115711450577,
-0.0030071951914578676,
0.07790159434080124,
0.14079873263835907,
-0.004583774600178003,
-0.009633667767047882,
0.07830455154180527,
-0.0038222663570195436,
-0.022147400304675102,
-0.03945324942469597,
0.03943432867527008,
0.03145143389701843,
0.010951155796647072,
0.06150398775935173,
-0.04858711361885071,
0.08277760446071625,
0.0038835881277918816,
0.043695591390132904,
-0.0528436079621315,
0.00886469054967165,
0.04131072759628296,
0.010162795893847942,
0.04311371222138405,
-0.03994094207882881,
-0.05008425936102867,
0.021677106618881226,
0.028080841526389122,
-0.032251667231321335,
0.04100758954882622,
0.059290170669555664,
-0.02906307391822338,
-0.01149754412472248,
-0.007390056271106005,
-0.018035436049103737,
-0.002339450176805258,
0.1030132845044136,
-0.07076264172792435,
-0.003657720750197768,
0.06713555008172989,
0.014555760659277439,
0.02057615853846073,
-0.06962659955024719,
-0.04798691347241402,
-0.0723031684756279,
-0.07646278291940689,
-0.060608766973018646,
-0.02278762124478817,
-0.03549126163125038,
0.03132002800703049,
0.027855001389980316,
0.06865102797746658,
0.042893342673778534,
-0.018959006294608116,
0.06185705214738846,
-0.04087052866816521,
0.062126487493515015,
0.06629256159067154,
-0.010877667926251888,
0.08577406406402588,
0.006929602473974228,
0.05811523646116257,
-0.1259281039237976,
0.07121232897043228,
-0.021345945075154305,
0.14109434187412262,
0.0023629406932741404,
0.06575965881347656,
-0.06877224892377853,
0.0019977865740656853,
-0.007268653251230717,
-0.020544657483696938,
-0.0122946472838521,
-0.007415585685521364,
-0.10305920243263245,
0.043164659291505814,
-0.032414861023426056,
-0.03121897764503956,
0.047975219786167145,
-0.03575965017080307,
0.0456436462700367,
0.037006571888923645,
0.0786755308508873,
0.030373936519026756,
0.02140447311103344,
0.01533649954944849,
0.024816585704684258,
-0.01259019784629345,
-0.06187796965241432,
0.02765141986310482,
0.043124113231897354,
-0.014580313116312027,
-0.029161814600229263,
0.04660855233669281,
0.024123936891555786,
-0.0591079406440258,
-0.01834874227643013,
0.02861999161541462,
0.019687624648213387,
0.1177767813205719,
0.016255034133791924,
-0.062260184437036514,
0.10167289525270462,
0.014407218433916569,
-0.011085099540650845,
-0.0735398679971695,
0.018735798075795174,
-0.02685914933681488,
0.045730236917734146,
-0.023491505533456802,
4.9410887499517425e-33,
0.0018360615940764546,
0.018367242068052292,
-0.021237995475530624,
0.041866280138492584,
-0.05429442971944809,
0.03868215158581734,
-0.03101934678852558,
0.011441138572990894,
-0.007306704763323069,
-0.03303249552845955,
-0.05708117038011551,
0.0876123309135437,
-0.004055159632116556,
0.03641139343380928,
-0.012713379226624966,
-0.07579013705253601,
-0.05717068910598755,
0.00463006179779768,
0.06570505350828171,
0.003602751065045595,
0.03528189659118652,
0.003540372010320425,
-0.047961290925741196,
-0.04188886284828186,
-0.09930191934108734,
0.011309145949780941,
0.07818105071783066,
-0.05622822046279907,
-0.034973159432411194,
-0.009635000489652157,
-0.07478685677051544,
0.04915827140212059,
-0.006574473809450865,
-0.04054446518421173,
-0.03543522581458092,
-0.015632890164852142,
0.0398000106215477,
-0.05177031829953194,
-0.005728151183575392,
-0.1426195353269577,
-0.026671210303902626,
0.0555461049079895,
0.02492872253060341,
-0.05001154541969299,
-0.05046875774860382,
-0.014499650336802006,
-0.008513309061527252,
0.017573481425642967,
0.02364891767501831,
-0.020510030910372734,
0.09670677781105042,
0.023341014981269836,
-0.019887303933501244,
0.016915766522288322,
0.06078656017780304,
0.02563321776688099,
0.08070903271436691,
0.04021170735359192,
0.12186352163553238,
-0.007024659775197506,
0.08502313494682312,
-0.008126101456582546,
0.05316106602549553,
0.06087369844317436,
0.06718143075704575,
-0.0014901036629453301,
-0.010269965045154095,
0.02418242022395134,
-0.004612746182829142,
0.05748123303055763,
-0.0651141032576561,
0.0052817403338849545,
-0.039211906492710114,
0.008284577168524265,
-0.005346064455807209,
-0.05087526515126228,
0.02867683582007885,
-0.03020172007381916,
-0.10089747607707977,
0.011100554838776588,
0.013287088833749294,
0.026335466653108597,
0.08143389225006104,
-0.03629637137055397,
-0.08235560357570648,
-0.06322198361158371,
0.06158754974603653,
-0.06227085739374161,
-0.12015844881534576,
-0.08116266876459122,
0.06296392530202866,
0.04657856374979019,
-0.00890305545181036,
0.049550000578165054,
0.016732055693864822,
-4.7855153786982744e-33,
0.013000452890992165,
0.02724124863743782,
-0.04727901890873909,
0.051005978137254715,
0.016817186027765274,
-0.046090878546237946,
-0.013131441548466682,
0.14079241454601288,
-0.011306557804346085,
-0.04561939463019371,
0.06086748465895653,
-0.08668415993452072,
-0.03392220661044121,
-0.030279139056801796,
0.04122448340058327,
-0.022725878283381462,
-0.08955798298120499,
-0.020903032273054123,
0.00591279286891222,
0.0408422015607357,
-0.0026181817520409822,
0.07622162997722626,
-0.18307049572467804,
0.04618543013930321,
-0.05016074702143669,
-0.01057632640004158,
-0.02634703554213047,
0.045412708073854446,
-0.00530764227733016,
-0.013839859515428543,
-0.08978299051523209,
-0.03641318902373314,
-0.053864073008298874,
0.060698773711919785,
-0.07874473929405212,
0.03725332394242287,
0.014297867193818092,
-0.014311284758150578,
-0.03181740269064903,
0.057057395577430725,
0.07374262064695358,
0.06809350848197937,
-0.05025254189968109,
0.08079078048467636,
-0.04145720228552818,
-0.06876688450574875,
-0.06703361123800278,
-0.0027133820112794638,
-0.015520208515226841,
-0.026222940534353256,
0.014985802583396435,
0.017100811004638672,
-0.07256598025560379,
0.045165374875068665,
-0.024708369746804237,
-0.008500851690769196,
0.02215789631009102,
-0.09175094962120056,
-0.04575890675187111,
-0.010903128422796726,
-0.08318311721086502,
0.059270232915878296,
-0.0022727143950760365,
-0.06668844819068909,
0.07081630825996399,
-0.07544872164726257,
-0.007012633141130209,
-0.041861433535814285,
-0.01820974238216877,
0.03663316369056702,
0.02383323758840561,
-0.0012193761067464948,
0.02606804482638836,
-0.04051421210169792,
0.044478658586740494,
-0.04794016852974892,
-0.0033651150297373533,
-0.09002302587032318,
-0.05759501829743385,
-0.0566108413040638,
-0.1380986124277115,
-0.02432909794151783,
0.014767159707844257,
0.07765951752662659,
-0.01395399123430252,
0.11137481033802032,
0.031019479036331177,
0.09580323100090027,
0.020559199154376984,
0.034133899956941605,
0.016348013654351234,
-0.03803659975528717,
0.03690838813781738,
0.028099769726395607,
-0.06055625528097153,
-5.550906223561469e-8,
-0.055780477821826935,
0.05523998290300369,
-0.08569581061601639,
0.04252157732844353,
-0.024495571851730347,
-0.024111364036798477,
-0.012273240834474564,
0.06850043684244156,
-0.025196289643645287,
0.04487956687808037,
-0.010030281729996204,
-0.012081844732165337,
-0.07299812883138657,
-0.004239064175635576,
-0.03845064714550972,
0.07367324084043503,
0.02653585933148861,
0.0335683599114418,
-0.056577395647764206,
-0.028072718530893326,
0.01762101985514164,
0.06670233607292175,
0.0044699180871248245,
0.02467206120491028,
0.020464761182665825,
-0.005980049725621939,
-0.016166534274816513,
0.09519308805465698,
0.03187047690153122,
-0.027504269033670425,
-0.027137337252497673,
0.023017141968011856,
-0.041031111031770706,
-0.012603038921952248,
-0.07831744104623795,
0.09106957167387009,
0.0740307942032814,
-0.027517050504684448,
-0.02480044774711132,
0.05295366421341896,
0.028799107298254967,
-0.029575498774647713,
-0.10731349140405655,
0.01433596108108759,
0.024970263242721558,
-0.01885976269841194,
-0.012787051498889923,
-0.04805755615234375,
0.03837217390537262,
0.03597709909081459,
0.02966281585395336,
-0.044506706297397614,
-0.009869481436908245,
0.023826291784644127,
0.005136503838002682,
-0.0030465296003967524,
-0.017383718863129616,
-0.008982379920780659,
0.0760510116815567,
0.041765548288822174,
0.003109224373474717,
-0.030759725719690323,
-0.019988246262073517,
-0.0037962745409458876
] |
pedropei/sentence-level-certainty | 57bb19e0804a77689ca02f2b1d408d162413cdc2 | 2021-09-29T05:35:19.000Z | [
"pytorch",
"bert",
"text-classification",
"transformers"
] | text-classification | false | pedropei | null | pedropei/sentence-level-certainty | 1,303 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
johngiorgi/declutr-small | d899ea3e95e6a65499184647d080379e6c477208 | 2022-03-11T14:47:48.000Z | [
"pytorch",
"jax",
"roberta",
"fill-mask",
"arxiv:2006.03659",
"transformers",
"autotrain_compatible"
] | fill-mask | false | johngiorgi | null | johngiorgi/declutr-small | 1,302 | 2 | transformers | # DeCLUTR-small
## Model description
The "DeCLUTR-small" model from our paper: [DeCLUTR: Deep Contrastive Learning for Unsupervised Textual Representations](https://arxiv.org/abs/2006.03659).
## Intended uses & limitations
The model is intended to be used as a universal sentence encoder, similar to [Google's Universal Sentence Encoder](https://tfhub.dev/google/universal-sentence-encoder/4) or [Sentence Transformers](https://github.com/UKPLab/sentence-transformers).
#### How to use
Please see [our repo](https://github.com/JohnGiorgi/DeCLUTR) for full details. A simple example is shown below.
##### With [SentenceTransformers](https://www.sbert.net/)
```python
from scipy.spatial.distance import cosine
from sentence_transformers import SentenceTransformer
# Load the model
model = SentenceTransformer("johngiorgi/declutr-small")
# Prepare some text to embed
texts = [
"A smiling costumed woman is holding an umbrella.",
"A happy woman in a fairy costume holds an umbrella.",
]
# Embed the text
embeddings = model.encode(texts)
# Compute a semantic similarity via the cosine distance
semantic_sim = 1 - cosine(embeddings[0], embeddings[1])
```
##### With 🤗 Transformers
```python
import torch
from scipy.spatial.distance import cosine
from transformers import AutoModel, AutoTokenizer
# Load the model
tokenizer = AutoTokenizer.from_pretrained("johngiorgi/declutr-small")
model = AutoModel.from_pretrained("johngiorgi/declutr-small")
# Prepare some text to embed
text = [
"A smiling costumed woman is holding an umbrella.",
"A happy woman in a fairy costume holds an umbrella.",
]
inputs = tokenizer(text, padding=True, truncation=True, return_tensors="pt")
# Embed the text
with torch.no_grad():
sequence_output = model(**inputs)[0]
# Mean pool the token-level embeddings to get sentence-level embeddings
embeddings = torch.sum(
sequence_output * inputs["attention_mask"].unsqueeze(-1), dim=1
) / torch.clamp(torch.sum(inputs["attention_mask"], dim=1, keepdims=True), min=1e-9)
# Compute a semantic similarity via the cosine distance
semantic_sim = 1 - cosine(embeddings[0], embeddings[1])
```
### BibTeX entry and citation info
```bibtex
@article{Giorgi2020DeCLUTRDC,
title={DeCLUTR: Deep Contrastive Learning for Unsupervised Textual Representations},
author={John M Giorgi and Osvald Nitski and Gary D. Bader and Bo Wang},
journal={ArXiv},
year={2020},
volume={abs/2006.03659}
}
``` | [
-0.03045801632106304,
-0.07003544270992279,
0.012529542669653893,
0.00010053622827399522,
-0.021056300029158592,
0.011870446614921093,
-0.1167537048459053,
0.04056871682405472,
0.013162489980459213,
-0.04640069976449013,
0.051908984780311584,
-0.02388315461575985,
0.0010619011009112,
0.07862444221973419,
-0.033395905047655106,
0.008854762651026249,
0.08382044732570648,
0.09339739382266998,
-0.10934381186962128,
-0.13066880404949188,
0.10191426426172256,
0.05462755262851715,
-0.019639136269688606,
0.0021535996347665787,
0.08539439737796783,
0.03651439771056175,
-0.0025775169488042593,
-0.008574789389967918,
0.0438983216881752,
-0.03925352171063423,
0.015574216842651367,
-0.05228303372859955,
0.03825349360704422,
0.09647480398416519,
0.03181460499763489,
0.07260894775390625,
-0.020630979910492897,
-0.021760668605566025,
0.005499576218426228,
0.0031553925946354866,
0.0032796384766697884,
0.00920052919536829,
-0.00006225470133358613,
0.023913025856018066,
0.0726379007101059,
-0.04841509833931923,
-0.11224251985549927,
0.0016495941672474146,
-0.015563678927719593,
-0.07031379640102386,
-0.017336983233690262,
0.09697408974170685,
-0.02297336980700493,
0.0803457498550415,
-0.06641826778650284,
-0.02556537836790085,
-0.001632369589060545,
-0.02635689452290535,
-0.028210707008838654,
-0.10725756734609604,
-0.06342758983373642,
-0.02343158796429634,
-0.04675425589084625,
-0.03599505126476288,
-0.016673583537340164,
0.019766537472605705,
0.00884818285703659,
-0.0038600789848715067,
-0.04360102862119675,
0.05962086468935013,
-0.04565182328224182,
0.031184211373329163,
0.012254554778337479,
0.026934221386909485,
-0.036886442452669144,
-0.00440114876255393,
0.06231863051652908,
0.007521260995417833,
0.04192299768328667,
-0.03554115071892738,
0.05437852069735527,
0.011072040535509586,
0.06709315627813339,
0.05410001799464226,
0.057569488883018494,
-0.030202172696590424,
-0.046790674328804016,
0.006355653051286936,
0.00014027477300260216,
0.025111209601163864,
-0.04384053125977516,
-0.11730827391147614,
0.010743862017989159,
-0.008440209552645683,
-0.015315143391489983,
-0.004734603222459555,
0.003244115272536874,
-0.020502248778939247,
-0.06835798919200897,
0.011621206067502499,
0.043206941336393356,
0.016518304124474525,
0.05937131494283676,
-0.10278104990720749,
-0.05832996219396591,
-0.009351871907711029,
0.055533431470394135,
-0.018978191539645195,
0.11870647966861725,
-0.11267072707414627,
-0.04285589978098869,
0.007601723540574312,
-0.07167212665081024,
0.0025357897393405437,
0.07393750548362732,
-0.008873098529875278,
0.032615020871162415,
-0.025198740884661674,
0.041363779455423355,
-0.005114846397191286,
0.006319456733763218,
0.02282925695180893,
-0.06323453783988953,
0.04757457599043846,
0.012727709487080574,
-0.053405676037073135,
-0.041658010333776474,
5.0005985188041054e-33,
-0.004680193029344082,
0.043611589819192886,
0.01116847898811102,
0.027434587478637695,
0.016795316711068153,
0.050923895090818405,
-0.07007458806037903,
0.04290764778852463,
-0.09198366850614548,
0.019600648432970047,
-0.05452211573719978,
0.08240991830825806,
-0.05360748618841171,
0.0726969912648201,
-0.015221474692225456,
-0.017334280535578728,
0.0013113942695781589,
-0.01277484092861414,
-0.02354000136256218,
-0.010255828499794006,
0.022323161363601685,
0.11154112219810486,
-0.026826145127415657,
-0.04172395169734955,
-0.133356973528862,
-0.02616114728152752,
0.04787130281329155,
-0.04485883191227913,
-0.030952399596571922,
0.023917119950056076,
-0.15247386693954468,
0.0012718045618385077,
0.028010478243231773,
0.03199990466237068,
0.07234086841344833,
-0.03157301992177963,
-0.025678139179944992,
-0.02074812538921833,
0.028796469792723656,
-0.06926087290048599,
-0.002260262379422784,
0.05878276377916336,
-0.05872837081551552,
-0.05934281647205353,
-0.037005212157964706,
0.00911839958280325,
0.05848931521177292,
-0.0436365082859993,
0.020535923540592194,
-0.005666444543749094,
0.06589912623167038,
0.04105110093951225,
-0.09641686826944351,
-0.09998826682567596,
0.02832024171948433,
0.025621920824050903,
0.09235438704490662,
0.04475972801446915,
0.06134023144841194,
-0.04161744937300682,
0.03535822033882141,
0.02437933161854744,
0.06744552403688431,
0.026974966749548912,
0.07567678391933441,
0.024879977107048035,
-0.04986453428864479,
0.015105508267879486,
-0.02104688435792923,
0.041162751615047455,
-0.0799393281340599,
0.0386575348675251,
-0.05787862464785576,
0.030586590990424156,
0.053010523319244385,
-0.01172347366809845,
0.07400555908679962,
-0.019828466698527336,
-0.03500533476471901,
0.0038397074677050114,
-0.01859663799405098,
0.01586390659213066,
0.0032239591237157583,
-0.09744716435670853,
-0.09340594708919525,
-0.008558375760912895,
0.05658883601427078,
-0.06000300869345665,
-0.01168106310069561,
-0.050179045647382736,
-0.00681776599958539,
-0.030585939064621925,
0.0003872110683005303,
-0.023205291479825974,
0.0668848380446434,
-7.218084122973885e-33,
0.008619574829936028,
0.03238248825073242,
-0.047119494527578354,
0.056992508471012115,
-0.05557079613208771,
-0.04330494999885559,
0.048133447766304016,
0.10049277544021606,
0.07647215574979782,
-0.0003138291067443788,
0.012604127638041973,
-0.008131521753966808,
0.046351801604032516,
-0.039320848882198334,
0.10981661826372147,
0.10243389755487442,
0.007032153662294149,
0.06422123312950134,
-0.007742627058178186,
0.010741930454969406,
0.03632591664791107,
0.12395589053630829,
-0.12696805596351624,
0.0011379476636648178,
-0.03132186457514763,
0.02748206816613674,
-0.03313073143362999,
0.015928052365779877,
-0.0009102423791773617,
-0.07327888160943985,
-0.07691838592290878,
0.028146814554929733,
-0.016526980325579643,
0.008532356470823288,
-0.15536262094974518,
0.01589120179414749,
-0.002334435237571597,
-0.06371282041072845,
-0.04909070208668709,
0.007296599447727203,
0.07407192140817642,
0.012755661271512508,
-0.011796538718044758,
0.047397397458553314,
0.007425858173519373,
-0.01091791782528162,
-0.08325343579053879,
-0.06250026077032089,
0.0556269995868206,
0.04433242231607437,
-0.009910938329994678,
0.0026344070211052895,
-0.11765047907829285,
0.019188011065125465,
-0.04806690663099289,
-0.013083189725875854,
-0.00769656291231513,
-0.0410877987742424,
0.00320070399902761,
0.015263854525983334,
-0.06809724867343903,
-0.03581971675157547,
-0.004329728428274393,
-0.07649637758731842,
0.02835105173289776,
-0.05030287057161331,
-0.011609725654125214,
0.02122621238231659,
0.038387928158044815,
-0.06237969547510147,
0.050592854619026184,
-0.023553846403956413,
0.028051743283867836,
0.005340175703167915,
0.014405030757188797,
-0.03140353783965111,
-0.04229522496461868,
0.012830933555960655,
-0.0937991738319397,
0.0030293455347418785,
-0.01007874682545662,
-0.03449084237217903,
-0.0037291485350579023,
0.044867005199193954,
0.052919935435056686,
0.006792351137846708,
0.012152513489127159,
0.08474908769130707,
-0.04032991826534271,
0.053518835455179214,
-0.049687460064888,
-0.006823335774242878,
0.07098154723644257,
0.056466538459062576,
0.011600363068282604,
-5.7278271015093196e-8,
-0.10320325940847397,
-0.014859999530017376,
-0.04052947089076042,
0.018307046964764595,
-0.028197603300213814,
-0.05774709954857826,
-0.014626150019466877,
-0.012560461647808552,
-0.07277275621891022,
0.01169541385024786,
0.04811718687415123,
-0.012389901094138622,
-0.06197924166917801,
-0.011865630745887756,
-0.05254469811916351,
0.1514539271593094,
0.019229695200920105,
-0.02168966643512249,
0.055158358067274094,
0.04880326986312866,
0.04586287587881088,
0.024596311151981354,
-0.007515281438827515,
0.042456019669771194,
-0.035936713218688965,
0.031061790883541107,
-0.0613703615963459,
0.08915678411722183,
-0.02486168034374714,
-0.0061899032443761826,
0.053346168249845505,
0.065901018679142,
-0.003578693838790059,
-0.052122581750154495,
-0.05999469757080078,
0.05112980306148529,
0.017097588628530502,
-0.002813478000462055,
0.0362604595720768,
0.029862355440855026,
0.024459078907966614,
0.02096877619624138,
-0.12460989505052567,
0.07438904047012329,
0.04134641960263252,
0.04312702268362045,
0.06130640581250191,
-0.10580167174339294,
-0.022532403469085693,
0.031329650431871414,
0.0397241935133934,
-0.04835608974099159,
-0.030746670439839363,
0.004714369773864746,
0.06366804987192154,
-0.024455146864056587,
0.03221786022186279,
-0.010412879288196564,
-0.025469867512583733,
-0.0065093575976789,
0.004720527678728104,
0.11427277326583862,
0.04110881686210632,
-0.01081798318773508
] |
monologg/koelectra-base-discriminator | c7005c19e7e523a86c96ad67fbd49c888ebbf287 | 2021-10-20T16:55:57.000Z | [
"pytorch",
"electra",
"pretraining",
"ko",
"transformers",
"korean",
"license:apache-2.0"
] | null | false | monologg | null | monologg/koelectra-base-discriminator | 1,298 | null | transformers | ---
language: ko
license: apache-2.0
tags:
- korean
---
# KoELECTRA (Base Discriminator)
Pretrained ELECTRA Language Model for Korean (`koelectra-base-discriminator`)
For more detail, please see [original repository](https://github.com/monologg/KoELECTRA/blob/master/README_EN.md).
## Usage
### Load model and tokenizer
```python
>>> from transformers import ElectraModel, ElectraTokenizer
>>> model = ElectraModel.from_pretrained("monologg/koelectra-base-discriminator")
>>> tokenizer = ElectraTokenizer.from_pretrained("monologg/koelectra-base-discriminator")
```
### Tokenizer example
```python
>>> from transformers import ElectraTokenizer
>>> tokenizer = ElectraTokenizer.from_pretrained("monologg/koelectra-base-discriminator")
>>> tokenizer.tokenize("[CLS] 한국어 ELECTRA를 공유합니다. [SEP]")
['[CLS]', '한국어', 'E', '##L', '##EC', '##T', '##RA', '##를', '공유', '##합니다', '.', '[SEP]']
>>> tokenizer.convert_tokens_to_ids(['[CLS]', '한국어', 'E', '##L', '##EC', '##T', '##RA', '##를', '공유', '##합니다', '.', '[SEP]'])
[2, 18429, 41, 6240, 15229, 6204, 20894, 5689, 12622, 10690, 18, 3]
```
## Example using ElectraForPreTraining
```python
import torch
from transformers import ElectraForPreTraining, ElectraTokenizer
discriminator = ElectraForPreTraining.from_pretrained("monologg/koelectra-base-discriminator")
tokenizer = ElectraTokenizer.from_pretrained("monologg/koelectra-base-discriminator")
sentence = "나는 방금 밥을 먹었다."
fake_sentence = "나는 내일 밥을 먹었다."
fake_tokens = tokenizer.tokenize(fake_sentence)
fake_inputs = tokenizer.encode(fake_sentence, return_tensors="pt")
discriminator_outputs = discriminator(fake_inputs)
predictions = torch.round((torch.sign(discriminator_outputs[0]) + 1) / 2)
print(list(zip(fake_tokens, predictions.tolist()[1:-1])))
```
| [
-0.09200117737054825,
0.01717865653336048,
-0.01671670190989971,
-0.02994278073310852,
0.0005711718695238233,
0.029604090377688408,
0.025805002078413963,
-0.00406632898375392,
-0.027251791208982468,
-0.03274298831820488,
0.04632452875375748,
-0.11095980554819107,
0.04278950020670891,
-0.00884988997131586,
0.043563514947891235,
0.007161008194088936,
-0.029520636424422264,
0.06755893677473068,
-0.05855637416243553,
-0.03766195848584175,
0.16602113842964172,
-0.03054741956293583,
0.026191120967268944,
-0.041508499532938004,
0.05044633150100708,
-0.008082392625510693,
0.02749382145702839,
0.037434641271829605,
0.04446675255894661,
-0.0499420240521431,
0.06044365465641022,
0.08281262964010239,
0.05601806193590164,
-0.01945575140416622,
-0.0061828563921153545,
0.053866561502218246,
-0.06076502799987793,
-0.05706429481506348,
-0.015896234661340714,
-0.007340807933360338,
0.03947991877794266,
0.007427780888974667,
-0.002705851336941123,
-0.053907476365566254,
0.01663825660943985,
-0.07617760449647903,
-0.03999204561114311,
-0.06145821139216423,
-0.057190001010894775,
-0.062462374567985535,
0.03934679180383682,
-0.06069684028625488,
0.11030782014131546,
0.039867136627435684,
-0.0784907191991806,
-0.014463260769844055,
-0.03876887634396553,
0.03891570866107941,
0.08872116357088089,
-0.028330396860837936,
-0.06618472933769226,
0.012713334523141384,
-0.014628635719418526,
0.03357253223657608,
-0.08719813078641891,
-0.039644453674554825,
0.09045446664094925,
-0.03581829369068146,
0.01944473385810852,
-0.0253010131418705,
-0.021019861102104187,
-0.032742954790592194,
0.07713633030653,
0.05459633097052574,
0.013931116089224815,
-0.0069212159141898155,
0.15126845240592957,
0.04334741830825806,
0.004475556313991547,
-0.10021455585956573,
0.004211011342704296,
-0.0494779534637928,
-0.07165387272834778,
0.014796480536460876,
0.08252163231372833,
0.016690079122781754,
-0.06486916542053223,
-0.03874906525015831,
0.020335735753178596,
0.07709921151399612,
-0.005340594332665205,
-0.0008892780169844627,
0.053110521286726,
-0.02788439579308033,
-0.05872686207294464,
0.04191329702734947,
-0.008654103614389896,
0.07829214632511139,
0.010599876753985882,
0.09567738324403763,
-0.0019429787062108517,
0.023858215659856796,
-0.015563765540719032,
-0.0071449582464993,
-0.11495892703533173,
-0.13432888686656952,
-0.007174117956310511,
0.04143286868929863,
-0.0328044556081295,
-0.01923251524567604,
0.022526482120156288,
-0.04256700351834297,
0.018183790147304535,
-0.002387449610978365,
0.04407377913594246,
0.009301610291004181,
0.041904255747795105,
-0.030118456110358238,
0.055192116647958755,
0.025957351550459862,
0.015685411170125008,
-0.09446936100721359,
-0.03812944516539574,
-0.008990795351564884,
-0.019570056349039078,
0.03727060928940773,
-0.044414009898900986,
5.2494958559372934e-33,
0.08853035420179367,
0.016733411699533463,
0.03948570787906647,
-0.04035286605358124,
-0.051821351051330566,
-0.012622419744729996,
-0.004702514037489891,
0.03453152999281883,
-0.09461430460214615,
-0.04798326641321182,
-0.08377528935670853,
0.11269170045852661,
-0.04214710369706154,
0.022847704589366913,
-0.044947706162929535,
-0.01586959883570671,
-0.08093107491731644,
-0.0030230372212827206,
0.013797268271446228,
-0.000028572712835739367,
0.08575942367315292,
0.017824383452534676,
-0.00020294082059990615,
0.07039523869752884,
-0.019160740077495575,
-0.006496873218566179,
0.0134486835449934,
-0.11923360824584961,
-0.04258162900805473,
0.06159926578402519,
0.009126506745815277,
-0.02719688229262829,
0.020286623388528824,
0.06453672051429749,
-0.08238574862480164,
0.010709298774600029,
-0.027548864483833313,
0.01261898037046194,
-0.05470704287290573,
-0.10859205573797226,
0.021494416519999504,
0.011743252165615559,
-0.033009495586156845,
0.035404086112976074,
0.0007562062819488347,
0.005261181388050318,
-0.0180292259901762,
0.018027229234576225,
0.11529519408941269,
0.012091496028006077,
0.0011302478378638625,
-0.008242321200668812,
0.0316779799759388,
0.038258690387010574,
0.015010216273367405,
0.1604931503534317,
0.03336971625685692,
-0.00022392805840354413,
0.05507117137312889,
-0.06579861044883728,
-0.1020355075597763,
0.11597329378128052,
0.025534050539135933,
-0.02156044729053974,
0.06298541277647018,
-0.011644494719803333,
-0.00602938886731863,
-0.09763779491186142,
-0.01978107914328575,
-0.06373412907123566,
-0.04957446828484535,
-0.04639090597629547,
-0.023333314806222916,
0.05525342375040054,
0.014297187328338623,
-0.046196889132261276,
-0.0007533148746006191,
-0.047321584075689316,
-0.06717166304588318,
-0.0033033699728548527,
-0.02141028828918934,
-0.032618436962366104,
0.017596857622265816,
-0.0392073355615139,
0.07927187532186508,
-0.066554494202137,
0.012121560052037239,
-0.01824171468615532,
-0.015940135344862938,
-0.010320942848920822,
0.03324936330318451,
-0.01682095229625702,
-0.04610566049814224,
-0.02674201689660549,
-0.024122020229697227,
-6.005143664922185e-33,
0.023409035056829453,
0.05135044455528259,
-0.018287722021341324,
0.05075865611433983,
-0.034501925110816956,
-0.019648704677820206,
0.042719680815935135,
0.08919856697320938,
-0.028649501502513885,
-0.04115307331085205,
0.05833566188812256,
-0.049621857702732086,
0.05387319624423981,
-0.03579672425985336,
0.09766010195016861,
-0.019230665639042854,
-0.03813490644097328,
0.0784561038017273,
0.06847713887691498,
0.07099983841180801,
-0.01912783831357956,
0.0721123069524765,
-0.11332245916128159,
0.01850479654967785,
-0.022753212600946426,
-0.004409588407725096,
-0.03462060168385506,
0.0802483856678009,
0.051199715584516525,
0.005059461575001478,
-0.03507706895470619,
0.01562977395951748,
-0.07353270798921585,
0.04247539862990379,
-0.0376177653670311,
-0.0846576988697052,
-0.03260597959160805,
-0.0005764741217717528,
-0.013810860924422741,
0.061746615916490555,
-0.03629177436232567,
0.017773883417248726,
-0.03333498537540436,
0.027601011097431183,
0.03648409619927406,
-0.043294068425893784,
-0.01035246904939413,
-0.04021592065691948,
0.03263191878795624,
-0.07512155175209045,
0.03401060402393341,
0.004856814630329609,
-0.07600821554660797,
0.03174220398068428,
-0.0015242447843775153,
-0.04577356204390526,
0.04468996077775955,
-0.011386720463633537,
-0.0282733216881752,
-0.03574009984731674,
-0.027932142838835716,
-0.1004144474864006,
0.08483436703681946,
-0.034225352108478546,
-0.007465009111911058,
-0.07727450877428055,
0.09394197911024094,
0.021760717034339905,
0.03915124014019966,
-0.03364251181483269,
-0.022535167634487152,
-0.02921273186802864,
0.02684086002409458,
-0.023279283195734024,
0.0023650459479540586,
-0.027722662314772606,
-0.14069567620754242,
0.052134960889816284,
0.05218883976340294,
-0.0656355693936348,
-0.0448836088180542,
0.06613579392433167,
0.06128707155585289,
-0.009284550324082375,
0.03950546309351921,
0.014004128985106945,
-0.00816860981285572,
0.10851632803678513,
0.09544514119625092,
-0.026535658165812492,
-0.04172190651297569,
0.05362240970134735,
0.030374690890312195,
0.07496847212314606,
-0.002046620938926935,
-4.648956064556842e-8,
0.013352922163903713,
-0.036060307174921036,
-0.018536359071731567,
0.02332141436636448,
0.027367347851395607,
-0.015026678331196308,
-0.03576425090432167,
-0.07906880974769592,
-0.02041127346456051,
-0.12474274635314941,
0.045392975211143494,
0.02435535565018654,
-0.02068479172885418,
-0.0015537256840616465,
0.009010215289890766,
0.008607710711658001,
0.018398737534880638,
0.1656382828950882,
-0.03966172784566879,
0.03889279067516327,
-0.0012875335523858666,
-0.007400071248412132,
-0.0040609766729176044,
-0.007990028709173203,
0.02109796553850174,
0.048731688410043716,
-0.06589321047067642,
0.027565784752368927,
0.02484368532896042,
-0.04528801888227463,
-0.06456304341554642,
0.0132597666233778,
0.016271045431494713,
-0.01190253160893917,
-0.02374856173992157,
0.08928386867046356,
-0.024940645322203636,
-0.07241726666688919,
-0.013038333505392075,
0.0455324612557888,
0.044339630752801895,
-0.06422647833824158,
-0.08944414556026459,
0.01177692599594593,
0.04512159898877144,
0.010323875583708286,
0.0269467793405056,
-0.012040608562529087,
0.002686187857761979,
0.035981688648462296,
-0.026687853038311005,
-0.06309822201728821,
-0.13703100383281708,
-0.021265041083097458,
-0.03216612711548805,
-0.01299524400383234,
-0.039182089269161224,
-0.03832360357046127,
0.026610106229782104,
-0.007575692143291235,
0.0193029697984457,
0.041331712156534195,
0.020031411200761795,
0.029553471133112907
] |
anton-l/wav2vec2-base-ft-keyword-spotting | 30629617f4408a39489bec210f6b5127b6fbaafc | 2021-10-27T22:16:42.000Z | [
"pytorch",
"tensorboard",
"wav2vec2",
"audio-classification",
"dataset:superb",
"transformers",
"generated_from_trainer",
"license:apache-2.0",
"model-index"
] | audio-classification | false | anton-l | null | anton-l/wav2vec2-base-ft-keyword-spotting | 1,294 | 1 | transformers | ---
license: apache-2.0
tags:
- audio-classification
- generated_from_trainer
datasets:
- superb
metrics:
- accuracy
model-index:
- name: wav2vec2-base-ft-keyword-spotting
results: []
---
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# wav2vec2-base-ft-keyword-spotting
This model is a fine-tuned version of [facebook/wav2vec2-base](https://huggingface.co/facebook/wav2vec2-base) on the superb dataset.
It achieves the following results on the evaluation set:
- Loss: 0.0824
- Accuracy: 0.9826
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 3e-05
- train_batch_size: 32
- eval_batch_size: 32
- seed: 0
- gradient_accumulation_steps: 4
- total_train_batch_size: 128
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_ratio: 0.1
- num_epochs: 5.0
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Accuracy |
|:-------------:|:-----:|:----:|:---------------:|:--------:|
| 0.8972 | 1.0 | 399 | 0.7023 | 0.8174 |
| 0.3274 | 2.0 | 798 | 0.1634 | 0.9773 |
| 0.1993 | 3.0 | 1197 | 0.1048 | 0.9788 |
| 0.1777 | 4.0 | 1596 | 0.0824 | 0.9826 |
| 0.1527 | 5.0 | 1995 | 0.0812 | 0.9810 |
### Framework versions
- Transformers 4.12.0.dev0
- Pytorch 1.9.1+cu111
- Datasets 1.14.0
- Tokenizers 0.10.3
| [
-0.045499760657548904,
-0.08692844957113266,
-0.04379597678780556,
0.001070421072654426,
0.04682379588484764,
0.02066580019891262,
-0.05452127382159233,
-0.06722816824913025,
-0.08422106504440308,
-0.09025629609823227,
0.00651346892118454,
-0.09928359091281891,
-0.001714292331598699,
-0.030278895050287247,
-0.09539099782705307,
-0.003306073136627674,
0.07237686216831207,
-0.028762008994817734,
-0.04857184365391731,
0.018182553350925446,
0.02355746179819107,
0.0324547104537487,
0.10180456936359406,
0.028795646503567696,
0.011592294089496136,
0.043601419776678085,
-0.06841418147087097,
0.05331416800618172,
0.03162825480103493,
-0.04405129328370094,
0.1173088550567627,
0.05102883651852608,
0.03322306647896767,
0.00532812625169754,
-0.017328737303614616,
-0.03854627534747124,
-0.017605850473046303,
-0.043737560510635376,
0.0060198502615094185,
-0.05932271108031273,
0.002048708265647292,
-0.02489001303911209,
-0.045964255928993225,
0.06034115329384804,
-0.05016147345304489,
0.006949841510504484,
-0.0653260126709938,
-0.05890186131000519,
-0.05736193433403969,
0.029328538104891777,
-0.06920306384563446,
-0.11953998357057571,
0.04724428430199623,
0.045092131942510605,
-0.09964898973703384,
0.038929350674152374,
-0.007070890627801418,
0.05837917700409889,
0.025445422157645226,
0.01250562071800232,
0.02840169332921505,
-0.029456032440066338,
-0.026864588260650635,
-0.03820627182722092,
-0.03963015601038933,
-0.04428067430853844,
-0.07939376682043076,
0.00008685863576829433,
0.08688529580831528,
-0.03212115541100502,
-0.03293295577168465,
0.0970168188214302,
0.03802808001637459,
0.058899860829114914,
0.06681585311889648,
0.014944413676857948,
0.047506265342235565,
0.028071878477931023,
0.05079613998532295,
-0.0828096941113472,
0.005242691840976477,
-0.09561410546302795,
0.07066191732883453,
-0.05979035049676895,
0.0825272649526596,
0.01851246878504753,
0.023129990324378014,
-0.0632505789399147,
-0.04138766601681709,
-0.005998801905661821,
-0.06496749818325043,
0.004601971246302128,
0.01797928847372532,
0.02422945946455002,
0.007679136469960213,
0.05717352777719498,
-0.014327602460980415,
0.05071980133652687,
-0.019968878477811813,
0.07248549908399582,
-0.012369762174785137,
0.00894191861152649,
-0.01320404838770628,
-0.03533376008272171,
0.035455040633678436,
-0.07846773415803909,
-0.02854432538151741,
0.11482060700654984,
0.1064017191529274,
-0.05283486098051071,
0.07341278344392776,
0.039163075387477875,
-0.06201479211449623,
-0.011564800515770912,
0.061810798943042755,
0.13676461577415466,
-0.048737652599811554,
-0.004221420735120773,
-0.02044208161532879,
0.04272016882896423,
-0.04805203527212143,
0.019348803907632828,
-0.0517021082341671,
-0.012217889539897442,
-0.041054315865039825,
-0.07693109661340714,
-0.07110283523797989,
2.384512331162088e-33,
0.026078609749674797,
0.0511317178606987,
-0.024023020640015602,
-0.025163399055600166,
0.029336990788578987,
-0.09958574920892715,
-0.02746250294148922,
-0.019576942548155785,
0.03477809205651283,
-0.02376537024974823,
-0.05795670300722122,
-0.004506198689341545,
-0.023213116452097893,
0.05655573680996895,
0.030972938984632492,
-0.04635731130838394,
-0.08239852637052536,
0.005625616759061813,
0.022114505991339684,
0.04019128158688545,
0.1265682876110077,
0.002237169537693262,
0.0063493927009403706,
-0.02462702803313732,
0.03732999414205551,
0.05762126296758652,
0.028836432844400406,
0.047594066709280014,
0.05384238436818123,
0.05983729287981987,
-0.07109294831752777,
-0.05791592225432396,
0.00839723739773035,
-0.0426839217543602,
0.05748189613223076,
0.01427883468568325,
0.03308749943971634,
0.05816930904984474,
-0.029990117996931076,
-0.08189825713634491,
0.016662392765283585,
-0.009589754976332188,
-0.0019925294909626245,
-0.051167067140340805,
-0.050137873739004135,
-0.053529735654592514,
0.00974042247980833,
0.06030598655343056,
0.024165693670511246,
0.023331983014941216,
-0.031456395983695984,
-0.02155010960996151,
0.0026596221141517162,
0.08180589973926544,
-0.044676609337329865,
-0.04058987647294998,
0.08127471804618835,
0.06964527815580368,
-0.0175978671759367,
-0.03601524606347084,
0.053097814321517944,
0.020365087315440178,
0.030903169885277748,
-0.023090194910764694,
-0.06207241490483284,
-0.056363485753536224,
-0.0019195745699107647,
0.009849166497588158,
0.03366997838020325,
0.02195306494832039,
-0.036208443343639374,
0.018981872126460075,
0.060238517820835114,
-0.0019033692078664899,
0.023037394508719444,
-0.05096030607819557,
0.010259521193802357,
-0.07958384603261948,
-0.051476363092660904,
0.01424665842205286,
0.017822394147515297,
0.09636656939983368,
0.01284548919647932,
-0.07475627213716507,
-0.0659215897321701,
-0.03808861970901489,
-0.011164351366460323,
-0.1380247175693512,
-0.033538054674863815,
0.023338118568062782,
-0.06312116980552673,
0.09231390804052353,
-0.032632969319820404,
-0.031741611659526825,
-0.047032423317432404,
-4.0472094174658736e-33,
0.006341500673443079,
0.13307014107704163,
0.005186389200389385,
0.06313209235668182,
-0.010501779615879059,
0.052020322531461716,
0.07925347238779068,
0.1196262538433075,
-0.061672233045101166,
-0.03941136226058006,
0.10411321371793747,
-0.036234863102436066,
-0.03062049299478531,
-0.02737901173532009,
0.01058948878198862,
-0.05137354135513306,
-0.057034268975257874,
-0.008987348526716232,
0.038716837763786316,
0.039060838520526886,
0.04364147037267685,
0.07269200682640076,
-0.026422491297125816,
0.05159622058272362,
-0.014335190877318382,
0.0002535103412810713,
0.01922032982110977,
0.09413772076368332,
0.07869041711091995,
-0.033654049038887024,
-0.01866092160344124,
-0.01654093526303768,
-0.041548408567905426,
-0.019794506952166557,
0.004214374348521233,
0.012640869244933128,
0.04311740770936012,
-0.028701573610305786,
-0.030178043991327286,
0.058370351791381836,
0.019860394299030304,
0.0655873641371727,
-0.11511090397834778,
0.017252031713724136,
-0.021677862852811813,
-0.028567301109433174,
0.02308710105717182,
-0.0031180037185549736,
0.00002988959022331983,
-0.02539551444351673,
0.08928396552801132,
-0.062197472900152206,
-0.00978035107254982,
0.025293221697211266,
-0.027135640382766724,
-0.022066548466682434,
0.002758315997198224,
-0.023700863122940063,
-0.0588122196495533,
0.02372829243540764,
0.0005464577116072178,
0.04471859335899353,
-0.10918253660202026,
-0.054061051458120346,
0.01570248045027256,
-0.018781661987304688,
-0.0706639513373375,
0.04721324145793915,
0.016452806070446968,
0.06357770413160324,
-0.02309045009315014,
0.07016019523143768,
-0.009821848943829536,
0.03862306848168373,
-0.05528639256954193,
0.009784978814423084,
-0.062048498541116714,
-0.028264015913009644,
-0.04052164405584335,
-0.08442782610654831,
-0.040672775357961655,
0.027304569259285927,
0.08176740258932114,
0.01698281243443489,
0.12863251566886902,
0.08519255369901657,
0.020280683413147926,
0.03712623566389084,
-0.06139072775840759,
0.02301079034805298,
0.04922497645020485,
0.02841021679341793,
-0.0012555905850604177,
0.10611192137002945,
0.022888703271746635,
-5.243079925776328e-8,
-0.07654684036970139,
0.03247944638133049,
-0.033279068768024445,
0.012022084556519985,
-0.00040939298924058676,
-0.056590888649225235,
-0.005497712641954422,
0.002443690085783601,
0.007959770038723946,
-0.00739726098254323,
0.00853007286787033,
0.0044684866443276405,
-0.05655721202492714,
0.04145023971796036,
-0.0015700885560363531,
0.0066355145536363125,
-0.00994678121060133,
0.12425318360328674,
-0.03404254838824272,
-0.08286753296852112,
0.022706203162670135,
0.001265827682800591,
0.0009060919401235878,
-0.034500084817409515,
0.05608317255973816,
-0.07895608246326447,
-0.0573699064552784,
0.09919065237045288,
-0.05711949244141579,
-0.022538047283887863,
-0.042202726006507874,
0.040832605212926865,
0.01662171259522438,
-0.11442648619413376,
0.010349411517381668,
0.09330170601606369,
-0.07532963156700134,
-0.0426669679582119,
-0.030706824734807014,
0.04735897108912468,
-0.01962961256504059,
0.09602118283510208,
-0.035728078335523605,
0.0040596844628453255,
0.03216549754142761,
-0.057001613080501556,
0.04380897432565689,
-0.06286069005727768,
0.030165405943989754,
0.02248535118997097,
-0.0077290586195886135,
0.055996332317590714,
-0.04585614055395126,
-0.016597118228673935,
0.058725859969854355,
0.04918746277689934,
-0.08455633372068405,
0.017927203327417374,
-0.047796495258808136,
-0.0011362152872607112,
0.018299506977200508,
0.012689926661550999,
-0.015990307554602623,
0.0018261584918946028
] |
moussaKam/barthez-orangesum-abstract | 2f4969c2f16bf27aaddb87bf9b862ccead48135b | 2021-11-15T13:03:03.000Z | [
"pytorch",
"mbart",
"text2text-generation",
"fr",
"arxiv:2010.12321",
"transformers",
"summarization",
"bart",
"license:apache-2.0",
"autotrain_compatible"
] | summarization | false | moussaKam | null | moussaKam/barthez-orangesum-abstract | 1,294 | 1 | transformers | ---
tags:
- summarization
- bart
language:
- fr
license: apache-2.0
widget:
- text: Citant les préoccupations de ses clients dénonçant des cas de censure après la suppression du compte de Trump, un fournisseur d'accès Internet de l'État de l'Idaho a décidé de bloquer Facebook et Twitter. La mesure ne concernera cependant que les clients mécontents de la politique de ces réseaux sociaux.
---
### Barthez model finetuned on orangeSum (abstract generation)
finetuning: examples/seq2seq (as of Feb 08 2021)
paper: https://arxiv.org/abs/2010.12321 \
github: https://github.com/moussaKam/BARThez
```
@article{eddine2020barthez,
title={BARThez: a Skilled Pretrained French Sequence-to-Sequence Model},
author={Eddine, Moussa Kamal and Tixier, Antoine J-P and Vazirgiannis, Michalis},
journal={arXiv preprint arXiv:2010.12321},
year={2020}
}
```
| [
-0.12664645910263062,
-0.006200073752552271,
0.05424751713871956,
0.005499940365552902,
-0.006751122884452343,
0.05135973542928696,
0.0007872793357819319,
0.04426664113998413,
0.08659489452838898,
-0.054991465061903,
0.03626478090882301,
-0.01099066250026226,
0.07608739286661148,
-0.050176847726106644,
-0.03594595193862915,
0.03804871067404747,
0.020246269181370735,
-0.025912996381521225,
-0.010499892756342888,
-0.04497483745217323,
0.03877639397978783,
-0.009155074134469032,
0.02510213293135166,
0.014497162774205208,
0.014876946806907654,
-0.06366642564535141,
-0.05490531772375107,
0.04929012805223465,
0.08788151293992996,
-0.03679246827960014,
0.040901921689510345,
0.07406765967607498,
0.07515044510364532,
0.05096980929374695,
-0.0019243916030973196,
-0.002983620623126626,
0.04002084583044052,
-0.07664111256599426,
-0.029215509071946144,
0.08313923329114914,
0.02532869018614292,
0.025789201259613037,
-0.07034587115049362,
-0.004913415294140577,
0.04293660819530487,
-0.06352967023849487,
-0.03912802413105965,
-0.008313724771142006,
-0.10697656869888306,
0.004347347654402256,
-0.1315186619758606,
0.037078745663166046,
0.0054950471967458725,
0.023476283997297287,
0.01832699589431286,
-0.0581977553665638,
-0.05490215867757797,
-0.04212454333901405,
0.045469462871551514,
-0.05737221986055374,
0.062124673277139664,
-0.017830507829785347,
-0.0629223883152008,
-0.014162133447825909,
0.03248307481408119,
-0.0006606079405173659,
0.008552134037017822,
0.0759403258562088,
-0.08327417075634003,
0.1047876849770546,
-0.07167718559503555,
0.04803236201405525,
-0.027768682688474655,
0.05062127858400345,
-0.0007495057070627809,
0.06740106642246246,
0.047956280410289764,
0.031050963327288628,
0.002062359359115362,
-0.14267240464687347,
-0.02676445059478283,
-0.027585681527853012,
-0.024815676733851433,
-0.027250191196799278,
-0.037989795207977295,
0.029644068330526352,
0.002332756295800209,
-0.05778293311595917,
0.07945579290390015,
0.030046502128243446,
-0.01970747672021389,
-0.05939177796244621,
0.04419880360364914,
0.02387205697596073,
-0.026579326018691063,
0.052134424448013306,
0.061240363866090775,
0.012424835935235023,
0.022456122562289238,
0.12763871252536774,
0.08266939222812653,
0.07861114293336868,
0.04936138913035393,
-0.11202940344810486,
-0.06917376071214676,
-0.05872589722275734,
-0.049008212983608246,
0.01080105546861887,
-0.0034468320664018393,
-0.023350326344370842,
0.04220950976014137,
-0.03314333036541939,
-0.016054552048444748,
-0.012501920573413372,
-0.03506904095411301,
0.003368066856637597,
0.08582893759012222,
-0.059895049780607224,
0.09416483342647552,
-0.04915149509906769,
0.05026419460773468,
0.008301500231027603,
-0.0675087422132492,
0.013102902099490166,
-0.040335673838853836,
-0.03901097550988197,
-0.0033951241057366133,
1.2506228396456293e-32,
0.07366061955690384,
-0.008995624259114265,
0.018418412655591965,
0.024194182828068733,
0.018045110628008842,
0.01789400912821293,
-0.04705291986465454,
-0.015803739428520203,
-0.10164295881986618,
-0.003865306731313467,
-0.0026547438465058804,
0.01909034326672554,
-0.06688901036977768,
0.04467013105750084,
0.041152145713567734,
-0.06811286509037018,
-0.002833107253536582,
-0.008644613437354565,
0.000030714963941136375,
0.012961294502019882,
0.09760086983442307,
-0.04043329879641533,
-0.01550512108951807,
0.009159167297184467,
0.03079167567193508,
0.11971839517354965,
0.04992905259132385,
-0.05335003882646561,
-0.05212666839361191,
0.05061928182840347,
-0.07443968951702118,
0.05473913252353668,
-0.016520371660590172,
0.0563490092754364,
0.04478272795677185,
0.022845495492219925,
-0.0030559778679162264,
-0.04136114940047264,
0.007904517464339733,
-0.11500869691371918,
-0.028338897973299026,
0.03368459269404411,
0.014602795243263245,
-0.00496660266071558,
-0.06426934897899628,
-0.06117241829633713,
0.030440010130405426,
-0.0011946100275963545,
0.09302590042352676,
-0.02538013830780983,
-0.02196788229048252,
-0.0052276900969445705,
-0.006119946017861366,
-0.04697411507368088,
0.0150690209120512,
-0.012491717003285885,
-0.02117704413831234,
0.07479391247034073,
-0.00041223480366170406,
0.029788408428430557,
-0.021085161715745926,
0.028653787449002266,
0.027867933735251427,
0.0423060804605484,
0.025196263566613197,
0.04076648876070976,
-0.09409380704164505,
0.08598253130912781,
0.07715924084186554,
-0.05218927562236786,
0.018758172169327736,
-0.008060789667069912,
-0.019566573202610016,
0.02633081190288067,
0.03531092777848244,
-0.0480140782892704,
-0.0057955230586230755,
-0.09187255054712296,
-0.02628595195710659,
0.0102598387748003,
-0.08599471300840378,
-0.03519250825047493,
0.004782826639711857,
0.01415614690631628,
-0.09458541870117188,
-0.035247236490249634,
0.1254115104675293,
0.07222139835357666,
-0.02343597821891308,
-0.03639199584722519,
0.027972161769866943,
0.012246906757354736,
0.04473710432648659,
0.028565408661961555,
-0.004861380904912949,
-1.3314227948381008e-32,
-0.04112499579787254,
0.02819899097084999,
-0.07757025957107544,
0.009800194762647152,
0.002093222690746188,
-0.10213685035705566,
0.01385790016502142,
0.12010674178600311,
0.018918011337518692,
-0.06721607595682144,
-0.008968526497483253,
-0.07282284647226334,
0.008171236142516136,
0.04663002863526344,
0.06418823450803757,
-0.08096013963222504,
0.028915632516145706,
-0.04167364537715912,
-0.03254793584346771,
0.0745772123336792,
0.009570158086717129,
0.0035007710102945566,
-0.09354956448078156,
0.051225945353507996,
0.07007749378681183,
0.02626390941441059,
0.0051618050783872604,
0.013105406425893307,
-0.059644706547260284,
-0.00490961829200387,
-0.02685573324561119,
-0.0407937690615654,
-0.035276032984256744,
0.02798798680305481,
-0.06820934265851974,
-0.015974797308444977,
0.07979326695203781,
0.03549621254205704,
-0.02612689882516861,
0.03261890262365341,
0.062385689467191696,
-0.014910595491528511,
-0.046408336609601974,
0.027114031836390495,
0.014757036231458187,
0.03226115182042122,
-0.06512144207954407,
-0.05402294918894768,
0.016734560951590538,
-0.01367766223847866,
0.037020836025476456,
0.05091875046491623,
-0.0634545236825943,
0.02535005286335945,
-0.13252907991409302,
-0.022737544029951096,
0.02893051877617836,
-0.03983743116259575,
-0.06527799367904663,
-0.06309575587511063,
-0.09730849415063858,
0.02724413201212883,
-0.017681729048490524,
-0.04272347316145897,
0.09438426047563553,
-0.13590313494205475,
-0.06080842390656471,
-0.04656459763646126,
0.031321290880441666,
-0.09829346090555191,
0.021534085273742676,
0.022452745586633682,
-0.038885630667209625,
0.008395630866289139,
0.0686529204249382,
-0.05787504464387894,
-0.012173163704574108,
0.036220304667949677,
-0.006749631837010384,
0.0208137147128582,
-0.10034066438674927,
0.008382532745599747,
0.048614587634801865,
0.06128116697072983,
0.018133867532014847,
-0.013160085305571556,
0.008372699841856956,
0.0682559609413147,
-0.0585484653711319,
0.02721140906214714,
0.000996819231659174,
-0.021407829597592354,
-0.03039461001753807,
0.1267915964126587,
-0.002890946576371789,
-6.852846468063944e-8,
-0.07781095802783966,
-0.03815082460641861,
-0.10252329707145691,
0.021160850301384926,
0.01920047588646412,
-0.06500818580389023,
-0.025682669132947922,
-0.02396957203745842,
-0.034602098166942596,
0.018838435411453247,
-0.04497080296278,
0.12122514843940735,
-0.05604308471083641,
-0.013433950953185558,
-0.05531506612896919,
0.05645119026303291,
0.025731204077601433,
0.0015765749849379063,
-0.06324572116136551,
0.006377974525094032,
0.0012662018416449428,
-0.005433950573205948,
-0.06604793667793274,
0.0030696303583681583,
0.03723617643117905,
-0.015976326540112495,
-0.1207963302731514,
0.022680645808577538,
0.027442891150712967,
-0.05027239769697189,
0.037790071219205856,
0.0326104499399662,
-0.043487511575222015,
-0.03302166610956192,
-0.0143198911100626,
0.07264049351215363,
-0.02156379260122776,
-0.023369180038571358,
0.01653246581554413,
0.01161385327577591,
0.06626557558774948,
0.03411512076854706,
-0.09998154640197754,
-0.011188498698174953,
0.07436440885066986,
-0.021458357572555542,
-0.07193612307310104,
-0.0386250838637352,
0.05504098907113075,
0.00747592793777585,
-0.04367139935493469,
-0.05942066013813019,
0.008952135220170021,
-0.04764026030898094,
0.013794544152915478,
0.006635123398154974,
0.02045847289264202,
-0.022886848077178,
0.08106550574302673,
-0.0010748904896900058,
0.08451724052429199,
0.052920181304216385,
0.04371224343776703,
0.01164164673537016
] |
uclanlp/plbart-python-en_XX | 48bf6e4889bdb9bafd12381a4e9a9a1e0fe224eb | 2021-11-09T17:09:27.000Z | [
"pytorch",
"plbart",
"text2text-generation",
"transformers",
"autotrain_compatible"
] | text2text-generation | false | uclanlp | null | uclanlp/plbart-python-en_XX | 1,292 | 1 | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
valhalla/gpt-neo-random-tiny | 6e358e9d007d3bf2f592832a2e1c4dce15fe409a | 2021-04-07T16:38:40.000Z | [
"pytorch",
"gpt_neo",
"feature-extraction",
"transformers"
] | feature-extraction | false | valhalla | null | valhalla/gpt-neo-random-tiny | 1,292 | null | transformers | **This model is uploaded for testing purpose. It's random model not trained on anything** | [
-0.03921431675553322,
-0.03463711962103844,
-0.008921454660594463,
0.03261713683605194,
0.09164424985647202,
-0.022051189094781876,
0.026831915602087975,
-0.08626741170883179,
-0.0856584832072258,
-0.0013496202882379293,
0.07263743132352829,
-0.04978799447417259,
0.030874188989400864,
0.010920241475105286,
-0.11631591618061066,
-0.014878788962960243,
0.0459459125995636,
-0.11561084538698196,
-0.06584278494119644,
0.048123911023139954,
-0.02869522012770176,
0.01910373382270336,
0.035975292325019836,
0.058257605880498886,
-0.0665159597992897,
-0.01139555312693119,
-0.0075302510522305965,
0.011711562052369118,
-0.003792828181758523,
-0.11432693898677826,
0.015842577442526817,
0.03281534090638161,
-0.0006576874293386936,
-0.001671551843173802,
0.05288546159863472,
-0.05441116541624069,
-0.013915158808231354,
-0.07061530649662018,
-0.0012023351155221462,
-0.010951632633805275,
0.0027756853960454464,
-0.14258970320224762,
-0.004181684460490942,
0.05846119299530983,
0.03887341916561127,
0.01418454758822918,
0.0021994311828166246,
-0.0926126167178154,
-0.034534115344285965,
0.0026492346078157425,
-0.14848577976226807,
-0.1306236982345581,
0.031095420941710472,
-0.01191633939743042,
-0.028901850804686546,
-0.02297666296362877,
-0.015587763860821724,
-0.05638357251882553,
-0.008228689432144165,
-0.00038033685996197164,
-0.002686403226107359,
-0.019487598910927773,
-0.0846768319606781,
0.04355407506227493,
-0.004987731110304594,
0.02977611869573593,
-0.07393628358840942,
-0.02369491569697857,
0.03176412731409073,
-0.07777104526758194,
0.02910965122282505,
0.059051115065813065,
-0.030564725399017334,
0.06167614459991455,
0.048282064497470856,
-0.028056176379323006,
0.05108450725674629,
0.022606968879699707,
0.11733096092939377,
-0.06316924095153809,
-0.043072860687971115,
-0.12561912834644318,
0.009560544975101948,
0.0019522380316630006,
0.036386311054229736,
0.06576066464185715,
0.04606669768691063,
0.02585497312247753,
-0.028855497017502785,
-0.013211884535849094,
-0.0008288481622003019,
0.03012828901410103,
-0.01857520453631878,
0.042831484228372574,
-0.08539406210184097,
0.06882122904062271,
0.060641564428806305,
-0.013033500872552395,
-0.10679762065410614,
0.1004486083984375,
0.026995617896318436,
-0.006127545610070229,
0.05736587569117546,
0.09756872057914734,
-0.028650617226958275,
-0.014874056912958622,
0.03097834810614586,
-0.0456680990755558,
0.08481850475072861,
-0.07394738495349884,
0.0023576233070343733,
0.024315575137734413,
-0.049276478588581085,
-0.03751281276345253,
0.008889957331120968,
0.025661852210760117,
-0.10180047154426575,
-0.001333401189185679,
-0.11937408149242401,
0.09444999694824219,
-0.04881434142589569,
-0.028477665036916733,
-0.05009572580456734,
-0.01006770133972168,
0.009869429282844067,
-0.06504815816879272,
-0.029870254918932915,
-2.3492566841870353e-33,
0.03196984529495239,
0.009628933854401112,
0.04612177982926369,
0.0063120462000370026,
0.055120017379522324,
-0.040954895317554474,
0.0019546588882803917,
0.03200256824493408,
-0.01963021419942379,
0.022938968613743782,
-0.05075331777334213,
-0.044133253395557404,
-0.058243051171302795,
0.12990665435791016,
0.0236952006816864,
0.005445168353617191,
0.010508072562515736,
0.012304756790399551,
-0.0014111524214968085,
0.04873870685696602,
0.16338838636875153,
-0.032145529985427856,
-0.05833430215716362,
-0.11139389127492905,
-0.016796207055449486,
0.10109234601259232,
0.004246930126100779,
0.014898432418704033,
0.03953096643090248,
0.09706960618495941,
-0.02481774240732193,
0.04763065651059151,
-0.014671020209789276,
-0.0185806043446064,
-0.029759369790554047,
0.011819177307188511,
0.009407933801412582,
0.014239626936614513,
-0.06673397123813629,
-0.10775092989206314,
0.00873616524040699,
-0.018203649669885635,
-0.013166782446205616,
0.016693824902176857,
-0.004731523338705301,
0.034397874027490616,
0.07260353118181229,
-0.03272683173418045,
0.009327387437224388,
0.059241414070129395,
-0.03351728245615959,
0.022062964737415314,
-0.04490206018090248,
-0.03828195109963417,
-0.008245093747973442,
0.08055730909109116,
0.004114966373890638,
0.01453391183167696,
0.022565031424164772,
0.056544333696365356,
0.05244631692767143,
-0.05481557175517082,
-0.029227809980511665,
-0.007156194653362036,
-0.01287929993122816,
0.003733087796717882,
-0.018626436591148376,
-0.09400435537099838,
0.1241593137383461,
0.03677143529057503,
-0.07089100778102875,
-0.007425177842378616,
-0.03337475284934044,
0.023309940472245216,
-0.028551766648888588,
-0.10679646581411362,
-0.015620455145835876,
0.057156212627887726,
-0.029719248414039612,
0.01895214058458805,
-0.014598268084228039,
-0.00338385789655149,
-0.02139771729707718,
-0.09623996913433075,
-0.0663362443447113,
-0.02225062996149063,
-0.025699926540255547,
-0.08732722699642181,
-0.02696298621594906,
0.051589105278253555,
0.04214482754468918,
0.01722870208323002,
-0.06382511556148529,
0.051911335438489914,
0.029665758833289146,
-9.490545577550708e-34,
-0.07157176733016968,
0.07511025667190552,
-0.07220814377069473,
0.04054791480302811,
0.019498050212860107,
-0.03894009813666344,
0.009122890420258045,
0.0806281790137291,
-0.026300419121980667,
0.07778388261795044,
0.04374517872929573,
0.012080186046659946,
0.002190820872783661,
-0.03524919971823692,
0.05443013086915016,
0.0021567000076174736,
-0.02844453603029251,
-0.034909021109342575,
-0.020796848461031914,
0.03802867233753204,
0.000977418152615428,
0.10695039480924606,
-0.019093232229351997,
0.04552255943417549,
-0.023266276344656944,
0.07574168592691422,
0.014984939247369766,
0.09318216145038605,
0.08914877474308014,
0.018978403881192207,
-0.023141544312238693,
0.01942022517323494,
-0.04578781872987747,
0.02327834628522396,
-0.020945167168974876,
0.009170508943498135,
0.10278645902872086,
-0.02993146888911724,
0.07661651074886322,
0.039685435593128204,
0.03606748580932617,
0.04402342438697815,
-0.09874139726161957,
0.03410632163286209,
-0.016575276851654053,
-0.012317650951445103,
0.04314398765563965,
-0.029413918033242226,
0.01215856708586216,
0.02407914213836193,
0.016988176852464676,
-0.05068254843354225,
-0.037503909319639206,
-0.04648730158805847,
-0.03940650075674057,
0.05595828965306282,
-0.042043522000312805,
0.03039536252617836,
-0.00897987186908722,
0.06386929750442505,
-0.0048088072799146175,
-0.0038003905210644007,
-0.09110479801893234,
-0.013313314877450466,
-0.04434329271316528,
-0.05112939700484276,
-0.01743469014763832,
0.038875117897987366,
0.04212714731693268,
0.030690276995301247,
0.05911897122859955,
0.10898266732692719,
-0.047546468675136566,
-0.00951620377600193,
-0.011358549818396568,
-0.06527683138847351,
-0.05376303195953369,
-0.009610626846551895,
-0.00514017790555954,
-0.04013066738843918,
-0.017488421872258186,
-0.021088212728500366,
-0.02098684199154377,
0.05732960253953934,
0.09168017655611038,
0.013880105689167976,
0.054821740835905075,
0.01121179573237896,
-0.03540685400366783,
0.0027391493786126375,
-0.022711150348186493,
0.07786549627780914,
0.06727591156959534,
0.10334816575050354,
-0.04630541428923607,
-2.670840082430459e-8,
-0.0036052765790373087,
-0.0062420363537967205,
0.056445494294166565,
0.043497130274772644,
0.029255308210849762,
0.021324152126908302,
-0.006503313779830933,
-0.02111385576426983,
-0.0050834775902330875,
-0.04602411761879921,
-0.04033796489238739,
0.01736479066312313,
-0.05914390832185745,
0.05599822849035263,
0.05063356086611748,
-0.03221585601568222,
-0.07834203541278839,
0.13828341662883759,
-0.041030965745449066,
-0.07864419370889664,
0.054611675441265106,
-0.0032172156497836113,
0.031273093074560165,
-0.06064078211784363,
0.04825339466333389,
-0.010974748991429806,
-0.015595645643770695,
0.09084372967481613,
-0.03332040458917618,
0.020999619737267494,
-0.0024058897979557514,
0.007929918356239796,
-0.035521965473890305,
-0.03305725008249283,
-0.0648617148399353,
0.09379291534423828,
0.022975308820605278,
0.01188230887055397,
0.0097776148468256,
-0.026392871513962746,
-0.04247669130563736,
0.03683169186115265,
0.008477514609694481,
-0.015082859434187412,
-0.012637901119887829,
0.0016676902305334806,
-0.0140580665320158,
-0.13906452059745789,
0.02957358956336975,
-0.020114831626415253,
0.019549623131752014,
-0.032044634222984314,
-0.029155613854527473,
0.03980685770511627,
0.01128687895834446,
0.03963175788521767,
0.006664153188467026,
-0.04290459677577019,
-0.0290325079113245,
0.03196628764271736,
0.09671109914779663,
-0.018678616732358932,
-0.02525738626718521,
-0.03737704083323479
] |
Helsinki-NLP/opus-mt-ka-en | f6f4a42415aa81a926f6596654cfcbd37cefc214 | 2020-08-21T14:42:47.000Z | [
"pytorch",
"marian",
"text2text-generation",
"ka",
"en",
"transformers",
"translation",
"license:apache-2.0",
"autotrain_compatible"
] | translation | false | Helsinki-NLP | null | Helsinki-NLP/opus-mt-ka-en | 1,288 | null | transformers | ---
language:
- ka
- en
tags:
- translation
license: apache-2.0
---
### kat-eng
* source group: Georgian
* target group: English
* OPUS readme: [kat-eng](https://github.com/Helsinki-NLP/Tatoeba-Challenge/tree/master/models/kat-eng/README.md)
* model: transformer-align
* source language(s): kat
* target language(s): eng
* model: transformer-align
* pre-processing: normalization + SentencePiece (spm12k,spm12k)
* download original weights: [opus-2020-06-16.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/kat-eng/opus-2020-06-16.zip)
* test set translations: [opus-2020-06-16.test.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/kat-eng/opus-2020-06-16.test.txt)
* test set scores: [opus-2020-06-16.eval.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/kat-eng/opus-2020-06-16.eval.txt)
## Benchmarks
| testset | BLEU | chr-F |
|-----------------------|-------|-------|
| Tatoeba-test.kat.eng | 37.9 | 0.538 |
### System Info:
- hf_name: kat-eng
- source_languages: kat
- target_languages: eng
- opus_readme_url: https://github.com/Helsinki-NLP/Tatoeba-Challenge/tree/master/models/kat-eng/README.md
- original_repo: Tatoeba-Challenge
- tags: ['translation']
- languages: ['ka', 'en']
- src_constituents: {'kat'}
- tgt_constituents: {'eng'}
- src_multilingual: False
- tgt_multilingual: False
- prepro: normalization + SentencePiece (spm12k,spm12k)
- url_model: https://object.pouta.csc.fi/Tatoeba-MT-models/kat-eng/opus-2020-06-16.zip
- url_test_set: https://object.pouta.csc.fi/Tatoeba-MT-models/kat-eng/opus-2020-06-16.test.txt
- src_alpha3: kat
- tgt_alpha3: eng
- short_pair: ka-en
- chrF2_score: 0.5379999999999999
- bleu: 37.9
- brevity_penalty: 0.991
- ref_len: 5992.0
- src_name: Georgian
- tgt_name: English
- train_date: 2020-06-16
- src_alpha2: ka
- tgt_alpha2: en
- prefer_old: False
- long_pair: kat-eng
- helsinki_git_sha: 480fcbe0ee1bf4774bcbe6226ad9f58e63f6c535
- transformers_git_sha: 2207e5d8cb224e954a7cba69fa4ac2309e9ff30b
- port_machine: brutasse
- port_time: 2020-08-21-14:41 | [
-0.09256701171398163,
0.009610931389033794,
0.0370548851788044,
-0.022286608815193176,
-0.04742299020290375,
0.018031345680356026,
-0.03954274579882622,
-0.041404809802770615,
0.02178995870053768,
0.0006230054423213005,
0.02941058576107025,
-0.07550972700119019,
-0.018682511523365974,
-0.04291911423206329,
-0.034768249839544296,
0.003684066003188491,
-0.03999960795044899,
0.0872039645910263,
-0.1064484566450119,
-0.055560022592544556,
0.017503270879387856,
0.04986782371997833,
0.018415533006191254,
0.05012118071317673,
0.10630352795124054,
0.06662138551473618,
-0.05724916607141495,
-0.013864273205399513,
0.06231953576207161,
-0.058556485921144485,
-0.011913291178643703,
0.030436499044299126,
0.01728055439889431,
0.0740571841597557,
0.025363262742757797,
0.08819358050823212,
-0.07659297436475754,
-0.08809928596019745,
0.024579321965575218,
0.010772989131510258,
0.002252692123875022,
0.001923486590385437,
-0.015605621039867401,
-0.042493786662817,
0.042446717619895935,
0.028918983414769173,
-0.04480602219700813,
0.01842505857348442,
0.000663481536321342,
-0.010650424286723137,
-0.13775834441184998,
-0.026634808629751205,
-0.014386159367859364,
0.028074992820620537,
-0.05630239471793175,
0.02825232967734337,
0.038435231894254684,
-0.01920253410935402,
0.05945925787091255,
-0.05998840928077698,
-0.09892784804105759,
-0.0031492519192397594,
-0.07341402769088745,
0.029873820021748543,
-0.013617703691124916,
-0.019075358286499977,
0.02844959869980812,
0.040884941816329956,
-0.0799432322382927,
0.05816502869129181,
-0.029014451429247856,
0.008238017559051514,
0.018935440108180046,
0.03533106669783592,
-0.01133174728602171,
0.011914745904505253,
0.03595553711056709,
-0.027040963992476463,
0.0357891209423542,
-0.05878719314932823,
0.02747761644423008,
-0.029207905754446983,
0.06413070857524872,
0.015130957588553429,
0.04102646932005882,
0.004350363276898861,
-0.013394482433795929,
-0.009859410114586353,
0.03230392560362816,
0.03126313537359238,
-0.0004278614651411772,
-0.025483829900622368,
0.048632752150297165,
0.03748349845409393,
-0.004839435685425997,
0.03704032301902771,
0.012638865038752556,
0.04511478543281555,
-0.009534930810332298,
0.10489687323570251,
0.033809058368206024,
0.02945490926504135,
0.09553403407335281,
-0.022489573806524277,
-0.11169840395450592,
-0.07452241331338882,
0.02643752284348011,
0.01649511232972145,
-0.018502118065953255,
-0.07723341882228851,
0.05336347967386246,
-0.03482648357748985,
-0.006661561317741871,
-0.052429378032684326,
0.007894502021372318,
-0.08145388960838318,
0.00524168461561203,
-0.02455158531665802,
0.013658091425895691,
0.030370423570275307,
-0.03766477108001709,
0.017398076131939888,
-0.055436477065086365,
0.022536974400281906,
-0.05491184443235397,
-0.009456438012421131,
-0.012496203184127808,
1.8584181814126425e-33,
0.060676656663417816,
0.02761613018810749,
0.008702170103788376,
0.030428001657128334,
-0.04268362745642662,
-0.006421769503504038,
-0.06136195361614227,
-0.03112618811428547,
-0.10640832781791687,
-0.016233032569289207,
-0.035229653120040894,
-0.0006340927793644369,
-0.11382980644702911,
-0.004662718623876572,
-0.045189786702394485,
-0.0005726250819861889,
0.015577130019664764,
0.03357180953025818,
-0.022187842056155205,
0.05186241865158081,
0.06499026715755463,
0.027822960168123245,
-0.020794149488210678,
-0.06213470920920372,
-0.08808912336826324,
0.07440274953842163,
0.04766269028186798,
-0.14755503833293915,
-0.13178375363349915,
0.018917011097073555,
-0.06427910923957825,
-0.026308095082640648,
-0.03111102245748043,
-0.009129666723310947,
-0.013694696128368378,
-0.06800732016563416,
0.04737755283713341,
-0.05435444042086601,
-0.06508655101060867,
-0.06841457635164261,
-0.020543968304991722,
0.038031041622161865,
0.021102553233504295,
-0.03688422217965126,
0.046690814197063446,
0.021037695929408073,
0.02791375294327736,
0.035461295396089554,
0.10912958532571793,
0.01199551485478878,
0.014059703797101974,
0.022678259760141373,
-0.05419469252228737,
0.0011128640035167336,
0.02177734300494194,
0.11444062739610672,
0.1043185368180275,
0.002526300959289074,
0.03918810933828354,
0.0561346597969532,
0.02363034151494503,
0.007008361630141735,
0.056908685714006424,
-0.015965942293405533,
0.14088311791419983,
-0.02691446989774704,
-0.05194682627916336,
-0.04323156550526619,
0.05129565671086311,
0.027842553332448006,
-0.10420089215040207,
-0.05428624525666237,
0.06662456691265106,
0.11307059973478317,
0.047690317034721375,
-0.03504190593957901,
0.01090840995311737,
-0.053552091121673584,
-0.03889508172869682,
-0.004380388185381889,
-0.08438854664564133,
0.016140462830662727,
0.010335729457437992,
-0.06195242702960968,
-0.024405770003795624,
-0.034779567271471024,
0.057365622371435165,
-0.09059996902942657,
-0.009574010968208313,
0.011307848617434502,
0.0017560324631631374,
0.06065136194229126,
-0.0757884755730629,
-0.011792869307100773,
0.01627134345471859,
-2.7291553362086823e-33,
0.055155232548713684,
0.018960360437631607,
-0.05807536467909813,
0.07697884738445282,
0.0006591990822926164,
-0.08405157178640366,
0.029467863962054253,
0.10623669624328613,
0.06924837827682495,
0.04510939493775368,
0.03099605068564415,
-0.08220425248146057,
0.041399016976356506,
-0.032958224415779114,
0.07482434064149857,
-0.030658181756734848,
0.08469976484775543,
0.041302360594272614,
0.026319608092308044,
0.05915351212024689,
0.005695666652172804,
0.10544222593307495,
-0.09483950585126877,
0.09716267138719559,
0.03496166318655014,
-0.02492169477045536,
-0.035961657762527466,
0.09917432069778442,
0.002381846308708191,
0.007512818556278944,
0.018247978761792183,
-0.05382402241230011,
-0.05665677785873413,
0.02569935843348503,
-0.10718545317649841,
0.010973895899951458,
0.0003555201692506671,
0.050403792411088943,
0.04195030778646469,
0.05513691157102585,
0.04017028212547302,
0.04955508932471275,
-0.04748883098363876,
-0.00037857674760743976,
0.010603184811770916,
-0.014829753898084164,
-0.020439909771084785,
0.0031365167815238237,
-0.017183057963848114,
-0.07652954012155533,
0.014684347435832024,
0.012664835900068283,
-0.039539843797683716,
-0.04013606905937195,
0.0010432726703584194,
-0.032751452177762985,
-0.01206283364444971,
-0.14400643110275269,
-0.04441835731267929,
-0.04268240928649902,
-0.010647016577422619,
0.0190648902207613,
0.030429035425186157,
-0.09895574301481247,
0.03310050070285797,
0.001285601407289505,
0.0722813680768013,
-0.01011411752551794,
0.018694046884775162,
0.021465139463543892,
-0.008427873253822327,
-0.04381535202264786,
0.014104771427810192,
0.037211645394563675,
0.02103518880903721,
-0.046708591282367706,
-0.03649669513106346,
0.06302617490291595,
0.07970132678747177,
-0.0943903997540474,
-0.049814678728580475,
0.00946997944265604,
0.02855103462934494,
0.04119909927248955,
0.0967826321721077,
0.06637614220380783,
0.03518105670809746,
0.02066727727651596,
0.03610818833112717,
0.05220338702201843,
0.004258825443685055,
0.039339322596788406,
0.025825142860412598,
0.10398068279027939,
-0.008175588212907314,
-5.4855817666066287e-8,
-0.09588748216629028,
-0.03216106444597244,
-0.1418714076280594,
0.05160865932703018,
-0.03901607543230057,
-0.0563184879720211,
-0.04828432574868202,
-0.06567200273275375,
-0.044676050543785095,
-0.06106765195727348,
-0.023223765194416046,
-0.015471567399799824,
-0.09109140187501907,
0.0039772591553628445,
-0.014317566528916359,
0.05039038136601448,
-0.020578384399414062,
0.07893946766853333,
-0.0559542216360569,
-0.038674116134643555,
0.054789863526821136,
0.056563373655080795,
0.028821293264627457,
-0.044135477393865585,
-0.002709413878619671,
0.03097633458673954,
-0.05791782960295677,
0.031964510679244995,
0.054887376725673676,
-0.01655578427016735,
0.01547464169561863,
0.02253478206694126,
-0.03244383633136749,
-0.05261482298374176,
0.010896177962422371,
0.059942577034235,
-0.0018678226042538881,
0.0021037315018475056,
0.032930511981248856,
0.07228468358516693,
0.08042845129966736,
0.04857621714472771,
-0.07603046298027039,
0.009725729003548622,
0.0029729024972766638,
-0.019334224984049797,
-0.05563296377658844,
-0.07902269065380096,
0.03309617191553116,
-0.04675041884183884,
0.08539029210805893,
-0.03479410335421562,
-0.05045480281114578,
0.04297121241688728,
0.03224145248532295,
0.03563828021287918,
0.0774368867278099,
-0.006261358503252268,
0.02716251090168953,
-0.001227636355906725,
0.07483668625354767,
-0.001348183723166585,
-0.04018007963895798,
-0.01604842208325863
] |
allegro/plt5-small | 5c65ab3ab269dda279491e7e685f0adf1dadef61 | 2021-08-19T16:59:55.000Z | [
"pytorch",
"t5",
"text2text-generation",
"pl",
"dataset:ccnet",
"dataset:nkjp",
"dataset:wikipedia",
"dataset:open subtitles",
"dataset:free readings",
"transformers",
"T5",
"translation",
"summarization",
"question answering",
"reading comprehension",
"license:cc-by-4.0",
"autotrain_compatible"
] | translation | false | allegro | null | allegro/plt5-small | 1,281 | 2 | transformers | ---
language: pl
tags:
- T5
- translation
- summarization
- question answering
- reading comprehension
datasets:
- ccnet
- nkjp
- wikipedia
- open subtitles
- free readings
license: cc-by-4.0
---
# plT5 Small
**plT5** models are T5-based language models trained on Polish corpora. The models were optimized for the original T5 denoising target.
## Corpus
plT5 was trained on six different corpora available for Polish language:
| Corpus | Tokens | Documents |
| :------ | ------: | ------: |
| [CCNet Middle](https://github.com/facebookresearch/cc_net) | 3243M | 7.9M |
| [CCNet Head](https://github.com/facebookresearch/cc_net) | 2641M | 7.0M |
| [National Corpus of Polish](http://nkjp.pl/index.php?page=14&lang=1)| 1357M | 3.9M |
| [Open Subtitles](http://opus.nlpl.eu/OpenSubtitles-v2018.php) | 1056M | 1.1M
| [Wikipedia](https://dumps.wikimedia.org/) | 260M | 1.4M |
| [Wolne Lektury](https://wolnelektury.pl/) | 41M | 5.5k |
## Tokenizer
The training dataset was tokenized into subwords using a sentencepiece unigram model with
vocabulary size of 50k tokens.
## Usage
Example code:
```python
from transformers import AutoTokenizer, AutoModel
tokenizer = AutoTokenizer.from_pretrained("allegro/plt5-small")
model = AutoModel.from_pretrained("allegro/plt5-small")
```
## License
CC BY 4.0
## Citation
If you use this model, please cite the following paper:
```
```
## Authors
The model was trained by [**Machine Learning Research Team at Allegro**](https://ml.allegro.tech/) and [**Linguistic Engineering Group at Institute of Computer Science, Polish Academy of Sciences**](http://zil.ipipan.waw.pl/).
You can contact us at: <a href="mailto:[email protected]">[email protected]</a> | [
-0.06414645165205002,
-0.052007779479026794,
0.07386210560798645,
-0.03724508360028267,
0.05881178751587868,
0.021099142730236053,
0.009735790081322193,
0.026378022506833076,
-0.022031249478459358,
0.010322987101972103,
0.004253800958395004,
-0.025457676500082016,
0.008735368959605694,
0.04156138375401497,
-0.0722440704703331,
0.01229714322835207,
0.0180868711322546,
0.09154815226793289,
-0.08323617279529572,
-0.07056164741516113,
0.038878198713064194,
0.0629330724477768,
0.11307387053966522,
0.0035721764434129,
0.07341396808624268,
0.0043879966251552105,
-0.07118870317935944,
-0.023089079186320305,
0.0231021735817194,
-0.019156143069267273,
-0.06677649170160294,
0.06829013675451279,
0.025136975571513176,
0.03694102540612221,
0.05760344862937927,
0.019095556810498238,
0.025233415886759758,
0.008705546148121357,
0.002412442583590746,
0.018145998939871788,
-0.02877340093255043,
0.010999449528753757,
-0.051280491054058075,
0.039247799664735794,
0.12249593436717987,
0.020287662744522095,
-0.04229867830872536,
0.008696277625858784,
-0.06385236978530884,
-0.024643443524837494,
-0.06881967931985855,
-0.0041030049324035645,
0.0346846841275692,
0.04602132365107536,
0.023257795721292496,
-0.09118997305631638,
-0.05038440600037575,
0.024830028414726257,
0.009592637419700623,
-0.09733106940984726,
-0.06671498715877533,
-0.09742862731218338,
-0.11369528621435165,
0.029805054888129234,
0.007468048483133316,
0.0010544833494350314,
0.029071133583784103,
0.052126239985227585,
-0.013838456943631172,
0.023435086011886597,
-0.012033800594508648,
0.0029953306075185537,
-0.049145061522722244,
0.051674213260412216,
-0.06390800327062607,
-0.018204981461167336,
0.05146564543247223,
-0.045047514140605927,
0.04978957027196884,
-0.09697610139846802,
0.06459641456604004,
0.055156342685222626,
0.06312312930822372,
-0.03365888446569443,
0.006808127276599407,
-0.03591194748878479,
0.031043516471982002,
0.03859139233827591,
0.0027309046126902103,
0.0028814675752073526,
-0.00633597606793046,
-0.011890188790857792,
0.04003781080245972,
0.012212336994707584,
-0.05849655345082283,
0.011373821645975113,
0.03740020841360092,
0.03156520053744316,
-0.007602930534631014,
0.13385912775993347,
0.06419127434492111,
0.009157812222838402,
0.02394472248852253,
-0.07222194969654083,
-0.10642371326684952,
-0.02921198308467865,
0.022018590942025185,
0.03837711736559868,
0.02248663455247879,
-0.0812712237238884,
-0.03924525901675224,
0.00008370872819796205,
-0.08326170593500137,
-0.050417836755514145,
0.07341630756855011,
-0.008219539187848568,
0.059868697077035904,
-0.014074353501200676,
0.07354000210762024,
0.015250856056809425,
-0.12180864810943604,
0.024297649040818214,
-0.018494538962841034,
0.019664140418171883,
0.017019586637616158,
0.0019708373583853245,
-0.023196781054139137,
5.14715768509669e-33,
0.05158324912190437,
0.03163689747452736,
-0.019464407116174698,
0.010771266184747219,
-0.006045755930244923,
0.0317213237285614,
0.033621858805418015,
0.0458880215883255,
-0.10128024220466614,
-0.015024918131530285,
-0.03711313009262085,
0.014666501432657242,
-0.07416509836912155,
-0.020978305488824844,
-0.01670239306986332,
-0.06053943186998367,
0.00868457555770874,
0.014158905483782291,
-0.05977999046444893,
0.04664675146341324,
0.08333159983158112,
0.06606949865818024,
0.022584663704037666,
-0.007593183312565088,
0.011943351477384567,
-0.009961800649762154,
0.027673549950122833,
-0.12330501526594162,
0.011094608344137669,
0.031243154779076576,
-0.10606208443641663,
-0.0013891411945223808,
-0.010691551491618156,
-0.008513499051332474,
0.02736807055771351,
-0.025657320395112038,
-0.012098344042897224,
-0.0609419085085392,
0.01004872191697359,
-0.09130053967237473,
0.009407524950802326,
-0.03408494219183922,
-0.010224621742963791,
0.026103300973773003,
-0.01078446488827467,
-0.010367200709879398,
-0.09110397845506668,
-0.036315396428108215,
0.01028587855398655,
-0.01091369055211544,
0.015151561237871647,
0.05557293817400932,
-0.06748012453317642,
-0.02139781042933464,
0.00371378636918962,
0.11258402466773987,
0.015985004603862762,
0.017473815008997917,
0.029668373987078667,
-0.012902638874948025,
0.04571468383073807,
-0.0450160838663578,
0.018819833174347878,
0.0055088018998503685,
0.17202942073345184,
0.03723954036831856,
-0.10434731096029282,
0.019610140472650528,
0.10779941827058792,
0.013904265128076077,
-0.06108125299215317,
0.0001691186334937811,
0.00850269291549921,
0.08373941481113434,
0.0636519119143486,
0.03403134271502495,
0.007143913768231869,
0.004563107155263424,
0.024482155218720436,
0.029889563098549843,
-0.07724428921937943,
-0.019824134185910225,
-0.007745423354208469,
-0.13217653334140778,
-0.05027436092495918,
0.033605385571718216,
0.051419008523225784,
-0.0872781053185463,
0.06636092811822891,
-0.01583123579621315,
0.046255242079496384,
-0.04209280014038086,
-0.005244965199381113,
-0.034322090446949005,
-0.054627325385808945,
-4.6364057570063904e-33,
0.008760987780988216,
0.03637583553791046,
-0.06857866048812866,
0.08054842054843903,
-0.0906561091542244,
-0.05460371449589729,
-0.051272690296173096,
0.08535019308328629,
0.019819028675556183,
0.014329018071293831,
0.03712014853954315,
-0.12579412758350372,
0.07602543383836746,
0.025699174031615257,
0.019362859427928925,
-0.009385214187204838,
0.021409595385193825,
-0.007052052766084671,
0.04489133879542351,
0.11555781215429306,
-0.05691719055175781,
0.03206528723239899,
-0.08910806477069855,
0.08836577087640762,
-0.007018213625997305,
0.054778698831796646,
-0.035388845950365067,
0.02860904671251774,
0.04170829802751541,
-0.037240345031023026,
-0.06896117329597473,
-0.0014944042777642608,
-0.07071717828512192,
-0.036465276032686234,
-0.05411938577890396,
0.0007032896974124014,
0.009915892034769058,
0.043089721351861954,
-0.03337873890995979,
0.09203588217496872,
0.040185801684856415,
0.0672249123454094,
-0.05051108077168465,
-0.02943382039666176,
-0.02023538388311863,
-0.027567150071263313,
-0.09450836479663849,
0.01869325526058674,
-0.016419747844338417,
-0.08561571687459946,
0.03980720415711403,
0.022694628685712814,
-0.10941124707460403,
-0.05958327651023865,
0.03986597806215286,
-0.04485008120536804,
0.03166864067316055,
-0.08616188913583755,
-0.014212222769856453,
-0.037889279425144196,
-0.024618379771709442,
-0.006509988568723202,
-0.05906517803668976,
0.026328304782509804,
0.05479760468006134,
-0.007886896841228008,
-0.06476465612649918,
0.09067659080028534,
-0.024689054116606712,
-0.052805956453084946,
0.004854186903685331,
-0.09714804589748383,
0.07176517695188522,
-0.008473902009427547,
-0.024933932349085808,
-0.044552627950906754,
0.0016584201948717237,
0.015062296763062477,
0.014042994938790798,
-0.04375419020652771,
-0.01167050190269947,
-0.03224286437034607,
0.0024106313940137625,
0.05596349760890007,
0.049803830683231354,
-0.002986522624269128,
0.024955792352557182,
0.010372767224907875,
0.013995692133903503,
0.05884688347578049,
-0.003919454291462898,
0.0019784055184572935,
0.002942797029390931,
0.13821978867053986,
0.007081907242536545,
-5.775946121389097e-8,
-0.003845043247565627,
0.0313178114593029,
-0.0888700857758522,
0.0637170821428299,
-0.013620317913591862,
-0.07755318284034729,
-0.03854638710618019,
-0.013610070571303368,
-0.05236173793673515,
-0.021147416904568672,
0.005417900625616312,
0.023049427196383476,
-0.067758709192276,
-0.014524158090353012,
-0.0069808922708034515,
0.10605545341968536,
-0.019300172105431557,
0.005981938913464546,
0.023311737924814224,
0.005074665881693363,
0.06572654843330383,
0.0343230739235878,
-0.0006163754733279347,
0.00828644074499607,
-0.008116504177451134,
0.056379616260528564,
-0.012144722044467926,
0.10008253902196884,
0.027636693790555,
-0.10593799501657486,
-0.008542688563466072,
0.04935475438833237,
-0.019840452820062637,
-0.005466264206916094,
0.09442324191331863,
0.11421345919370651,
-0.016148870810866356,
-0.03236423805356026,
-0.05672218278050423,
0.11350689828395844,
0.037477392703294754,
-0.04429503157734871,
-0.0033027930185198784,
0.0028235893696546555,
0.04740379750728607,
-0.06338337063789368,
-0.041999831795692444,
-0.09505937993526459,
0.08872751146554947,
-0.0688694640994072,
0.02460208721458912,
0.00811191089451313,
-0.00866054929792881,
0.02882957272231579,
0.053481779992580414,
0.025730684399604797,
0.052767563611269,
0.006154118105769157,
-0.029186347499489784,
0.0498129241168499,
0.015603669919073582,
0.009113729000091553,
0.04830145463347435,
-0.023806877434253693
] |
facebook/wav2vec2-lv-60-espeak-cv-ft | 7718bdd728dde297e1e69d61fc782d147bac21a6 | 2021-12-08T21:03:18.000Z | [
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"multi-lingual",
"dataset:common_voice",
"arxiv:2109.11680",
"transformers",
"speech",
"audio",
"phoneme-recognition",
"license:apache-2.0"
] | automatic-speech-recognition | false | facebook | null | facebook/wav2vec2-lv-60-espeak-cv-ft | 1,281 | 2 | transformers | ---
language: multi-lingual
datasets:
- common_voice
tags:
- speech
- audio
- automatic-speech-recognition
- phoneme-recognition
widget:
- example_title: Librispeech sample 1
src: https://cdn-media.huggingface.co/speech_samples/sample1.flac
- example_title: Librispeech sample 2
src: https://cdn-media.huggingface.co/speech_samples/sample2.flac
license: apache-2.0
---
# Wav2Vec2-Large-LV60 finetuned on multi-lingual Common Voice
This checkpoint leverages the pretrained checkpoint [wav2vec2-large-lv60](https://huggingface.co/facebook/wav2vec2-large-lv60)
and is fine-tuned on [CommonVoice](https://huggingface.co/datasets/common_voice) to recognize phonetic labels in multiple languages.
When using the model make sure that your speech input is sampled at 16kHz.
Note that the model outputs a string of phonetic labels. A dictionary mapping phonetic labels to words
has to be used to map the phonetic output labels to output words.
[Paper: Simple and Effective Zero-shot Cross-lingual Phoneme Recognition](https://arxiv.org/abs/2109.11680)
Authors: Qiantong Xu, Alexei Baevski, Michael Auli
**Abstract**
Recent progress in self-training, self-supervised pretraining and unsupervised learning enabled well performing speech recognition systems without any labeled data. However, in many cases there is labeled data available for related languages which is not utilized by these methods. This paper extends previous work on zero-shot cross-lingual transfer learning by fine-tuning a multilingually pretrained wav2vec 2.0 model to transcribe unseen languages. This is done by mapping phonemes of the training languages to the target language using articulatory features. Experiments show that this simple method significantly outperforms prior work which introduced task-specific architectures and used only part of a monolingually pretrained model.
The original model can be found under https://github.com/pytorch/fairseq/tree/master/examples/wav2vec#wav2vec-20.
# Usage
To transcribe audio files the model can be used as a standalone acoustic model as follows:
```python
from transformers import Wav2Vec2Processor, Wav2Vec2ForCTC
from datasets import load_dataset
import torch
# load model and processor
processor = Wav2Vec2Processor.from_pretrained("facebook/wav2vec2-lv-60-espeak-cv-ft")
model = Wav2Vec2ForCTC.from_pretrained("facebook/wav2vec2-lv-60-espeak-cv-ft")
# load dummy dataset and read soundfiles
ds = load_dataset("patrickvonplaten/librispeech_asr_dummy", "clean", split="validation")
# tokenize
input_values = processor(ds[0]["audio"]["array"], return_tensors="pt").input_values
# retrieve logits
with torch.no_grad():
logits = model(input_values).logits
# take argmax and decode
predicted_ids = torch.argmax(logits, dim=-1)
transcription = processor.batch_decode(predicted_ids)
# => should give ['m ɪ s t ɚ k w ɪ l t ɚ ɹ ɪ z ð ɪ ɐ p ɑː s əl ʌ v ð ə m ɪ d əl k l æ s ᵻ z æ n d w iː ɑːɹ ɡ l æ d t ə w ɛ l k ə m h ɪ z ɡ ɑː s p əl']
``` | [
-0.0885884016752243,
-0.13317987322807312,
-0.011039100587368011,
-0.0967567041516304,
0.0209974218159914,
0.011417693458497524,
-0.03603757172822952,
-0.05309564992785454,
-0.010248826816678047,
-0.10429906845092773,
0.0305080134421587,
-0.08656425029039383,
-0.035629719495773315,
-0.03541545569896698,
0.0006481007440015674,
-0.0386354997754097,
0.0209111999720335,
0.0016006474616006017,
-0.07654907554388046,
-0.015420240350067616,
0.020696990191936493,
0.0031980560161173344,
0.07562959939241409,
0.009399275295436382,
0.05110771209001541,
0.0004002891364507377,
-0.05485095456242561,
0.023933900520205498,
0.07213179767131805,
-0.03611021116375923,
0.12024082243442535,
0.10919838398694992,
0.011191798374056816,
0.00899682566523552,
0.007562811952084303,
-0.04430287703871727,
-0.03350483998656273,
-0.03935180976986885,
-0.004549834411591291,
0.008390006609261036,
-0.02347380667924881,
0.005632022861391306,
0.011773665435612202,
-0.023368071764707565,
-0.009074287489056587,
-0.03887102007865906,
-0.033368147909641266,
0.00040847130003385246,
0.005677989684045315,
0.037467703223228455,
-0.03459453210234642,
-0.03166164830327034,
-0.001868068822659552,
0.10448140650987625,
-0.14743202924728394,
-0.03052232600748539,
0.0230445247143507,
0.04883244261145592,
0.040785934776067734,
0.018096011132001877,
-0.12718108296394348,
-0.00011820239160442725,
0.004015786573290825,
0.047136783599853516,
-0.029896071180701256,
-0.01355354581028223,
0.01614305190742016,
0.005854315124452114,
0.011706005781888962,
0.01601901464164257,
-0.06961982697248459,
0.05506085231900215,
0.057748980820178986,
0.07046201825141907,
0.055784471333026886,
0.012192138470709324,
0.052221596240997314,
0.03732050582766533,
0.014687707647681236,
-0.12388056516647339,
0.02328309789299965,
-0.028972821310162544,
-0.02372025139629841,
-0.04460678622126579,
0.06864262372255325,
-0.04279303923249245,
-0.029177989810705185,
-0.004647982306778431,
0.021066535264253616,
-0.021879656240344048,
0.004176852758973837,
0.025915464386343956,
-0.002603475470095873,
0.018410615622997284,
-0.02914273738861084,
0.0941038429737091,
0.07495095580816269,
0.06855215132236481,
0.013891061767935753,
0.08486854285001755,
-0.009204228408634663,
-0.07200533896684647,
0.035028066486120224,
0.024378176778554916,
-0.04363454878330231,
-0.10262958705425262,
0.028154222294688225,
0.037145428359508514,
0.019060689955949783,
-0.0456063486635685,
0.10198496282100677,
0.030564215034246445,
0.0020891858730465174,
-0.02361735701560974,
0.04709334671497345,
0.045519132167100906,
-0.07169393450021744,
-0.09436313062906265,
-0.008414150215685368,
-0.009564477019011974,
-0.13091705739498138,
-0.026046499609947205,
0.027684452012181282,
-0.03210984915494919,
0.017021650448441505,
-0.01683584228157997,
0.014832117594778538,
4.3717439386027216e-33,
0.04688852280378342,
0.030890533700585365,
-0.018271176144480705,
-0.025402363389730453,
0.05555174872279167,
-0.07997337728738785,
-0.0594932995736599,
-0.010583420284092426,
-0.07005840539932251,
-0.024574484676122665,
0.008321144618093967,
-0.016499066725373268,
-0.06545930355787277,
0.0035192060749977827,
-0.03568430244922638,
0.052306704223155975,
-0.04006226733326912,
0.017472608014941216,
0.012811072170734406,
0.020384324714541435,
0.17227298021316528,
0.006810949649661779,
0.08229425549507141,
0.01627420447766781,
0.055552076548337936,
0.0978582426905632,
0.059487421065568924,
-0.01631159521639347,
0.0077202208340168,
0.07486028969287872,
-0.06717182695865631,
-0.027052514255046844,
0.0329400859773159,
-0.025105426087975502,
0.04853537678718567,
0.015621497295796871,
-0.016602301970124245,
-0.054208941757678986,
-0.044270437210798264,
-0.08350814133882523,
0.031179221346974373,
0.009752776473760605,
-0.0109039805829525,
-0.018257619813084602,
-0.044655490666627884,
-0.08420642465353012,
-0.032030850648880005,
0.05945300683379173,
0.01303702499717474,
0.0630505308508873,
-0.030400743708014488,
-0.026418669149279594,
-0.056081611663103104,
0.026612527668476105,
-0.022453410550951958,
0.013576542027294636,
0.04812152683734894,
-0.006634176708757877,
0.047604650259017944,
0.04427779093384743,
0.02334456518292427,
0.013901910744607449,
0.006984276231378317,
0.04801806062459946,
0.07270824909210205,
-0.024864502251148224,
-0.05811434984207153,
-0.0007785425987094641,
0.07650220394134521,
-0.006854802370071411,
-0.02242795005440712,
-0.09693560004234314,
0.09450088441371918,
0.07459091395139694,
0.06065095216035843,
-0.01803104765713215,
0.01783915050327778,
-0.026447903364896774,
-0.03127326816320419,
0.0652293860912323,
-0.03247677534818649,
0.013009194284677505,
-0.05858877673745155,
-0.019725946709513664,
-0.03607023507356644,
-0.022015845403075218,
0.03168478235602379,
-0.10858059674501419,
-0.046435143798589706,
0.008969699963927269,
0.005168881267309189,
0.0271772388368845,
0.0290067195892334,
-0.048174016177654266,
-0.05146542564034462,
-4.737238927735957e-33,
0.008788470178842545,
0.09055925905704498,
0.013644193299114704,
0.06722001731395721,
0.009087789803743362,
-0.04765436053276062,
0.15798556804656982,
0.0789327472448349,
-0.019873542711138725,
-0.040748998522758484,
0.033763375133275986,
-0.07868902385234833,
0.027105633169412613,
-0.04298882558941841,
0.0462108813226223,
-0.03534450754523277,
0.07005702704191208,
0.0017342079663649201,
0.11884742975234985,
0.08950895816087723,
0.02715197764337063,
0.007144555449485779,
-0.03424679487943649,
0.11517605185508728,
-0.06137401983141899,
-0.05493202432990074,
-0.06565345823764801,
0.021719902753829956,
-0.005421506706625223,
-0.026928264647722244,
-0.04242035746574402,
0.03381236642599106,
-0.040608685463666916,
-0.01141279749572277,
-0.03706702962517738,
-0.028497012332081795,
0.03401419520378113,
-0.02998216263949871,
0.0021765052806586027,
0.022222287952899933,
0.08895023912191391,
0.011967220343649387,
-0.1255805939435959,
-0.09423676878213882,
0.00418689101934433,
-0.024462750181555748,
-0.11834007501602173,
0.013701812364161015,
-0.01737583428621292,
-0.04001354053616524,
0.05637607350945473,
-0.00014332603313960135,
0.00641782209277153,
0.034412335604429245,
-0.0004959248472005129,
-0.05343044921755791,
0.005036606453359127,
-0.052492398768663406,
-0.04013383761048317,
0.012614215724170208,
-0.0014915548963472247,
-0.014488155022263527,
-0.06391298025846481,
-0.0771079733967781,
0.09384958446025848,
0.028018701821565628,
0.02575719729065895,
0.02307863160967827,
0.05060520023107529,
-0.0008814047905616462,
-0.015120930038392544,
-0.04957398399710655,
0.036786291748285294,
0.004252555780112743,
-0.04455109313130379,
-0.05509893596172333,
-0.08459591120481491,
-0.057580966502428055,
0.014109169133007526,
-0.03220701217651367,
-0.03897275775671005,
-0.005721087567508221,
0.057297609746456146,
0.036160871386528015,
0.08514677733182907,
0.0849023088812828,
-0.040297143161296844,
0.019805120304226875,
0.03658073768019676,
0.022147182375192642,
-0.026591286063194275,
0.05924811214208603,
-0.005128334276378155,
0.05288488417863846,
0.010124494321644306,
-5.645949130439476e-8,
-0.06561986356973648,
0.03984340652823448,
-0.0017549952026456594,
-0.013393140397965908,
-0.02570679783821106,
-0.12189541012048721,
-0.018190423026680946,
-0.015735795721411705,
0.01319572702050209,
-0.02024875394999981,
0.06700772047042847,
-0.023052982985973358,
-0.047769155353307724,
0.028936346992850304,
0.017812179401516914,
0.01902398094534874,
-0.045290444046258926,
0.11985599994659424,
-0.07336004078388214,
-0.07660973817110062,
0.019773676991462708,
0.045321859419345856,
0.02243424393236637,
0.014556455425918102,
0.018729258328676224,
0.009984997101128101,
-0.024454262107610703,
0.1188027411699295,
-0.006657602731138468,
-0.10883137583732605,
-0.08137309551239014,
0.005223593208938837,
-0.03625893220305443,
-0.009248148649930954,
-0.004200648982077837,
0.013332475908100605,
-0.03548414260149002,
-0.03459794446825981,
0.04550942778587341,
0.10702754557132721,
0.03688483312726021,
0.0763024315237999,
-0.10503029823303223,
-0.04768573120236397,
0.07798969745635986,
0.027684548869729042,
-0.05592207610607147,
-0.04052405804395676,
0.01377915684133768,
0.08247306942939758,
0.02827245183289051,
0.006915691774338484,
-0.03970445692539215,
-0.0042658704333007336,
0.06080532446503639,
-0.005456199869513512,
-0.009565606713294983,
0.011727077886462212,
0.05432863533496857,
0.011347966268658638,
0.06194661557674408,
0.01813819259405136,
0.026899240911006927,
-0.02588006481528282
] |
pdelobelle/robBERT-dutch-books | 04eab2e04d08d4f62df7f769135bcece4f907606 | 2021-05-20T19:17:17.000Z | [
"pytorch",
"jax",
"roberta",
"fill-mask",
"transformers",
"autotrain_compatible"
] | fill-mask | false | pdelobelle | null | pdelobelle/robBERT-dutch-books | 1,281 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
JamesStratford/Pidrow-bot-DialoGPT-Medium-v2 | 0fb0a99a49c249fdaf3335bf14ad62c71709b373 | 2022-06-29T07:02:11.000Z | [
"pytorch",
"gpt2",
"text-generation",
"transformers",
"conversational"
] | conversational | false | JamesStratford | null | JamesStratford/Pidrow-bot-DialoGPT-Medium-v2 | 1,280 | null | transformers | ---
tags:
- conversational
---
# Pidrow bot - medium | [
-0.05453476682305336,
-0.0035541062243282795,
0.013654408045113087,
-0.006258395034819841,
0.07183122634887695,
-0.05576155334711075,
0.09608016163110733,
0.05145232751965523,
-0.0025397385470569134,
0.005794489290565252,
0.013036993332207203,
-0.023125048726797104,
0.008712511509656906,
-0.02045530267059803,
0.03259598836302757,
0.039997827261686325,
0.0005723622161895037,
-0.011835484765470028,
0.006732438690960407,
-0.003882333170622587,
0.002762418007478118,
0.054747581481933594,
0.02050371654331684,
-0.01901480369269848,
-0.001589890569448471,
0.04839194938540459,
-0.05570279806852341,
-0.02726837992668152,
0.039630427956581116,
-0.03280375152826309,
-0.004422706086188555,
0.0877494066953659,
0.048907868564128876,
0.0877336785197258,
0.012131650000810623,
0.05032828077673912,
0.003280283184722066,
-0.04267976060509682,
0.015566207468509674,
0.01979578472673893,
0.008500329218804836,
-0.04258803650736809,
-0.03560226783156395,
-0.015946587547659874,
-0.05204940214753151,
0.07442865520715714,
-0.1083255410194397,
0.022614462301135063,
-0.060936011373996735,
0.017938481643795967,
-0.13784286379814148,
-0.03677462413907051,
0.04788926616311073,
0.09684834629297256,
-0.017676251009106636,
0.0004234497610013932,
-0.016380099579691887,
-0.029504256322979927,
0.07267484068870544,
-0.023586662486195564,
-0.08085189014673233,
-0.02546890638768673,
-0.03688272088766098,
0.017026040703058243,
-0.03526928275823593,
0.01895802654325962,
-0.08629274368286133,
-0.01590636558830738,
-0.050710372626781464,
0.09333472698926926,
0.060117967426776886,
0.03224444389343262,
-0.016055315732955933,
0.07064028829336166,
-0.00032606764580123127,
-0.0026399872731417418,
-0.034105077385902405,
-0.03929021954536438,
-0.05422324314713478,
0.03786364197731018,
-0.020339246839284897,
-0.06549008935689926,
-0.040146611630916595,
0.008100799284875393,
-0.02416091412305832,
-0.00785968080163002,
-0.031391441822052,
0.01504574716091156,
-0.03997324779629707,
0.046987537294626236,
-0.037504978477954865,
-0.04150877147912979,
0.026544436812400818,
0.05053388699889183,
-0.0689762756228447,
0.02902252972126007,
-0.05501534417271614,
-0.062442708760499954,
-0.06082586199045181,
0.11306626349687576,
0.014801914803683758,
0.017513662576675415,
-0.030008021742105484,
-0.10536588728427887,
-0.03777090087532997,
-0.021949924528598785,
0.010302226059138775,
0.0769781693816185,
0.04711088538169861,
0.016115223988890648,
-0.009569160640239716,
-0.0693875327706337,
0.013128979131579399,
-0.06799351423978806,
0.08625161647796631,
0.033771976828575134,
0.09483127295970917,
0.03487543761730194,
0.03953689709305763,
-0.0412137433886528,
0.03270289674401283,
0.004918817896395922,
-0.06309526413679123,
-0.030338449403643608,
0.010161996819078922,
-0.05011489987373352,
-0.03406589478254318,
-3.481905735289789e-33,
0.07072676718235016,
-0.04541181027889252,
-0.01165065634995699,
0.10050899535417557,
0.000007517149697378045,
0.11329902708530426,
-0.06380267441272736,
-0.09913601726293564,
0.007967385463416576,
-0.0699816420674324,
0.03409913182258606,
0.08476416021585464,
-0.09587171673774719,
0.03972337394952774,
-0.004099493380635977,
-0.04234849661588669,
0.03294418379664421,
0.040907252579927444,
-0.005800263024866581,
-0.009309819899499416,
0.02228461392223835,
0.0107469093054533,
0.009905217215418816,
0.0984574481844902,
0.05415860936045647,
0.07936657965183258,
0.0065828184597194195,
-0.1821928471326828,
0.058819517493247986,
0.0423935204744339,
-0.07476107031106949,
0.017518917098641396,
-0.015669656917452812,
0.0427091009914875,
-0.015798095613718033,
0.008298429660499096,
-0.025421690195798874,
-0.04050101712346077,
-0.028178250417113304,
-0.07093273848295212,
-0.061467744410037994,
-0.011477869935333729,
-0.04509526863694191,
-0.07953177392482758,
0.008709545247256756,
0.02347235381603241,
0.004705790895968676,
-0.02561669796705246,
0.01692819595336914,
-0.05085035786032677,
-0.016515633091330528,
0.04440053552389145,
0.06362451612949371,
0.004050110932439566,
0.00786830298602581,
-0.06836549937725067,
-0.0012398315593600273,
-0.00009887709165923297,
-0.04092063382267952,
-0.0418899729847908,
0.0726584941148758,
0.0408506765961647,
0.07248146086931229,
-0.07727524638175964,
0.09833018481731415,
-0.0468403585255146,
-0.004615455865859985,
0.06721044331789017,
-0.007858457043766975,
0.01685512065887451,
-0.05172104761004448,
0.057155560702085495,
0.014132969081401825,
0.07647550106048584,
-0.045183248817920685,
-0.016295162960886955,
-0.0035081421956419945,
-0.05138144642114639,
0.01573948562145233,
0.07952084392309189,
-0.08625882863998413,
-0.054130394011735916,
-0.04190355911850929,
-0.021907703951001167,
-0.04216816648840904,
-0.002711281646043062,
0.04479759931564331,
-0.0902206227183342,
0.018647588789463043,
-0.008443395607173443,
-0.034961700439453125,
0.01545848697423935,
-0.028830712661147118,
0.02865629456937313,
-0.1621890366077423,
3.2274547383619426e-34,
-0.04956500977277756,
-0.018694965168833733,
-0.08672285079956055,
0.057619065046310425,
-0.0355088971555233,
-0.017706209793686867,
0.027234330773353577,
0.08986100554466248,
0.03433344140648842,
0.029757512733340263,
-0.058689672499895096,
-0.022431636229157448,
-0.049072351306676865,
-0.024638643488287926,
0.1408495157957077,
0.035906024277210236,
0.000553877791389823,
-0.022292600944638252,
0.018059052526950836,
0.00819934532046318,
-0.05392863228917122,
0.0061636329628527164,
-0.09240029007196426,
0.12815091013908386,
0.04690651595592499,
0.0027340089436620474,
0.03521629795432091,
0.023779064416885376,
0.023639803752303123,
-0.032418932765722275,
0.007633610628545284,
0.056110233068466187,
0.011681577190756798,
-0.03487147018313408,
0.06113889440894127,
0.015513957478106022,
0.028148217126727104,
0.040043678134679794,
0.02455308474600315,
0.01132915634661913,
0.0725436881184578,
0.0007223639986477792,
-0.016377132385969162,
-0.09067659825086594,
0.003931762184947729,
-0.063324473798275,
-0.05830519646406174,
-0.059275805950164795,
-0.07959558814764023,
0.06628585606813431,
0.0662958025932312,
-0.018223967403173447,
0.024614892899990082,
-0.12005187571048737,
-0.07149828225374222,
-0.04060067981481552,
-0.01038247998803854,
0.034071605652570724,
-0.04560825601220131,
-0.03675919398665428,
-0.0495096817612648,
-0.08387770503759384,
-0.04731900617480278,
0.02346411906182766,
-0.00982777401804924,
0.0035564503632485867,
0.013583811931312084,
-0.04332005977630615,
-0.037838537245988846,
-0.030085692182183266,
0.14190955460071564,
0.0398026667535305,
-0.08605790883302689,
0.05849815905094147,
0.04538992792367935,
0.017777156084775925,
-0.01082130428403616,
0.013525811024010181,
0.012539008632302284,
-0.0032524082344025373,
-0.07084424793720245,
0.03184710815548897,
-0.01263812929391861,
0.08167868852615356,
0.05560600757598877,
-0.020755134522914886,
-0.007125149480998516,
0.013904154300689697,
-0.047183431684970856,
0.013009663671255112,
0.007676080800592899,
-0.007855696603655815,
0.03864672780036926,
0.04845622181892395,
-0.10094330459833145,
-2.6408919495679584e-8,
-0.016088560223579407,
-0.07290326803922653,
-0.03645294904708862,
0.07189147174358368,
0.045066703110933304,
0.02746666967868805,
0.0023994273506104946,
-0.057467829436063766,
-0.060875773429870605,
0.008889510296285152,
0.06504794210195541,
0.021878594532608986,
0.042913395911455154,
0.043630488216876984,
0.09889249503612518,
0.03910304233431816,
-0.06971513479948044,
0.01962246745824814,
0.00596983265131712,
-0.057672157883644104,
0.030294600874185562,
-0.02737094648182392,
-0.1077020913362503,
0.04082903638482094,
-0.01382130105048418,
0.022753197699785233,
-0.00905500166118145,
0.057258278131484985,
-0.0927308201789856,
0.07492787390947342,
0.022694453597068787,
0.02518993616104126,
-0.10020748525857925,
-0.01868085004389286,
-0.03515974059700966,
0.0240655355155468,
-0.029654189944267273,
-0.06414566934108734,
0.01467818208038807,
0.008447084575891495,
-0.015066920779645443,
0.10916851460933685,
0.0024445056915283203,
-0.03871007636189461,
0.0477408692240715,
-0.012394062243402004,
-0.04020601883530617,
-0.10014540702104568,
0.010479395277798176,
0.004445207770913839,
-0.02947501465678215,
-0.0034665227867662907,
0.06039553880691528,
0.0607745423913002,
0.05060144513845444,
-0.0024185944348573685,
0.06089797988533974,
-0.0027814735658466816,
0.017523081973195076,
0.005254521034657955,
0.06945107877254486,
0.10282964259386063,
0.08065564185380936,
0.03573990240693092
] |
dandelin/vilt-b32-mlm-itm | a94469664a838bf855b40144f638ba9b3e791c89 | 2021-11-27T10:13:10.000Z | [
"pytorch",
"vilt",
"arxiv:2102.03334",
"transformers",
"license:apache-2.0"
] | null | false | dandelin | null | dandelin/vilt-b32-mlm-itm | 1,279 | 1 | transformers | ---
license: apache-2.0
tags:
---
# Vision-and-Language Transformer (ViLT), pre-trained only
Vision-and-Language Transformer (ViLT) model pre-trained on GCC+SBU+COCO+VG (200k steps). It was introduced in the paper [ViLT: Vision-and-Language Transformer Without Convolution or Region Supervision](https://arxiv.org/abs/2102.03334) by Kim et al. and first released in [this repository](https://github.com/dandelin/ViLT).
Disclaimer: The team releasing ViLT did not write a model card for this model so this model card has been written by the Hugging Face team.
## Model description
(to do)
## Intended uses & limitations
You can use the raw model for visual question answering.
### How to use
(to do)
## Training data
(to do)
## Training procedure
### Preprocessing
(to do)
### Pretraining
(to do)
## Evaluation results
(to do)
### BibTeX entry and citation info
```bibtex
@misc{kim2021vilt,
title={ViLT: Vision-and-Language Transformer Without Convolution or Region Supervision},
author={Wonjae Kim and Bokyung Son and Ildoo Kim},
year={2021},
eprint={2102.03334},
archivePrefix={arXiv},
primaryClass={stat.ML}
}
``` | [
-0.048531774431467056,
-0.04296017438173294,
-0.0023779836483299732,
0.003038106020539999,
0.007005148567259312,
0.039982035756111145,
-0.03964078053832054,
0.03885788470506668,
-0.03840811178088188,
-0.08428911119699478,
0.05534873530268669,
-0.11531589180231094,
-0.0315798781812191,
0.047950759530067444,
0.026333658024668694,
-0.03581693768501282,
0.025022724643349648,
0.08821017295122147,
-0.04607310891151428,
-0.022437602281570435,
0.03847401589155197,
0.002039221115410328,
0.04260426387190819,
-0.008515629917383194,
0.016737939789891243,
0.018523570150136948,
-0.03552045300602913,
-0.05940183252096176,
0.07897079735994339,
-0.06416001915931702,
-0.008776921778917313,
0.023624703288078308,
0.041372139006853104,
0.11349627375602722,
-0.04714655131101608,
0.05830240994691849,
-0.013058371841907501,
0.0003473548567853868,
-0.06139815226197243,
-0.07213613390922546,
-0.09559464454650879,
-0.0189187191426754,
0.014142598025500774,
-0.0045580207370221615,
0.08461225777864456,
0.028867030516266823,
0.01789850741624832,
0.01978258788585663,
-0.044777270406484604,
-0.06019320338964462,
-0.11751367896795273,
-0.11695457994937897,
0.04235595092177391,
0.031092865392565727,
-0.0598316416144371,
0.01802070252597332,
0.002831806195899844,
-0.10905633121728897,
0.003673644969239831,
0.009172462858259678,
-0.04195746034383774,
-0.014679387211799622,
-0.022036638110876083,
0.03568778559565544,
-0.11486250907182693,
-0.04808398336172104,
0.05468409135937691,
-0.010570904240012169,
0.05172036588191986,
-0.03988693654537201,
-0.04328148439526558,
0.017249882221221924,
0.03672182932496071,
-0.0014650606317445636,
0.02446526475250721,
0.04954614117741585,
0.0945778414607048,
0.013312937691807747,
0.09544112533330917,
-0.07027970999479294,
0.07199295610189438,
-0.012218011543154716,
0.05455303564667702,
0.007436950691044331,
0.03855888545513153,
0.0016319415299221873,
-0.04013568535447121,
0.04546615853905678,
-0.008132901974022388,
-0.05152910202741623,
-0.030870096758008003,
-0.0034845976624637842,
-0.028418205678462982,
-0.04955711588263512,
-0.013345318846404552,
-0.007834619842469692,
0.002139961114153266,
-0.05049027502536774,
-0.03953617811203003,
0.09353838115930557,
-0.010731318034231663,
-0.06353529542684555,
0.07134442031383514,
-0.0011507050367072225,
-0.012392503209412098,
-0.0441930890083313,
0.0021347098518162966,
0.10805267840623856,
0.034910231828689575,
-0.061451587826013565,
0.03573611378669739,
0.03393802046775818,
-0.013242511078715324,
-0.04094325751066208,
0.032241374254226685,
0.030771899968385696,
-0.004960543476045132,
-0.0014415603363886476,
0.05787352845072746,
0.0002619595325086266,
0.012393695302307606,
-0.013726653531193733,
0.01950083114206791,
-0.02348833903670311,
-0.020025203004479408,
-0.07041644304990768,
-0.0595891997218132,
4.37515397424606e-33,
0.0833023265004158,
0.06693945080041885,
0.030413292348384857,
0.03200585022568703,
0.029824301600456238,
-0.0054430230520665646,
0.0538884662091732,
0.009380195289850235,
-0.074514240026474,
-0.046924907714128494,
-0.021123096346855164,
-0.0551672987639904,
-0.04178065434098244,
0.15764835476875305,
-0.039573315531015396,
-0.04624281823635101,
-0.07845932245254517,
-0.03918322175741196,
-0.010450243018567562,
-0.012784354388713837,
0.07130105048418045,
0.025474635884165764,
0.06866449862718582,
0.01971849799156189,
-0.052667729556560516,
0.07690337300300598,
0.022689685225486755,
-0.0861017107963562,
-0.0030725274700671434,
0.019722087308764458,
-0.04856304079294205,
0.02223155088722706,
-0.011478778906166553,
-0.008780992589890957,
0.0443289689719677,
-0.011759569868445396,
-0.0012578408932313323,
-0.09279240667819977,
-0.04605037719011307,
-0.037940580397844315,
0.036300137639045715,
0.10936456173658371,
-0.004708589520305395,
-0.10118825733661652,
-0.0216426532715559,
0.008181718178093433,
0.03885241970419884,
0.05701572075486183,
-0.032015323638916016,
0.0400395505130291,
0.010344349779188633,
0.03946448862552643,
-0.11963973194360733,
-0.07946152240037918,
0.0057844859547913074,
0.061287347227334976,
0.08556029945611954,
0.08769601583480835,
-0.015198683366179466,
0.014171870425343513,
-0.030721504241228104,
0.022865332663059235,
0.003995934966951609,
0.036136556416749954,
0.026551060378551483,
-0.04316948354244232,
-0.05263889580965042,
-0.029400626197457314,
0.014885980635881424,
0.006679909769445658,
-0.07059726119041443,
0.03490555286407471,
-0.07722283154726028,
-0.05610106885433197,
-0.007275860756635666,
-0.03278909623622894,
0.007009269203990698,
-0.06855050474405289,
0.0028056867886334658,
0.038450952619314194,
-0.08458473533391953,
0.07022006809711456,
0.07125844806432724,
-0.07416039705276489,
0.03199387714266777,
-0.04420362040400505,
0.05720681697130203,
-0.03681931272149086,
0.012566283345222473,
-0.033424876630306244,
0.03006366826593876,
0.039509568363428116,
0.040998850017786026,
-0.0313212051987648,
0.0330299437046051,
-5.041655597742451e-33,
0.02545216865837574,
0.0296070147305727,
-0.04845471680164337,
0.0633644238114357,
-0.02471860684454441,
-0.06313928216695786,
0.08394337445497513,
0.09103401750326157,
0.02507719211280346,
-0.06744968891143799,
0.027398595586419106,
-0.010251768864691257,
0.02244795858860016,
0.023587528616189957,
0.09400515258312225,
-0.04652310162782669,
-0.003406423144042492,
-0.07614109665155411,
-0.025988629087805748,
0.07439064234495163,
-0.011705512180924416,
0.10085190087556839,
-0.06241736188530922,
0.00041276763658970594,
-0.09206724911928177,
-0.008551755920052528,
0.033500220626592636,
0.07328486442565918,
0.007236761506646872,
-0.014070648699998856,
0.056041765958070755,
-0.046897225081920624,
0.007284130435436964,
0.05591806024312973,
-0.028993794694542885,
0.017409643158316612,
-0.0005094957887195051,
-0.02497986890375614,
0.02541830576956272,
0.10782013833522797,
0.0416378527879715,
0.02316936105489731,
-0.0860382542014122,
-0.0032133322674781084,
-0.0629057064652443,
0.0001545573177281767,
-0.030658232048153877,
-0.021659070625901222,
-0.07588130235671997,
-0.04730072617530823,
-0.020091542974114418,
-0.06071717292070389,
-0.11733052134513855,
-0.06170446798205376,
-0.05681747943162918,
-0.056252993643283844,
0.030477866530418396,
-0.0030541950836777687,
0.026006419211626053,
-0.05028381571173668,
-0.03544291853904724,
0.0074637592770159245,
0.009738176129758358,
-0.12842199206352234,
0.02697550505399704,
0.01649986393749714,
-0.03821534663438797,
0.00901523046195507,
0.04234594479203224,
-0.04394828528165817,
0.06386459618806839,
0.06651908904314041,
-0.012864556163549423,
0.018899202346801758,
0.027685249224305153,
-0.0532551184296608,
-0.037027422338724136,
-0.01432209275662899,
0.040101706981658936,
-0.0785035565495491,
-0.041976846754550934,
-0.035746876150369644,
0.03942808136343956,
0.08643773943185806,
0.10652787983417511,
0.07790560275316238,
-0.0020060038659721613,
0.08671467751264572,
0.06450968980789185,
0.019597409293055534,
-0.05508919432759285,
0.06577753275632858,
0.03195532038807869,
0.07136902958154678,
0.023993056267499924,
-5.891940801916462e-8,
-0.09921502321958542,
0.02840089239180088,
-0.07298306375741959,
-0.003963018301874399,
-0.025138448923826218,
-0.03454944118857384,
0.022396158427000046,
-0.00736774830147624,
-0.0419352762401104,
0.05794103443622589,
-0.00017648444918449968,
0.03022090718150139,
-0.02343391813337803,
-0.023144356906414032,
0.004892510361969471,
0.05662752315402031,
0.004483249969780445,
0.12035337090492249,
-0.04544133320450783,
-0.01830589771270752,
0.012751702219247818,
-0.008673192001879215,
0.014432895928621292,
0.037881214171648026,
-0.022170471027493477,
-0.035830121487379074,
-0.0823817327618599,
0.016460008919239044,
0.0015916487900540233,
-0.06554755568504333,
0.02622073143720627,
0.05067818984389305,
0.006759302224963903,
-0.007717597298324108,
0.044877469539642334,
0.04757482185959816,
-0.04185263440012932,
-0.05498857423663139,
0.03582644462585449,
0.06519614905118942,
0.11542881280183792,
0.04760456457734108,
-0.04104326665401459,
-0.0003075118875131011,
0.015073995105922222,
0.06225929036736488,
-0.030819321051239967,
-0.17169125378131866,
-0.024821706116199493,
0.05584977939724922,
0.008937422186136246,
0.011644398793578148,
-0.07460170984268188,
0.132074236869812,
0.012579103000462055,
0.01163046807050705,
0.04457269236445427,
-0.065635547041893,
0.015121936798095703,
0.06678646802902222,
0.03477565199136734,
0.006482437718659639,
0.03388194739818573,
-0.0032977727241814137
] |
speechbrain/asr-crdnn-rnnlm-librispeech | d9760a0bef6c6718d30ad1271f7d05980d435677 | 2021-11-30T00:37:56.000Z | [
"en",
"dataset:librispeech",
"arxiv:2106.04624",
"speechbrain",
"automatic-speech-recognition",
"CTC",
"Attention",
"pytorch",
"license:apache-2.0"
] | automatic-speech-recognition | false | speechbrain | null | speechbrain/asr-crdnn-rnnlm-librispeech | 1,276 | 7 | speechbrain | ---
language: "en"
thumbnail:
tags:
- automatic-speech-recognition
- CTC
- Attention
- pytorch
- speechbrain
license: "apache-2.0"
datasets:
- librispeech
metrics:
- wer
- cer
---
<iframe src="https://ghbtns.com/github-btn.html?user=speechbrain&repo=speechbrain&type=star&count=true&size=large&v=2" frameborder="0" scrolling="0" width="170" height="30" title="GitHub"></iframe>
<br/><br/>
# CRDNN with CTC/Attention and RNNLM trained on LibriSpeech
This repository provides all the necessary tools to perform automatic speech
recognition from an end-to-end system pretrained on LibriSpeech (EN) within
SpeechBrain. For a better experience we encourage you to learn more about
[SpeechBrain](https://speechbrain.github.io).
The performance of the model is the following:
| Release | Test WER | GPUs |
|:-------------:|:--------------:| :--------:|
| 20-05-22 | 3.09 | 1xV100 32GB |
## Pipeline description
This ASR system is composed with 3 different but linked blocks:
- Tokenizer (unigram) that transforms words into subword units and trained with
the train transcriptions of LibriSpeech.
- Neural language model (RNNLM) trained on the full 10M words dataset.
- Acoustic model (CRDNN + CTC/Attention). The CRDNN architecture is made of
N blocks of convolutional neural networks with normalisation and pooling on the
frequency domain. Then, a bidirectional LSTM is connected to a final DNN to obtain
the final acoustic representation that is given to the CTC and attention decoders.
The system is trained with recordings sampled at 16kHz (single channel).
The code will automatically normalize your audio (i.e., resampling + mono channel selection) when calling *transcribe_file* if needed.
## Install SpeechBrain
First of all, please install SpeechBrain with the following command:
```
pip install speechbrain
```
Please notice that we encourage you to read our tutorials and learn more about
[SpeechBrain](https://speechbrain.github.io).
### Transcribing your own audio files (in English)
```python
from speechbrain.pretrained import EncoderDecoderASR
asr_model = EncoderDecoderASR.from_hparams(source="speechbrain/asr-crdnn-rnnlm-librispeech", savedir="pretrained_models/asr-crdnn-rnnlm-librispeech")
asr_model.transcribe_file('speechbrain/asr-crdnn-rnnlm-librispeech/example.wav')
```
### Inference on GPU
To perform inference on the GPU, add `run_opts={"device":"cuda"}` when calling the `from_hparams` method.
## Parallel Inference on a Batch
Please, [see this Colab notebook](https://colab.research.google.com/drive/1hX5ZI9S4jHIjahFCZnhwwQmFoGAi3tmu?usp=sharing) to figure out how to transcribe in parallel a batch of input sentences using a pre-trained model.
### Training
The model was trained with SpeechBrain (Commit hash: '2abd9f01').
To train it from scratch follow these steps:
1. Clone SpeechBrain:
```bash
git clone https://github.com/speechbrain/speechbrain/
```
2. Install it:
```bash
cd speechbrain
pip install -r requirements.txt
pip install -e .
```
3. Run Training:
```bash
cd recipes/LibriSpeech/ASR/seq2seq/
python train.py hparams/train_BPE_1000.yaml --data_folder=your_data_folder
```
You can find our training results (models, logs, etc) [here](https://drive.google.com/drive/folders/1SAndjcThdkO-YQF8kvwPOXlQ6LMT71vt?usp=sharing).
### Limitations
The SpeechBrain team does not provide any warranty on the performance achieved by this model when used on other datasets.
# **About SpeechBrain**
- Website: https://speechbrain.github.io/
- Code: https://github.com/speechbrain/speechbrain/
- HuggingFace: https://huggingface.co/speechbrain/
# **Citing SpeechBrain**
Please, cite SpeechBrain if you use it for your research or business.
```bibtex
@misc{speechbrain,
title={{SpeechBrain}: A General-Purpose Speech Toolkit},
author={Mirco Ravanelli and Titouan Parcollet and Peter Plantinga and Aku Rouhe and Samuele Cornell and Loren Lugosch and Cem Subakan and Nauman Dawalatabad and Abdelwahab Heba and Jianyuan Zhong and Ju-Chieh Chou and Sung-Lin Yeh and Szu-Wei Fu and Chien-Feng Liao and Elena Rastorgueva and François Grondin and William Aris and Hwidong Na and Yan Gao and Renato De Mori and Yoshua Bengio},
year={2021},
eprint={2106.04624},
archivePrefix={arXiv},
primaryClass={eess.AS},
note={arXiv:2106.04624}
}
``` | [
-0.1341107040643692,
-0.15978728234767914,
-0.0145663321018219,
-0.09611143171787262,
-0.012745773419737816,
0.06049976870417595,
-0.00040567395626567304,
-0.036050617694854736,
-0.08299211412668228,
-0.0641578882932663,
-0.0019767044577747583,
-0.08641182631254196,
-0.05679861828684807,
0.015438560396432877,
-0.0568784698843956,
-0.022313518449664116,
0.04537506401538849,
0.03712044656276703,
-0.003731468692421913,
-0.0782732218503952,
0.045500338077545166,
0.05410442501306534,
0.07836426794528961,
-0.012756369076669216,
0.05148698762059212,
0.03979821130633354,
-0.026333430781960487,
-0.04442871734499931,
0.03720352426171303,
0.014540123753249645,
0.1019027829170227,
-0.048789165914058685,
0.11085920035839081,
0.02623622491955757,
-0.050023678690195084,
0.01941872201859951,
-0.01666872575879097,
-0.04393699765205383,
-0.057696469128131866,
-0.02032485418021679,
-0.027765393257141113,
0.02694137766957283,
-0.023784609511494637,
0.002657255157828331,
0.025386761873960495,
-0.0517132468521595,
-0.020478172227740288,
-0.039887405931949615,
0.032534852623939514,
-0.019201163202524185,
-0.05441896244883537,
-0.11169790476560593,
0.05136513710021973,
0.045203398913145065,
-0.07063508778810501,
0.04939389228820801,
0.035813894122838974,
0.00045132730156183243,
0.025393636897206306,
0.003308720886707306,
-0.018914425745606422,
-0.002915957011282444,
0.004106725566089153,
0.013309248723089695,
-0.06831914931535721,
0.003747879294678569,
0.023389600217342377,
0.02123168669641018,
0.03739476203918457,
-0.04175744205713272,
-0.1248665526509285,
0.07737623155117035,
0.005536343436688185,
0.029200036078691483,
0.0034082592464983463,
-0.001559200114570558,
0.06908577680587769,
-0.0046629211865365505,
0.07313273847103119,
-0.07652068138122559,
0.024572646245360374,
-0.05880654230713844,
0.04618178308010101,
0.011996551416814327,
0.08459898829460144,
-0.032793596386909485,
0.009128320962190628,
0.06363099068403244,
0.009337401017546654,
-0.020784558728337288,
-0.027043012902140617,
-0.029277021065354347,
-0.05105281621217728,
-0.013589812442660332,
0.01829603873193264,
0.06317707151174545,
0.001890338258817792,
-0.0009723814437165856,
-0.04682553559541702,
0.0811232179403305,
-0.013891994953155518,
-0.02242700383067131,
0.00836451817303896,
-0.09119558334350586,
-0.03371020779013634,
-0.009654694236814976,
-0.046837855130434036,
0.0705091655254364,
0.07667099684476852,
-0.04691970348358154,
0.030616622418165207,
0.008947275578975677,
0.01708623580634594,
-0.06908795982599258,
0.07433146238327026,
0.033596426248550415,
-0.05336001515388489,
-0.01373685896396637,
0.12021075189113617,
0.012269985862076283,
-0.06586720049381256,
0.00428555766120553,
-0.055804651230573654,
0.03952448070049286,
0.09192848950624466,
-0.04405055567622185,
-0.04790221527218819,
6.918122943898993e-33,
0.09004910290241241,
-0.0008527708705514669,
-0.006027857307344675,
0.02645360864698887,
-0.02674180641770363,
-0.08046349883079529,
-0.0631435438990593,
-0.005978457164019346,
-0.08175387978553772,
-0.05675193667411804,
0.048577070236206055,
-0.013945738784968853,
-0.05893418565392494,
0.04033366218209267,
-0.018029171973466873,
-0.0020969524048268795,
-0.04115917161107063,
-0.012591277249157429,
0.040789056569337845,
-0.05988307669758797,
0.02011929452419281,
0.010170631110668182,
0.0355784110724926,
0.05825275927782059,
0.049243029206991196,
0.049618788063526154,
0.05074721947312355,
-0.012699363753199577,
-0.008756470866501331,
0.0423586331307888,
-0.025301922112703323,
-0.05595369637012482,
0.003952678292989731,
0.027267687022686005,
0.032063793390989304,
-0.06093214452266693,
-0.024175286293029785,
0.0011366737307980657,
-0.045736540108919144,
-0.060260917991399765,
0.011516359634697437,
-0.001133115729317069,
0.0010289252968505025,
-0.02438192442059517,
-0.05762684345245361,
-0.06694188714027405,
0.03858306258916855,
0.06644643098115921,
0.11231499910354614,
-0.014422687701880932,
0.006627839524298906,
0.005186986178159714,
-0.04080375283956528,
0.002515046391636133,
-0.05597653239965439,
0.020883213728666306,
0.06660086661577225,
0.08825702220201492,
0.06399547308683395,
-0.03157316893339157,
0.007582385092973709,
-0.00793414656072855,
0.058956123888492584,
-0.032809071242809296,
0.04348357021808624,
-0.03806091099977493,
-0.08604775369167328,
-0.001799202524125576,
0.021567946299910545,
0.029214872047305107,
-0.04809512570500374,
-0.037794336676597595,
0.08191655576229095,
0.07432745397090912,
0.01999637484550476,
0.04075828567147255,
0.07503499835729599,
-0.08628112822771072,
-0.04983186721801758,
0.05247330293059349,
-0.042966172099113464,
0.00878186896443367,
0.03911006078124046,
-0.07704977691173553,
0.03252263739705086,
-0.05279775336384773,
0.04414188116788864,
-0.08101695030927658,
-0.0193194467574358,
0.020229240879416466,
0.01138625293970108,
0.021678464487195015,
-0.04625162482261658,
0.016159584745764732,
-0.033358823508024216,
-7.560465954434078e-33,
-0.026998290792107582,
0.12693671882152557,
-0.05302437022328377,
0.10915910452604294,
-0.020378025248646736,
0.027801254764199257,
0.035946283489465714,
0.06855365633964539,
-0.006390849128365517,
-0.06441207975149155,
0.02331581711769104,
-0.023308223113417625,
0.06947678327560425,
-0.0622398741543293,
0.0760495588183403,
0.03470258414745331,
0.028706876561045647,
0.0022627736907452345,
-0.03812365233898163,
0.06666981428861618,
0.030213220044970512,
0.08633654564619064,
-0.027773113921284676,
0.043970249593257904,
-0.03689708560705185,
-0.029462577775120735,
-0.04865608364343643,
0.04873650148510933,
-0.008055374026298523,
-0.026127135381102562,
-0.019208624958992004,
0.011739159002900124,
-0.09452375769615173,
0.014243456535041332,
-0.09191951900720596,
0.011703304015100002,
0.03245149180293083,
-0.045353420078754425,
0.04726636782288551,
0.06820448487997055,
0.1529669314622879,
0.03656008094549179,
-0.07131416350603104,
-0.06986571848392487,
0.016909465193748474,
-0.01322983019053936,
-0.10917235910892487,
0.0547533743083477,
-0.041944485157728195,
0.005260842386633158,
-0.05319046974182129,
0.014327209442853928,
0.03711751475930214,
-0.016602057963609695,
0.017711453139781952,
-0.01747772842645645,
0.025469541549682617,
0.005960430949926376,
0.02684035338461399,
0.005789975635707378,
-0.07361844182014465,
-0.06837736070156097,
-0.03860590234398842,
-0.032002080231904984,
-0.006251813843846321,
0.013783673755824566,
-0.03394652530550957,
0.0765872672200203,
-0.01838047057390213,
-0.0279424786567688,
0.029195772483944893,
0.014679303392767906,
0.025722123682498932,
-0.0034829287324100733,
-0.018443038687109947,
-0.011034159921109676,
-0.05585530027747154,
-0.011859487742185593,
-0.01865328662097454,
-0.09629184007644653,
0.03260192647576332,
0.05860171839594841,
0.08092010021209717,
-0.03416501358151436,
0.0588713176548481,
0.104792021214962,
0.012934474274516106,
0.0077280751429498196,
0.07642323523759842,
-0.035478025674819946,
-0.04460487887263298,
0.09479784220457077,
0.018615256994962692,
0.02671099454164505,
0.06231382489204407,
-5.669478042591436e-8,
-0.05248142406344414,
-0.006553165148943663,
-0.009605561383068562,
-0.05022832378745079,
-0.013349107466638088,
-0.09367837011814117,
0.029124321416020393,
0.08738125115633011,
-0.03681325539946556,
-0.0074566686525940895,
0.05986887961626053,
-0.03227824717760086,
-0.054018981754779816,
0.05309579148888588,
-0.04121086001396179,
0.0831015333533287,
-0.03573203086853027,
0.08128365874290466,
-0.025376925244927406,
-0.1264640986919403,
0.05772200599312782,
0.00660800002515316,
0.04805661365389824,
0.0687779113650322,
0.003082167124375701,
-0.026445429772138596,
-0.021907636895775795,
0.07245334982872009,
-0.04857384413480759,
-0.05492645129561424,
-0.0326380580663681,
0.03138943389058113,
-0.01763305254280567,
-0.10021377354860306,
0.07619910687208176,
0.10345577448606491,
-0.09068142622709274,
-0.04631733521819115,
-0.0032930560410022736,
0.0961199402809143,
0.032413311302661896,
0.04836760833859444,
-0.04281264916062355,
0.006151105742901564,
0.0029600660782307386,
0.030185891315340996,
-0.011674760840833187,
-0.08044514805078506,
0.018911991268396378,
-0.022694263607263565,
0.10131272673606873,
0.0039796363562345505,
-0.02254820615053177,
-0.03451572358608246,
0.02211173065006733,
0.054624661803245544,
-0.010631715878844261,
0.0059087336994707584,
0.07090732455253601,
0.02816750295460224,
-0.03194762021303177,
0.05637337639927864,
-0.07457030564546585,
-0.07394081354141235
] |
DaisyMak/bert-finetuned-squad-accelerate-10epoch_transformerfrozen | 38a0fbdddcb26bedfc182590a24ebc9a843832c3 | 2022-02-02T21:30:47.000Z | [
"pytorch",
"bert",
"question-answering",
"transformers",
"autotrain_compatible"
] | question-answering | false | DaisyMak | null | DaisyMak/bert-finetuned-squad-accelerate-10epoch_transformerfrozen | 1,275 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
facebook/wav2vec2-base-100h | 9c1fef36b62a428a658e5b022ef9f21b38f47e0b | 2022-05-27T16:32:50.000Z | [
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"en",
"dataset:librispeech_asr",
"arxiv:2006.11477",
"transformers",
"audio",
"license:apache-2.0"
] | automatic-speech-recognition | false | facebook | null | facebook/wav2vec2-base-100h | 1,268 | 1 | transformers | ---
language: en
datasets:
- librispeech_asr
tags:
- audio
- automatic-speech-recognition
license: apache-2.0
---
# Wav2Vec2-Base-100h
[Facebook's Wav2Vec2](https://ai.facebook.com/blog/wav2vec-20-learning-the-structure-of-speech-from-raw-audio/)
The base model pretrained and fine-tuned on 100 hours of Librispeech on 16kHz sampled speech audio. When using the model
make sure that your speech input is also sampled at 16Khz.
[Paper](https://arxiv.org/abs/2006.11477)
Authors: Alexei Baevski, Henry Zhou, Abdelrahman Mohamed, Michael Auli
**Abstract**
We show for the first time that learning powerful representations from speech audio alone followed by fine-tuning on transcribed speech can outperform the best semi-supervised methods while being conceptually simpler. wav2vec 2.0 masks the speech input in the latent space and solves a contrastive task defined over a quantization of the latent representations which are jointly learned. Experiments using all labeled data of Librispeech achieve 1.8/3.3 WER on the clean/other test sets. When lowering the amount of labeled data to one hour, wav2vec 2.0 outperforms the previous state of the art on the 100 hour subset while using 100 times less labeled data. Using just ten minutes of labeled data and pre-training on 53k hours of unlabeled data still achieves 4.8/8.2 WER. This demonstrates the feasibility of speech recognition with limited amounts of labeled data.
The original model can be found under https://github.com/pytorch/fairseq/tree/master/examples/wav2vec#wav2vec-20.
# Usage
To transcribe audio files the model can be used as a standalone acoustic model as follows:
```python
from transformers import Wav2Vec2Processor, Wav2Vec2ForCTC
from datasets import load_dataset
import soundfile as sf
import torch
# load model and processor
processor = Wav2Vec2Processor.from_pretrained("facebook/wav2vec2-base-100h")
model = Wav2Vec2ForCTC.from_pretrained("facebook/wav2vec2-base-100h")
# define function to read in sound file
def map_to_array(batch):
speech, _ = sf.read(batch["file"])
batch["speech"] = speech
return batch
# load dummy dataset and read soundfiles
ds = load_dataset("patrickvonplaten/librispeech_asr_dummy", "clean", split="validation")
ds = ds.map(map_to_array)
# tokenize
input_values = processor(ds[0]["audio"]["array"], return_tensors="pt", padding="longest").input_values # Batch size 1
# retrieve logits
logits = model(input_values).logits
# take argmax and decode
predicted_ids = torch.argmax(logits, dim=-1)
transcription = processor.batch_decode(predicted_ids)
```
## Evaluation
This code snippet shows how to evaluate **facebook/wav2vec2-base-100h** on LibriSpeech's "clean" and "other" test data.
```python
from datasets import load_dataset
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import soundfile as sf
import torch
from jiwer import wer
librispeech_eval = load_dataset("librispeech_asr", "clean", split="test")
model = Wav2Vec2ForCTC.from_pretrained("facebook/wav2vec2-base-100h").to("cuda")
processor = Wav2Vec2Processor.from_pretrained("facebook/wav2vec2-base-100h")
def map_to_pred(batch):
input_values = processor(batch["audio"]["array"], return_tensors="pt", padding="longest").input_values
with torch.no_grad():
logits = model(input_values.to("cuda")).logits
predicted_ids = torch.argmax(logits, dim=-1)
transcription = processor.batch_decode(predicted_ids)
batch["transcription"] = transcription
return batch
result = librispeech_eval.map(map_to_pred, batched=True, batch_size=1, remove_columns=["speech"])
print("WER:", wer(result["text"], result["transcription"]))
```
*Result (WER)*:
| "clean" | "other" |
|---|---|
| 6.1 | 13.5 |
| [
-0.0784335732460022,
-0.12503460049629211,
-0.009250065311789513,
-0.04507138952612877,
0.01946551911532879,
0.02893015556037426,
-0.045234717428684235,
-0.07532016187906265,
-0.057044874876737595,
-0.04094746336340904,
-0.0750444233417511,
-0.07954437285661697,
-0.022203583270311356,
0.03422686830163002,
-0.03787141293287277,
-0.04641837254166603,
0.10429814457893372,
0.01875521056354046,
-0.07448900490999222,
-0.06156984344124794,
0.05114908516407013,
0.03194200247526169,
0.07044503837823868,
-0.021684667095541954,
0.08447320014238358,
0.04957069084048271,
-0.002860765438526869,
-0.009599435143172741,
0.04046785458922386,
-0.023639250546693802,
0.12063382565975189,
0.03653035685420036,
0.07583153992891312,
-0.03113778866827488,
-0.058982767164707184,
-0.027258463203907013,
-0.003698687069118023,
-0.020017780363559723,
-0.006643988192081451,
-0.030105087906122208,
0.02269320748746395,
0.04692983254790306,
-0.06262736022472382,
0.0033789225853979588,
-0.0067676156759262085,
-0.02803611382842064,
-0.012192519381642342,
-0.027048293501138687,
-0.014081988483667374,
0.08082473278045654,
-0.05305173993110657,
-0.01729019172489643,
0.005358655471354723,
0.1019546166062355,
-0.13189689815044403,
0.02349231392145157,
0.01982009969651699,
0.04957348108291626,
0.06832244247198105,
0.0052292668260633945,
-0.044476330280303955,
-0.0712590366601944,
0.009094303473830223,
-0.017277633771300316,
-0.04203511402010918,
-0.056307122111320496,
-0.018684759736061096,
0.00007992557220859453,
0.030432408675551414,
-0.0037866313941776752,
-0.09817762672901154,
0.10882416367530823,
0.030090129002928734,
0.09250180423259735,
0.028471417725086212,
-0.031434565782547,
0.12058607488870621,
0.014872883446514606,
0.08369582146406174,
-0.10045459866523743,
0.04974031075835228,
-0.03963325917720795,
0.06778354197740555,
-0.0211970042437315,
0.04129014164209366,
0.004343539942055941,
0.00005221748142503202,
-0.012532182969152927,
-0.01070092897862196,
-0.03250231593847275,
-0.035331957042217255,
-0.023957572877407074,
-0.02941879816353321,
0.01639453135430813,
0.03653183579444885,
0.06267698109149933,
0.045970410108566284,
0.04788867011666298,
0.03977620229125023,
0.07003016769886017,
0.009684070013463497,
0.02401006408035755,
-0.007446350995451212,
-0.06820371001958847,
-0.012749788351356983,
-0.06791725754737854,
-0.020187603309750557,
0.04150326922535896,
0.04181896150112152,
-0.04010654613375664,
0.04560942202806473,
0.0416567400097847,
0.011333568021655083,
0.007162386551499367,
0.11652126163244247,
0.030719252303242683,
-0.00386353419162333,
-0.07551680505275726,
0.007434159051626921,
0.029076602309942245,
-0.07059840857982635,
0.012430663220584393,
-0.02033522166311741,
0.00742277642711997,
0.0511600635945797,
-0.03529797121882439,
-0.02269761450588703,
2.403434300461473e-33,
-0.01131067518144846,
0.056683510541915894,
-0.0011002093087881804,
0.007174170576035976,
-0.0142232496291399,
-0.09406273812055588,
-0.04038005322217941,
0.0682031586766243,
-0.02663993090391159,
-0.00820000097155571,
0.016455259174108505,
-0.04660388082265854,
-0.03298991546034813,
0.07421577721834183,
0.0036595764104276896,
0.020241111516952515,
-0.060849279165267944,
-0.002887370064854622,
-0.01643328368663788,
-0.029938871040940285,
0.08975786715745926,
0.012787030078470707,
0.06032135710120201,
0.02022923156619072,
0.06026136502623558,
0.01143824402242899,
0.05846206098794937,
-0.03316177800297737,
0.03203367814421654,
0.04440085589885712,
-0.06376466900110245,
-0.08799286931753159,
-0.004575709346681833,
-0.023965075612068176,
0.023969998583197594,
-0.0002063772117253393,
0.03751309588551521,
-0.010613150894641876,
-0.05134263262152672,
-0.11815661191940308,
0.013003875501453876,
0.013185598887503147,
0.05807410925626755,
-0.05551043152809143,
-0.07246360927820206,
-0.1123887151479721,
-0.00004470213389140554,
0.021507224068045616,
-0.004507942590862513,
0.005061564967036247,
0.015874598175287247,
0.019466016441583633,
-0.07711703330278397,
0.026100607588887215,
-0.04713928699493408,
0.014387923292815685,
0.10786787420511246,
0.036705248057842255,
0.06731623411178589,
0.024156490340828896,
0.019249822944402695,
-0.0005551492213271558,
0.006252074148505926,
0.02653225138783455,
0.0416380874812603,
-0.05725329741835594,
-0.010941061191260815,
0.05982166528701782,
-0.009311683475971222,
-0.016183802857995033,
0.010784563608467579,
-0.026592427864670753,
0.06466389447450638,
0.01146706286817789,
-0.0009954646229743958,
-0.012013250961899757,
0.06853727251291275,
-0.06708627194166183,
-0.04597163572907448,
0.06865903735160828,
-0.012064401991665363,
0.039823539555072784,
-0.006520165130496025,
-0.08273594081401825,
-0.047991544008255005,
-0.08613904565572739,
0.02253764122724533,
-0.1507454663515091,
-0.056155577301979065,
-0.02252676896750927,
-0.07747410237789154,
0.03682008013129234,
-0.01112416572868824,
-0.024054396897554398,
-0.02462312951683998,
-3.918353930780642e-33,
0.023749858140945435,
0.09996973723173141,
-0.01423583459109068,
0.11382883042097092,
-0.025273175910115242,
-0.011274771764874458,
0.09544021636247635,
0.0681089386343956,
-0.038177892565727234,
-0.06250043958425522,
0.06550154089927673,
-0.06908556073904037,
0.10752733796834946,
-0.027682757005095482,
0.07328092306852341,
-0.03831425681710243,
0.007623143959790468,
0.0308273583650589,
0.06219446286559105,
0.07575187087059021,
0.0384354293346405,
0.07291337847709656,
0.011836372315883636,
0.06611373275518417,
-0.01926971971988678,
-0.028178982436656952,
-0.029154138639569283,
0.07908426970243454,
-0.03190336003899574,
0.030037065967917442,
-0.04938703030347824,
-0.011238626204431057,
-0.08122007548809052,
-0.010902647860348225,
-0.04706123098731041,
0.026449458673596382,
-0.004389434587210417,
-0.032637037336826324,
0.0050622159615159035,
0.033630888909101486,
0.07408912479877472,
0.09136475622653961,
-0.07714183628559113,
-0.09862937778234482,
-0.021320316940546036,
-0.07084745913743973,
-0.12479079514741898,
0.019520755857229233,
0.02481580339372158,
-0.03228368982672691,
0.04713773354887962,
0.01205865852534771,
0.0309035312384367,
0.03890244662761688,
-0.012976057827472687,
-0.011178579181432724,
-0.012727160006761551,
-0.03904930502176285,
0.03278383985161781,
0.007650141138583422,
-0.04787778481841087,
0.010875251144170761,
-0.05449085682630539,
-0.07837602496147156,
0.01168906781822443,
-0.008629578165709972,
-0.03837181255221367,
0.03156207874417305,
0.058000825345516205,
-0.017648542299866676,
-0.014737667515873909,
-0.006181109230965376,
0.008768603205680847,
0.07945078611373901,
-0.08378306776285172,
-0.03887476399540901,
-0.03649219870567322,
-0.02946697175502777,
-0.06222183629870415,
-0.07212574779987335,
-0.01321684755384922,
0.03237813711166382,
0.044932495802640915,
-0.013074840418994427,
0.07099071890115738,
0.08261174708604813,
-0.04564324766397476,
-0.0605856254696846,
-0.010268772020936012,
0.002444703131914139,
0.004006830044090748,
0.022839408367872238,
0.019091444090008736,
0.06173764914274216,
0.012184126302599907,
-5.5711609547870466e-8,
-0.09442111849784851,
0.038384851068258286,
0.05182543024420738,
-0.06535301357507706,
-0.01595502346754074,
-0.14495116472244263,
-0.016495106741786003,
0.0056466953828930855,
0.017484214156866074,
-0.019073452800512314,
0.08126331120729446,
-0.06475791335105896,
-0.00409049354493618,
0.03627907112240791,
0.01949138008058071,
0.059760406613349915,
0.04667678475379944,
0.04289783537387848,
-0.031405139714479446,
-0.08666335046291351,
0.0726463571190834,
0.03832513466477394,
0.018851321190595627,
0.033379945904016495,
0.08874402940273285,
0.0031005998607724905,
-0.0022759544663131237,
0.05745052173733711,
0.01734134368598461,
-0.025346077978610992,
-0.07566990703344345,
0.04480668529868126,
-0.02953246608376503,
-0.06648114323616028,
0.06114144250750542,
0.03719157725572586,
-0.05971083790063858,
-0.0699707567691803,
-0.07170011848211288,
0.07196681946516037,
0.022871142253279686,
0.1177884042263031,
-0.08262848854064941,
-0.056906793266534805,
0.014545255340635777,
0.0029659937135875225,
-0.024672195315361023,
-0.0687776431441307,
0.023614292964339256,
0.0341653935611248,
0.031125163659453392,
0.058870695531368256,
-0.002238156972452998,
-0.046201858669519424,
0.0656801238656044,
0.07126074284315109,
-0.04332844167947769,
0.0005308649851940572,
0.010121291503310204,
-0.004256731364876032,
-0.00727841816842556,
0.07690206915140152,
-0.054638080298900604,
-0.026039687916636467
] |
pritamdeka/S-BioBert-snli-multinli-stsb | 3ab11e57f285f37c31648373a5cb6bf0da5c7362 | 2022-03-11T12:35:08.000Z | [
"pytorch",
"bert",
"feature-extraction",
"sentence-transformers",
"sentence-similarity",
"transformers"
] | sentence-similarity | false | pritamdeka | null | pritamdeka/S-BioBert-snli-multinli-stsb | 1,268 | 0 | sentence-transformers | ---
pipeline_tag: sentence-similarity
tags:
- sentence-transformers
- feature-extraction
- sentence-similarity
- transformers
---
# S-BioBert-snli-multinli-stsb
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
<!--- Describe your model here -->
## Usage (Sentence-Transformers)
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
```
pip install -U sentence-transformers
```
Then you can use the model like this:
```python
from sentence_transformers import SentenceTransformer
sentences = ["This is an example sentence", "Each sentence is converted"]
model = SentenceTransformer('pritamdeka/S-BioBert-snli-multinli-stsb')
embeddings = model.encode(sentences)
print(embeddings)
```
## Usage (HuggingFace Transformers)
Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
```python
from transformers import AutoTokenizer, AutoModel
import torch
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
token_embeddings = model_output[0] #First element of model_output contains all token embeddings
input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
# Sentences we want sentence embeddings for
sentences = ['This is an example sentence', 'Each sentence is converted']
# Load model from HuggingFace Hub
tokenizer = AutoTokenizer.from_pretrained('pritamdeka/S-BioBert-snli-multinli-stsb')
model = AutoModel.from_pretrained('pritamdeka/S-BioBert-snli-multinli-stsb')
# Tokenize sentences
encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
# Compute token embeddings
with torch.no_grad():
model_output = model(**encoded_input)
# Perform pooling. In this case, max pooling.
sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
print("Sentence embeddings:")
print(sentence_embeddings)
```
## Evaluation Results
<!--- Describe how your model was evaluated -->
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name={MODEL_NAME})
## Training
The model was trained with the parameters:
**DataLoader**:
`torch.utils.data.dataloader.DataLoader` of length 90 with parameters:
```
{'batch_size': 64, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
```
**Loss**:
`sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss`
Parameters of the fit()-Method:
```
{
"callback": null,
"epochs": 4,
"evaluation_steps": 1000,
"evaluator": "sentence_transformers.evaluation.EmbeddingSimilarityEvaluator.EmbeddingSimilarityEvaluator",
"max_grad_norm": 1,
"optimizer_class": "<class 'transformers.optimization.AdamW'>",
"optimizer_params": {
"lr": 2e-05
},
"scheduler": "WarmupLinear",
"steps_per_epoch": null,
"warmup_steps": 36,
"weight_decay": 0.01
}
```
## Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 75, 'do_lower_case': False}) with Transformer model: BertModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
)
```
## Citing & Authors
<!--- Describe where people can find more information --> | [
-0.04462115839123726,
-0.045864902436733246,
-0.04525943472981453,
0.001018201350234449,
0.0012780779507011175,
0.08573930710554123,
-0.05083615332841873,
0.03235311061143875,
0.018527882173657417,
-0.09303642064332962,
0.05125213786959648,
-0.027080396190285683,
0.03210661560297012,
0.04211435467004776,
0.06591925770044327,
0.017475785687565804,
0.01956169307231903,
0.09015625715255737,
-0.06768959760665894,
-0.10960092395544052,
0.14002804458141327,
0.10526864975690842,
0.010539940558373928,
0.035323090851306915,
-0.04485396668314934,
0.060705870389938354,
-0.025796009227633476,
-0.023088309913873672,
-0.001570596476085484,
0.011859764344990253,
0.06606791913509369,
0.004822604823857546,
-0.03847759589552879,
0.06066690385341644,
0.07196665555238724,
0.08817575871944427,
-0.03490070253610611,
0.018434036523103714,
-0.007994739338755608,
-0.07332529872655869,
0.0003843044105451554,
-0.05248664692044258,
-0.037780530750751495,
-0.0044699907302856445,
0.07580193877220154,
-0.09483730047941208,
-0.12759137153625488,
-0.034899547696113586,
0.03802810609340668,
-0.06198799982666969,
-0.12871570885181427,
0.014988791197538376,
0.023630892857909203,
0.07591377198696136,
0.03322222828865051,
0.05066341534256935,
0.04351293668150902,
-0.04254181310534477,
0.0017844696994870901,
-0.178816556930542,
-0.06303135305643082,
-0.021207356825470924,
0.02124079503118992,
-0.005823166109621525,
-0.01361902616918087,
0.005726144649088383,
0.006251147948205471,
-0.009045667946338654,
0.035082943737506866,
0.015699248760938644,
-0.08067020773887634,
0.04261600226163864,
-0.08304218202829361,
-0.0022290118504315615,
-0.09430594742298126,
-0.003426307812333107,
0.08138403296470642,
0.03411654382944107,
0.05798421800136566,
0.020492378622293472,
-0.011618702672421932,
-0.09670428186655045,
0.061882924288511276,
0.08284582197666168,
0.037435293197631836,
-0.05030946806073189,
-0.02397042140364647,
-0.07209561765193939,
0.011744524352252483,
0.002148689702153206,
-0.039731405675411224,
-0.10388224571943283,
0.05634542182087898,
-0.046501874923706055,
0.005515667609870434,
0.016537396237254143,
-0.05673030763864517,
-0.02046111784875393,
0.045568328350782394,
0.04337498918175697,
0.01456486340612173,
0.031093627214431763,
0.025658845901489258,
-0.08187434077262878,
-0.05411345884203911,
0.022500846534967422,
-0.007230801973491907,
-0.012591022998094559,
0.06916853040456772,
-0.09356288611888885,
-0.0073203593492507935,
0.006716988980770111,
-0.02002127468585968,
-0.0020947081502527,
0.07089199125766754,
-0.03766554594039917,
0.01866309531033039,
-0.01700001396238804,
0.021342618390917778,
0.10371802002191544,
-0.054177138954401016,
0.0740877315402031,
-0.022870929911732674,
0.028238167986273766,
-0.008061670698225498,
-0.02395615354180336,
0.000984427286311984,
2.5922793505487447e-34,
-0.010116981342434883,
-0.01085547637194395,
-0.005449099000543356,
0.01679011806845665,
0.014677039347589016,
0.03931160643696785,
-0.014023602940142155,
0.038398876786231995,
-0.1033857986330986,
-0.04046463966369629,
-0.09208635985851288,
0.02362188696861267,
-0.055259983986616135,
0.07753804326057434,
-0.011497053317725658,
0.015713714063167572,
-0.03899890556931496,
-0.04763336479663849,
0.08179674297571182,
0.04335298389196396,
0.05844098702073097,
0.04104667901992798,
0.0423043891787529,
-0.034066092222929,
-0.07264331728219986,
-0.02818767912685871,
0.0784493237733841,
-0.08066204935312271,
-0.060720182955265045,
0.012621655128896236,
-0.07436985522508621,
0.021599790081381798,
-0.01507424097508192,
-0.0009319677483290434,
0.021862810477614403,
-0.013886821456253529,
0.030680997297167778,
-0.007112007588148117,
-0.02909533865749836,
-0.03031701035797596,
-0.059481870383024216,
0.020476074889302254,
-0.01939793862402439,
-0.0644465982913971,
0.02023732289671898,
0.03706270083785057,
0.026987286284565926,
0.03614981472492218,
0.09875376522541046,
0.005973827559500933,
0.07155304402112961,
-0.009180898778140545,
-0.023395895957946777,
-0.023352952674031258,
0.0019990243017673492,
0.00663696089759469,
0.054527051746845245,
0.03217335790395737,
0.12394805997610092,
0.0006381099228747189,
0.045190855860710144,
0.007273837458342314,
0.07646673917770386,
0.014432534575462341,
0.13322727382183075,
0.01258724182844162,
0.03578374907374382,
0.05695983022451401,
0.025804640725255013,
0.07012926042079926,
-0.048179496079683304,
0.009003951214253902,
-0.03730395808815956,
0.04012670740485191,
0.03235846012830734,
-0.03301870822906494,
-0.03772853687405586,
-0.08304716646671295,
-0.03953693434596062,
0.09026458859443665,
-0.047222159802913666,
-0.030954862013459206,
0.039841797202825546,
-0.04065384343266487,
0.0026602258440107107,
-0.0647590160369873,
0.00848350953310728,
-0.04490102827548981,
0.05425942689180374,
-0.05251673609018326,
0.05758003517985344,
-0.015162121504545212,
0.004306337796151638,
0.05473717674612999,
0.039632413536310196,
-2.309881847867312e-33,
0.014321492053568363,
0.0017992501379922032,
-0.06273586302995682,
0.01673157326877117,
-0.012311792932450771,
-0.07360619306564331,
0.0016470850678160787,
0.058352597057819366,
0.008857007138431072,
-0.006144436076283455,
-0.04212125763297081,
-0.01767709106206894,
0.09200385212898254,
-0.07385469973087311,
0.0672682449221611,
0.08056644350290298,
-0.017890965566039085,
0.07191174477338791,
0.006380316335707903,
0.05362696945667267,
0.0010621908586472273,
0.0573364719748497,
-0.12149140983819962,
0.0635087639093399,
-0.0150319654494524,
0.009512592107057571,
0.013058868236839771,
-0.0033849889878183603,
-0.02916024811565876,
-0.05223418399691582,
-0.010542837902903557,
0.0023284992203116417,
-0.08348102867603302,
-0.025519469752907753,
-0.11997600644826889,
0.02429630421102047,
-0.040250424295663834,
-0.016435489058494568,
0.05808097496628761,
0.007298820652067661,
0.02848898433148861,
0.08591821789741516,
-0.049939144402742386,
0.00618929835036397,
0.0044037951156497,
-0.016443584114313126,
-0.1262582242488861,
-0.06444550305604935,
0.034990087151527405,
-0.004446454346179962,
-0.026128172874450684,
-0.003744238056242466,
-0.10301163792610168,
0.02083994634449482,
-0.04362105950713158,
-0.10452494025230408,
-0.02716044709086418,
-0.04717554152011871,
-0.09518449008464813,
-0.05603828281164169,
-0.04728205129504204,
-0.02314077503979206,
0.0354156419634819,
-0.042104482650756836,
0.06436445564031601,
-0.047142039984464645,
0.009682471863925457,
0.022356770932674408,
-0.03690790757536888,
-0.04732660949230194,
0.000699652184266597,
-0.028435397893190384,
0.02277824841439724,
0.07403092086315155,
0.03031078539788723,
-0.02565721981227398,
-0.03597773611545563,
0.004742236342281103,
-0.012696150690317154,
-0.059021949768066406,
-0.002146972343325615,
-0.051165223121643066,
0.02393881231546402,
-0.018737206235527992,
-0.0014748056419193745,
-0.037614885717630386,
0.0379500687122345,
0.08407037705183029,
0.022975485771894455,
0.012458551675081253,
0.0025876422878354788,
0.0019651444163173437,
-0.01863418146967888,
0.07069556415081024,
0.047303780913352966,
-4.843737499982126e-8,
-0.06554719805717468,
-0.0561482198536396,
-0.06196638196706772,
0.06700758635997772,
-0.0971289873123169,
-0.04987788945436478,
0.04737038165330887,
0.04033917561173439,
-0.04954073578119278,
-0.04344016686081886,
0.006702821236103773,
0.0354609340429306,
-0.08443887531757355,
0.008448663167655468,
-0.00665469653904438,
0.09908083081245422,
-0.01176433265209198,
0.050432413816452026,
0.045420609414577484,
-0.018070928752422333,
0.044046834111213684,
-0.02597302757203579,
0.01033695973455906,
0.03677741065621376,
0.01708880066871643,
-0.013859570026397705,
-0.041672609746456146,
0.037090931087732315,
0.010016489773988724,
-0.004434746224433184,
0.027960574254393578,
-0.011393786408007145,
-0.03123537264764309,
-0.014695857651531696,
0.008995793759822845,
0.032988328486680984,
0.05550846830010414,
-0.06416845321655273,
0.07399195432662964,
0.05497312173247337,
0.03773944452404976,
0.02521984465420246,
-0.11674375087022781,
-0.014243637211620808,
0.12174762785434723,
0.01785363256931305,
0.015617134980857372,
-0.01645573601126671,
0.03600035235285759,
0.03536592796444893,
0.056057192385196686,
-0.055761586874723434,
-0.018202580511569977,
0.004504833836108446,
0.021905528381466866,
0.03191971778869629,
0.007340831216424704,
-0.015726912766695023,
0.07502534240484238,
-0.0712108388543129,
0.06004200503230095,
0.05734388157725334,
0.0680813193321228,
-0.10567572712898254
] |
google/bert_uncased_L-8_H-768_A-12 | 3f3d093c8dd66e4776c0286f0b52b8dea5865ece | 2021-05-19T17:36:32.000Z | [
"pytorch",
"jax",
"bert",
"arxiv:1908.08962",
"transformers",
"license:apache-2.0"
] | null | false | google | null | google/bert_uncased_L-8_H-768_A-12 | 1,266 | null | transformers | ---
thumbnail: https://huggingface.co/front/thumbnails/google.png
license: apache-2.0
---
BERT Miniatures
===
This is the set of 24 BERT models referenced in [Well-Read Students Learn Better: On the Importance of Pre-training Compact Models](https://arxiv.org/abs/1908.08962) (English only, uncased, trained with WordPiece masking).
We have shown that the standard BERT recipe (including model architecture and training objective) is effective on a wide range of model sizes, beyond BERT-Base and BERT-Large. The smaller BERT models are intended for environments with restricted computational resources. They can be fine-tuned in the same manner as the original BERT models. However, they are most effective in the context of knowledge distillation, where the fine-tuning labels are produced by a larger and more accurate teacher.
Our goal is to enable research in institutions with fewer computational resources and encourage the community to seek directions of innovation alternative to increasing model capacity.
You can download the 24 BERT miniatures either from the [official BERT Github page](https://github.com/google-research/bert/), or via HuggingFace from the links below:
| |H=128|H=256|H=512|H=768|
|---|:---:|:---:|:---:|:---:|
| **L=2** |[**2/128 (BERT-Tiny)**][2_128]|[2/256][2_256]|[2/512][2_512]|[2/768][2_768]|
| **L=4** |[4/128][4_128]|[**4/256 (BERT-Mini)**][4_256]|[**4/512 (BERT-Small)**][4_512]|[4/768][4_768]|
| **L=6** |[6/128][6_128]|[6/256][6_256]|[6/512][6_512]|[6/768][6_768]|
| **L=8** |[8/128][8_128]|[8/256][8_256]|[**8/512 (BERT-Medium)**][8_512]|[8/768][8_768]|
| **L=10** |[10/128][10_128]|[10/256][10_256]|[10/512][10_512]|[10/768][10_768]|
| **L=12** |[12/128][12_128]|[12/256][12_256]|[12/512][12_512]|[**12/768 (BERT-Base)**][12_768]|
Note that the BERT-Base model in this release is included for completeness only; it was re-trained under the same regime as the original model.
Here are the corresponding GLUE scores on the test set:
|Model|Score|CoLA|SST-2|MRPC|STS-B|QQP|MNLI-m|MNLI-mm|QNLI(v2)|RTE|WNLI|AX|
|---|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|:---:|
|BERT-Tiny|64.2|0.0|83.2|81.1/71.1|74.3/73.6|62.2/83.4|70.2|70.3|81.5|57.2|62.3|21.0|
|BERT-Mini|65.8|0.0|85.9|81.1/71.8|75.4/73.3|66.4/86.2|74.8|74.3|84.1|57.9|62.3|26.1|
|BERT-Small|71.2|27.8|89.7|83.4/76.2|78.8/77.0|68.1/87.0|77.6|77.0|86.4|61.8|62.3|28.6|
|BERT-Medium|73.5|38.0|89.6|86.6/81.6|80.4/78.4|69.6/87.9|80.0|79.1|87.7|62.2|62.3|30.5|
For each task, we selected the best fine-tuning hyperparameters from the lists below, and trained for 4 epochs:
- batch sizes: 8, 16, 32, 64, 128
- learning rates: 3e-4, 1e-4, 5e-5, 3e-5
If you use these models, please cite the following paper:
```
@article{turc2019,
title={Well-Read Students Learn Better: On the Importance of Pre-training Compact Models},
author={Turc, Iulia and Chang, Ming-Wei and Lee, Kenton and Toutanova, Kristina},
journal={arXiv preprint arXiv:1908.08962v2 },
year={2019}
}
```
[2_128]: https://huggingface.co/google/bert_uncased_L-2_H-128_A-2
[2_256]: https://huggingface.co/google/bert_uncased_L-2_H-256_A-4
[2_512]: https://huggingface.co/google/bert_uncased_L-2_H-512_A-8
[2_768]: https://huggingface.co/google/bert_uncased_L-2_H-768_A-12
[4_128]: https://huggingface.co/google/bert_uncased_L-4_H-128_A-2
[4_256]: https://huggingface.co/google/bert_uncased_L-4_H-256_A-4
[4_512]: https://huggingface.co/google/bert_uncased_L-4_H-512_A-8
[4_768]: https://huggingface.co/google/bert_uncased_L-4_H-768_A-12
[6_128]: https://huggingface.co/google/bert_uncased_L-6_H-128_A-2
[6_256]: https://huggingface.co/google/bert_uncased_L-6_H-256_A-4
[6_512]: https://huggingface.co/google/bert_uncased_L-6_H-512_A-8
[6_768]: https://huggingface.co/google/bert_uncased_L-6_H-768_A-12
[8_128]: https://huggingface.co/google/bert_uncased_L-8_H-128_A-2
[8_256]: https://huggingface.co/google/bert_uncased_L-8_H-256_A-4
[8_512]: https://huggingface.co/google/bert_uncased_L-8_H-512_A-8
[8_768]: https://huggingface.co/google/bert_uncased_L-8_H-768_A-12
[10_128]: https://huggingface.co/google/bert_uncased_L-10_H-128_A-2
[10_256]: https://huggingface.co/google/bert_uncased_L-10_H-256_A-4
[10_512]: https://huggingface.co/google/bert_uncased_L-10_H-512_A-8
[10_768]: https://huggingface.co/google/bert_uncased_L-10_H-768_A-12
[12_128]: https://huggingface.co/google/bert_uncased_L-12_H-128_A-2
[12_256]: https://huggingface.co/google/bert_uncased_L-12_H-256_A-4
[12_512]: https://huggingface.co/google/bert_uncased_L-12_H-512_A-8
[12_768]: https://huggingface.co/google/bert_uncased_L-12_H-768_A-12
| [
-0.02777470275759697,
-0.02693094126880169,
0.07438826560974121,
0.03228488564491272,
-0.0023304771166294813,
0.018128493800759315,
-0.06253628432750702,
0.0994548574090004,
-0.014644814655184746,
0.018868697807192802,
-0.015814494341611862,
0.03585591912269592,
0.03645862638950348,
0.04551365599036217,
-0.014007769525051117,
0.02179890125989914,
0.07545263320207596,
0.024630775675177574,
-0.08102796226739883,
-0.038678135722875595,
0.04442288726568222,
0.004127463325858116,
0.035637278109788895,
-0.06602323800325394,
-0.0036878888495266438,
-0.04231955111026764,
-0.10835908353328705,
-0.10179445147514343,
0.1127767413854599,
0.017838995903730392,
0.01322801411151886,
-0.0231491569429636,
0.060954611748456955,
0.10242877900600433,
0.0375167578458786,
0.07160431146621704,
-0.007885153405368328,
0.06596683710813522,
0.08308044821023941,
0.037167150527238846,
-0.012698134407401085,
0.05730978772044182,
-0.046946585178375244,
-0.020251978188753128,
0.08908210694789886,
-0.059475671499967575,
-0.03805408999323845,
-0.05272062495350838,
-0.04246129095554352,
-0.06516197323799133,
-0.08722401410341263,
-0.0465037040412426,
-0.00350557011552155,
-0.006868511438369751,
-0.012093286029994488,
-0.017656998708844185,
-0.018602291122078896,
-0.08509580790996552,
-0.048703644424676895,
-0.05522743612527847,
-0.1006460189819336,
-0.05546271428465843,
-0.03855401650071144,
-0.02299017831683159,
-0.08375518023967743,
0.010514002293348312,
-0.0332985445857048,
0.020559493452310562,
0.02245338261127472,
0.017550311982631683,
0.02086251601576805,
0.07695921510457993,
-0.002593731041997671,
0.04768828675150871,
0.0177034679800272,
-0.08130199462175369,
0.08254873752593994,
0.01259934064000845,
0.05082662031054497,
-0.056801896542310715,
0.003977705724537373,
-0.011792338453233242,
0.061928100883960724,
-0.027844129130244255,
0.03977213054895401,
-0.01979219727218151,
0.050365421921014786,
-0.03929493576288223,
0.0031530733685940504,
-0.041712965816259384,
-0.025899091735482216,
-0.02879168465733528,
0.0234839990735054,
0.01508942898362875,
0.041859906166791916,
-0.013815062120556831,
0.07762707024812698,
-0.06824886798858643,
-0.035266585648059845,
0.06303618848323822,
0.08460132032632828,
0.05870901793241501,
0.11230025440454483,
-0.0903414711356163,
0.07434411346912384,
0.05187731981277466,
0.025597769767045975,
0.017762847244739532,
0.06019540876150131,
-0.07116957008838654,
0.025501219555735588,
0.0264898668974638,
-0.03993377089500427,
-0.02484058029949665,
0.033353839069604874,
-0.04111992195248604,
-0.012459754012525082,
-0.032413944602012634,
0.04432254657149315,
0.08561859279870987,
0.0311464574187994,
0.010137348435819149,
0.009034326300024986,
-0.013844281435012817,
-0.037362899631261826,
0.022949982434511185,
-0.04159504920244217,
3.0798436882963647e-33,
0.010033472441136837,
0.08980696648359299,
-0.015826981514692307,
0.0021228354889899492,
0.04828347638249397,
-0.012724562548100948,
0.07859385013580322,
0.013289345428347588,
-0.04710506275296211,
0.0008750183042138815,
-0.024205293506383896,
0.040203407406806946,
-0.08776650577783585,
0.1084313839673996,
0.05108625441789627,
-0.0076477923430502415,
-0.03032587841153145,
0.09285354614257812,
0.04229235649108887,
0.02342383936047554,
0.012891994789242744,
-0.03050696663558483,
0.021354084834456444,
-0.08490459620952606,
-0.04626283422112465,
-0.004968647845089436,
0.06569510698318481,
0.006347084417939186,
-0.05621005594730377,
0.04938972741365433,
-0.09828261286020279,
0.04791073501110077,
0.005325495731085539,
0.0073667350225150585,
-0.009293892420828342,
-0.030588563531637192,
-0.025204559788107872,
-0.03599413484334946,
0.06201314181089401,
-0.055159613490104675,
0.015916872769594193,
0.08668506890535355,
0.01913357712328434,
-0.03226336091756821,
0.019701041281223297,
0.016111237928271294,
0.07878092676401138,
0.027088068425655365,
-0.03437655791640282,
-0.04213705286383629,
0.038557808846235275,
0.018548857420682907,
-0.09642824530601501,
-0.02115079015493393,
0.014828594401478767,
-0.014169528149068356,
0.052391670644283295,
-0.021084407344460487,
0.018860751762986183,
0.0188959501683712,
-0.018108483403921127,
-0.017935508862137794,
-0.0007771972450427711,
0.0875239372253418,
0.05831224471330643,
-0.01666453666985035,
-0.03579762578010559,
0.019875947386026382,
-0.03154779225587845,
0.024714933708310127,
-0.04408795386552811,
-0.017733389511704445,
0.031613849103450775,
-0.034551091492176056,
0.019006161019206047,
-0.09389360249042511,
0.0749051496386528,
-0.06782030314207077,
-0.060423046350479126,
-0.0027907630428671837,
0.036781832575798035,
0.03104851022362709,
-0.06610022485256195,
-0.07133632153272629,
-0.09378468245267868,
-0.05997026711702347,
0.06689010560512543,
-0.027257995679974556,
0.019673382863402367,
0.02110666036605835,
0.0042736465111374855,
-0.07312818616628647,
0.004901031032204628,
0.009528765454888344,
-0.08911892771720886,
-2.745649909673619e-33,
0.0021529693622142076,
0.03855104371905327,
-0.10308390855789185,
0.050320789217948914,
-0.04681287705898285,
-0.04624652862548828,
0.04134273901581764,
0.15953823924064636,
-0.05114345625042915,
-0.06880908459424973,
-0.03467176482081413,
-0.01697215437889099,
-0.02391764335334301,
-0.08151818066835403,
-0.013180517591536045,
0.008677455596625805,
-0.00866649392992258,
0.0117244403809309,
0.06523464620113373,
-0.031274884939193726,
0.06625952571630478,
-0.050342388451099396,
-0.05482276901602745,
0.08445682376623154,
-0.0037109581753611565,
0.08581460267305374,
-0.1056312620639801,
-0.006267915479838848,
0.0016805074410513043,
0.03180089220404625,
-0.037861187011003494,
-0.026890192180871964,
0.029224365949630737,
0.041481297463178635,
-0.05287330225110054,
0.028274059295654297,
-0.004168998915702105,
-0.04711843654513359,
0.028253236785531044,
0.026713063940405846,
0.05356067046523094,
-0.07454729825258255,
0.01215335913002491,
0.008674802258610725,
0.002732679480686784,
-0.005528884474188089,
-0.1011095717549324,
-0.08269007503986359,
-0.00893216859549284,
-0.028915394097566605,
0.01280263438820839,
-0.03088524378836155,
-0.10103844851255417,
-0.027487996965646744,
-0.09202675521373749,
-0.08071903884410858,
-0.011788311414420605,
-0.010570026002824306,
0.040800344198942184,
0.03534208983182907,
-0.03600774705410004,
-0.08346249163150787,
-0.04663081839680672,
0.0144363883882761,
-0.0611286535859108,
-0.01945393905043602,
-0.0429740846157074,
0.06830962002277374,
-0.04516363888978958,
0.03358118236064911,
-0.04700200632214546,
-0.03670932725071907,
0.06817365437746048,
0.030344508588314056,
-0.10013546049594879,
0.05196927860379219,
-0.004978442098945379,
-0.04802384972572327,
-0.029270552098751068,
0.011249368079006672,
-0.035611048340797424,
-0.04569050669670105,
-0.007384720258414745,
0.06185262277722359,
-0.003068223362788558,
0.07179275900125504,
0.042144566774368286,
0.042808420956134796,
-0.043737392872571945,
0.1017121970653534,
-0.03529709577560425,
0.015136893838644028,
0.06037892401218414,
0.0446556992828846,
0.020039809867739677,
-5.7391801533412945e-8,
-0.020838076248764992,
0.05167875811457634,
-0.0003159099433105439,
0.032759685069322586,
-0.08053361624479294,
-0.07808814197778702,
-0.0645233765244484,
0.073664091527462,
-0.03812188282608986,
0.0739324614405632,
0.05438229441642761,
0.0640188530087471,
-0.051926061511039734,
0.03982805460691452,
0.06603474169969559,
0.08508943021297455,
-0.04874661564826965,
-0.007028104271739721,
-0.0013886261731386185,
-0.043596457690000534,
0.01172784436494112,
0.03845464810729027,
0.012406852096319199,
-0.03461853042244911,
0.06254647672176361,
-0.07115825265645981,
-0.016401374712586403,
0.15517796576023102,
-0.07044593244791031,
0.03150911629199982,
-0.028944045305252075,
0.0592564232647419,
-0.0842917189002037,
0.004482691176235676,
0.12364226579666138,
0.051830366253852844,
-0.1016145721077919,
-0.02944220043718815,
-0.0042844912968575954,
0.026145359501242638,
0.04261724650859833,
-0.0030251643620431423,
-0.05400453135371208,
-0.009814517572522163,
0.12240474671125412,
0.01839965581893921,
-0.012614627368748188,
-0.005961736664175987,
0.022503379732370377,
0.0739760547876358,
0.024917954578995705,
-0.027219194918870926,
-0.0398184210062027,
0.008865961804986,
-0.036761652678251266,
0.03012857772409916,
-0.07172215729951859,
-0.008826298639178276,
0.015618893317878246,
0.011758017353713512,
-0.004138866905122995,
0.05558526888489723,
-0.027862677350640297,
0.07714439183473587
] |
hf-internal-testing/tiny-random-imagegpt | 8291cd3a0461602decb3fa68263f4ca3b278c8f9 | 2021-12-24T10:48:44.000Z | [
"pytorch",
"imagegpt",
"transformers"
] | null | false | hf-internal-testing | null | hf-internal-testing/tiny-random-imagegpt | 1,266 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
ainize/kobart-news | 4b95cf0288646bf92bcdf7429b6f462b71db5eeb | 2021-06-29T02:51:15.000Z | [
"pytorch",
"bart",
"text2text-generation",
"ko",
"transformers",
"summarization",
"license:mit",
"autotrain_compatible"
] | summarization | false | ainize | null | ainize/kobart-news | 1,265 | 2 | transformers | ---
language: ko
license: mit
tags:
- summarization
- bart
---
# kobart-news
- This model is a [kobart](https://huggingface.co/hyunwoongko/kobart) fine-tuned on the [문서요약 텍스트/신문기사](https://aihub.or.kr/aidata/8054) using [Ainize Teachable-NLP](https://ainize.ai/teachable-nlp).
## Usage
### Python Code
```python
from transformers import PreTrainedTokenizerFast, BartForConditionalGeneration
# Load Model and Tokenize
tokenizer = PreTrainedTokenizerFast.from_pretrained("ainize/kobart-news")
model = BartForConditionalGeneration.from_pretrained("ainize/kobart-news")
# Encode Input Text
input_text = '국내 전반적인 경기침체로 상가 건물주의 수익도 전국적인 감소세를 보이고 있는 것으로 나타났다. 수익형 부동산 연구개발기업 상가정보연구소는 한국감정원 통계를 분석한 결과 전국 중대형 상가 순영업소득(부동산에서 발생하는 임대수입, 기타수입에서 제반 경비를 공제한 순소득)이 1분기 ㎡당 3만4200원에서 3분기 2만5800원으로 감소했다고 17일 밝혔다. 수도권, 세종시, 지방광역시에서 순영업소득이 가장 많이 감소한 지역은 3분기 1만3100원을 기록한 울산으로, 1분기 1만9100원 대비 31.4% 감소했다. 이어 대구(-27.7%), 서울(-26.9%), 광주(-24.9%), 부산(-23.5%), 세종(-23.4%), 대전(-21%), 경기(-19.2%), 인천(-18.5%) 순으로 감소했다. 지방 도시의 경우도 비슷했다. 경남의 3분기 순영업소득은 1만2800원으로 1분기 1만7400원 대비 26.4% 감소했으며 제주(-25.1%), 경북(-24.1%), 충남(-20.9%), 강원(-20.9%), 전남(-20.1%), 전북(-17%), 충북(-15.3%) 등도 감소세를 보였다. 조현택 상가정보연구소 연구원은 "올해 내수 경기의 침체된 분위기가 유지되며 상가, 오피스 등을 비롯한 수익형 부동산 시장의 분위기도 경직된 모습을 보였고 오피스텔, 지식산업센터 등의 수익형 부동산 공급도 증가해 공실의 위험도 늘었다"며 "실제 올 3분기 전국 중대형 상가 공실률은 11.5%를 기록하며 1분기 11.3% 대비 0.2% 포인트 증가했다"고 말했다. 그는 "최근 소셜커머스(SNS를 통한 전자상거래), 음식 배달 중개 애플리케이션, 중고 물품 거래 애플리케이션 등의 사용 증가로 오프라인 매장에 영향을 미쳤다"며 "향후 지역, 콘텐츠에 따른 상권 양극화 현상은 심화될 것으로 보인다"고 덧붙였다.'
input_ids = tokenizer.encode(input_text, return_tensors="pt")
# Generate Summary Text Ids
summary_text_ids = model.generate(
input_ids=input_ids,
bos_token_id=model.config.bos_token_id,
eos_token_id=model.config.eos_token_id,
length_penalty=2.0,
max_length=142,
min_length=56,
num_beams=4,
)
# Decoding Text
print(tokenizer.decode(summary_text_ids[0], skip_special_tokens=True))
```
### API and Demo
You can experience this model through [ainize-api](https://ainize.ai/gkswjdzz/summarize-torchserve?branch=main) and [ainize-demo](https://main-summarize-torchserve-gkswjdzz.endpoint.ainize.ai/).
| [
-0.10427828133106232,
0.02535543590784073,
0.052255235612392426,
0.02787598967552185,
-0.0013280597049742937,
-0.02597055584192276,
-0.006717091891914606,
0.05753716826438904,
-0.007845100946724415,
-0.0691143199801445,
0.054690442979335785,
-0.030059363692998886,
0.06178659200668335,
-0.004734253045171499,
0.04165033996105194,
0.019938739016652107,
0.007799228653311729,
0.027838751673698425,
-0.10206873714923859,
-0.08137592673301697,
0.1013801097869873,
0.060535233467817307,
0.016807807609438896,
0.008442775346338749,
0.075712651014328,
0.007355343084782362,
-0.03047352097928524,
-0.005635929759591818,
0.049475256353616714,
-0.006840970832854509,
-0.03838145360350609,
0.03366224840283394,
0.01438943762332201,
0.04308030381798744,
-0.02612556517124176,
0.04932694882154465,
-0.0421282984316349,
-0.037812914699316025,
0.0021788848098367453,
0.013380536809563637,
0.01589125022292137,
-0.013346066698431969,
-0.056764282286167145,
-0.06628693640232086,
0.042317282408475876,
0.04554608836770058,
-0.08377867192029953,
-0.055485352873802185,
-0.043826956301927567,
-0.019702037796378136,
-0.07519319653511047,
0.0021611745469272137,
0.04746376723051071,
0.05218977481126785,
-0.01326453685760498,
-0.006235433276742697,
-0.006226785480976105,
-0.029262099415063858,
0.08457955718040466,
-0.1173461452126503,
-0.08019676804542542,
0.02885882556438446,
0.0008660982712171972,
0.025596382096409798,
-0.041236549615859985,
-0.04987292364239693,
0.03086886927485466,
0.0865883007645607,
-0.0068634492345154285,
0.0000055937334764166735,
-0.018244832754135132,
0.0040166983380913734,
0.031326647847890854,
0.07395923882722855,
0.04762270301580429,
-0.06573230773210526,
0.12444200366735458,
-0.015413346700370312,
0.04113875329494476,
-0.05962483957409859,
-0.027768289670348167,
-0.05619579181075096,
0.024113649502396584,
0.03306707739830017,
-0.005981950554996729,
-0.05360044166445732,
-0.01036790944635868,
-0.02312423661351204,
0.07666878402233124,
0.018082374706864357,
-0.0406840443611145,
-0.04787436127662659,
-0.006758235860615969,
-0.008610524237155914,
-0.02140921913087368,
0.06982781738042831,
-0.0640060231089592,
0.05904453247785568,
-0.09839443117380142,
0.0864950567483902,
0.022200819104909897,
0.00887003168463707,
0.041417159140110016,
0.012022845447063446,
-0.04938745126128197,
-0.09329769760370255,
0.028038369491696358,
-0.009900067001581192,
0.04603240266442299,
0.009654833935201168,
0.034376442432403564,
-0.070923812687397,
0.011622270569205284,
-0.032268550246953964,
0.03232153505086899,
0.011925030499696732,
-0.005059900227934122,
0.0013137394562363625,
0.013976537622511387,
0.09533660858869553,
0.06746721267700195,
0.00574831198900938,
-0.05103108659386635,
0.05209188908338547,
-0.033057987689971924,
-0.015126397833228111,
0.0639895349740982,
4.4789166973030667e-33,
-0.02299422025680542,
0.014261290431022644,
0.04072271287441254,
0.014330272562801838,
-0.042255181819200516,
-0.024640120565891266,
-0.03329597786068916,
0.013023982755839825,
-0.05753033235669136,
-0.038746487349271774,
-0.07018139213323593,
0.08185260742902756,
-0.08236190676689148,
0.03891843557357788,
-0.03705136850476265,
-0.022412674501538277,
-0.061157092452049255,
0.017320558428764343,
0.008891346864402294,
0.026636507362127304,
0.09510694444179535,
0.012839756906032562,
-0.011090770363807678,
-0.040043335407972336,
-0.08616578578948975,
-0.02868226356804371,
0.06200098991394043,
-0.06482400000095367,
-0.05225555971264839,
0.03500773012638092,
-0.034101780503988266,
-0.027504563331604004,
-0.012022236362099648,
-0.007612427696585655,
-0.03591740503907204,
-0.03380929306149483,
0.022703152149915695,
-0.04574969410896301,
-0.07241193950176239,
-0.12442507594823837,
-0.04711339250206947,
0.02687891386449337,
-0.02484000474214554,
-0.05367468297481537,
-0.03846599534153938,
0.04415000602602959,
-0.012536142021417618,
0.011410156264901161,
0.04884444549679756,
0.0422622486948967,
0.01666317693889141,
-0.012948372401297092,
-0.027917275205254555,
0.028896406292915344,
0.04411955550312996,
0.08161548525094986,
0.08456595987081528,
0.01186679769307375,
0.039297062903642654,
-0.052276361733675,
-0.02615802362561226,
0.013729138299822807,
0.1321764439344406,
0.015485215000808239,
0.09339190274477005,
0.009931475855410099,
-0.002978383330628276,
0.03549910709261894,
0.02871655859053135,
-0.04414282366633415,
-0.09018851816654205,
-0.052832212299108505,
-0.024651680141687393,
0.04020719602704048,
-0.008286497555673122,
-0.05201820284128189,
0.021719779819250107,
-0.09913095831871033,
-0.04459289833903313,
0.011748256161808968,
0.008247274905443192,
-0.06075767055153847,
-0.006503986660391092,
-0.010852133855223656,
0.04074975475668907,
-0.07019690424203873,
0.022991811856627464,
-0.04725122079253197,
-0.020932326093316078,
0.002765237120911479,
-0.0232660174369812,
0.016489455476403236,
-0.04725055396556854,
-0.012755265459418297,
-0.03592824190855026,
-6.789144030306925e-33,
0.03399144858121872,
0.08805882930755615,
-0.049811623990535736,
0.02664485201239586,
0.0002557795087341219,
-0.030888676643371582,
0.03824322298169136,
0.1621091067790985,
0.025999801233410835,
-0.005029664374887943,
0.028550272807478905,
-0.10144402086734772,
-0.018095621839165688,
-0.03115563467144966,
0.07301130145788193,
0.02799743227660656,
-0.02618713304400444,
0.0658176988363266,
0.038177698850631714,
0.08842054754495621,
-0.015631260350346565,
0.03546953573822975,
-0.19276075065135956,
0.10305628180503845,
-0.040968697518110275,
0.049094777554273605,
-0.015470060519874096,
0.09528942406177521,
0.03639558330178261,
-0.02070973999798298,
0.013511056080460548,
-0.01251041516661644,
-0.05671799182891846,
0.10836784541606903,
-0.07302740961313248,
0.01091735064983368,
0.06460868567228317,
-0.031396057456731796,
-0.06404666602611542,
0.013477038592100143,
0.11233123391866684,
0.05587977543473244,
-0.08679208159446716,
0.01733812876045704,
-0.03625239059329033,
-0.02184012159705162,
-0.010116799734532833,
-0.06855212897062302,
0.05867402255535126,
-0.06679771840572357,
0.06916501373052597,
-0.02601618692278862,
-0.08841698616743088,
0.0006152992718853056,
-0.09015345573425293,
-0.024321312084794044,
0.09125393629074097,
-0.04889577627182007,
-0.07809795439243317,
-0.0611267164349556,
-0.057215601205825806,
-0.0643610879778862,
0.05591928958892822,
-0.1154877319931984,
-0.03500362113118172,
-0.08592480421066284,
0.026929739862680435,
-0.0065214005298912525,
0.02255731262266636,
-0.05409132316708565,
0.004727956373244524,
0.03311294689774513,
0.057967282831668854,
-0.07642120122909546,
0.003970255609601736,
0.03309602662920952,
0.0020331768319010735,
-0.03257863223552704,
-0.0048734042793512344,
-0.06296147406101227,
-0.08867009729146957,
-0.012887340039014816,
0.055066198110580444,
0.07230661809444427,
0.0029669804498553276,
0.045568227767944336,
0.02012128010392189,
0.1242406964302063,
0.061362795531749725,
-0.005173667334020138,
-0.01070878654718399,
0.04823041707277298,
0.0323844738304615,
0.10590893030166626,
-0.03709710016846657,
-5.634078803495868e-8,
-0.02234269492328167,
-0.06480077654123306,
-0.06369633227586746,
0.06237322837114334,
-0.048652708530426025,
-0.012470926158130169,
-0.022237209603190422,
-0.05780116096138954,
-0.06741383671760559,
-0.03743763640522957,
0.005583553109318018,
0.0435015931725502,
-0.10494617372751236,
-0.009289812296628952,
0.024503272026777267,
0.045816343277692795,
-0.04818741977214813,
0.07512976974248886,
-0.02264566905796528,
-0.03381611034274101,
0.058779653161764145,
-0.021502239629626274,
-0.020421557128429413,
-0.011535074561834335,
0.007202823180705309,
0.008416571654379368,
-0.08489231020212173,
0.06720955669879913,
-0.01740206964313984,
-0.0418061800301075,
0.007872444577515125,
0.0247147586196661,
-0.031068086624145508,
-0.021888135001063347,
-0.013138005509972572,
0.03571433573961258,
0.051851436495780945,
-0.06693737953901291,
0.009193282574415207,
0.06563592702150345,
0.024851735681295395,
0.033690955489873886,
-0.12476971745491028,
-0.004244379233568907,
0.08759920299053192,
0.0386970192193985,
0.0053072539158165455,
-0.07020602375268936,
0.039437782019376755,
0.03242005407810211,
-0.026759028434753418,
-0.02674068883061409,
-0.06430550664663315,
-0.024608099833130836,
0.01461702585220337,
0.015160678885877132,
-0.041423555463552475,
-0.043068163096904755,
-0.00286257010884583,
0.03489784896373749,
0.028312962502241135,
0.052456457167863846,
0.03599632531404495,
0.001684257760643959
] |
hf-internal-testing/tiny-random-speech_to_text | 0edd349ecdb54044ad27ca4cde3136252e3503c1 | 2021-09-17T19:26:03.000Z | [
"pytorch",
"speech_to_text",
"transformers"
] | null | false | hf-internal-testing | null | hf-internal-testing/tiny-random-speech_to_text | 1,260 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
hf-internal-testing/tiny-random-vision-encoder-decoder | 2b34c3c71aa6c25134e293c502f172ee7368eb67 | 2021-12-15T17:14:55.000Z | [
"pytorch",
"vision-encoder-decoder",
"transformers"
] | null | false | hf-internal-testing | null | hf-internal-testing/tiny-random-vision-encoder-decoder | 1,258 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
lgrobol/roberta-minuscule | 3ec7286af3b51b67bef74c29a8b9195205b532c4 | 2021-08-17T13:38:29.000Z | [
"pytorch",
"roberta",
"fill-mask",
"transformers",
"autotrain_compatible"
] | fill-mask | false | lgrobol | null | lgrobol/roberta-minuscule | 1,256 | 1 | transformers | RoBERTa-minuscule
==================
A ridiculously small model for testing purposes. | [
-0.06243985891342163,
-0.06771157681941986,
-0.07250465452671051,
0.06631236523389816,
-0.044766247272491455,
0.006072840187698603,
0.010920976288616657,
0.07705207169055939,
-0.0066064209677278996,
0.04413164034485817,
0.041306789964437485,
-0.0204787515103817,
0.008035294711589813,
0.0002067737514153123,
-0.07051307708024979,
0.02961158938705921,
0.07274017482995987,
-0.01647498831152916,
-0.12369775772094727,
0.058911051601171494,
-0.06971403956413269,
0.009755455888807774,
0.052399955689907074,
0.01921253092586994,
0.028762241825461388,
-0.03572158142924309,
-0.0669553279876709,
-0.018471315503120422,
0.03111821599304676,
0.024561015889048576,
-0.053439825773239136,
0.07813195139169693,
0.029909413307905197,
0.0398278571665287,
0.08119449764490128,
0.05252738669514656,
0.04339737817645073,
0.04296988993883133,
0.002728621941059828,
-0.025318488478660583,
-0.005903454031795263,
-0.010629083029925823,
-0.0135786272585392,
-0.0000863123350427486,
-0.08939994871616364,
-0.15471230447292328,
0.029723018407821655,
0.003549334593117237,
-0.03846915811300278,
-0.05447123199701309,
-0.11334331333637238,
-0.0378267727792263,
-0.09688899666070938,
-0.05369209498167038,
-0.018196946009993553,
-0.04609290510416031,
-0.01543040294200182,
-0.04360990598797798,
-0.04620986059308052,
0.022676566615700722,
-0.0207924023270607,
-0.05269059166312218,
-0.07575833797454834,
0.021051594987511635,
0.0750703439116478,
0.03395228460431099,
-0.10479388386011124,
0.01615794375538826,
-0.00720958411693573,
0.0008414724143221974,
-0.04433099180459976,
-0.0518731027841568,
-0.06452436745166779,
0.10281702876091003,
0.06418922543525696,
-0.04420273378491402,
0.037705209106206894,
0.033384233713150024,
0.08253461122512817,
0.05147598683834076,
-0.05082005262374878,
-0.05313941836357117,
-0.013288489542901516,
-0.002904788823798299,
-0.015490831807255745,
0.04012685641646385,
0.03463319316506386,
0.02664824016392231,
0.011098076589405537,
-0.09177009016275406,
-0.08863364905118942,
-0.0158674493432045,
0.0302277822047472,
-0.002655168529599905,
-0.09025563299655914,
-0.004272907972335815,
0.002133993897587061,
-0.04117188602685928,
-0.0700780376791954,
0.09568949043750763,
0.01227132510393858,
0.11912360042333603,
0.09099598228931427,
0.01640305295586586,
-0.00851875264197588,
-0.04984978958964348,
0.06399252265691757,
0.0003501938481349498,
0.023675983771681786,
-0.048570770770311356,
0.024806825444102287,
0.009872621856629848,
-0.03655947372317314,
0.03839634731411934,
0.0995769128203392,
-0.04477527365088463,
-0.008066067472100258,
0.02358752302825451,
0.007746804505586624,
-0.004562576301395893,
0.06851791590452194,
0.007966465316712856,
-0.04999576136469841,
0.04609706625342369,
0.0011852957541123033,
0.051796384155750275,
-0.051435861736536026,
4.585597952427974e-34,
-0.006982778664678335,
0.10613518953323364,
0.10169073194265366,
0.08542536199092865,
0.013869003392755985,
0.06062779203057289,
-0.07867255061864853,
0.04576234146952629,
0.02733984775841236,
0.07012418657541275,
0.008081221021711826,
0.008001045323908329,
-0.013599715195596218,
0.021466659381985664,
-0.022748559713363647,
0.06637755036354065,
-0.08177162706851959,
0.030917279422283173,
-0.007659689523279667,
0.04089788720011711,
0.04932413995265961,
0.05907629802823067,
-0.043257977813482285,
-0.026878485456109047,
-0.01767190918326378,
-0.06048104539513588,
0.0630524531006813,
-0.012739044614136219,
-0.028347207233309746,
0.015577924437820911,
-0.03871629387140274,
0.06640350073575974,
-0.05688948556780815,
-0.030269265174865723,
-0.005876509938389063,
-0.02493778057396412,
-0.030919717624783516,
-0.07133158296346664,
0.026228737086057663,
-0.016763396561145782,
-0.08525968343019485,
-0.00996579322963953,
0.020973842591047287,
-0.002531416015699506,
-0.007436784449964762,
0.006267978809773922,
0.07225055992603302,
0.004957236349582672,
0.027510948479175568,
-0.023515623062849045,
-0.012983967550098896,
0.027301348745822906,
0.07684703916311264,
0.06376290321350098,
-0.047193218022584915,
0.035568851977586746,
0.09919782727956772,
0.011384529061615467,
-0.04031014442443848,
0.03958038240671158,
-0.016743021085858345,
-0.05454331263899803,
0.010657481849193573,
-0.04751316457986832,
0.03887855261564255,
-0.015973906964063644,
-0.06198515370488167,
-0.106786347925663,
0.06938488036394119,
0.14168459177017212,
-0.074564628303051,
-0.009665872901678085,
0.012752754613757133,
0.022363699972629547,
-0.03197299689054489,
-0.039478473365306854,
0.07109683007001877,
-0.019381074234843254,
0.003886505961418152,
-0.03941204026341438,
-0.0007154184859246016,
0.03346991166472435,
-0.01237793080508709,
-0.06541711091995239,
-0.1029859408736229,
-0.1182839423418045,
0.008923366665840149,
-0.09713717550039291,
-0.14388792216777802,
-0.06346539407968521,
0.01445791032165289,
0.09946320205926895,
0.021826408803462982,
0.00467687426134944,
-0.08673696219921112,
-4.5489328142578884e-33,
-0.0393051914870739,
-0.019627366214990616,
0.017444849014282227,
0.07920020818710327,
0.06696324050426483,
-0.056033145636320114,
0.03947390988469124,
0.061098068952560425,
0.0151522783562541,
0.017058158293366432,
0.1099669486284256,
-0.07924451678991318,
0.019697466865181923,
-0.043663278222084045,
0.07392527908086777,
0.03470868244767189,
-0.0003795935772359371,
-0.16001614928245544,
0.04317630082368851,
-0.055602509528398514,
-0.088909812271595,
0.08283110707998276,
0.002524483483284712,
-0.0033773300237953663,
-0.06462851166725159,
-0.023653004318475723,
0.048617810010910034,
-0.04546113684773445,
-0.03821109980344772,
-0.018900224938988686,
-0.031139912083745003,
-0.0530313178896904,
-0.056340157985687256,
-0.062420278787612915,
-0.08093788474798203,
0.053051915019750595,
-0.004211904481053352,
-0.06269494444131851,
-0.025700414553284645,
-0.03167306259274483,
0.008678370155394077,
0.032795730978250504,
-0.01874667778611183,
0.06353893131017685,
0.011640448123216629,
-0.007364960853010416,
-0.010588499717414379,
0.002896708669140935,
-0.02478947676718235,
-0.011429846286773682,
-0.0022529340349137783,
-0.030113015323877335,
-0.048825088888406754,
0.015628540888428688,
-0.03723740577697754,
-0.02288242243230343,
0.05526396259665489,
-0.0031894908752292395,
0.0018152233678847551,
0.08258254826068878,
-0.05602740868926048,
-0.0012537621660158038,
-0.06093964725732803,
0.06891561299562454,
-0.016794148832559586,
-0.08334042131900787,
0.0003172309370711446,
0.027408279478549957,
-0.028969481587409973,
0.05105502903461456,
0.07838411629199982,
-0.024128444492816925,
0.01605876348912716,
0.022732315585017204,
0.037332624197006226,
0.012905288487672806,
-0.08363812416791916,
-0.08028406649827957,
-0.03771529346704483,
-0.0511745810508728,
-0.08046112954616547,
-0.0423472635447979,
-0.04025620222091675,
0.04690422862768173,
0.0034999377094209194,
0.027558812871575356,
0.005966934375464916,
0.08826038241386414,
-0.027837524190545082,
0.11650166660547256,
0.00022670376347377896,
0.025936760008335114,
0.010062001645565033,
-0.0009805732406675816,
-0.015761103481054306,
-3.684529659153668e-8,
-0.00917893834412098,
0.012188994325697422,
-0.03606521710753441,
-0.04784589633345604,
-0.01720096729695797,
0.02489960379898548,
-0.005368613637983799,
-0.06066286191344261,
-0.06147097423672676,
0.14284175634384155,
-0.003321918426081538,
0.003086097538471222,
-0.0632091611623764,
0.07216101884841919,
0.004057055804878473,
0.0009897372219711542,
-0.002777290530502796,
0.053498897701501846,
-0.04583628103137016,
-0.001374672050587833,
0.07342837750911713,
0.03825228661298752,
-0.02966420352458954,
-0.006150263361632824,
0.014166177250444889,
0.0008953094366006553,
-0.06323375552892685,
0.08455004543066025,
-0.012706497684121132,
-0.007565073203295469,
0.04077231511473656,
0.06533940136432648,
0.01746254786849022,
-0.0268174447119236,
-0.07923886179924011,
0.05038842931389809,
0.012340272776782513,
-0.0013519383501261473,
-0.047606199979782104,
0.03486577421426773,
-0.027586253359913826,
-0.02204759605228901,
-0.044222861528396606,
0.014728501439094543,
0.00691570108756423,
-0.02651912346482277,
0.0684182420372963,
-0.07515905052423477,
0.04656458646059036,
0.04069165140390396,
0.03092888742685318,
-0.0011807649862021208,
-0.04110507667064667,
-0.0486818365752697,
-0.05199168622493744,
-0.0346570760011673,
-0.07434383779764175,
0.03856132552027702,
-0.0016777036944404244,
-0.02869005873799324,
0.035267025232315063,
-0.016606464982032776,
-0.025830762460827827,
-0.009694349020719528
] |
izumi-lab/bert-small-japanese | 7472b8975446df577a1820d559197075ab05f2e1 | 2022-03-19T09:37:46.000Z | [
"pytorch",
"bert",
"fill-mask",
"ja",
"dataset:wikipedia",
"arxiv:2003.10555",
"transformers",
"license:cc-by-sa-4.0",
"autotrain_compatible"
] | fill-mask | false | izumi-lab | null | izumi-lab/bert-small-japanese | 1,252 | null | transformers | ---
language: ja
license: cc-by-sa-4.0
datasets:
- wikipedia
widget:
- text: 東京大学で[MASK]の研究をしています。
---
# BERT small Japanese finance
This is a [BERT](https://github.com/google-research/bert) model pretrained on texts in the Japanese language.
The codes for the pretraining are available at [retarfi/language-pretraining](https://github.com/retarfi/language-pretraining/tree/v1.0).
## Model architecture
The model architecture is the same as BERT small in the [original ELECTRA paper](https://arxiv.org/abs/2003.10555); 12 layers, 256 dimensions of hidden states, and 4 attention heads.
## Training Data
The models are trained on the Japanese version of Wikipedia.
The training corpus is generated from the Japanese version of Wikipedia, using Wikipedia dump file as of June 1, 2021.
The corpus file is 2.9GB, consisting of approximately 20M sentences.
## Tokenization
The texts are first tokenized by MeCab with IPA dictionary and then split into subwords by the WordPiece algorithm.
The vocabulary size is 32768.
## Training
The models are trained with the same configuration as BERT small in the [original ELECTRA paper](https://arxiv.org/abs/2003.10555); 128 tokens per instance, 128 instances per batch, and 1.45M training steps.
## Citation
**There will be another paper for this pretrained model. Be sure to check here again when you cite.**
```
@inproceedings{suzuki2021fin-bert-electra,
title={金融文書を用いた事前学習言語モデルの構築と検証},
% title={Construction and Validation of a Pre-Trained Language Model Using Financial Documents},
author={鈴木 雅弘 and 坂地 泰紀 and 平野 正徳 and 和泉 潔},
% author={Masahiro Suzuki and Hiroki Sakaji and Masanori Hirano and Kiyoshi Izumi},
booktitle={人工知能学会第27回金融情報学研究会(SIG-FIN)},
% booktitle={Proceedings of JSAI Special Interest Group on Financial Infomatics (SIG-FIN) 27},
pages={5-10},
year={2021}
}
```
## Licenses
The pretrained models are distributed under the terms of the [Creative Commons Attribution-ShareAlike 4.0](https://creativecommons.org/licenses/by-sa/4.0/).
## Acknowledgments
This work was supported by JSPS KAKENHI Grant Number JP21K12010.
| [
-0.1085769459605217,
-0.035343848168849945,
0.03383440524339676,
0.056082747876644135,
0.029143471270799637,
0.09396633505821228,
-0.0002758160699158907,
0.054543301463127136,
0.02398882992565632,
0.020321283489465714,
0.06612618267536163,
0.0035854927264153957,
0.008817452006042004,
0.030745426192879677,
-0.004911382216960192,
0.016530504450201988,
0.030180873349308968,
-0.03941653296351433,
-0.06371744722127914,
-0.03208785504102707,
0.08858881890773773,
0.021668681874871254,
0.072073794901371,
-0.06887762993574142,
0.08770392835140228,
-0.01876799389719963,
-0.05242738872766495,
-0.02440926991403103,
0.06169063225388527,
0.023495988920331,
0.03378025069832802,
0.009171090088784695,
0.11221971362829208,
0.07334377616643906,
0.003619386348873377,
0.06655451655387878,
0.037911757826805115,
-0.014666205272078514,
0.021794365718960762,
0.05355124920606613,
-0.02508804388344288,
0.02945788763463497,
-0.0013493754668161273,
0.0017783398507162929,
0.11852440983057022,
-0.040066011250019073,
-0.059382613748311996,
-0.021029001101851463,
-0.06222716346383095,
-0.021443204954266548,
-0.07358033210039139,
0.011331469751894474,
0.04795633256435394,
0.027092084288597107,
-0.006307901814579964,
-0.018399007618427277,
0.004076228477060795,
-0.009856590069830418,
-0.0017533625941723585,
-0.06650114059448242,
-0.10179559141397476,
-0.06351635605096817,
-0.0168850589543581,
-0.014177502132952213,
-0.04029756784439087,
0.02371174655854702,
-0.05305953323841095,
0.030205734074115753,
0.034748490899801254,
-0.0627121552824974,
-0.021702619269490242,
0.05012573301792145,
0.022413285449147224,
0.03308772295713425,
0.0017713290872052312,
-0.083854079246521,
0.07355695962905884,
0.04617099091410637,
0.005671949125826359,
-0.10727076232433319,
0.00741449324414134,
-0.024888835847377777,
0.07718686014413834,
-0.01686500757932663,
0.05659952014684677,
0.04536745324730873,
0.03416287153959274,
-0.026553034782409668,
-0.013778556138277054,
0.03122442588210106,
-0.014803044497966766,
-0.08069044351577759,
0.05343002825975418,
0.016082847490906715,
-0.00662617851048708,
0.03475143015384674,
-0.005041604395955801,
-0.018804315477609634,
0.01594899222254753,
0.0860152617096901,
0.09836841374635696,
0.05714548006653786,
0.059021394699811935,
-0.0015612004790455103,
-0.01918688975274563,
0.013934419490396976,
0.015299731865525246,
0.06149669364094734,
0.0681120753288269,
-0.04605806618928909,
0.022649627178907394,
-0.0034058019518852234,
-0.025974469259381294,
-0.044374048709869385,
0.0168246291577816,
0.007534692995250225,
-0.027266453951597214,
-0.016478944569826126,
0.00834935437887907,
0.12415008991956711,
0.0027668213006109,
0.01316010020673275,
-0.011492609977722168,
-0.014400972984731197,
-0.056427501142024994,
-0.032127849757671356,
-0.048306308686733246,
3.606865927569983e-33,
0.04217002913355827,
0.028896717354655266,
0.0045201824977993965,
0.007000643759965897,
-0.06048235297203064,
-0.03807656094431877,
0.01645788364112377,
0.021906625479459763,
-0.0663515031337738,
-0.020629091188311577,
-0.09146223217248917,
0.0627857968211174,
-0.07140950113534927,
0.07856705784797668,
-0.0017988198669627309,
-0.017987878993153572,
-0.06669541448354721,
0.06461162120103836,
0.11681873351335526,
0.007951827719807625,
0.08431414514780045,
-0.006278385408222675,
0.04361669719219208,
-0.12212502956390381,
-0.016292419284582138,
0.04354621842503548,
0.089333675801754,
-0.08967722207307816,
-0.056769803166389465,
0.07009315490722656,
-0.12799771130084991,
0.003538192017003894,
-0.030224433168768883,
-0.0004615333746187389,
-0.03759448230266571,
-0.04048418998718262,
0.030832091346383095,
-0.05987013876438141,
0.00620113592594862,
-0.0702332928776741,
0.01545227225869894,
0.07827764004468918,
-0.003296139882877469,
-0.049875982105731964,
-0.03167673200368881,
0.0037067339289933443,
0.06830313801765442,
-0.031560033559799194,
0.07799945026636124,
0.04292551055550575,
-0.008875489234924316,
-0.011469409801065922,
-0.01834660954773426,
-0.005388019606471062,
0.04739031568169594,
0.014825256541371346,
0.05523483082652092,
0.015111963264644146,
0.03944143280386925,
0.058876559138298035,
0.010107859969139099,
-0.009391493164002895,
-0.020487377420067787,
0.04529574140906334,
0.03406517952680588,
0.013340701349079609,
-0.06127377599477768,
-0.06016860529780388,
0.06109870225191116,
0.018108904361724854,
-0.06327591836452484,
-0.031026851385831833,
0.026371266692876816,
0.005044842138886452,
0.041932474821805954,
-0.059582050889730453,
0.0047889887355268,
-0.03046882338821888,
-0.053693633526563644,
0.03299642726778984,
0.01999782584607601,
-0.03581491857767105,
-0.040916558355093,
-0.0633058026432991,
-0.03797024115920067,
-0.018077153712511063,
0.07254065573215485,
-0.06386083364486694,
0.019700152799487114,
0.011429781094193459,
0.05415528267621994,
-0.08592240512371063,
-0.03629324212670326,
-0.0015156606677919626,
-0.05323253944516182,
-2.4055999651318707e-33,
-0.0247552040964365,
0.07289104163646698,
-0.05971381813287735,
0.0297054685652256,
-0.08476132899522781,
-0.04692625626921654,
0.032255254685878754,
0.21062251925468445,
-0.04391973093152046,
-0.009231941774487495,
0.01148011814802885,
-0.10427074134349823,
0.05888597294688225,
-0.018640128895640373,
0.07625368982553482,
0.02542366459965706,
-0.0015703809913247824,
0.020989876240491867,
0.042111121118068695,
0.024201834574341774,
0.03099491447210312,
-0.036843881011009216,
-0.083168163895607,
0.06933704018592834,
-0.007960837334394455,
0.05252833664417267,
-0.03273960202932358,
0.04653293639421463,
-0.01804962009191513,
0.01992902345955372,
-0.08388987928628922,
-0.025640230625867844,
-0.05615514889359474,
0.05499494448304176,
-0.11061960458755493,
-0.02600633166730404,
0.005270373076200485,
-0.019463974982500076,
-0.015517179854214191,
-0.0009563338826410472,
0.10988382995128632,
0.017047425732016563,
-0.06456387788057327,
0.007826808840036392,
-0.043657537549734116,
-0.023349978029727936,
-0.15095558762550354,
0.049485184252262115,
0.03995983675122261,
-0.08157326281070709,
0.017772341147065163,
-0.011588436551392078,
-0.05696607381105423,
-0.07977503538131714,
-0.0893394723534584,
-0.10804121196269989,
0.03429030999541283,
-0.018125493079423904,
-0.03943387791514397,
-0.012273226864635944,
-0.027354920282959938,
-0.044310279190540314,
0.03662968426942825,
-0.007693388964980841,
0.01634141430258751,
-0.055367112159729004,
0.015960542485117912,
0.08983148634433746,
-0.019946886226534843,
-0.004169967025518417,
-0.0002889572351705283,
-0.011051913723349571,
0.0839337632060051,
-0.006192922592163086,
-0.006917436607182026,
0.01442347839474678,
0.0013846480287611485,
-0.06516426056623459,
0.0029472962487488985,
-0.06497517973184586,
-0.02834075503051281,
0.029255885630846024,
0.05221831053495407,
0.040193699300289154,
0.037008773535490036,
0.08681820333003998,
0.03351490572094917,
0.018255598843097687,
-0.00926200021058321,
0.04526319354772568,
-0.00830572284758091,
0.0923486053943634,
0.023315325379371643,
0.08471763134002686,
-0.029309779405593872,
-5.2622539215008146e-8,
-0.06178291514515877,
-0.02500140480697155,
-0.006406046915799379,
0.06430919468402863,
-0.058405205607414246,
-0.03354574739933014,
-0.04867998883128166,
0.013362674973905087,
-0.03901754692196846,
-0.039100755006074905,
0.10110516101121902,
0.10787243396043777,
-0.07159760594367981,
0.019435402005910873,
-0.04158551245927811,
0.03691708669066429,
-0.03210899978876114,
0.04940476641058922,
-0.004056520294398069,
-0.013132222928106785,
0.048808369785547256,
0.05474724993109703,
0.05847050994634628,
-0.010622368194162846,
0.03776125609874725,
-0.06643359363079071,
-0.028348084539175034,
0.1327243149280548,
-0.013736966997385025,
-0.08312336355447769,
-0.0747143104672432,
0.01742486283183098,
-0.09148746728897095,
-0.006731022614985704,
0.012354954145848751,
0.03223676234483719,
-0.027066072449088097,
-0.04423810914158821,
-0.05404375120997429,
0.02458908036351204,
0.08050817996263504,
-0.019426673650741577,
-0.06400075554847717,
-0.013480918481945992,
0.09570175409317017,
-0.01215567160397768,
-0.062035318464040756,
-0.09856358915567398,
0.044993363320827484,
-0.050077181309461594,
0.024000195786356926,
-0.028754187747836113,
-0.047377899289131165,
0.006395300850272179,
-0.039228588342666626,
0.02775411680340767,
-0.09695060551166534,
-0.02073756977915764,
-0.0006196177564561367,
0.0371677428483963,
-0.0008252166444435716,
0.04919677972793579,
0.03551127016544342,
0.04379658028483391
] |
castorini/afriberta_large | e74edb9488208f8a2aeb69be4c16d179ab385564 | 2022-06-10T12:05:16.000Z | [
"pytorch",
"tf",
"xlm-roberta",
"fill-mask",
"om",
"am",
"rw",
"rn",
"ha",
"ig",
"pcm",
"so",
"sw",
"ti",
"yo",
"multilingual",
"transformers",
"autotrain_compatible"
] | fill-mask | false | castorini | null | castorini/afriberta_large | 1,251 | 2 | transformers | Hugging Face's logo
---
language:
- om
- am
- rw
- rn
- ha
- ig
- pcm
- so
- sw
- ti
- yo
- multilingual
---
# afriberta_large
## Model description
AfriBERTa large is a pretrained multilingual language model with around 126 million parameters.
The model has 10 layers, 6 attention heads, 768 hidden units and 3072 feed forward size.
The model was pretrained on 11 African languages namely - Afaan Oromoo (also called Oromo), Amharic, Gahuza (a mixed language containing Kinyarwanda and Kirundi), Hausa, Igbo, Nigerian Pidgin, Somali, Swahili, Tigrinya and Yorùbá.
The model has been shown to obtain competitive downstream performances on text classification and Named Entity Recognition on several African languages, including those it was not pretrained on.
## Intended uses & limitations
#### How to use
You can use this model with Transformers for any downstream task.
For example, assuming we want to finetune this model on a token classification task, we do the following:
```python
>>> from transformers import AutoTokenizer, AutoModelForTokenClassification
>>> model = AutoModelForTokenClassification.from_pretrained("castorini/afriberta_large")
>>> tokenizer = AutoTokenizer.from_pretrained("castorini/afriberta_large")
# we have to manually set the model max length because it is an imported sentencepiece model, which huggingface does not properly support right now
>>> tokenizer.model_max_length = 512
```
#### Limitations and bias
- This model is possibly limited by its training dataset which are majorly obtained from news articles from a specific span of time. Thus, it may not generalize well.
- This model is trained on very little data (less than 1 GB), hence it may not have seen enough data to learn very complex linguistic relations.
## Training data
The model was trained on an aggregation of datasets from the BBC news website and Common Crawl.
## Training procedure
For information on training procedures, please refer to the AfriBERTa [paper]() or [repository](https://github.com/keleog/afriberta)
### BibTeX entry and citation info
```
@inproceedings{ogueji-etal-2021-small,
title = "Small Data? No Problem! Exploring the Viability of Pretrained Multilingual Language Models for Low-resourced Languages",
author = "Ogueji, Kelechi and
Zhu, Yuxin and
Lin, Jimmy",
booktitle = "Proceedings of the 1st Workshop on Multilingual Representation Learning",
month = nov,
year = "2021",
address = "Punta Cana, Dominican Republic",
publisher = "Association for Computational Linguistics",
url = "https://aclanthology.org/2021.mrl-1.11",
pages = "116--126",
}
```
| [
-0.08165895193815231,
-0.05106094852089882,
-0.02171929180622101,
0.004028848372399807,
-0.07591322809457779,
-0.004545403644442558,
-0.033020585775375366,
0.061460573226213455,
0.028677452355623245,
-0.0018121707253158092,
-0.0040605273097753525,
-0.0775199607014656,
0.01467098481953144,
0.03471678867936134,
0.0370098240673542,
0.0217119250446558,
0.08335965871810913,
0.04076460748910904,
-0.1282605230808258,
-0.09976114332675934,
0.089021235704422,
0.026894180104136467,
0.04461689665913582,
-0.01871618814766407,
0.011859286576509476,
-0.05150296166539192,
-0.045588791370391846,
-0.004621169529855251,
0.09100081771612167,
-0.06661245226860046,
0.017995020374655724,
0.07317950576543808,
0.008818566799163818,
0.1083306223154068,
-0.033917855471372604,
0.1231573075056076,
-0.10849512368440628,
-0.046523332595825195,
0.013478533364832401,
-0.03254636749625206,
0.0336977019906044,
-0.04349188134074211,
0.02003045566380024,
-0.02518199197947979,
0.09830829501152039,
-0.0012428053887560964,
-0.021442772820591927,
0.05156076326966286,
-0.03847034275531769,
-0.024729019030928612,
-0.06903750449419022,
-0.061802275478839874,
-0.030448028817772865,
0.11732237040996552,
-0.03011283650994301,
-0.11270678788423538,
-0.017682772129774094,
-0.06712324917316437,
0.018497556447982788,
0.0015330020105466247,
-0.11823970079421997,
-0.0370909757912159,
0.012943299487233162,
0.010135935619473457,
-0.07352671027183533,
-0.01962338015437126,
0.024669619277119637,
0.011728907003998756,
-0.009004686027765274,
0.027621913701295853,
0.012399938888847828,
-0.0034449410159140825,
0.03986016660928726,
0.03761408105492592,
0.03871171548962593,
-0.025606783106923103,
0.07261614501476288,
-0.006248041056096554,
0.03166513890028,
-0.07564075291156769,
0.03853447362780571,
0.013980815187096596,
0.06727524846792221,
-0.05081474035978317,
0.07128265500068665,
0.015391330234706402,
-0.015676315873861313,
-0.014351305551826954,
-0.009883400052785873,
-0.0004661595157813281,
-0.027773993089795113,
-0.005135697312653065,
0.1014418676495552,
-0.0315617099404335,
0.010654907673597336,
-0.013412438333034515,
0.002758446615189314,
0.004007252398878336,
-0.07380057871341705,
0.08326469361782074,
-0.008358944207429886,
-0.01025721337646246,
0.07563629746437073,
0.044598281383514404,
-0.09582552313804626,
-0.10636469721794128,
0.04334495961666107,
0.030899297446012497,
0.03915972262620926,
-0.018655948340892792,
-0.022507255896925926,
-0.0037869480438530445,
-0.07840705662965775,
0.006235211156308651,
0.02051578089594841,
-0.0008928858442232013,
-0.008351688273251057,
-0.055737998336553574,
-0.028204279020428658,
0.07590113580226898,
-0.08110208064317703,
-0.024301638826727867,
0.03770265355706215,
0.006574425846338272,
0.00032874694443307817,
0.015557684004306793,
-0.0476931631565094,
5.597377531591395e-33,
-0.0036903980653733015,
0.05076467990875244,
-0.016523942351341248,
0.012416087090969086,
-0.030915724113583565,
0.0041247084736824036,
0.01010913122445345,
0.047036007046699524,
-0.006078933831304312,
-0.00620204396545887,
-0.09748585522174835,
0.08924868702888489,
-0.04722589999437332,
0.12936434149742126,
-0.02208929881453514,
-0.03659455478191376,
0.020501907914876938,
0.03621165454387665,
-0.0058717564679682255,
0.0024421820417046547,
0.15360380709171295,
0.025120440870523453,
0.07134448736906052,
-0.024065230041742325,
-0.014619666151702404,
0.030792072415351868,
0.054446130990982056,
-0.08180677145719528,
-0.042289409786462784,
0.0489325113594532,
-0.0737706795334816,
-0.0545198917388916,
-0.03864378482103348,
-0.011516337282955647,
-0.007503548637032509,
-0.0634276270866394,
-0.03784479945898056,
-0.07409463077783585,
-0.01351950317621231,
-0.04828984662890434,
-0.04417213052511215,
0.016210924834012985,
-0.002644836902618408,
-0.0504131056368351,
-0.07917534559965134,
-0.019396472722291946,
0.01854032091796398,
-0.033500831574201584,
-0.00925657618790865,
0.03614882752299309,
0.05098585784435272,
0.016686270013451576,
-0.07631346583366394,
-0.012877535074949265,
0.03917449712753296,
0.011441334150731564,
0.000818284519482404,
0.04814646393060684,
0.07598503679037094,
0.02237468957901001,
-0.029085541144013405,
-0.049620792269706726,
0.03721174970269203,
0.01698644645512104,
0.1024358943104744,
0.0027642224449664354,
-0.04506099596619606,
0.06338030099868774,
0.08640703558921814,
-0.03594757989048958,
-0.004417554009705782,
-0.056239817291498184,
-0.03709867596626282,
0.05440699681639671,
0.03752092644572258,
0.005724972579628229,
0.05370721593499184,
-0.061434924602508545,
-0.011679082177579403,
0.0403655543923378,
0.004721242934465408,
0.042347636073827744,
0.015948306769132614,
-0.04029359295964241,
-0.009966040030121803,
0.01966249756515026,
0.037635210901498795,
-0.09219976514577866,
-0.020795507356524467,
-0.01087517011910677,
0.04053429141640663,
0.05953419581055641,
0.016321402043104172,
-0.008862556889653206,
-0.02680812031030655,
-5.522136709589153e-33,
0.033328521996736526,
0.006212491076439619,
-0.08368367701768875,
0.04691171646118164,
-0.0013520707143470645,
-0.0714503824710846,
0.07096674293279648,
0.12696026265621185,
-0.06871762126684189,
-0.02775135636329651,
0.07241328060626984,
-0.05727057531476021,
0.10904484242200851,
-0.019883591681718826,
0.057141706347465515,
-0.033866770565509796,
0.033275358378887177,
0.005090263672173023,
0.07661724090576172,
0.05842902511358261,
-0.04945060610771179,
0.025204842910170555,
-0.11067881435155869,
-0.043465983122587204,
-0.08084461838006973,
0.05595723167061806,
-0.11280963569879532,
-0.023448873311281204,
0.009881467558443546,
0.04614632576704025,
-0.06300181150436401,
0.03688469156622887,
-0.05709144100546837,
0.042239077389240265,
-0.07134490460157394,
-0.006678610108792782,
-0.016117583960294724,
0.018734993413090706,
-0.023850470781326294,
0.07591114193201065,
0.0672859400510788,
0.009266993962228298,
-0.091402068734169,
-0.009772341698408127,
-0.042495161294937134,
-0.004996855743229389,
-0.08334952592849731,
-0.041249435395002365,
0.010666259564459324,
-0.05087690055370331,
0.03492770344018936,
0.026004953309893608,
-0.03252873942255974,
-0.06398356705904007,
0.013587725348770618,
-0.037217073142528534,
0.05807846412062645,
-0.10878555476665497,
-0.03804926201701164,
-0.060484327375888824,
-0.008435361087322235,
-0.036529719829559326,
0.07741563767194748,
-0.09166761487722397,
0.015912417322397232,
0.03297271579504013,
0.008962157182395458,
-0.018045855686068535,
0.03889726102352142,
0.006983492523431778,
0.04572043567895889,
-0.00856628455221653,
-0.07300951331853867,
-0.029450150206685066,
-0.07454542815685272,
0.0027489750646054745,
-0.035974159836769104,
-0.03482717275619507,
0.01797504723072052,
-0.07516944408416748,
-0.0987868383526802,
0.02110280841588974,
-0.013720573857426643,
0.03570709750056267,
0.0006984364590607584,
0.04035757854580879,
0.06982022523880005,
0.04039488732814789,
0.06339211761951447,
0.03157524764537811,
-0.01570666767656803,
0.08932608366012573,
0.03134255111217499,
0.0758788213133812,
-0.008173132315278053,
-5.522392498846784e-8,
-0.07437975704669952,
0.01916051097214222,
0.024195341393351555,
0.04449501261115074,
0.024260055273771286,
-0.02558029815554619,
-0.07152114063501358,
0.028357699513435364,
0.000898292229976505,
0.03823602944612503,
0.027564825490117073,
0.0226649921387434,
-0.014349542558193207,
-0.006843963172286749,
0.035497844219207764,
0.0250936821103096,
0.03239288553595543,
0.05098055303096771,
-0.0013811030657961965,
-0.08832772821187973,
0.01044936291873455,
0.02524891309440136,
-0.013099545612931252,
-0.017693588510155678,
0.04326236620545387,
-0.0460093654692173,
-0.04137175902724266,
0.0758015513420105,
-0.025487232953310013,
-0.0357399508357048,
-0.03753401339054108,
0.06010347604751587,
-0.0672682374715805,
-0.04241526871919632,
0.0169305969029665,
0.09012109041213989,
0.014176386408507824,
-0.049695342779159546,
-0.021187709644436836,
0.006143176928162575,
0.11120065301656723,
0.06299643218517303,
-0.037599798291921616,
0.018099604174494743,
0.07834901660680771,
0.027169058099389076,
-0.0166303813457489,
-0.14680176973342896,
-0.03370825946331024,
-0.009492207318544388,
0.04883841425180435,
0.031179428100585938,
-0.047395259141922,
0.13006027042865753,
0.021774427965283394,
-0.02119048684835434,
-0.039147913455963135,
-0.08891415596008301,
0.01280650682747364,
0.050988323986530304,
0.04925403743982315,
-0.04364603012800217,
-0.01976795680820942,
-0.03413474187254906
] |
KETI-AIR/ke-t5-base-ko | fda98d3a8ddad618a447c2e3043cccca5878e986 | 2021-06-23T02:46:59.000Z | [
"pytorch",
"tf",
"jax",
"t5",
"text2text-generation",
"transformers",
"autotrain_compatible"
] | text2text-generation | false | KETI-AIR | null | KETI-AIR/ke-t5-base-ko | 1,241 | 1 | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
mdhugol/indonesia-bert-sentiment-classification | 80ccb4c2817cf976534ac491020a9572e5dae54f | 2021-09-14T08:24:28.000Z | [
"pytorch",
"bert",
"text-classification",
"transformers"
] | text-classification | false | mdhugol | null | mdhugol/indonesia-bert-sentiment-classification | 1,241 | 1 | transformers | Indonesian BERT Base Sentiment Classifier is a sentiment-text-classification model. The model was originally the pre-trained [IndoBERT Base Model (phase1 - uncased)](https://huggingface.co/indobenchmark/indobert-base-p1) model using [Prosa sentiment dataset](https://github.com/indobenchmark/indonlu/tree/master/dataset/smsa_doc-sentiment-prosa)
## How to Use
### As Text Classifier
```python
from transformers import pipeline
from transformers import AutoTokenizer, AutoModelForSequenceClassification
pretrained= "mdhugol/indonesia-bert-sentiment-classification"
model = AutoModelForSequenceClassification.from_pretrained(pretrained)
tokenizer = AutoTokenizer.from_pretrained(pretrained)
sentiment_analysis = pipeline("sentiment-analysis", model=model, tokenizer=tokenizer)
label_index = {'LABEL_0': 'positive', 'LABEL_1': 'neutral', 'LABEL_2': 'negative'}
pos_text = "Sangat bahagia hari ini"
neg_text = "Dasar anak sialan!! Kurang ajar!!"
result = sentiment_analysis(pos_text)
status = label_index[result[0]['label']]
score = result[0]['score']
print(f'Text: {pos_text} | Label : {status} ({score * 100:.3f}%)')
result = sentiment_analysis(neg_text)
status = label_index[result[0]['label']]
score = result[0]['score']
print(f'Text: {neg_text} | Label : {status} ({score * 100:.3f}%)')
``` | [
-0.11185584217309952,
-0.0272884089499712,
0.031555477529764175,
0.0035488998983055353,
-0.02992434613406658,
0.045981843024492264,
0.020103227347135544,
0.05837050825357437,
0.012540728785097599,
-0.019212936982512474,
0.029097342863678932,
-0.06990830600261688,
-0.025047527626156807,
0.026196571066975594,
0.06919180601835251,
0.05467287078499794,
0.01413293369114399,
-0.05356641113758087,
-0.08138174563646317,
-0.08396881073713303,
0.09552276879549026,
0.12733058631420135,
0.02913595549762249,
-0.0026424601674079895,
0.02408369816839695,
0.0004000202752649784,
0.006027499679476023,
0.028743674978613853,
0.05208545923233032,
0.04581678286194801,
0.007940290495753288,
0.02409433014690876,
-0.024317502975463867,
0.09734836965799332,
-0.02311304770410061,
0.05826510116457939,
-0.012507254257798195,
0.011059584096074104,
0.08144226670265198,
0.015485761687159538,
0.044262755662202835,
-0.0717606395483017,
-0.05796490237116814,
-0.08964172750711441,
0.08090786635875702,
-0.024074167013168335,
-0.05821993947029114,
-0.027451930567622185,
0.024986637756228447,
-0.07648755609989166,
-0.003716772887855768,
-0.03372359648346901,
0.014241304248571396,
0.030262833461165428,
-0.10258160531520844,
0.027215763926506042,
0.0659986287355423,
-0.00259304023347795,
0.02853993885219097,
-0.06342356652021408,
-0.10882433503866196,
-0.01848440244793892,
0.018389414995908737,
-0.02184240147471428,
-0.11267226189374924,
-0.073273204267025,
-0.012037833221256733,
0.053914960473775864,
0.021728284657001495,
0.05773298814892769,
-0.014710381627082825,
0.0005613134708255529,
0.053641729056835175,
0.04476797953248024,
0.004501573741436005,
-0.024791309610009193,
0.08834097534418106,
-0.0036365094128996134,
0.05826149135828018,
-0.061190225183963776,
-0.03137380629777908,
-0.03409798815846443,
0.11254935711622238,
0.08592545241117477,
0.026394665241241455,
-0.04993822053074837,
0.024941032752394676,
-0.025304971262812614,
-0.048493966460227966,
-0.004293638747185469,
0.009241866879165173,
-0.1174914613366127,
0.03500381112098694,
-0.007910117506980896,
-0.04513496160507202,
0.03340179845690727,
-0.10952749848365784,
0.007785014342516661,
0.008015105500817299,
0.07366125285625458,
-0.01696348749101162,
0.018759774044156075,
-0.022930249571800232,
-0.04457176476716995,
-0.03758099675178528,
0.005892226938158274,
-0.01738503761589527,
-0.02316325157880783,
0.07229027897119522,
-0.05262687802314758,
-0.05976341664791107,
-0.04064507037401199,
-0.034129951149225235,
-0.06235886365175247,
0.004690634552389383,
0.01610863395035267,
-0.06282994151115417,
0.030873915180563927,
-0.013743028976023197,
0.07685333490371704,
0.008176589384675026,
-0.0504460409283638,
-0.01849031262099743,
0.03413870185613632,
-0.018541954457759857,
0.012730401009321213,
-0.02779315784573555,
3.847027504966638e-33,
0.01131278183311224,
0.014919069595634937,
0.06580445170402527,
-0.03392532467842102,
-0.06643503904342651,
-0.019918367266654968,
-0.014205224812030792,
-0.03916824236512184,
-0.04841109737753868,
-0.039370499551296234,
-0.051379166543483734,
0.062369510531425476,
-0.09015850722789764,
0.018948933109641075,
-0.06828199326992035,
-0.024370115250349045,
-0.055373333394527435,
-0.007469764910638332,
0.050546400249004364,
-0.008718469180166721,
0.07908377796411514,
0.06476560980081558,
0.03850472718477249,
-0.1239967867732048,
-0.10626731067895889,
0.02524144947528839,
0.09301122277975082,
-0.061518896371126175,
-0.06650064140558243,
0.0511411614716053,
-0.09762393683195114,
0.04901302978396416,
0.015681937336921692,
0.03699253499507904,
-0.06737601011991501,
-0.03838742524385452,
0.03461697697639465,
-0.004240597598254681,
-0.05861535668373108,
-0.09252530336380005,
-0.008341616950929165,
0.09175654500722885,
0.001089810742996633,
-0.04691137745976448,
-0.005510545801371336,
0.006517836358398199,
-0.005008301697671413,
0.01597265899181366,
0.09108065813779831,
0.10191669315099716,
0.019427001476287842,
0.01246846467256546,
0.0025289272889494896,
0.05054300278425217,
0.02470935322344303,
0.004596631973981857,
0.053263384848833084,
0.03359084948897362,
0.08619816601276398,
-0.09072591364383698,
-0.004260016139596701,
0.003256638068705797,
0.041521623730659485,
-0.0621393546462059,
0.039111848920583725,
-0.018265802413225174,
0.02561284229159355,
0.011956187896430492,
-0.028903571888804436,
-0.013292972929775715,
-0.011858531273901463,
0.01822415180504322,
-0.033972691744565964,
0.040922775864601135,
0.02901962772011757,
-0.06141110509634018,
0.02057582698762417,
0.002537553198635578,
-0.07852659374475479,
0.019378507509827614,
-0.003799106227234006,
-0.04098194092512131,
0.025480374693870544,
-0.0776963084936142,
0.0061923302710056305,
-0.0113898403942585,
0.06229250878095627,
-0.05918416008353233,
-0.02013581059873104,
0.013278787955641747,
-0.01807371713221073,
-0.002423476194962859,
-0.034525636583566666,
0.08623258024454117,
-0.06374387443065643,
-6.194049280519173e-33,
0.012057280167937279,
0.04638092964887619,
-0.13698144257068634,
0.00833900272846222,
-0.03504594787955284,
-0.018444251269102097,
0.04566647857427597,
0.1485838145017624,
-0.005961894057691097,
0.041961222887039185,
0.027859538793563843,
-0.03641033172607422,
0.03766288235783577,
-0.022885944694280624,
0.03889879584312439,
0.007171143312007189,
-0.039900489151477814,
0.07258398085832596,
-0.005153288599103689,
0.02168373018503189,
-0.005819231737405062,
0.07774869352579117,
-0.14742983877658844,
0.09319788217544556,
-0.03891882300376892,
0.025129584595561028,
-0.03216550126671791,
0.09377825260162354,
0.022014709189534187,
-0.004426505416631699,
0.01780402660369873,
0.004937575198709965,
-0.05123318359255791,
0.09744395315647125,
-0.06621773540973663,
-0.06635022908449173,
-0.027171066030859947,
-0.08592888712882996,
-0.019824516028165817,
0.09892476350069046,
0.046093158423900604,
0.052972860634326935,
-0.08352164179086685,
0.032496143132448196,
-0.06848052144050598,
-0.010699900798499584,
0.026113858446478844,
-0.025455238297581673,
0.03690430521965027,
-0.1111823096871376,
0.007277397438883781,
-0.0003822417638730258,
-0.05877285823225975,
0.03051365353167057,
-0.01684926077723503,
-0.05872369557619095,
0.06291098147630692,
-0.057318855077028275,
-0.11259882152080536,
0.02955748699605465,
-0.04980621114373207,
-0.006793357897549868,
0.057423997670412064,
-0.05036827176809311,
-0.014064428396522999,
-0.03230646625161171,
0.03744268789887428,
0.04367775097489357,
0.01629357971251011,
-0.006173304282128811,
0.017879141494631767,
0.0741858258843422,
0.02133159711956978,
-0.024258611723780632,
0.01816675066947937,
-0.003254459472373128,
-0.03131904825568199,
0.004872872959822416,
-0.004385774955153465,
-0.05688488110899925,
-0.05743591487407684,
-0.03170747309923172,
-0.013140162453055382,
0.015732185915112495,
-0.004196195863187313,
0.003641890361905098,
0.05893190577626228,
0.095968097448349,
-0.006530932150781155,
-0.002934045623987913,
0.043034449219703674,
0.01568295992910862,
0.014043676666915417,
0.116978719830513,
0.05969635397195816,
-5.244472589538418e-8,
-0.037633124738931656,
-0.06924177706241608,
-0.014838777482509613,
0.07304831594228745,
-0.07566743344068527,
-0.03256696090102196,
0.019306592643260956,
0.04627974331378937,
-0.060663867741823196,
-0.03769596293568611,
-0.0006200713105499744,
0.05037427693605423,
-0.08026610314846039,
-0.004419277422130108,
-0.019923994317650795,
0.038704805076122284,
0.010988317430019379,
0.12918879091739655,
0.01720357872545719,
-0.04015650972723961,
0.016855433583259583,
-0.04203074052929878,
0.026499824598431587,
-0.08288673311471939,
0.010462607257068157,
-0.016270121559500694,
-0.001370697864331305,
0.053426992148160934,
-0.04485955089330673,
-0.017259810119867325,
-0.04996033385396004,
0.03480461612343788,
-0.11464536190032959,
-0.025610459968447685,
0.005065211560577154,
0.0928443968296051,
0.02585574798285961,
-0.046006008982658386,
0.01009675394743681,
0.04586951434612274,
0.04053274169564247,
0.01969011500477791,
-0.14273521304130554,
-0.01792338117957115,
0.05520660802721977,
0.04151436686515808,
0.022553984075784683,
-0.06423106789588928,
0.006988264620304108,
0.028051111847162247,
0.03129982203245163,
0.002414847956970334,
-0.050427425652742386,
-0.0034148283302783966,
-0.06409380584955215,
0.010025795549154282,
-0.09161295741796494,
-0.025510670617222786,
-0.008137042634189129,
0.009079862385988235,
0.03914550319314003,
-0.0011756374733522534,
0.043443720787763596,
-0.007807915098965168
] |
staka/fugumt-ja-en | 8cb8ff81a8625a626c6f0f19cc5082c6181f223a | 2022-05-29T08:28:51.000Z | [
"pytorch",
"marian",
"text2text-generation",
"en",
"ja",
"transformers",
"translation",
"license:cc-by-sa-4.0",
"autotrain_compatible"
] | translation | false | staka | null | staka/fugumt-ja-en | 1,239 | 2 | transformers | ---
license: cc-by-sa-4.0
language:
- en
- ja
tags:
- translation
widget:
- text: "猫はかわいいです。"
---
# FuguMT
This is a translation model using Marian-NMT.
For more details, please see [my repository](https://github.com/s-taka/fugumt).
* source language: ja
* target language: en
### How to use
This model uses transformers and sentencepiece.
```python
!pip install transformers sentencepiece
```
You can use this model directly with a pipeline:
```python
from transformers import pipeline
fugu_translator = pipeline('translation', model='staka/fugumt-ja-en')
fugu_translator('猫はかわいいです。')
```
### Eval results
The results of the evaluation using [tatoeba](https://tatoeba.org/ja)(randomly selected 500 sentences) are as follows:
|source |target |BLEU(*1)|
|-------|-------|--------|
|ja |en |39.1 |
(*1) sacrebleu | [
-0.06110086292028427,
0.04749888926744461,
-0.030141832306981087,
0.024955326691269875,
0.004617446102201939,
-0.022587601095438004,
0.031613193452358246,
0.05452163144946098,
0.06961628049612045,
-0.06008737161755562,
0.011227213777601719,
-0.12071538716554642,
0.045672107487916946,
0.023460231721401215,
0.08312534540891647,
0.030861953273415565,
-0.006281261797994375,
0.08995702117681503,
-0.03847294673323631,
-0.13134609162807465,
0.05810873955488205,
0.09673626720905304,
0.06871137022972107,
-0.0014337452594190836,
0.012262889184057713,
-0.010409685783088207,
-0.04782181233167648,
-0.008254428394138813,
0.0017660915618762374,
-0.026252901181578636,
-0.051519930362701416,
0.024454675614833832,
-0.05671856179833412,
0.07034781575202942,
0.049891576170921326,
0.11316155642271042,
-0.09968218207359314,
-0.047421034425497055,
0.022121166810393333,
-0.018212679773569107,
-0.043131377547979355,
0.0275852233171463,
-0.008351855911314487,
-0.07268508523702621,
0.08235801756381989,
-0.07767217606306076,
-0.06094004958868027,
-0.006358013022691011,
-0.01009342074394226,
0.0042719123885035515,
-0.1391896903514862,
0.009005478583276272,
0.0007288749911822379,
0.07156137377023697,
0.01820739544928074,
0.02024003118276596,
0.03289732709527016,
-0.044493142515420914,
0.01356474682688713,
-0.09582044929265976,
-0.095059335231781,
0.06311880797147751,
-0.024225255474448204,
-0.0044064586982131,
-0.0344260074198246,
-0.03033798560500145,
-0.0029155805241316557,
0.05215980485081673,
-0.015388857573270798,
0.035919081419706345,
-0.04630708321928978,
-0.0003880165168084204,
0.02444116771221161,
0.001787649467587471,
-0.05431666970252991,
-0.028621310368180275,
0.0834285169839859,
0.023042846471071243,
0.03221042826771736,
0.031713373959064484,
-0.02653573639690876,
-0.06548061966896057,
0.10275979340076447,
-0.0010496467584744096,
0.0010080143110826612,
0.0075540486723184586,
-0.041206203401088715,
-0.002202961128205061,
0.07073410600423813,
0.0778188556432724,
-0.024338461458683014,
-0.08038967102766037,
0.03407358005642891,
0.03865336999297142,
-0.014879881404340267,
0.00011583501327550039,
-0.044508907943964005,
0.010888845659792423,
-0.003571518464013934,
0.1061297133564949,
0.0007344165351241827,
0.0206195916980505,
0.053978875279426575,
-0.06405377388000488,
-0.05963403359055519,
-0.043982043862342834,
0.03286173939704895,
-0.04013005644083023,
0.022224299609661102,
-0.07989975810050964,
-0.026493839919567108,
-0.03763487935066223,
0.023476196452975273,
-0.07723292708396912,
0.04379534348845482,
-0.018559200689196587,
0.032517608255147934,
-0.014193647541105747,
-0.03638423979282379,
0.04249957203865051,
0.022470835596323013,
0.07489269971847534,
-0.0059018866159021854,
0.01901441067457199,
0.0036829824093729258,
0.033014629036188126,
0.0291915126144886,
1.3792595925461115e-33,
0.047222115099430084,
0.012484918348491192,
0.0689825564622879,
0.03028106689453125,
-0.03331328183412552,
0.0005377664929255843,
0.016129130497574806,
-0.03915762901306152,
-0.11273428797721863,
-0.04334748536348343,
-0.08453238010406494,
0.009988175705075264,
-0.12027809023857117,
0.003065095515921712,
-0.046170491725206375,
-0.02726675570011139,
-0.013281070627272129,
-0.008665194734930992,
0.04205432906746864,
0.04582292214035988,
0.09000016003847122,
0.025284184142947197,
-0.0014381491346284747,
-0.07487823069095612,
-0.05897817760705948,
0.006889567710459232,
0.08028478920459747,
-0.05941063538193703,
-0.0791178047657013,
0.06007908284664154,
-0.10576485097408295,
-0.015663087368011475,
-0.004961584694683552,
-0.03815174475312233,
-0.019407421350479126,
-0.0693388283252716,
-0.06018807366490364,
-0.0075523811392486095,
-0.04053700715303421,
-0.05369926616549492,
-0.009551580995321274,
0.0029290709644556046,
-0.021274013444781303,
0.0108414301648736,
-0.010055744089186192,
0.048199187964200974,
0.00372801860794425,
0.013203177601099014,
0.1044066995382309,
0.007665493991225958,
-0.00021111003297846764,
-0.021996866911649704,
-0.02349986881017685,
0.020021067932248116,
0.0727287083864212,
0.02607899159193039,
0.06229952350258827,
0.043485477566719055,
0.060979850590229034,
0.0292891263961792,
-0.022806702181696892,
-0.03291168063879013,
0.039870575070381165,
0.059396207332611084,
0.14818204939365387,
0.019317789003252983,
-0.021134566515684128,
-0.008719039149582386,
0.059384509921073914,
-0.005736089777201414,
-0.08615487068891525,
-0.05393684282898903,
-0.02278805337846279,
0.03559771925210953,
0.03169560432434082,
-0.03965441510081291,
0.01649159938097,
-0.07901205122470856,
-0.014270143583416939,
0.01903126947581768,
-0.01917414180934429,
-0.05519585683941841,
0.03440072387456894,
-0.02152991108596325,
0.04390886798501015,
0.006237860769033432,
0.04296498745679855,
-0.05533568188548088,
0.008431049063801765,
-0.04054584354162216,
-0.024982629343867302,
0.034649599343538284,
-0.012098562903702259,
-0.03832351416349411,
0.04697336256504059,
-2.9338002701502037e-33,
0.01409817487001419,
0.06917125731706619,
-0.047362346202135086,
0.07525338232517242,
-0.059776585549116135,
-0.12002227455377579,
0.05335180461406708,
0.060323044657707214,
0.061208855360746384,
-0.014188897795975208,
-0.0934775322675705,
-0.0829169750213623,
0.07036154717206955,
-0.02201346680521965,
0.04763447493314743,
0.029817942529916763,
0.021668391302227974,
0.000006254169420571998,
0.03059489279985428,
0.07840120792388916,
-0.0785873681306839,
0.07483962923288345,
-0.10489857941865921,
0.02901749312877655,
-0.03663462772965431,
0.02230255678296089,
0.019629381597042084,
-0.009552313946187496,
0.01835949718952179,
-0.036698248237371445,
-0.030490340664982796,
0.01991823874413967,
-0.04175852984189987,
0.07813141494989395,
-0.10273823887109756,
-0.022608501836657524,
0.060833174735307693,
0.010313277132809162,
0.033827219158411026,
0.0723607987165451,
0.03770030662417412,
0.06408865004777908,
-0.017637168988585472,
-0.035170745104551315,
-0.04298785328865051,
0.037802863866090775,
-0.0709323137998581,
-0.009569769725203514,
-0.0006430265493690968,
-0.04393427446484566,
-0.01757984608411789,
0.002252391306683421,
-0.11835857480764389,
-0.00770970294252038,
-0.019870568066835403,
-0.11436938494443893,
-0.0031274768989533186,
-0.07932732254266739,
-0.062445275485515594,
-0.05105038359761238,
-0.05898534134030342,
-0.027398979291319847,
0.05743979290127754,
-0.11284751445055008,
0.03096209652721882,
-0.011813187971711159,
0.048829130828380585,
0.05514596402645111,
0.03774898126721382,
-0.038346435874700546,
0.05957391858100891,
-0.02117244526743889,
0.03200092911720276,
0.020339613780379295,
0.036516930907964706,
-0.0008730345871299505,
-0.05784088373184204,
-0.0021687017288058996,
0.012666182592511177,
-0.025108227506279945,
-0.06460800766944885,
-0.007651923689991236,
0.058276012539863586,
-0.0287117138504982,
0.008457093499600887,
-0.058299027383327484,
0.015652107074856758,
0.03370323404669762,
0.051666852086782455,
0.04998066648840904,
-0.0007254472002387047,
0.015092219226062298,
0.03724493831396103,
0.09088099747896194,
0.016001230105757713,
-5.384404389019437e-8,
-0.07186122983694077,
-0.03483274206519127,
-0.1026233583688736,
0.054029323160648346,
-0.05467231571674347,
-0.020216338336467743,
-0.04589183256030083,
-0.02558049187064171,
-0.003198755206540227,
-0.032671473920345306,
-0.024899590760469437,
0.08292996138334274,
-0.059586916118860245,
0.005605579353868961,
-0.06643978506326675,
0.11923415958881378,
-0.01591428741812706,
0.09707404673099518,
0.027537718415260315,
-0.01782415620982647,
0.054694175720214844,
0.03307491913437843,
0.02851615846157074,
-0.020172759890556335,
-0.029673919081687927,
0.035637203603982925,
-0.1276545375585556,
0.01634676568210125,
0.03361106291413307,
-0.0633881464600563,
0.018466835841536522,
-0.0004853412101510912,
-0.03206596523523331,
-0.029731441289186478,
-0.022728223353624344,
0.02762356773018837,
0.021531177684664726,
-0.07400908321142197,
0.04955195263028145,
0.11245592683553696,
0.1079951599240303,
0.022280342876911163,
-0.09429768472909927,
0.004369422793388367,
-0.021364726126194,
-0.0111430324614048,
-0.05555063858628273,
-0.05332564562559128,
0.04528172314167023,
0.005484714638441801,
0.08218623697757721,
-0.05715787783265114,
-0.03633064404129982,
0.018677668645977974,
0.04713713005185127,
0.050815336406230927,
0.04840134456753731,
-0.03136395663022995,
0.05603477358818054,
0.0002421877288725227,
0.013789652846753597,
0.03909691050648689,
0.05760807916522026,
-0.09376056492328644
] |
cmarkea/distilcamembert-base-qa | ea9c62f924a2464890c04979fa67ef28bb49d2ff | 2022-06-15T15:09:29.000Z | [
"pytorch",
"tf",
"camembert",
"question-answering",
"fr",
"dataset:fquad",
"dataset:piaf",
"transformers",
"license:cc-by-nc-sa-3.0",
"autotrain_compatible"
] | question-answering | false | cmarkea | null | cmarkea/distilcamembert-base-qa | 1,235 | 3 | transformers | ---
language: fr
license: cc-by-nc-sa-3.0
datasets:
- fquad
- piaf
widget:
- text: "Quand et où est sorti Toy Story ?"
context: "Pixar Animation Studios, ou simplement Pixar dans le langage courant, est une société américaine de production de films en images tridimensionnelles de synthèse. Elle a acquis sa notoriété grâce à Toy Story, premier long métrage de ce type, sorti aux États-Unis en 1995. À ce jour, le studio d'animation a remporté dix-neuf Oscars, quatre Golden Globes et trois Grammy Awards ainsi que de nombreuses autres récompenses. Le studio travaille avec PhotoRealistic RenderMan, sa propre version de l'interface de programmation de rendu RenderMan utilisée pour créer des images de haute qualité. Ses studios de production et son siège social se trouvent au Pixar Campus situé à Emeryville près de San Francisco en Californie."
- text: "Quel est le premier long métrage du studio ?"
context: "Pixar Animation Studios, ou simplement Pixar dans le langage courant, est une société américaine de production de films en images tridimensionnelles de synthèse. Elle a acquis sa notoriété grâce à Toy Story, premier long métrage de ce type, sorti aux États-Unis en 1995. À ce jour, le studio d'animation a remporté dix-neuf Oscars, quatre Golden Globes et trois Grammy Awards ainsi que de nombreuses autres récompenses. Le studio travaille avec PhotoRealistic RenderMan, sa propre version de l'interface de programmation de rendu RenderMan utilisée pour créer des images de haute qualité. Ses studios de production et son siège social se trouvent au Pixar Campus situé à Emeryville près de San Francisco en Californie."
---
DistilCamemBERT-QA
==================
We present DistilCamemBERT-QA which is [DistilCamemBERT](https://huggingface.co/cmarkea/distilcamembert-base) fine-tuned for the Question-Answering task for the french language. This model is constructed over two datasets FQuAD v1.0 and Piaf which are composed of contexts and questions with their answers inside the context.
This modelization is close to [etalab-ia/camembert-base-squadFR-fquad-piaf](https://huggingface.co/etalab-ia/camembert-base-squadFR-fquad-piaf) based on [CamemBERT](https://huggingface.co/camembert-base) model. The problem of the modelizations based on CamemBERT is at the scaling moment, for the production phase for example. Indeed, inference cost can be a technological issue especially as in a context of cross-encoding like for this task. To counteract this effect, we propose this modelization which divides the inference time by 2 with the same consumption power thanks to DistilCamemBERT.
Dataset
-------
The dataset is composed of FQuAD v1.0 and Piaf with 24'566 questions and answers for the training set and 3'188 for the evaluation set.
Evaluation results and benchmark
--------------------------------
We compare [DistilCamemBERT-QA](https://huggingface.co/cmarkea/distilcamembert-base-qa) to two other modelizations working on french language. The first one [etalab-ia/camembert-base-squadFR-fquad-piaf](https://huggingface.co/etalab-ia/camembert-base-squadFR-fquad-piaf) is based on well named [CamemBERT](https://huggingface.co/camembert-base), the french RoBERTa model and the second one [fmikaelian/flaubert-base-uncased-squad](https://huggingface.co/fmikaelian/flaubert-base-uncased-squad) is based on [FlauBERT](https://huggingface.co/flaubert/flaubert_base_uncased) an other french model based on BERT architecture this time. To compare the models to each others, the exact match comparing character by character the prediected answer and the ground truth is used, f1-score which measures the quality of intersection between predicted answer words and ground truth is also used and finally inclusion score which measures if the ground truth answer is include in predicted answer. For the mean inference time measure, an **AMD Ryzen 5 4500U @ 2.3GHz with 6 cores** was used.
| **model** | **time (ms)** | **exact match (%)** | **f1-score (%)** | **inclusion-score (%)** |
| :--------------: | :-----------: | :--------------: | :------------: | :------------: |
| [cmarkea/distilcamembert-base-qa](https://huggingface.co/cmarkea/distilcamembert-base-qa) | **216.96** | 25.66 | 62.65 | 59.82 |
| [etalab-ia/camembert-base-squadFR-fquad-piaf](https://huggingface.co/etalab-ia/camembert-base-squadFR-fquad-piaf) | 432.17 | **59.76** | **79.57** | **69.23** |
| [fmikaelian/flaubert-base-uncased-squad](https://huggingface.co/fmikaelian/flaubert-base-uncased-squad) | 875.84 | 0.22 | 5.21 | 3.68 |
Do not take into account the results on the FlauBERT model, there seems to be a problem with the modelling as the results seem very low.
How to use DistilCamemBERT-QA
------------------------------
```python
from transformers import pipeline
qa_engine = pipeline(
"question-answering",
model="cmarkea/distilcamembert-base-qa",
tokenizer="cmarkea/distilcamembert-base-qa"
)
result = qa_engine(
context="David Fincher, né le 28 août 1962 à Denver (Colorado), "
"est un réalisateur et producteur américain. Il est principalement "
"connu pour avoir réalisé les films Seven, Fight Club, L'Étrange "
"Histoire de Benjamin Button, The Social Network et Gone Girl qui "
"lui ont valu diverses récompenses et nominations aux Oscars du "
"cinéma ou aux Golden Globes. Réputé pour son perfectionnisme, il "
"peut tourner un très grand nombre de prises de ses plans et "
"séquences afin d'obtenir le rendu visuel qu'il désire. Il a "
"également développé et produit les séries télévisées House of "
"Cards (pour laquelle il remporte l'Emmy Award de la meilleure "
"réalisation pour une série dramatique en 2013) et Mindhunter, "
"diffusées sur Netflix.",
question="Quel est le métier de David Fincher ?"
)
result
{'score': 0.7981914281845093,
'start': 61,
'end': 98,
'answer': ' réalisateur et producteur américain.'}
```
Citation
--------
```bibtex
@inproceedings{delestre:hal-03674695,
TITLE = {{DistilCamemBERT : une distillation du mod{\`e}le fran{\c c}ais CamemBERT}},
AUTHOR = {Delestre, Cyrile and Amar, Abibatou},
URL = {https://hal.archives-ouvertes.fr/hal-03674695},
BOOKTITLE = {{CAp (Conf{\'e}rence sur l'Apprentissage automatique)}},
ADDRESS = {Vannes, France},
YEAR = {2022},
MONTH = Jul,
KEYWORDS = {NLP ; Transformers ; CamemBERT ; Distillation},
PDF = {https://hal.archives-ouvertes.fr/hal-03674695/file/cap2022.pdf},
HAL_ID = {hal-03674695},
HAL_VERSION = {v1},
}
``` | [
-0.04686908796429634,
0.009212997741997242,
0.018989477306604385,
-0.04965265840291977,
-0.02896607108414173,
0.04306996241211891,
-0.001546937506645918,
-0.02002597786486149,
0.06685510277748108,
-0.00549405487254262,
-0.00007440404442604631,
-0.03243853151798248,
0.07240142673254013,
0.0251749400049448,
-0.025692032650113106,
-0.052414074540138245,
0.0010861315531656146,
-0.023334329947829247,
0.0662357434630394,
-0.01475260965526104,
0.08372044563293457,
-0.07393517345190048,
0.02767089381814003,
-0.019929872825741768,
-0.005826161243021488,
0.0032619843259453773,
-0.07989397644996643,
0.012561663053929806,
0.015068759210407734,
-0.04248833656311035,
-0.0038877567276358604,
0.12431247532367706,
0.05577990785241127,
-0.009586778469383717,
0.08277510851621628,
0.05150187760591507,
-0.005140482448041439,
-0.0877816379070282,
-0.04442327097058296,
0.051239628344774246,
-0.06050845980644226,
0.0707707554101944,
-0.06310844421386719,
-0.10093081742525101,
-0.005471676122397184,
-0.05184619873762131,
-0.052224062383174896,
-0.06433138251304626,
-0.09775592386722565,
0.014317076653242111,
-0.12394791841506958,
0.0309281125664711,
0.04346312955021858,
-0.05921882018446922,
-0.02426879294216633,
0.002107639331370592,
0.05579707399010658,
-0.04857315123081207,
0.06795363873243332,
-0.030058585107326508,
-0.073551245033741,
0.01393801812082529,
0.028284115716814995,
0.02578604593873024,
-0.00950145348906517,
-0.06407425552606583,
-0.012887153774499893,
-0.009371240623295307,
-0.07208698987960815,
-0.06496331840753555,
-0.010013637132942677,
-0.021470747888088226,
-0.03622511029243469,
-0.04294509068131447,
-0.04169200360774994,
0.014994656667113304,
-0.02525728940963745,
-0.0137100201100111,
-0.06319747865200043,
-0.16092556715011597,
0.06451018899679184,
-0.06707166880369186,
0.027451667934656143,
-0.03598707914352417,
0.01534870732575655,
0.06130044162273407,
0.04435727745294571,
-0.02166111022233963,
0.02067676931619644,
0.08207206428050995,
-0.10311584919691086,
0.009008863009512424,
-0.019954821094870567,
0.060799941420555115,
0.040504131466150284,
-0.07417066395282745,
-0.006084605120122433,
-0.050871141254901886,
0.018977593630552292,
0.009982281364500523,
0.020933417603373528,
-0.040740322321653366,
0.07230474799871445,
-0.03979858383536339,
0.019768185913562775,
-0.0800919383764267,
0.035374194383621216,
-0.030993981286883354,
-0.03650468587875366,
0.02418440207839012,
0.007158981170505285,
-0.005579172633588314,
-0.04950597882270813,
-0.05347747355699539,
0.012825734913349152,
0.03540000692009926,
-0.03126860782504082,
-0.06419230997562408,
0.04281141608953476,
-0.02973087690770626,
0.07958345860242844,
-0.011724166572093964,
-0.05725427344441414,
0.0006821572897024453,
0.00809694267809391,
-0.03584487363696098,
-0.004263296257704496,
1.328836413392704e-32,
0.05851317569613457,
0.08599814027547836,
0.008713685907423496,
0.0231236070394516,
0.03841462731361389,
-0.008184305392205715,
-0.046435192227363586,
0.044125452637672424,
-0.11402648687362671,
-0.04151324927806854,
-0.025305964052677155,
0.05359427258372307,
-0.10128182917833328,
0.05133970454335213,
0.08651567250490189,
-0.031025202944874763,
0.021784193813800812,
-0.07200663536787033,
0.022962162271142006,
-0.021137971431016922,
-0.04301237687468529,
0.0007886214880272746,
-0.0018157482845708728,
0.012973189353942871,
-0.043060753494501114,
0.09149535000324249,
-0.006539277266710997,
-0.03069751150906086,
-0.07547740638256073,
-0.001790269510820508,
-0.004916541278362274,
-0.0024089047219604254,
0.0845213308930397,
0.03172833099961281,
0.07534126192331314,
-0.10226447880268097,
-0.022910460829734802,
-0.040977105498313904,
-0.034425243735313416,
0.09774060547351837,
0.0049248854629695415,
-0.038124680519104004,
-0.05649334937334061,
-0.04439196363091469,
0.04039900377392769,
0.07253159582614899,
0.08770541101694107,
0.019505910575389862,
0.046623311936855316,
0.07647478580474854,
0.0033463763538748026,
0.056043319404125214,
-0.04907209426164627,
-0.03828342631459236,
0.012087982147932053,
-0.046358659863471985,
-0.03148648515343666,
-0.12793654203414917,
-0.000416306487750262,
-0.05383956432342529,
0.06297433376312256,
0.04877215996384621,
0.027444105595350266,
-0.018319206312298775,
-0.009110035374760628,
0.032354701310396194,
0.09282374382019043,
0.02965334989130497,
0.06153970956802368,
0.06788463145494461,
-0.08627096563577652,
-0.021724803373217583,
0.09554218500852585,
-0.0015386767918244004,
0.10450157523155212,
0.06128179281949997,
-0.078306183218956,
0.037537939846515656,
-0.0025909533724188805,
0.05342880263924599,
-0.06989900022745132,
-0.0016001204494386911,
-0.003494946053251624,
-0.03707943111658096,
-0.011421392671763897,
-0.02909063547849655,
0.05238164961338043,
0.09996946901082993,
0.030743349343538284,
-0.01633262448012829,
0.007592895999550819,
-0.054003145545721054,
-0.018835175782442093,
0.029174989089369774,
-0.04023735597729683,
-1.4968210213441385e-32,
0.006544229108840227,
0.005627993028610945,
-0.07032382488250732,
0.012750948779284954,
-0.012234637513756752,
-0.014870740473270416,
0.01187143661081791,
0.03308854624629021,
0.06439370661973953,
-0.0728360190987587,
-0.036551836878061295,
-0.055261239409446716,
0.05179517716169357,
-0.07410868257284164,
-0.008376521058380604,
0.04783700406551361,
0.012372765690088272,
-0.06685291230678558,
-0.09237006306648254,
0.01893283985555172,
0.04722844809293747,
-0.04979381337761879,
-0.015112304128706455,
0.007828308269381523,
-0.043849486857652664,
0.08374355733394623,
0.09481553733348846,
-0.04647098854184151,
-0.0661362037062645,
-0.007976598106324673,
-0.03622454032301903,
-0.08871811628341675,
-0.012885389849543571,
0.09077873826026917,
-0.055003806948661804,
-0.053265929222106934,
0.08511250466108322,
-0.026544954627752304,
-0.056364160031080246,
0.030491389334201813,
-0.02628401294350624,
0.008134869858622551,
-0.026238514110445976,
0.06908011436462402,
-0.01141855027526617,
-0.05800781399011612,
-0.08720225840806961,
-0.10383246839046478,
0.035266295075416565,
0.011070494540035725,
-0.050185076892375946,
0.06734846532344818,
-0.030207786709070206,
-0.06093611940741539,
0.013361586257815361,
-0.035329025238752365,
-0.020662328228354454,
0.0033851293846964836,
-0.036889322102069855,
0.06211097538471222,
0.025412319228053093,
0.046093977987766266,
-0.02726074494421482,
-0.008246656507253647,
-0.02667357213795185,
-0.01709447242319584,
-0.02424813248217106,
0.012440966442227364,
-0.013002167455852032,
-0.010100370272994041,
0.0984065979719162,
0.03754124045372009,
0.019260376691818237,
-0.049218837171792984,
-0.11534345149993896,
0.031182780861854553,
0.03045400232076645,
0.08368874341249466,
0.06306884437799454,
0.025355985388159752,
0.005409360863268375,
0.008961002342402935,
-0.017265820875763893,
0.043817467987537384,
-0.023104654625058174,
0.05525646358728409,
-0.06650234758853912,
-0.03199826180934906,
0.00849777739495039,
0.01041402481496334,
0.05458547919988632,
0.06620892882347107,
0.08774412423372269,
0.05289902165532112,
0.005219608545303345,
-7.182333661148732e-8,
-0.04508335143327713,
-0.014619525521993637,
-0.0741383507847786,
-0.060019057244062424,
0.04460734501481056,
-0.044296927750110626,
-0.014499361626803875,
-0.003241098951548338,
-0.01990041695535183,
-0.016109583899378777,
0.059960637241601944,
-0.018509164452552795,
-0.06266434490680695,
0.022365178912878036,
0.018761862069368362,
0.02065281569957733,
0.0015767194563522935,
0.02828800305724144,
0.0026458222419023514,
-0.031823527067899704,
-0.017642231658101082,
-0.01800280623137951,
-0.00004840490146307275,
-0.10379698872566223,
-0.1372024267911911,
-0.00990653969347477,
-0.06448640674352646,
-0.1109888106584549,
0.009314997121691704,
-0.012362557463347912,
-0.035759326070547104,
0.05827790126204491,
-0.032165102660655975,
-0.05589362978935242,
0.037081360816955566,
-0.08206859976053238,
-0.00459080096334219,
-0.049634527415037155,
-0.02520015463232994,
-0.0000317199774144683,
0.10592621564865112,
0.0276891328394413,
0.018593549728393555,
-0.007069677114486694,
0.12065322697162628,
0.03801582753658295,
0.04108670353889465,
-0.1324489712715149,
0.054008662700653076,
0.10507655888795853,
-0.04259113967418671,
-0.019937751814723015,
-0.0654035285115242,
0.034260135143995285,
0.05801784619688988,
-0.019236411899328232,
0.019669584929943085,
0.012225338257849216,
-0.00586728285998106,
0.03084995038807392,
0.02139977179467678,
-0.03144991397857666,
0.030408216640353203,
0.04728974029421806
] |
elgeish/wav2vec2-large-xlsr-53-arabic | b5e6df14064b879671fd242c0366cbe2a68effc9 | 2022-06-04T23:37:05.000Z | [
"pytorch",
"jax",
"wav2vec2",
"automatic-speech-recognition",
"ar",
"dataset:arabic_speech_corpus",
"dataset:mozilla-foundation/common_voice_6_1",
"transformers",
"audio",
"speech",
"xlsr-fine-tuning-week",
"hf-asr-leaderboard",
"license:apache-2.0",
"model-index"
] | automatic-speech-recognition | false | elgeish | null | elgeish/wav2vec2-large-xlsr-53-arabic | 1,230 | 6 | transformers | ---
language: ar
datasets:
- arabic_speech_corpus
- mozilla-foundation/common_voice_6_1
metrics:
- wer
tags:
- audio
- automatic-speech-recognition
- speech
- xlsr-fine-tuning-week
- hf-asr-leaderboard
license: apache-2.0
model-index:
- name: elgeish-wav2vec2-large-xlsr-53-arabic
results:
- task:
name: Automatic Speech Recognition
type: automatic-speech-recognition
dataset:
name: Common Voice 6.1 (Arabic)
type: mozilla-foundation/common_voice_6_1
config: ar
split: test
args:
language: ar
metrics:
- name: Test WER
type: wer
value: 26.55
- name: Validation WER
type: wer
value: 23.39
---
# Wav2Vec2-Large-XLSR-53-Arabic
Fine-tuned [facebook/wav2vec2-large-xlsr-53](https://huggingface.co/facebook/wav2vec2-large-xlsr-53)
on Arabic using the `train` splits of [Common Voice](https://huggingface.co/datasets/common_voice)
and [Arabic Speech Corpus](https://huggingface.co/datasets/arabic_speech_corpus).
When using this model, make sure that your speech input is sampled at 16kHz.
## Usage
The model can be used directly (without a language model) as follows:
```python
import torch
import torchaudio
from datasets import load_dataset
from lang_trans.arabic import buckwalter
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
dataset = load_dataset("common_voice", "ar", split="test[:10]")
resamplers = { # all three sampling rates exist in test split
48000: torchaudio.transforms.Resample(48000, 16000),
44100: torchaudio.transforms.Resample(44100, 16000),
32000: torchaudio.transforms.Resample(32000, 16000),
}
def prepare_example(example):
speech, sampling_rate = torchaudio.load(example["path"])
example["speech"] = resamplers[sampling_rate](speech).squeeze().numpy()
return example
dataset = dataset.map(prepare_example)
processor = Wav2Vec2Processor.from_pretrained("elgeish/wav2vec2-large-xlsr-53-arabic")
model = Wav2Vec2ForCTC.from_pretrained("elgeish/wav2vec2-large-xlsr-53-arabic").eval()
def predict(batch):
inputs = processor(batch["speech"], sampling_rate=16000, return_tensors="pt", padding=True)
with torch.no_grad():
predicted = torch.argmax(model(inputs.input_values).logits, dim=-1)
predicted[predicted == -100] = processor.tokenizer.pad_token_id # see fine-tuning script
batch["predicted"] = processor.tokenizer.batch_decode(predicted)
return batch
dataset = dataset.map(predict, batched=True, batch_size=1, remove_columns=["speech"])
for reference, predicted in zip(dataset["sentence"], dataset["predicted"]):
print("reference:", reference)
print("predicted:", buckwalter.untrans(predicted))
print("--")
```
Here's the output:
```
reference: ألديك قلم ؟
predicted: هلديك قالر
--
reference: ليست هناك مسافة على هذه الأرض أبعد من يوم أمس.
predicted: ليست نالك مسافة على هذه الأرض أبعد من يوم أمس
--
reference: إنك تكبر المشكلة.
predicted: إنك تكبر المشكلة
--
reference: يرغب أن يلتقي بك.
predicted: يرغب أن يلتقي بك
--
reference: إنهم لا يعرفون لماذا حتى.
predicted: إنهم لا يعرفون لماذا حتى
--
reference: سيسعدني مساعدتك أي وقت تحب.
predicted: سيسئدني مساعد سكرأي وقت تحب
--
reference: أَحَبُّ نظريّة علمية إليّ هي أن حلقات زحل مكونة بالكامل من الأمتعة المفقودة.
predicted: أحب ناضريةً علمية إلي هي أنحل قتزح المكونا بالكامل من الأمت عن المفقودة
--
reference: سأشتري له قلماً.
predicted: سأشتري له قلما
--
reference: أين المشكلة ؟
predicted: أين المشكل
--
reference: وَلِلَّهِ يَسْجُدُ مَا فِي السَّمَاوَاتِ وَمَا فِي الْأَرْضِ مِنْ دَابَّةٍ وَالْمَلَائِكَةُ وَهُمْ لَا يَسْتَكْبِرُونَ
predicted: ولله يسجد ما في السماوات وما في الأرض من دابة والملائكة وهم لا يستكبرون
--
```
## Evaluation
The model can be evaluated as follows on the Arabic test data of Common Voice:
```python
import jiwer
import torch
import torchaudio
from datasets import load_dataset
from lang_trans.arabic import buckwalter
from transformers import set_seed, Wav2Vec2ForCTC, Wav2Vec2Processor
set_seed(42)
test_split = load_dataset("common_voice", "ar", split="test")
resamplers = { # all three sampling rates exist in test split
48000: torchaudio.transforms.Resample(48000, 16000),
44100: torchaudio.transforms.Resample(44100, 16000),
32000: torchaudio.transforms.Resample(32000, 16000),
}
def prepare_example(example):
speech, sampling_rate = torchaudio.load(example["path"])
example["speech"] = resamplers[sampling_rate](speech).squeeze().numpy()
return example
test_split = test_split.map(prepare_example)
processor = Wav2Vec2Processor.from_pretrained("elgeish/wav2vec2-large-xlsr-53-arabic")
model = Wav2Vec2ForCTC.from_pretrained("elgeish/wav2vec2-large-xlsr-53-arabic").to("cuda").eval()
def predict(batch):
inputs = processor(batch["speech"], sampling_rate=16000, return_tensors="pt", padding=True)
with torch.no_grad():
predicted = torch.argmax(model(inputs.input_values.to("cuda")).logits, dim=-1)
predicted[predicted == -100] = processor.tokenizer.pad_token_id # see fine-tuning script
batch["predicted"] = processor.batch_decode(predicted)
return batch
test_split = test_split.map(predict, batched=True, batch_size=16, remove_columns=["speech"])
transformation = jiwer.Compose([
# normalize some diacritics, remove punctuation, and replace Persian letters with Arabic ones
jiwer.SubstituteRegexes({
r'[auiFNKo\~_،؟»\?;:\-,\.؛«!"]': "", "\u06D6": "",
r"[\|\{]": "A", "p": "h", "ک": "k", "ی": "y"}),
# default transformation below
jiwer.RemoveMultipleSpaces(),
jiwer.Strip(),
jiwer.SentencesToListOfWords(),
jiwer.RemoveEmptyStrings(),
])
metrics = jiwer.compute_measures(
truth=[buckwalter.trans(s) for s in test_split["sentence"]], # Buckwalter transliteration
hypothesis=test_split["predicted"],
truth_transform=transformation,
hypothesis_transform=transformation,
)
print(f"WER: {metrics['wer']:.2%}")
```
**Test Result**: 26.55%
## Training
For more details, see [Fine-Tuning with Arabic Speech Corpus](https://github.com/huggingface/transformers/tree/1c06240e1b3477728129bb58e7b6c7734bb5074e/examples/research_projects/wav2vec2#fine-tuning-with-arabic-speech-corpus).
This model represents Arabic in a format called [Buckwalter transliteration](https://en.wikipedia.org/wiki/Buckwalter_transliteration).
The Buckwalter format only includes ASCII characters, some of which are non-alpha (e.g., `">"` maps to `"أ"`).
The [lang-trans](https://github.com/kariminf/lang-trans) package is used to convert (transliterate) Arabic abjad.
[This script](https://github.com/huggingface/transformers/blob/1c06240e1b3477728129bb58e7b6c7734bb5074e/examples/research_projects/wav2vec2/finetune_large_xlsr_53_arabic_speech_corpus.sh)
was used to first fine-tune [facebook/wav2vec2-large-xlsr-53](https://huggingface.co/facebook/wav2vec2-large-xlsr-53)
on the `train` split of the [Arabic Speech Corpus](https://huggingface.co/datasets/arabic_speech_corpus) dataset;
the `test` split was used for model selection; the resulting model at this point is saved as [elgeish/wav2vec2-large-xlsr-53-levantine-arabic](https://huggingface.co/elgeish/wav2vec2-large-xlsr-53-levantine-arabic).
Training was then resumed using the `train` split of the [Common Voice](https://huggingface.co/datasets/common_voice) dataset;
the `validation` split was used for model selection;
training was stopped to meet the deadline of [Fine-Tune-XLSR Week](https://github.com/huggingface/transformers/blob/700229f8a4003c4f71f29275e0874b5ba58cd39d/examples/research_projects/wav2vec2/FINE_TUNE_XLSR_WAV2VEC2.md):
this model is the checkpoint at 100k steps and a validation WER of **23.39%**.
<img src="https://huggingface.co/elgeish/wav2vec2-large-xlsr-53-arabic/raw/main/validation_wer.png" alt="Validation WER" width="100%" />
It's worth noting that validation WER is trending down, indicating the potential of further training (resuming the decaying learning rate at 7e-6).
## Future Work
One area to explore is using `attention_mask` in model input, which is recommended [here](https://huggingface.co/blog/fine-tune-xlsr-wav2vec2).
Also, exploring data augmentation using datasets used to train models listed [here](https://paperswithcode.com/sota/speech-recognition-on-common-voice-arabic).
| [
-0.05971662327647209,
-0.04161885008215904,
-0.05014706403017044,
-0.051968272775411606,
-0.0197196863591671,
-0.00043111148988828063,
-0.010884481482207775,
-0.138751819729805,
-0.03605477511882782,
-0.06449650228023529,
-0.0008760940982028842,
-0.11000842601060867,
-0.02698618732392788,
-0.011255449615418911,
0.01303145196288824,
-0.026859596371650696,
-0.04534555971622467,
-0.08267491310834885,
-0.0536532886326313,
-0.06150263547897339,
0.027130702510476112,
0.1080700010061264,
0.07870865613222122,
0.003037308109924197,
0.04261649772524834,
0.01881742663681507,
-0.09721130132675171,
0.05059444159269333,
0.03285524994134903,
-0.0754716619849205,
0.12847883999347687,
0.06604849547147751,
0.07721340656280518,
0.007388608995825052,
0.04355882853269577,
0.0634254664182663,
0.01119737047702074,
-0.02441965602338314,
-0.015161757357418537,
-0.009194961749017239,
0.023828329518437386,
-0.023852964863181114,
0.03322639688849449,
-0.048057522624731064,
-0.022645967081189156,
-0.04001932218670845,
-0.07701350003480911,
0.009263270534574986,
0.028879595920443535,
0.05616041645407677,
-0.13990122079849243,
-0.01732209324836731,
0.02484786882996559,
0.022644802927970886,
-0.05263354629278183,
-0.016940483823418617,
-0.035254813730716705,
0.03305962681770325,
0.057167187333106995,
-0.015006793662905693,
-0.05266221612691879,
-0.002056218683719635,
-0.05164216458797455,
0.04784571751952171,
-0.08190692961215973,
-0.035601403564214706,
0.032496124505996704,
-0.05981786176562309,
-0.009535005316138268,
0.004760565236210823,
-0.09870566427707672,
0.060831788927316666,
-0.009528785943984985,
0.05047754943370819,
-0.008290087804198265,
0.01881156675517559,
0.0013029580004513264,
-0.10226887464523315,
0.04990193620324135,
-0.0585123673081398,
-0.03776951879262924,
-0.07942072302103043,
0.004451586399227381,
0.029924113303422928,
0.10331888496875763,
-0.020145196467638016,
-0.018289823085069656,
-0.04317351058125496,
-0.03619706258177757,
-0.03668169304728508,
-0.019482828676700592,
-0.034364622086286545,
-0.012086605653166771,
0.09127356857061386,
0.03313840925693512,
0.07051169872283936,
0.03235038369894028,
0.06545595079660416,
-0.037183549255132675,
0.12399931997060776,
-0.014183873310685158,
-0.08281662315130234,
0.057329628616571426,
0.0038985004648566246,
0.005301625933498144,
-0.05347486212849617,
0.0016408524243161082,
0.0817495584487915,
0.0331566222012043,
-0.04874102771282196,
-0.03959105163812637,
0.012709186412394047,
-0.008103564381599426,
-0.03657057136297226,
0.03849825635552406,
0.027824094519019127,
-0.0620419904589653,
-0.055593036115169525,
0.02276727370917797,
0.024210751056671143,
-0.04898623749613762,
-0.012831564992666245,
-0.013659344054758549,
-0.03619730472564697,
0.0383119210600853,
0.027484161779284477,
-0.0021242196671664715,
3.914534308824439e-33,
0.04749533534049988,
0.015548871830105782,
0.03051607310771942,
-0.011791679076850414,
-0.018279816955327988,
-0.054498810321092606,
-0.049055520445108414,
0.04468546062707901,
-0.04896334186196327,
-0.034862127155065536,
0.010219551622867584,
-0.006576310843229294,
-0.05176616460084915,
0.04367343708872795,
0.06297875940799713,
0.0024887635372579098,
0.005760176107287407,
0.007937025278806686,
-0.02636335976421833,
0.00009964401397155598,
0.10577572882175446,
0.024623315781354904,
0.011526986956596375,
-0.0259533878415823,
-0.0009618217591196299,
0.04115843400359154,
0.09366044402122498,
-0.04273711517453194,
-0.03454752266407013,
0.05018424242734909,
-0.005297414027154446,
-0.040334515273571014,
-0.003049285616725683,
-0.008252092637121677,
0.02143072336912155,
-0.016881022602319717,
-0.04852452874183655,
0.05227838084101677,
-0.09951340407133102,
-0.05697469413280487,
0.01824002154171467,
0.03873654082417488,
-0.02265177108347416,
-0.061511531472206116,
0.04387269914150238,
-0.06296757608652115,
-0.008717932738363743,
0.051794860512018204,
0.023478113114833832,
0.05786042660474777,
-0.056925080716609955,
0.04239974915981293,
-0.010748255997896194,
0.013425441458821297,
-0.03150485083460808,
-0.031243927776813507,
0.06140821799635887,
0.09029662609100342,
-0.002109045162796974,
-0.00548082310706377,
-0.028911087661981583,
-0.007077110465615988,
0.014037054032087326,
-0.007414419669657946,
0.042761433869600296,
-0.056609146296978,
0.0207422636449337,
-0.012311703525483608,
0.04185955226421356,
0.033613186329603195,
0.00449829688295722,
-0.0016875934088602662,
0.10477454215288162,
0.0938636064529419,
-0.03764239326119423,
-0.029031196609139442,
-0.0162733793258667,
-0.04954706132411957,
0.0017259296728298068,
-0.003811608999967575,
-0.012323674745857716,
0.060996413230895996,
0.04408331587910652,
-0.010918454267084599,
-0.05348813161253929,
-0.04613953456282616,
0.03909045085310936,
-0.057384852319955826,
-0.02937154471874237,
-0.009523396380245686,
-0.04037497192621231,
0.0999726876616478,
-0.08103068917989731,
-0.06086191162467003,
-0.02248530089855194,
-5.305371877877677e-33,
0.029397813603281975,
0.05802330747246742,
0.005973103456199169,
0.09512662142515182,
0.012856426648795605,
-0.012318811379373074,
0.19809046387672424,
0.09435174614191055,
0.027611151337623596,
-0.036704760044813156,
0.08396994322538376,
-0.06749680638313293,
0.08069628477096558,
-0.08577905595302582,
0.07083634287118912,
-0.03320576250553131,
-0.043670378625392914,
-0.024839896708726883,
0.09241326153278351,
0.060473404824733734,
-0.018649661913514137,
0.10951119661331177,
-0.04670814424753189,
0.08596952259540558,
-0.04858659580349922,
-0.03164136782288551,
-0.028372079133987427,
-0.006689907051622868,
-0.022604208439588547,
-0.008435038849711418,
-0.010330280289053917,
0.02357434295117855,
-0.15855441987514496,
0.03799368813633919,
-0.009614516980946064,
-0.022144759073853493,
-0.0065985447727143764,
0.02022978663444519,
-0.027633244171738625,
0.07858961820602417,
0.09575100243091583,
0.057318542152643204,
-0.072845458984375,
-0.08436418324708939,
0.04810711741447449,
-0.041285283863544464,
0.010060466825962067,
0.006821824703365564,
-0.04717804864048958,
-0.09022539108991623,
0.0321878120303154,
-0.026671571657061577,
0.016350341960787773,
0.01565919630229473,
0.02582348883152008,
-0.027985939756035805,
0.0139972735196352,
-0.04685723036527634,
-0.07080995291471481,
0.0048266504891216755,
-0.0232623852789402,
0.018360093235969543,
-0.06148424372076988,
-0.039435975253582,
0.06356976926326752,
0.024218648672103882,
-0.01701626181602478,
-0.015325763262808323,
0.04155821353197098,
-0.001258333446457982,
-0.0349101722240448,
-0.016629310324788094,
0.015088574960827827,
-0.015569127164781094,
0.0034447582438588142,
-0.005232783500105143,
-0.09492158144712448,
-0.025683902204036713,
-0.01283600740134716,
-0.031990498304367065,
-0.009306926280260086,
0.04530256241559982,
0.007677019108086824,
0.016057923436164856,
0.029433932155370712,
0.07651893049478531,
0.005594328045845032,
0.03996411710977554,
-0.02782900258898735,
0.04357856512069702,
-0.012797976844012737,
0.04391681030392647,
-0.03353537619113922,
0.06679614633321762,
-0.007860414683818817,
-5.189639651348443e-8,
-0.07984314858913422,
-0.010647064074873924,
-0.02899748459458351,
0.0020452134776860476,
0.004647707566618919,
-0.012755058705806732,
-0.005206703208386898,
-0.03332217410206795,
0.024767955765128136,
-0.024305695667862892,
0.07559479773044586,
-0.038212232291698456,
-0.04984518140554428,
0.08947303891181946,
-0.03449111431837082,
-0.09336622059345245,
-0.056197818368673325,
0.14344166219234467,
-0.04813145846128464,
-0.12108530849218369,
0.023733800277113914,
0.012505343183875084,
0.004777989350259304,
-0.01666308008134365,
0.044430240988731384,
-0.011910509318113327,
-0.01937035098671913,
0.10053376853466034,
0.020611846819519997,
0.03418852388858795,
-0.04259330406785011,
0.022389264777302742,
-0.02984691970050335,
-0.11613710969686508,
0.03819948807358742,
0.07629278302192688,
-0.08551152795553207,
-0.0024389096070080996,
0.003899852279573679,
0.03854616358876228,
0.04948078840970993,
0.08101546764373779,
-0.07613064348697662,
-0.02500603161752224,
0.0737617090344429,
-0.00992575567215681,
-0.0599844865500927,
-0.015037869103252888,
0.05611961707472801,
-0.022348983213305473,
0.06829548627138138,
0.00015431434439960867,
0.02274744212627411,
0.005740254186093807,
0.07377488911151886,
0.02171372063457966,
0.02865792065858841,
-0.07794301956892014,
0.0527016706764698,
0.0027491983491927385,
0.1277131736278534,
-0.07013122737407684,
-0.06290458887815475,
0.002221120987087488
] |
hetpandya/t5-small-tapaco | d9695bcb99a04766dbc41d636bf6b8646710b1e9 | 2021-06-30T06:36:41.000Z | [
"pytorch",
"t5",
"text2text-generation",
"en",
"dataset:tapaco",
"transformers",
"autotrain_compatible"
] | text2text-generation | false | hetpandya | null | hetpandya/t5-small-tapaco | 1,230 | null | transformers | ---
language: en
datasets:
- tapaco
---
# T5-small for paraphrase generation
Google's T5 small fine-tuned on [TaPaCo](https://huggingface.co/datasets/tapaco) dataset for paraphrasing.
## Model in Action 🚀
```python
from transformers import T5ForConditionalGeneration, T5Tokenizer
tokenizer = T5Tokenizer.from_pretrained("hetpandya/t5-small-tapaco")
model = T5ForConditionalGeneration.from_pretrained("hetpandya/t5-small-tapaco")
def get_paraphrases(sentence, prefix="paraphrase: ", n_predictions=5, top_k=120, max_length=256,device="cpu"):
text = prefix + sentence + " </s>"
encoding = tokenizer.encode_plus(
text, pad_to_max_length=True, return_tensors="pt"
)
input_ids, attention_masks = encoding["input_ids"].to(device), encoding[
"attention_mask"
].to(device)
model_output = model.generate(
input_ids=input_ids,
attention_mask=attention_masks,
do_sample=True,
max_length=max_length,
top_k=top_k,
top_p=0.98,
early_stopping=True,
num_return_sequences=n_predictions,
)
outputs = []
for output in model_output:
generated_sent = tokenizer.decode(
output, skip_special_tokens=True, clean_up_tokenization_spaces=True
)
if (
generated_sent.lower() != sentence.lower()
and generated_sent not in outputs
):
outputs.append(generated_sent)
return outputs
paraphrases = get_paraphrases("The house will be cleaned by me every Saturday.")
for sent in paraphrases:
print(sent)
```
## Output
```
The house is cleaned every Saturday by me.
The house will be cleaned on Saturday.
I will clean the house every Saturday.
I get the house cleaned every Saturday.
I will clean this house every Saturday.
```
## Model fine-tuning
Please find my guide on fine-tuning the model here:
https://towardsdatascience.com/training-t5-for-paraphrase-generation-ab3b5be151a2
Created by [Het Pandya/@hetpandya](https://github.com/hetpandya) | [LinkedIn](https://www.linkedin.com/in/het-pandya)
Made with <span style="color: red;">♥</span> in India | [
-0.08427716046571732,
-0.03606363758444786,
0.04441409558057785,
0.001035271561704576,
-0.015796590596437454,
-0.012963401153683662,
0.01894739829003811,
0.040713146328926086,
-0.03209112584590912,
-0.04620911926031113,
0.04128267243504524,
-0.0714435949921608,
-0.008668242022395134,
-0.032587211579084396,
0.00021649444533977658,
0.020859969779849052,
0.009949538856744766,
-0.013440298847854137,
-0.13477766513824463,
-0.14849038422107697,
0.16039328277111053,
0.08674800395965576,
0.03995521739125252,
-0.00561183039098978,
0.062282174825668335,
0.04541908949613571,
-0.05739601328969002,
-0.0016323522431775928,
0.023351792246103287,
0.052669428288936615,
-0.04500642418861389,
0.0002471937332302332,
-0.06811372935771942,
0.0775798112154007,
0.026713427156209946,
0.06949185580015182,
-0.1192018985748291,
0.0243842676281929,
0.02834150940179825,
0.008194388821721077,
-0.011094776913523674,
-0.046648990362882614,
-0.07617556303739548,
0.028164781630039215,
0.08726366609334946,
-0.00027254357701167464,
-0.04815371334552765,
-0.013985115103423595,
-0.01651707850396633,
-0.02396281808614731,
-0.06830687075853348,
-0.03169691190123558,
0.008882163092494011,
0.05493887886404991,
-0.029010161757469177,
-0.029836438596248627,
-0.010529417544603348,
0.008387349545955658,
0.0762278288602829,
-0.07359470427036285,
-0.06892050057649612,
-0.06250625103712082,
0.009762099012732506,
-0.01680072210729122,
-0.03170732781291008,
-0.007247847970575094,
0.09158914536237717,
0.03790511190891266,
-0.05046628415584564,
0.06234705075621605,
-0.054518964141607285,
0.03710687533020973,
0.06558414548635483,
0.010780969634652138,
-0.0010801840107887983,
0.019488181918859482,
0.09211442619562149,
-0.022132959216833115,
0.02700302004814148,
-0.05184900015592575,
0.03110821172595024,
-0.047694478183984756,
0.06882309168577194,
0.06869913637638092,
0.008574635721743107,
-0.05312482267618179,
0.030810564756393433,
-0.0011578529374673963,
0.013983920216560364,
-0.015499012544751167,
0.0035341165494173765,
-0.08056389540433884,
0.03088965080678463,
-0.051097508519887924,
-0.03218451887369156,
0.04668872058391571,
-0.11827436834573746,
-0.09920020401477814,
-0.08174001425504684,
0.03151065856218338,
0.024884169921278954,
-0.01166240032762289,
0.03481846675276756,
-0.07261738181114197,
-0.02624644711613655,
0.004104141611605883,
0.07252894341945648,
0.0049730027094483376,
0.03349905461072922,
0.004327889531850815,
-0.021595250815153122,
0.05949348583817482,
-0.006831286940723658,
-0.03960996866226196,
0.04048510640859604,
0.027130845934152603,
-0.07776305824518204,
-0.03311938792467117,
0.035611592233181,
0.03500692918896675,
-0.007363654673099518,
0.06910456717014313,
-0.0751296654343605,
0.026589054614305496,
-0.04296746477484703,
-0.02225092425942421,
-0.03998111933469772,
7.114001445048265e-33,
0.04849528148770332,
0.06753050535917282,
0.04847852885723114,
-0.014080692082643509,
-0.09066657721996307,
-0.0003197124460712075,
-0.012837924994528294,
0.02876068837940693,
-0.04235583916306496,
0.02873416058719158,
-0.1033981591463089,
-0.03559605032205582,
-0.10092168301343918,
-0.0018393364734947681,
0.03308640047907829,
0.0065992362797260284,
-0.10646548867225647,
0.06401146948337555,
0.017752718180418015,
0.03605470433831215,
0.06558546423912048,
0.060896992683410645,
-0.031584903597831726,
-0.071589894592762,
-0.07486813515424728,
0.009003905579447746,
0.044853802770376205,
-0.12318708747625351,
-0.07328339666128159,
0.02252102643251419,
-0.1388726532459259,
-0.02371794916689396,
0.007492119446396828,
-0.026469988748431206,
0.030635248869657516,
-0.04541851580142975,
0.05324079841375351,
-0.045060042291879654,
0.00007304076279979199,
-0.053708989173173904,
-0.026563284918665886,
0.046077661216259,
0.04151187464594841,
-0.020933853462338448,
-0.015084550715982914,
-0.04064788669347763,
-0.006705085746943951,
-0.007998276501893997,
-0.00974859669804573,
0.06871667504310608,
-0.021299423649907112,
-0.010891088284552097,
0.009759027510881424,
-0.07301750034093857,
0.03771044313907623,
0.024066399782896042,
0.024540118873119354,
0.021771231666207314,
0.10941774398088455,
-0.031120076775550842,
-0.002668413333594799,
-0.04191289097070694,
0.02571486495435238,
0.022063923999667168,
0.07958322763442993,
0.04883203282952309,
-0.04202669858932495,
0.015738248825073242,
0.05342584475874901,
0.039742279797792435,
-0.068759486079216,
-0.048384908586740494,
-0.048261214047670364,
0.010186520405113697,
0.043993644416332245,
-0.04693770036101341,
0.013519087806344032,
-0.033471863716840744,
-0.09922627359628677,
0.020467326045036316,
0.020920565351843834,
-0.06280577927827835,
-0.02240554802119732,
-0.028022637590765953,
-0.012306423857808113,
0.011380464769899845,
0.08737322688102722,
-0.11326655745506287,
-0.009309515357017517,
-0.011163527145981789,
-0.03847572207450867,
0.01683974824845791,
-0.05641402304172516,
-0.04791506007313728,
-0.06308557093143463,
-6.985361955449997e-33,
0.05802518129348755,
0.02606239542365074,
-0.011234061792492867,
0.10924875736236572,
0.02895396575331688,
-0.018950602039694786,
0.019940510392189026,
0.049177419394254684,
-0.01048968080431223,
0.007276165299117565,
0.031686946749687195,
-0.023161429911851883,
0.061763375997543335,
-0.08298873156309128,
0.06153855845332146,
0.025830039754509926,
-0.042355023324489594,
0.056341592222452164,
0.021460270509123802,
0.053322404623031616,
-0.02363797277212143,
0.0038047845009714365,
-0.07194547355175018,
0.025168651714920998,
-0.038701239973306656,
0.042540352791547775,
0.028045300394296646,
0.06517508625984192,
0.009230129420757294,
-0.05201394855976105,
-0.06060343235731125,
0.023616129532456398,
-0.07888582348823547,
0.08153647929430008,
-0.05649463087320328,
0.03682751953601837,
0.07394509762525558,
-0.06633522361516953,
-0.028897244483232498,
0.05202392488718033,
0.08532603830099106,
0.027447473257780075,
-0.004354638978838921,
0.009652580134570599,
-0.10406283289194107,
-0.024108264595270157,
-0.12212524563074112,
-0.014555307105183601,
-0.004440691322088242,
-0.04236982762813568,
0.08301417529582977,
0.043118443340063095,
-0.1066184863448143,
0.016582148149609566,
0.0030670242849737406,
-0.10096000880002975,
0.06203378736972809,
-0.09099043160676956,
-0.06646188348531723,
-0.027442872524261475,
-0.0372525230050087,
0.0005492137279361486,
0.014674738049507141,
-0.07024148851633072,
0.04573924094438553,
-0.033042434602975845,
-0.007897375151515007,
0.0037573990412056446,
0.016169948503375053,
0.0037668151780962944,
-0.03174801170825958,
-0.031071007251739502,
0.06802915036678314,
-0.03138691559433937,
-0.015211937949061394,
0.02058199793100357,
0.017297005280852318,
-0.03826652467250824,
0.04333524405956268,
-0.0903787761926651,
0.04914617910981178,
0.0020134311635047197,
0.05924726277589798,
0.04473342001438141,
0.006407120730727911,
0.02064093016088009,
0.06360151618719101,
0.1597728133201599,
0.03194854408502579,
0.0857185497879982,
-0.033024709671735764,
0.006511609070003033,
0.025030730292201042,
0.14740101993083954,
0.0446668341755867,
-6.1361809855498e-8,
-0.011254503391683102,
0.09243402630090714,
-0.01774049736559391,
0.09210535138845444,
-0.05586402118206024,
-0.023354489356279373,
-0.025823114439845085,
0.0061300345696508884,
0.027198733761906624,
0.002000982640311122,
0.08976390957832336,
-0.01142124179750681,
-0.020126139745116234,
0.05649688467383385,
-0.015362875536084175,
0.06941823661327362,
-0.06094219908118248,
0.02340492233633995,
0.014129013754427433,
0.0014971739146858454,
-0.01726602576673031,
0.008971280418336391,
-0.05401374399662018,
-0.02665398269891739,
0.0645793229341507,
-0.0031559737399220467,
-0.02713717892765999,
0.062485985457897186,
-0.015182859264314175,
-0.01734154112637043,
-0.015464305877685547,
0.006626034155488014,
-0.05760286748409271,
-0.08167649805545807,
-0.008447020314633846,
0.10476922243833542,
0.00868932157754898,
-0.011189327575266361,
0.04235558584332466,
0.058948323130607605,
-0.028324957937002182,
0.034656625241041183,
-0.11710172891616821,
-0.014188362285494804,
0.020658109337091446,
-0.033318955451250076,
0.03755415976047516,
-0.06191994622349739,
0.022824399173259735,
0.05013996735215187,
0.018008878454566002,
0.0036811011377722025,
-0.013662627898156643,
0.021862538531422615,
0.027196476235985756,
0.014647133648395538,
0.01685497537255287,
-0.00362593331374228,
-0.01051601767539978,
0.00851482804864645,
0.05119222030043602,
0.06748811155557632,
-0.028654946014285088,
-0.008137204684317112
] |
monologg/koelectra-small-v2-discriminator | f2c615617707ae5e011a94c5506d0086301afe74 | 2020-12-26T16:23:57.000Z | [
"pytorch",
"electra",
"pretraining",
"transformers"
] | null | false | monologg | null | monologg/koelectra-small-v2-discriminator | 1,230 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
salti/AraElectra-base-finetuned-ARCD | ba34c8067e38d6202812a3f880fd01f2cd20761e | 2021-01-29T20:39:31.000Z | [
"pytorch",
"electra",
"question-answering",
"ar",
"dataset:arcd",
"transformers",
"autotrain_compatible"
] | question-answering | false | salti | null | salti/AraElectra-base-finetuned-ARCD | 1,229 | 1 | transformers | ---
language:
- ar
datasets:
- arcd
widget:
- text: "أين يعيش محمد ؟"
context: "اسمي محمد وأنا أعيش في سوريا"
- text: "ما العدد الذري للهيدروجين ؟"
context: "الهيدروجين هو عنصر كيميائي عدده الذري 1 ، وهو غاز عديم الرائحة واللون وهو سريع الاشتعال"
- text: "ما خواص الهيدروجين ؟"
context: "الهيدروجين هو عنصر كيميائي عدده الذري 1 ، وهو غاز عديم الرائحة واللون وهو سريع الاشتعال"
---
| [
-0.00665840832516551,
0.04693882539868355,
0.03515922650694847,
-0.05558440834283829,
0.005783548578619957,
-0.0058401767164468765,
0.08815372735261917,
-0.051095783710479736,
-0.008887331932783127,
-0.013463662937283516,
0.07419020682573318,
0.008938021026551723,
0.048264242708683014,
-0.00584753043949604,
0.01735915057361126,
-0.026588398963212967,
0.0162733756005764,
-0.026519112288951874,
-0.06903456896543503,
-0.010357994586229324,
0.039760783314704895,
0.01742038130760193,
0.038801755756139755,
-0.02187199331820011,
0.04364737123250961,
0.031957514584064484,
-0.046330589801073074,
-0.02495294064283371,
-0.0732719749212265,
0.04494868591427803,
-0.034211352467536926,
0.019763801246881485,
0.06894256919622421,
-0.029776617884635925,
-0.029405448585748672,
0.055865608155727386,
0.040077418088912964,
0.033769164234399796,
-0.048268068581819534,
0.013814586214721203,
-0.01802045851945877,
-0.08945343643426895,
0.01980091817677021,
-0.045527249574661255,
-0.0013442642521113157,
-0.06515023857355118,
-0.0530434176325798,
-0.051130395382642746,
-0.09190123528242111,
0.01731480471789837,
-0.04603849723935127,
-0.012687456794083118,
0.01637963391840458,
-0.028232822194695473,
-0.054398082196712494,
-0.05491260811686516,
-0.06908063590526581,
0.09173517674207687,
0.05697721987962723,
0.07211671769618988,
-0.06622341275215149,
0.05162104219198227,
-0.050018806010484695,
0.050623614341020584,
0.008231760933995247,
-0.023284418508410454,
0.040469877421855927,
-0.029142849147319794,
-0.01981944404542446,
0.012967173010110855,
-0.005835717543959618,
0.04522407799959183,
-0.007014680653810501,
0.03944896534085274,
-0.038869526237249374,
-0.02084299363195896,
0.03545885905623436,
-0.020359298214316368,
0.007474622689187527,
-0.15637873113155365,
0.06892857700586319,
-0.032677408307790756,
0.05547461286187172,
0.01982482336461544,
0.05419568344950676,
-0.02203291282057762,
-0.019497400149703026,
-0.025472398847341537,
-0.004529519472271204,
0.017385447397828102,
0.10289334505796432,
-0.018534066155552864,
0.16434922814369202,
0.03050408326089382,
0.09706098586320877,
0.04720333218574524,
-0.0355675183236599,
0.04749540239572525,
-0.1493556946516037,
0.042521096765995026,
-0.0025426188949495554,
-0.07395728677511215,
-0.007747895084321499,
0.019469808787107468,
-0.023637056350708008,
-0.07074937224388123,
-0.021957620978355408,
-0.07102201133966446,
-0.041114918887615204,
-0.054969124495983124,
0.005558935925364494,
-0.021414516493678093,
-0.05316808074712753,
-0.026112662628293037,
-0.02815827913582325,
0.02085774391889572,
-0.08187178522348404,
-0.007923698052763939,
-0.02081156149506569,
0.08420369774103165,
-0.012700233608484268,
0.00826792698353529,
0.03902164474129677,
0.008163860067725182,
-0.018810998648405075,
-0.09237053990364075,
-0.054182521998882294,
-3.1180050103698564e-33,
-0.0337383933365345,
0.00020138066611252725,
0.007014910690486431,
-0.03197557479143143,
-0.03168559819459915,
-0.00011430535960244015,
-0.12611356377601624,
0.008506137877702713,
0.07422981411218643,
-0.004763461649417877,
-0.016996562480926514,
-0.00834538135677576,
0.012825051322579384,
0.01526500005275011,
-0.014651401899755001,
0.037203215062618256,
-0.03296506032347679,
0.06931769847869873,
-0.07077226787805557,
0.05436526983976364,
0.03188776969909668,
0.0752587839961052,
0.03025399148464203,
-0.05651204288005829,
-0.05677029862999916,
0.008261489681899548,
0.06889430433511734,
0.03415043652057648,
0.02420887164771557,
0.028426650911569595,
-0.07033739238977432,
-0.025186464190483093,
0.08280368149280548,
-0.015539533458650112,
-0.0415266752243042,
-0.023102518171072006,
-0.014922444708645344,
-0.07074113190174103,
-0.005300935823470354,
-0.031147776171565056,
-0.03532912954688072,
0.05286622792482376,
0.03589152544736862,
0.013753043487668037,
0.013177970424294472,
-0.0033862553536891937,
-0.027008546516299248,
-0.0475793294608593,
-0.09765192866325378,
0.009292522445321083,
-0.08601266890764236,
0.0719815045595169,
0.0375809483230114,
-0.045598458498716354,
0.0367470383644104,
0.01574133336544037,
0.010616972111165524,
-0.0041790311224758625,
-0.007748771458864212,
0.015700187534093857,
0.030201125890016556,
0.010171663947403431,
0.017610670998692513,
0.011569250375032425,
-0.011537536978721619,
0.10634121298789978,
-0.030024956911802292,
-0.04148457199335098,
0.00996577087789774,
-0.02932700887322426,
-0.03710976988077164,
0.02310977876186371,
-0.01974417455494404,
0.08145696669816971,
-0.020757468417286873,
-0.054907090961933136,
0.02916494756937027,
-0.012396099045872688,
0.045298125594854355,
-0.016577476635575294,
-0.02077896147966385,
-0.04325786978006363,
0.04803716391324997,
0.022313088178634644,
-0.03248221054673195,
0.0881735309958458,
0.056066662073135376,
-0.09381275624036789,
-0.008046191185712814,
0.07119090855121613,
-0.05192553997039795,
-0.006390667986124754,
-0.06284669786691666,
-0.08395020663738251,
0.01202215626835823,
-2.4694651242250225e-33,
-0.039797864854335785,
0.10697579383850098,
-0.062305573374032974,
0.060928668826818466,
0.02749972604215145,
0.006851671263575554,
0.054599396884441376,
0.02769065834581852,
0.07351270318031311,
0.04319470003247261,
-0.006821364164352417,
-0.01996428146958351,
-0.03210856765508652,
-0.07533270120620728,
0.0319678857922554,
-0.027543921023607254,
0.042339857667684555,
-0.055151648819446564,
0.002436166862025857,
-0.05562974512577057,
0.0873965248465538,
0.028209341689944267,
-0.14389295876026154,
0.05521324276924133,
-0.04144219309091568,
-0.004262030590325594,
0.005093351937830448,
-0.10940999537706375,
-0.04581066220998764,
0.007216594181954861,
-0.02590290829539299,
-0.020308267325162888,
-0.055534038692712784,
0.023870373144745827,
-0.0693102553486824,
0.007518813479691744,
-0.03375677019357681,
0.025471262633800507,
-0.1170855388045311,
0.010704857297241688,
0.10216160118579865,
0.0008681795443408191,
-0.03478086739778519,
0.06074397265911102,
-0.026794875040650368,
-0.02850574627518654,
0.028240544721484184,
-0.027616389095783234,
-0.009530621580779552,
-0.11689566820859909,
-0.017680542543530464,
0.06521738320589066,
-0.04981724172830582,
-0.08576123416423798,
-0.015454580076038837,
-0.011721652001142502,
0.07387956231832504,
-0.07250706106424332,
-0.0764596238732338,
-0.09745313227176666,
-0.04621055722236633,
0.030380526557564735,
0.021603181958198547,
-0.05661027505993843,
-0.013950536958873272,
0.026359140872955322,
0.03652462735772133,
0.0028958276379853487,
0.021092766895890236,
-0.07723415642976761,
0.09078563749790192,
-0.1582808941602707,
-0.09320834279060364,
-0.05676006153225899,
0.023397302255034447,
0.05732240900397301,
-0.043379925191402435,
-0.012221853248775005,
-0.046759847551584244,
0.02004045620560646,
-0.07605186104774475,
-0.011241944506764412,
-0.018737932667136192,
0.09953995048999786,
-0.015521344728767872,
0.025720201432704926,
0.006892656907439232,
-0.027471790090203285,
0.02944515272974968,
0.07001988589763641,
-0.05305297300219536,
0.015335789881646633,
-0.03475524112582207,
0.04663120582699776,
0.01960340514779091,
-4.556057220383991e-8,
-0.03363073617219925,
0.038966353982686996,
-0.04152452200651169,
0.048463474959135056,
-0.06826723366975784,
0.013144580647349358,
-0.04248855635523796,
0.040958110243082047,
-0.07215573638677597,
0.002451704116538167,
0.07609386742115021,
-0.00017614217358641326,
-0.09469739347696304,
-0.019283097237348557,
-0.009523202665150166,
0.035682812333106995,
0.07100289314985275,
0.0012634489685297012,
-0.039036188274621964,
0.005817701108753681,
0.02968357689678669,
0.07105842977762222,
-0.02389666438102722,
-0.007068681996315718,
-0.10653812438249588,
0.10642623901367188,
-0.029161361977458,
0.04833893105387688,
-0.09128423035144806,
0.08159174770116806,
0.029225021600723267,
-0.02814125269651413,
-0.0161857008934021,
-0.021753527224063873,
0.018404217436909676,
-0.06895825266838074,
-0.05621247738599777,
0.0006811706116423011,
0.028045257553458214,
0.015368374064564705,
0.023843353614211082,
-0.052767351269721985,
0.04236219823360443,
0.17525655031204224,
0.11426065117120743,
-0.009978514164686203,
-0.016730353236198425,
-0.034367889165878296,
0.03526744991540909,
-0.0611983947455883,
0.045848291367292404,
-0.029778171330690384,
0.0030560079030692577,
-0.002091755159199238,
0.05053457245230675,
-0.04000440612435341,
0.036763109266757965,
-0.033741701394319534,
-0.02685626596212387,
0.06706796586513519,
0.10279393196105957,
-0.06138233467936516,
-0.02013896405696869,
0.009113834239542484
] |
pierreguillou/ner-bert-large-cased-pt-lenerbr | d081b0eb833d418c68e3327fc16e956d4738b164 | 2021-12-29T19:33:17.000Z | [
"pytorch",
"bert",
"token-classification",
"pt",
"dataset:lener_br",
"transformers",
"generated_from_trainer",
"model-index",
"autotrain_compatible"
] | token-classification | false | pierreguillou | null | pierreguillou/ner-bert-large-cased-pt-lenerbr | 1,227 | 2 | transformers | ---
language:
- pt
tags:
- generated_from_trainer
datasets:
- lener_br
metrics:
- precision
- recall
- f1
- accuracy
model-index:
- name: checkpoints
results:
- task:
name: Token Classification
type: token-classification
dataset:
name: lener_br
type: lener_br
metrics:
- name: F1
type: f1
value: 0.9082022949426265
- name: Precision
type: precision
value: 0.8975220495590088
- name: Recall
type: recall
value: 0.9191397849462366
- name: Accuracy
type: accuracy
value: 0.9808310603867311
- name: Loss
type: loss
value: 0.1228889599442482
widget:
- text: "Ao Instituto Médico Legal da jurisdição do acidente ou da residência cumpre fornecer, no prazo de 90 dias, laudo à vítima (art. 5, § 5, Lei n. 6.194/74 de 19 de dezembro de 1974), função técnica que pode ser suprida por prova pericial realizada por ordem do juízo da causa, ou por prova técnica realizada no âmbito administrativo que se mostre coerente com os demais elementos de prova constante dos autos."
- text: "Acrescento que não há de se falar em violação do artigo 114, § 3º, da Constituição Federal, posto que referido dispositivo revela-se impertinente, tratando da possibilidade de ajuizamento de dissídio coletivo pelo Ministério Público do Trabalho nos casos de greve em atividade essencial."
- text: "Todavia, entendo que extrair da aludida norma o sentido expresso na redação acima implica desconstruir o significado do texto constitucional, o que é absolutamente vedado ao intérprete. Nesse sentido, cito Dimitri Dimoulis: ‘(...) ao intérprete não é dado escolher significados que não estejam abarcados pela moldura da norma. Interpretar não pode significar violentar a norma.’ (Positivismo Jurídico. São Paulo: Método, 2006, p. 220).59. Dessa forma, deve-se tomar o sentido etimológico como limite da atividade interpretativa, a qual não pode superado, a ponto de destruir a própria norma a ser interpretada. Ou, como diz Konrad Hesse, ‘o texto da norma é o limite insuperável da atividade interpretativa.’ (Elementos de Direito Constitucional da República Federal da Alemanha, Porto Alegre: Sergio Antonio Fabris, 2003, p. 71)."
---
## (BERT large) NER model in the legal domain in Portuguese (LeNER-Br)
**ner-bert-large-portuguese-cased-lenerbr** is a NER model (token classification) in the legal domain in Portuguese that was finetuned on 20/12/2021 in Google Colab from the model [pierreguillou/bert-large-cased-pt-lenerbr](https://huggingface.co/pierreguillou/bert-large-cased-pt-lenerbr) on the dataset [LeNER_br](https://huggingface.co/datasets/lener_br) by using a NER objective.
Due to the small size of the finetuning dataset, the model overfitted before to reach the end of training. Here are the overall final metrics on the validation dataset (*note: see the paragraph "Validation metrics by Named Entity" to get detailed metrics*):
- **f1**: 0.9082022949426265
- **precision**: 0.8975220495590088
- **recall**: 0.9191397849462366
- **accuracy**: 0.9808310603867311
- **loss**: 0.1228889599442482
Check as well the [base version of this model](https://huggingface.co/pierreguillou/ner-bert-base-cased-pt-lenerbr) with a f1 of 0.893.
**Note**: the model [pierreguillou/bert-large-cased-pt-lenerbr](https://huggingface.co/pierreguillou/bert-large-cased-pt-lenerbr) is a language model that was created through the finetuning of the model [BERTimbau large](https://huggingface.co/neuralmind/bert-large-portuguese-cased) on the dataset [LeNER-Br language modeling](https://huggingface.co/datasets/pierreguillou/lener_br_finetuning_language_model) by using a MASK objective. This first specialization of the language model before finetuning on the NER task allows to get a better NER model.
## Blog post
[NLP | Modelos e Web App para Reconhecimento de Entidade Nomeada (NER) no domínio jurídico brasileiro](https://medium.com/@pierre_guillou/nlp-modelos-e-web-app-para-reconhecimento-de-entidade-nomeada-ner-no-dom%C3%ADnio-jur%C3%ADdico-b658db55edfb) (29/12/2021)
## Widget & App
You can test this model into the widget of this page.
Use as well the [NER App](https://huggingface.co/spaces/pierreguillou/ner-bert-pt-lenerbr) that allows comparing the 2 BERT models (base and large) fitted in the NER task with the legal LeNER-Br dataset.
## Using the model for inference in production
````
# install pytorch: check https://pytorch.org/
# !pip install transformers
from transformers import AutoModelForTokenClassification, AutoTokenizer
import torch
# parameters
model_name = "pierreguillou/ner-bert-large-cased-pt-lenerbr"
model = AutoModelForTokenClassification.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
input_text = "Acrescento que não há de se falar em violação do artigo 114, § 3º, da Constituição Federal, posto que referido dispositivo revela-se impertinente, tratando da possibilidade de ajuizamento de dissídio coletivo pelo Ministério Público do Trabalho nos casos de greve em atividade essencial."
# tokenization
inputs = tokenizer(input_text, max_length=512, truncation=True, return_tensors="pt")
tokens = inputs.tokens()
# get predictions
outputs = model(**inputs).logits
predictions = torch.argmax(outputs, dim=2)
# print predictions
for token, prediction in zip(tokens, predictions[0].numpy()):
print((token, model.config.id2label[prediction]))
````
You can use pipeline, too. However, it seems to have an issue regarding to the max_length of the input sequence.
````
!pip install transformers
import transformers
from transformers import pipeline
model_name = "pierreguillou/ner-bert-large-cased-pt-lenerbr"
ner = pipeline(
"ner",
model=model_name
)
ner(input_text)
````
## Training procedure
### Notebook
The notebook of finetuning ([HuggingFace_Notebook_token_classification_NER_LeNER_Br.ipynb](https://github.com/piegu/language-models/blob/master/HuggingFace_Notebook_token_classification_NER_LeNER_Br.ipynb)) is in github.
### Hyperparameters
# batch, learning rate...
- per_device_batch_size = 2
- gradient_accumulation_steps = 2
- learning_rate = 2e-5
- num_train_epochs = 10
- weight_decay = 0.01
- optimizer = AdamW
- betas = (0.9,0.999)
- epsilon = 1e-08
- lr_scheduler_type = linear
- seed = 42
# save model & load best model
- save_total_limit = 7
- logging_steps = 500
- eval_steps = logging_steps
- evaluation_strategy = 'steps'
- logging_strategy = 'steps'
- save_strategy = 'steps'
- save_steps = logging_steps
- load_best_model_at_end = True
- fp16 = True
# get best model through a metric
- metric_for_best_model = 'eval_f1'
- greater_is_better = True
### Training results
````
Num examples = 7828
Num Epochs = 20
Instantaneous batch size per device = 2
Total train batch size (w. parallel, distributed & accumulation) = 4
Gradient Accumulation steps = 2
Total optimization steps = 39140
Step Training Loss Validation Loss Precision Recall F1 Accuracy
500 0.250000 0.140582 0.760833 0.770323 0.765548 0.963125
1000 0.076200 0.117882 0.829082 0.817849 0.823428 0.966569
1500 0.082400 0.150047 0.679610 0.914624 0.779795 0.957213
2000 0.047500 0.133443 0.817678 0.857419 0.837077 0.969190
2500 0.034200 0.230139 0.895672 0.845591 0.869912 0.964070
3000 0.033800 0.108022 0.859225 0.887312 0.873043 0.973700
3500 0.030100 0.113467 0.855747 0.885376 0.870310 0.975879
4000 0.029900 0.118619 0.850207 0.884946 0.867229 0.974477
4500 0.022500 0.124327 0.841048 0.890968 0.865288 0.975041
5000 0.020200 0.129294 0.801538 0.918925 0.856227 0.968077
5500 0.019700 0.128344 0.814222 0.908602 0.858827 0.969250
6000 0.024600 0.182563 0.908087 0.866882 0.887006 0.968565
6500 0.012600 0.159217 0.829883 0.913763 0.869806 0.969357
7000 0.020600 0.183726 0.854557 0.893333 0.873515 0.966447
7500 0.014400 0.141395 0.777716 0.905161 0.836613 0.966828
8000 0.013400 0.139378 0.873042 0.899140 0.885899 0.975772
8500 0.014700 0.142521 0.864152 0.901505 0.882433 0.976366
9000 0.010900 0.122889 0.897522 0.919140 0.908202 0.980831
9500 0.013500 0.143407 0.816580 0.906667 0.859268 0.973395
10000 0.010400 0.144946 0.835608 0.908387 0.870479 0.974629
10500 0.007800 0.143086 0.847587 0.910108 0.877735 0.975985
11000 0.008200 0.156379 0.873778 0.884301 0.879008 0.976321
11500 0.008200 0.133356 0.901193 0.910108 0.905628 0.980328
12000 0.006900 0.133476 0.892202 0.920215 0.905992 0.980572
12500 0.006900 0.129991 0.890159 0.904516 0.897280 0.978683
````
### Validation metrics by Named Entity
````
{'JURISPRUDENCIA': {'f1': 0.8135593220338984,
'number': 657,
'precision': 0.865979381443299,
'recall': 0.7671232876712328},
'LEGISLACAO': {'f1': 0.8888888888888888,
'number': 571,
'precision': 0.8952042628774423,
'recall': 0.882661996497373},
'LOCAL': {'f1': 0.850467289719626,
'number': 194,
'precision': 0.7777777777777778,
'recall': 0.9381443298969072},
'ORGANIZACAO': {'f1': 0.8740635033892258,
'number': 1340,
'precision': 0.8373205741626795,
'recall': 0.914179104477612},
'PESSOA': {'f1': 0.9836677554829678,
'number': 1072,
'precision': 0.9841269841269841,
'recall': 0.9832089552238806},
'TEMPO': {'f1': 0.9669669669669669,
'number': 816,
'precision': 0.9481743227326266,
'recall': 0.9865196078431373},
'overall_accuracy': 0.9808310603867311,
'overall_f1': 0.9082022949426265,
'overall_precision': 0.8975220495590088,
'overall_recall': 0.9191397849462366}
```` | [
-0.030734021216630936,
0.013166505843400955,
-0.08788739144802094,
-0.06078614667057991,
-0.035642288625240326,
0.0015469151549041271,
0.008087637834250927,
0.09198180586099625,
-0.021672304719686508,
-0.06086566299200058,
0.09111503511667252,
-0.0834258496761322,
0.0076814210042357445,
-0.05582883208990097,
-0.0671519786119461,
-0.03625450283288956,
-0.01655570976436138,
0.027511054649949074,
-0.07282638549804688,
-0.018082676455378532,
0.041976749897003174,
0.057835668325424194,
0.06606996804475784,
0.07486902922391891,
-0.04921550676226616,
-0.0041409945115447044,
-0.09436442703008652,
0.07015076279640198,
-0.05198836699128151,
-0.05104943737387657,
-0.014786657877266407,
0.114204540848732,
-0.0014841841766610742,
0.042068302631378174,
0.03472154960036278,
0.034176409244537354,
-0.06822552531957626,
-0.018014583736658096,
0.027893152087926865,
-0.013920481316745281,
-0.04044632613658905,
-0.028680047020316124,
0.005154179409146309,
0.0279528871178627,
0.034888532012701035,
0.011875075288116932,
-0.062240757048130035,
-0.011475496925413609,
-0.012040331028401852,
0.017868688330054283,
-0.0971551164984703,
0.08721280097961426,
-0.0030919702257961035,
0.022232910618185997,
-0.028253043070435524,
-0.027326257899403572,
-0.03565046191215515,
-0.023959930986166,
-0.01624993607401848,
-0.007370928302407265,
0.027049057185649872,
-0.025735318660736084,
-0.07252822816371918,
-0.02798451855778694,
-0.07564572989940643,
-0.03705253452062607,
-0.006421517115086317,
-0.033267393708229065,
0.03823355212807655,
0.004023738205432892,
-0.0012983237393200397,
0.027062376961112022,
-0.05100739374756813,
0.05613556131720543,
-0.007367108948528767,
0.0317869707942009,
-0.02549903467297554,
0.04982003942131996,
0.03347191959619522,
-0.08614975959062576,
-0.02514643780887127,
-0.002785283140838146,
0.014684680849313736,
-0.0027394762728363276,
0.08099788427352905,
-0.03094421699643135,
0.02869356982409954,
0.0050108409486711025,
0.08925390988588333,
0.02989857643842697,
0.013597932644188404,
-0.011880860663950443,
0.015903888270258904,
-0.025195859372615814,
-0.021764211356639862,
0.0317763015627861,
0.05624663829803467,
0.04468739405274391,
-0.031764887273311615,
0.08704628795385361,
0.015932457521557808,
-0.018717603757977486,
-0.05042000487446785,
0.04203558713197708,
-0.020205294713377953,
0.014198296703398228,
0.015715645626187325,
0.04120667651295662,
0.060503724962472916,
-0.028164096176624298,
0.014342933893203735,
0.0021831863559782505,
-0.07090345770120621,
-0.019332552328705788,
0.010981280356645584,
0.09733929485082626,
-0.10622330009937286,
0.017344078049063683,
0.02460629492998123,
0.09656891971826553,
-0.017700910568237305,
-0.038428861647844315,
-0.03858312591910362,
-0.03222807124257088,
-0.03967669978737831,
-0.01462442334741354,
-0.010372171178460121,
1.303729911174254e-32,
-0.014893568120896816,
-0.005195279605686665,
0.044026900082826614,
-0.03765510395169258,
0.0037648307625204325,
-0.04078228026628494,
-0.1350591480731964,
-0.04033524915575981,
-0.059082888066768646,
-0.015000933781266212,
-0.05127542093396187,
-0.018998898565769196,
-0.0339762307703495,
0.024036848917603493,
0.03902062401175499,
-0.035972289741039276,
-0.04699154198169708,
0.02161356247961521,
-0.04843950644135475,
-0.009064898826181889,
0.10982052236795425,
-0.04949427396059036,
0.01552652008831501,
-0.07648477703332901,
-0.04235721752047539,
0.13009287416934967,
-0.0320209264755249,
0.0028992739971727133,
-0.0024979112204164267,
0.056904371827840805,
-0.05028320476412773,
-0.018024710938334465,
0.04481976479291916,
-0.07199890166521072,
0.0463956743478775,
-0.007412980310618877,
-0.0014850754523649812,
-0.03571499511599541,
-0.010712995193898678,
-0.019490107893943787,
-0.010124723426997662,
0.012904771603643894,
-0.02561143785715103,
-0.06311523914337158,
0.026404142379760742,
-0.08777299523353577,
0.022660991176962852,
0.012368772178888321,
0.001734502729959786,
0.04168738052248955,
-0.07418733835220337,
-0.050581593066453934,
-0.028415635228157043,
-0.054845646023750305,
-0.03328149765729904,
0.017512653023004532,
-0.0005878316005691886,
0.10071159899234772,
-0.012108340859413147,
0.03607287257909775,
-0.002667649183422327,
0.05558430403470993,
-0.008267791010439396,
0.031233353540301323,
-0.0030298365745693445,
-0.030774077400565147,
-0.059251997619867325,
0.010838868096470833,
0.07571233808994293,
0.034527767449617386,
-0.042031608521938324,
0.01390149723738432,
0.06454765796661377,
0.041754499077796936,
0.051667388528585434,
-0.006448694039136171,
0.053493332117795944,
-0.0619867742061615,
-0.06441869586706161,
-0.00204640650190413,
-0.06526903063058853,
-0.042452458292245865,
-0.08978115022182465,
-0.029754141345620155,
-0.047068431973457336,
-0.02706240862607956,
0.06390354782342911,
-0.0686638131737709,
-0.07044512778520584,
-0.011807459406554699,
-0.08477146178483963,
0.018346372991800308,
-0.023063819855451584,
-0.0132522564381361,
-0.09685084223747253,
-1.5485301362160478e-32,
-0.036040958017110825,
0.02947169542312622,
0.004335165023803711,
0.03438128903508186,
0.04219921678304672,
-0.015004012733697891,
0.027645563706755638,
0.04480581358075142,
0.010623309761285782,
-0.040980130434036255,
-0.011830918490886688,
-0.039662253111600876,
-0.049047015607357025,
-0.022225406020879745,
-0.0028071817941963673,
0.04436076059937477,
-0.07601303607225418,
-0.005144793074578047,
-0.08246586471796036,
0.02350674755871296,
0.01970997080206871,
0.04657047614455223,
-0.025974854826927185,
0.09649131447076797,
-0.04605571925640106,
0.023619038984179497,
0.02793027274310589,
0.0001566075225127861,
-0.0012008908670395613,
-0.056322529911994934,
0.002630550181493163,
-0.02541561797261238,
-0.05957953259348869,
0.07550673931837082,
-0.002621695399284363,
-0.08838573098182678,
0.11301130056381226,
-0.058687977492809296,
-0.04963260143995285,
0.06683561205863953,
0.09525593370199203,
0.0778917744755745,
-0.1074632927775383,
0.006730606779456139,
-0.025450678542256355,
-0.036482710391283035,
-0.00010539913637330756,
0.01429202500730753,
0.07692084461450577,
-0.06920597702264786,
0.10191331058740616,
0.008211638778448105,
-0.0772833526134491,
0.05108180269598961,
0.02847030572593212,
-0.03797867149114609,
0.01928086206316948,
-0.07860437035560608,
-0.12406051903963089,
0.024496668949723244,
-0.010358363389968872,
0.06019287183880806,
-0.07168292254209518,
0.040699295699596405,
0.1624576896429062,
-0.04111093655228615,
-0.04341299086809158,
0.06868826597929001,
-0.03685187175869942,
0.0426352322101593,
0.06401041150093079,
0.021707748994231224,
-0.021791988983750343,
-0.03186430782079697,
0.028429843485355377,
-0.014230754226446152,
-0.04551071301102638,
0.035620953887701035,
-0.01090647466480732,
0.029728638008236885,
-0.034600671380758286,
-0.09336137026548386,
0.04495961591601372,
0.1332278996706009,
-0.014240930788218975,
0.0249217189848423,
-0.005357008893042803,
0.09983786195516586,
-0.0021885312162339687,
0.02003392204642296,
-0.0010997412027791142,
0.015175950713455677,
0.024181969463825226,
0.1142006516456604,
-0.04083864390850067,
-7.20055055580815e-8,
-0.029207957908511162,
-0.04648806154727936,
-0.051005445420742035,
0.04133050888776779,
0.02269616723060608,
0.0019061811035498977,
-0.05787007883191109,
-0.02007993496954441,
-0.07564050704240799,
-0.04016081243753433,
0.07478971779346466,
-0.01588425412774086,
-0.14639896154403687,
-0.05204056575894356,
0.0651719868183136,
-0.02954494208097458,
0.007826406508684158,
0.17274901270866394,
-0.043862808495759964,
-0.035469602793455124,
0.02165684849023819,
-0.03696577623486519,
-0.012045789510011673,
-0.09454573690891266,
-0.03981352224946022,
-0.03609972074627876,
0.010037747211754322,
0.15913747251033783,
0.0017787781544029713,
0.028241371735930443,
0.02703726477921009,
-0.005303253419697285,
0.06715864688158035,
-0.07670532912015915,
-0.007879873737692833,
0.010900509543716908,
0.04490093141794205,
0.010377871803939342,
-0.005468859802931547,
0.03050713986158371,
-0.030035555362701416,
0.026863161474466324,
-0.018601546064019203,
0.0303523950278759,
0.038396093994379044,
0.00930845458060503,
0.0027671665884554386,
-0.01938645727932453,
0.06600171327590942,
-0.034105334430933,
-0.014776174910366535,
0.022891439497470856,
-0.032948751002550125,
0.07498831301927567,
-0.035404156893491745,
0.10483364015817642,
-0.02841450273990631,
-0.04139309376478195,
-0.062349747866392136,
-0.1012427881360054,
0.13713257014751434,
-0.03835732489824295,
-0.0101075554266572,
0.04540695622563362
] |
Helsinki-NLP/opus-mt-hy-en | c1f5af969aee273f845a84ad3f4b149ba5435303 | 2021-09-09T22:11:07.000Z | [
"pytorch",
"marian",
"text2text-generation",
"hy",
"en",
"transformers",
"translation",
"license:apache-2.0",
"autotrain_compatible"
] | translation | false | Helsinki-NLP | null | Helsinki-NLP/opus-mt-hy-en | 1,226 | null | transformers | ---
tags:
- translation
license: apache-2.0
---
### opus-mt-hy-en
* source languages: hy
* target languages: en
* OPUS readme: [hy-en](https://github.com/Helsinki-NLP/OPUS-MT-train/blob/master/models/hy-en/README.md)
* dataset: opus
* model: transformer-align
* pre-processing: normalization + SentencePiece
* download original weights: [opus-2019-12-18.zip](https://object.pouta.csc.fi/OPUS-MT-models/hy-en/opus-2019-12-18.zip)
* test set translations: [opus-2019-12-18.test.txt](https://object.pouta.csc.fi/OPUS-MT-models/hy-en/opus-2019-12-18.test.txt)
* test set scores: [opus-2019-12-18.eval.txt](https://object.pouta.csc.fi/OPUS-MT-models/hy-en/opus-2019-12-18.eval.txt)
## Benchmarks
| testset | BLEU | chr-F |
|-----------------------|-------|-------|
| Tatoeba.hy.en | 29.5 | 0.466 |
| [
-0.05840497836470604,
-0.007921182550489902,
0.01768920011818409,
-0.015817273408174515,
0.00876666884869337,
0.09242000430822372,
-0.061145536601543427,
0.01800341159105301,
0.005555426701903343,
-0.015529943630099297,
0.012687375769019127,
-0.03752690553665161,
-0.07703845202922821,
-0.0434642992913723,
-0.029781829565763474,
0.004971795715391636,
-0.028756290674209595,
0.0790238082408905,
-0.07089976221323013,
-0.020210694521665573,
0.052485521882772446,
0.032945647835731506,
0.03218910098075867,
-0.0030725118704140186,
0.09943609684705734,
0.06633938103914261,
-0.09497417509555817,
0.008104557171463966,
0.09370939433574677,
-0.03562276437878609,
-0.004591061733663082,
0.009547406807541847,
0.06508393585681915,
0.07096429914236069,
0.06383971869945526,
0.07583783566951752,
-0.00245850533246994,
-0.07756365835666656,
-0.03060118854045868,
0.050150107592344284,
0.040382109582424164,
0.046072520315647125,
-0.05061532184481621,
0.0015716097550466657,
0.04059134051203728,
-0.0036742142401635647,
-0.06305252760648727,
0.019193660467863083,
0.006941200699657202,
-0.0005834954790771008,
-0.1280084252357483,
-0.009612448513507843,
0.015265067107975483,
0.06459681689739227,
-0.06985539197921753,
0.038184113800525665,
0.043455932289361954,
0.004110065288841724,
0.08212149143218994,
-0.03093707375228405,
-0.13999079167842865,
-0.02066984213888645,
-0.10145040601491928,
0.0034111517015844584,
-0.0030876249074935913,
-0.0099374670535326,
0.01344690565019846,
0.060249391943216324,
-0.05334128066897392,
0.039498720318078995,
-0.019721874967217445,
-0.018285708501935005,
0.0008411271846853197,
0.046864207834005356,
-0.013256818056106567,
0.030342942103743553,
0.0034934512805193663,
-0.06118384003639221,
0.0019449571846053004,
-0.08647968620061874,
0.0012047758791595697,
-0.06276382505893707,
0.07261224836111069,
0.00268632173538208,
0.07199765741825104,
0.01066561508923769,
0.011821217834949493,
0.00020675227278843522,
-0.021311843767762184,
0.031635135412216187,
-0.06949176639318466,
-0.05280749499797821,
0.010363718494772911,
0.01711711473762989,
0.0017083754064515233,
0.05997580662369728,
0.01777759939432144,
0.05136709287762642,
0.017359862104058266,
0.06612484902143478,
0.0250881165266037,
0.004296658560633659,
0.06807726621627808,
-0.034331876784563065,
-0.11510918289422989,
-0.029669681563973427,
0.07570692151784897,
0.051471173763275146,
0.01578984223306179,
-0.08213982731103897,
0.013920105062425137,
-0.017482347786426544,
-0.01946074701845646,
-0.09959658980369568,
0.030904894694685936,
-0.049606066197156906,
-0.003749353112652898,
-0.008527176454663277,
-0.00908226054161787,
0.029434824362397194,
-0.02938241697847843,
-0.011820212006568909,
-0.019447477534413338,
-0.007760849315673113,
-0.04240430146455765,
-0.07327635586261749,
0.022369563579559326,
1.924035010759512e-33,
0.06751830875873566,
-0.017735080793499947,
-0.01463793870061636,
0.001179514336399734,
-0.06956648081541061,
-0.023123569786548615,
-0.02607082761824131,
0.030400406569242477,
-0.11269828677177429,
0.007590651512145996,
-0.010177984833717346,
-0.014617071487009525,
-0.09776359051465988,
0.02240859530866146,
-0.034657929092645645,
0.012740441597998142,
0.06370306760072708,
0.023620935156941414,
0.039002347737550735,
0.05650511756539345,
0.06790444254875183,
0.050626762211322784,
-0.00383431906811893,
-0.033307697623968124,
-0.05102258548140526,
0.05748843401670456,
0.03120109625160694,
-0.11748422682285309,
-0.10582832247018814,
0.02536642551422119,
-0.10668321698904037,
0.022323288023471832,
-0.006855890154838562,
-0.004703461192548275,
-0.015168534591794014,
-0.022069180384278297,
-0.024901198223233223,
-0.013356545940041542,
-0.045101284980773926,
-0.09556813538074493,
0.007505880203098059,
0.01693718507885933,
-0.01932637020945549,
-0.0636587142944336,
0.046877700835466385,
0.023875288665294647,
0.013378378003835678,
0.011574230156838894,
0.10649624466896057,
0.01209526788443327,
-0.0012375577352941036,
0.05656145140528679,
-0.06529170274734497,
0.005791094619780779,
0.034228891134262085,
0.10545723140239716,
0.07506757974624634,
0.030272170901298523,
0.04796106368303299,
0.04501236230134964,
0.06254754960536957,
0.03300907462835312,
0.023967690765857697,
0.01797819510102272,
0.11392447352409363,
-0.01640838198363781,
-0.04965465888381004,
-0.07068973779678345,
0.07722871005535126,
0.040374163538217545,
-0.14552263915538788,
-0.04967783764004707,
0.06489752978086472,
0.0924208015203476,
0.06583938002586365,
-0.026837173849344254,
-0.02655230090022087,
-0.027339525520801544,
-0.00016566112753935158,
-0.03766825050115585,
-0.06300674378871918,
0.006090986542403698,
0.004741272889077663,
-0.01799040473997593,
-0.026892846450209618,
-0.0025233670603483915,
0.04570615291595459,
-0.06173991411924362,
-0.04268600791692734,
0.009450666606426239,
0.04290817677974701,
0.040072426199913025,
-0.09475318342447281,
-0.009820584207773209,
-0.003106393851339817,
-2.4395385074210256e-33,
0.09041508287191391,
0.008728680200874805,
-0.05592527240514755,
0.06398147344589233,
-0.02762039750814438,
-0.0620279498398304,
0.005735707934945822,
0.11627975106239319,
0.06231283023953438,
0.035848282277584076,
0.0824669823050499,
-0.14993640780448914,
0.027831271290779114,
-0.07948870956897736,
0.0640174150466919,
-0.036339711397886276,
-0.015918763354420662,
0.02660573646426201,
0.027305537834763527,
0.04323409125208855,
0.0021493572276085615,
0.07474029064178467,
-0.038148459047079086,
0.08078908920288086,
-0.008270607329905033,
-0.020037243142724037,
-0.021872378885746002,
0.07394850999116898,
0.009386714547872543,
0.011967598460614681,
0.0026847473345696926,
0.006458406336605549,
-0.12476538121700287,
-0.01473378948867321,
-0.0724664181470871,
0.04534861817955971,
0.03588319942355156,
0.04489409923553467,
0.038232166320085526,
0.057465530931949615,
0.06562091410160065,
0.04868873953819275,
-0.035322342067956924,
-0.03902621567249298,
0.018523262813687325,
-0.012293082661926746,
-0.01287336740642786,
0.01295281108468771,
-0.0016303351148962975,
-0.07323456555604935,
0.01612449251115322,
-0.0015614191070199013,
-0.07798709720373154,
-0.03364528715610504,
-0.009620721451938152,
-0.07729704678058624,
-0.0024247991386801004,
-0.13550733029842377,
-0.05072799697518349,
-0.02505345083773136,
-0.02313893660902977,
0.036426324397325516,
-0.045427534729242325,
-0.07162486016750336,
0.022654244676232338,
-0.010644269175827503,
0.03044816292822361,
0.0051924134604632854,
0.018954243510961533,
0.054244957864284515,
-0.023088641464710236,
-0.06657740473747253,
0.08280780911445618,
0.08586390316486359,
0.006672616582363844,
-0.04068911075592041,
-0.029963137581944466,
0.030634334310889244,
0.059031277894973755,
-0.07710961997509003,
-0.0235859677195549,
0.03580589219927788,
0.0018165356013923883,
0.043531060218811035,
0.10154237598180771,
0.10478456318378448,
0.02708570286631584,
-0.011943348683416843,
-0.007671918720006943,
0.06738749146461487,
0.024163318797945976,
0.0321674719452858,
0.015001025050878525,
0.11382055282592773,
-0.006493342574685812,
-5.012605086562871e-8,
-0.10210876166820526,
0.0017961934208869934,
-0.09132780879735947,
0.042935263365507126,
-0.04322066158056259,
-0.05954567342996597,
-0.06136693060398102,
-0.01644730754196644,
-0.035750631242990494,
-0.026793112978339195,
-0.0032716377172619104,
0.006656373385339975,
-0.07279517501592636,
-0.017393728718161583,
-0.056257035583257675,
0.022135145962238312,
-0.014879251830279827,
0.08159418404102325,
-0.030656956136226654,
-0.03931831195950508,
0.056556228548288345,
0.047447796911001205,
0.05482390150427818,
-0.06976769119501114,
-0.0032309708185493946,
-0.0013445854419842362,
-0.03753881901502609,
0.03362726792693138,
0.0030289182905107737,
0.0053899395279586315,
0.04925008863210678,
0.041176434606313705,
-0.020059699192643166,
-0.08656278252601624,
0.055261727422475815,
0.06501583009958267,
0.0010248991893604398,
-0.029066259041428566,
-0.0008301808848045766,
0.04745177552103996,
0.09240511804819107,
0.03694272041320801,
-0.12217250466346741,
0.00950700230896473,
0.0411713533103466,
-0.03420970216393471,
-0.06110074743628502,
-0.0302371047437191,
0.031572502106428146,
-0.06675484776496887,
0.06925217062234879,
-0.07058916240930557,
-0.05963710695505142,
0.017099054530262947,
0.03298110142350197,
0.025441644713282585,
0.06791000068187714,
-0.018789339810609818,
0.019099468365311623,
-0.021065909415483475,
0.04903605207800865,
-0.019902672618627548,
-0.017940497025847435,
-0.010153456591069698
] |
Salesforce/codegen-350M-multi | 2b61ebc2f74ace34d530e8ba9501198ee27ead82 | 2022-06-28T17:47:03.000Z | [
"pytorch",
"codegen",
"text-generation",
"arxiv:2203.13474",
"transformers",
"license:bsd-3-clause"
] | text-generation | false | Salesforce | null | Salesforce/codegen-350M-multi | 1,224 | 0 | transformers | ---
license: bsd-3-clause
---
# CodeGen (CodeGen-Multi 350M)
## Model description
CodeGen is a family of autoregressive language models for **program synthesis** from the paper: [A Conversational Paradigm for Program Synthesis](https://arxiv.org/abs/2203.13474) by Erik Nijkamp, Bo Pang, Hiroaki Hayashi, Lifu Tu, Huan Wang, Yingbo Zhou, Silvio Savarese, Caiming Xiong. The models are originally released in [this repository](https://github.com/salesforce/CodeGen), under 3 pre-training data variants (`NL`, `Multi`, `Mono`) and 4 model size variants (`350M`, `2B`, `6B`, `16B`).
The checkpoint included in this repository is denoted as **CodeGen-Multi 350M** in the paper, where "Multi" means the model is initialized with *CodeGen-NL 350M* and further pre-trained on a dataset of multiple programming languages, and "350M" refers to the number of trainable parameters.
## Training data
This checkpoint (CodeGen-Multi 350M) was firstly initialized with *CodeGen-NL 350M*, and then pre-trained on [BigQuery](https://console.cloud.google.com/marketplace/details/github/github-repos), a large-scale dataset of multiple programming languages from GitHub repositories. The data consists of 119.2B tokens and includes C, C++, Go, Java, JavaScript, and Python.
## Training procedure
CodeGen was trained using cross-entropy loss to maximize the likelihood of sequential inputs.
The family of models are trained using multiple TPU-v4-512 by Google, leveraging data and model parallelism.
See Section 2.3 of the [paper](https://arxiv.org/abs/2203.13474) for more details.
## Evaluation results
We evaluate our models on two code generation benchmark: HumanEval and MTPB. Please refer to the [paper](https://arxiv.org/abs/2203.13474) for more details.
## Intended Use and Limitations
As an autoregressive language model, CodeGen is capable of extracting features from given natural language and programming language texts, and calculating the likelihood of them.
However, the model is intended for and best at **program synthesis**, that is, generating executable code given English prompts, where the prompts should be in the form of a comment string. The model can complete partially-generated code as well.
## How to use
This model can be easily loaded using the `AutoModelForCausalLM` functionality:
```python
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("Salesforce/codegen-350M-multi")
model = AutoModelForCausalLM.from_pretrained("Salesforce/codegen-350M-multi")
text = "def hello_world():"
input_ids = tokenizer(text, return_tensors="pt").input_ids
generated_ids = model.generate(input_ids, max_length=128)
print(tokenizer.decode(generated_ids[0], skip_special_tokens=True))
```
## BibTeX entry and citation info
```bibtex
@article{Nijkamp2022ACP,
title={A Conversational Paradigm for Program Synthesis},
author={Nijkamp, Erik and Pang, Bo and Hayashi, Hiroaki and Tu, Lifu and Wang, Huan and Zhou, Yingbo and Savarese, Silvio and Xiong, Caiming},
journal={arXiv preprint},
year={2022}
}
```
| [
-0.10794465243816376,
-0.10251324623823166,
-0.028563963249325752,
0.016631783917546272,
-0.014589831233024597,
0.01519389171153307,
-0.052615195512771606,
0.026126481592655182,
-0.06801563501358032,
-0.0416460856795311,
-0.013040215708315372,
-0.05123358592391014,
-0.006010175682604313,
-0.09493175148963928,
-0.03805619105696678,
0.020117076113820076,
-0.02710251696407795,
-0.05010265111923218,
-0.01251243893057108,
-0.06287474185228348,
0.040397875010967255,
0.02019469626247883,
-0.011288907378911972,
0.04421771690249443,
0.07613810896873474,
-0.015146156772971153,
-0.03527749329805374,
0.03311778977513313,
0.114488884806633,
0.016087347641587257,
0.043382029980421066,
0.14001017808914185,
0.05465206503868103,
0.06888098269701004,
0.006446529179811478,
0.04352807626128197,
-0.04156307876110077,
-0.03426489979028702,
-0.04124827682971954,
0.043403588235378265,
-0.023563126102089882,
0.05026708170771599,
0.010869807563722134,
0.011215132661163807,
0.06181873008608818,
-0.03523355349898338,
-0.05931544676423073,
0.002183920005336404,
-0.07690528780221939,
-0.07939834892749786,
-0.06244967132806778,
-0.027291547507047653,
-0.030611146241426468,
0.04624776169657707,
0.028772994875907898,
-0.12640854716300964,
0.034330062568187714,
-0.02356378361582756,
-0.002909184666350484,
0.014766681008040905,
-0.02933765947818756,
-0.010605944320559502,
-0.07198210805654526,
-0.06394758820533752,
0.04420732706785202,
0.07863672822713852,
-0.012943679466843605,
-0.02299291267991066,
0.018387237563729286,
-0.013932598754763603,
-0.09166861325502396,
0.00047582737170159817,
-0.07099319994449615,
0.04099283367395401,
0.021724406629800797,
0.035013347864151,
0.11350855976343155,
0.017011376097798347,
0.012887165881693363,
-0.13961708545684814,
-0.02424655668437481,
0.004623703192919493,
0.005992686841636896,
-0.03790421411395073,
0.00018869068298954517,
0.019253088161349297,
-0.007881046272814274,
0.02212240919470787,
0.12360423803329468,
0.0568104162812233,
-0.03212965279817581,
0.005189026240259409,
-0.03028916008770466,
0.05540311709046364,
-0.05247144401073456,
0.06850835680961609,
0.05574347451329231,
0.006740008480846882,
0.1170329824090004,
0.0339292511343956,
0.040900830179452896,
0.043035320937633514,
0.09672722965478897,
0.012718242593109608,
-0.007832509465515614,
-0.04690760746598244,
0.06601317226886749,
0.07096942514181137,
0.015633869916200638,
-0.05648670345544815,
0.02744762971997261,
0.018118461593985558,
-0.003329290309920907,
-0.05179445073008537,
-0.02005201391875744,
0.04202726483345032,
-0.09174709022045135,
-0.06251104176044464,
0.04520978406071663,
0.07021583616733551,
-0.03706413507461548,
-0.011738236993551254,
0.020604852586984634,
-0.009472616948187351,
-0.05867389589548111,
-0.057738494127988815,
-0.07166752219200134,
1.599921463537052e-33,
0.05231171101331711,
0.02907092124223709,
0.02093002386391163,
0.01461102906614542,
0.024878591299057007,
-0.028281498700380325,
-0.011929763481020927,
-0.0018489290960133076,
-0.0909380316734314,
0.02695162408053875,
-0.030695214867591858,
-0.056160636246204376,
-0.0426395907998085,
0.1303739845752716,
0.00004346897912910208,
0.017893239855766296,
-0.0504346638917923,
0.009061344899237156,
-0.0063856118358671665,
0.016625910997390747,
0.10319256037473679,
0.04754641652107239,
-0.03749705106019974,
-0.028364762663841248,
0.043683696538209915,
0.09890101104974747,
0.06958380341529846,
-0.012790253385901451,
-0.023429932072758675,
0.04195915907621384,
-0.101503886282444,
0.018059762194752693,
-0.009887134656310081,
0.024482611566781998,
0.03584962710738182,
0.023265428841114044,
-0.04360384866595268,
-0.041547320783138275,
-0.05576137825846672,
-0.025870367884635925,
0.08281923085451126,
0.045095235109329224,
0.002768618753179908,
-0.04351123422384262,
0.05838576704263687,
-0.0923013761639595,
-0.016192002221941948,
0.008413360454142094,
-0.047941431403160095,
-0.02611919306218624,
0.018594147637486458,
0.08137085288763046,
-0.009008506312966347,
-0.0301428884267807,
-0.020590364933013916,
0.05558168888092041,
0.036498069763183594,
0.025692909955978394,
0.06592600792646408,
0.11395545303821564,
0.0025589875876903534,
0.015718620270490646,
-0.014855015091598034,
0.02608838863670826,
0.06859449297189713,
0.03748704865574837,
-0.017555996775627136,
-0.013775063678622246,
0.112853042781353,
0.0359993577003479,
-0.03730909526348114,
-0.07723411172628403,
-0.03678565472364426,
0.08516893535852432,
0.05563132464885712,
-0.07248751074075699,
0.055716373026371,
-0.04670985788106918,
-0.026883944869041443,
0.0007176813087426126,
-0.09627489745616913,
0.07430780678987503,
-0.018390614539384842,
-0.05295536667108536,
-0.06963084638118744,
-0.047876033931970596,
0.0239079799503088,
-0.029659345746040344,
-0.10245630890130997,
-0.0709138810634613,
0.023360278457403183,
-0.014224305748939514,
0.023598195984959602,
-0.009363139048218727,
-0.04761122539639473,
-4.315301107980051e-33,
0.028432393446564674,
0.007825218141078949,
-0.06819230318069458,
-0.003122476162388921,
-0.084911048412323,
-0.06789358705282211,
0.057958487421274185,
0.04531498998403549,
-0.041589438915252686,
-0.04296976700425148,
0.035189803689718246,
-0.09881734848022461,
0.09944286197423935,
0.010012095794081688,
0.07031659781932831,
-0.03914337232708931,
-0.0009522936306893826,
-0.06491870433092117,
0.0324738435447216,
0.13650737702846527,
0.06929849833250046,
0.045606572180986404,
-0.03925967216491699,
0.01584940403699875,
-0.019588565453886986,
-0.037793755531311035,
-0.019167492166161537,
0.057766955345869064,
-0.017928224056959152,
0.013281343504786491,
0.02868666499853134,
0.014455905184149742,
-0.04233582690358162,
0.010047716088593006,
-0.06937739998102188,
-0.03448382392525673,
0.036909300833940506,
-0.056690115481615067,
-0.002840189728885889,
0.07311730086803436,
0.033801764249801636,
0.014266847632825375,
-0.0581948459148407,
0.06871643662452698,
-0.026379691436886787,
0.10568264871835709,
-0.0936354398727417,
-0.00930153951048851,
0.03948621079325676,
-0.08333484083414078,
-0.005389908328652382,
-0.014713555574417114,
0.0065532587468624115,
0.011067722924053669,
0.023432178422808647,
-0.08096809685230255,
0.002350827446207404,
-0.03356979787349701,
0.03318655863404274,
-0.006015857215970755,
-0.0033195435535162687,
-0.04457584768533707,
0.08942604064941406,
-0.09249221533536911,
0.020344069227576256,
-0.01931215077638626,
-0.08996187895536423,
-0.038353465497493744,
0.004135695286095142,
-0.041164688766002655,
0.000127078325022012,
0.029992736876010895,
-0.0707770586013794,
0.006091139279305935,
-0.09993870556354523,
-0.04623277112841606,
-0.01654180698096752,
-0.001142696593888104,
-0.01265707891434431,
-0.03707072138786316,
-0.06441742926836014,
0.034627534449100494,
0.0480961836874485,
0.14979417622089386,
-0.05019015818834305,
-0.027391474694013596,
0.012179991230368614,
0.10847345739603043,
-0.002417968353256583,
0.017007356509566307,
-0.01189478486776352,
0.06405184417963028,
-0.029585449025034904,
0.06773427873849869,
-0.0421316996216774,
-5.998663965556261e-8,
0.0025339224375784397,
0.03558577969670296,
-0.024476729333400726,
0.032741986215114594,
0.03118455968797207,
-0.036640748381614685,
-0.07938919961452484,
0.029521187767386436,
0.023080945014953613,
0.010453330352902412,
0.025698740035295486,
0.046920448541641235,
-0.06433317810297012,
-0.00513865053653717,
-0.015583537518978119,
0.027139924466609955,
-0.06433112919330597,
-0.0038689777720719576,
-0.05580209195613861,
0.0016044661169871688,
0.019413964822888374,
0.03253130614757538,
0.012717579491436481,
-0.02145599201321602,
0.05744149163365364,
-0.06506385654211044,
0.04499112069606781,
0.08549798280000687,
0.04566306993365288,
-0.018061334267258644,
0.027476120740175247,
0.045034587383270264,
-0.02929787151515484,
-0.0617874339222908,
0.020207393914461136,
0.04337146878242493,
0.032116249203681946,
0.011128094978630543,
0.06402745097875595,
-0.05223890021443367,
0.06884297728538513,
-0.023690348491072655,
-0.09390928596258163,
-0.009575861506164074,
0.040121890604496,
0.003965949174016714,
-0.09785565733909607,
-0.07006492465734482,
-0.013149481266736984,
-0.002830481855198741,
0.0006648397538810968,
0.017609793692827225,
-0.03516422212123871,
0.0488412119448185,
0.0469636544585228,
0.06267665326595306,
-0.09947285056114197,
-0.05542813241481781,
0.04419771954417229,
-0.026203226298093796,
0.03671560063958168,
-0.00487243989482522,
0.013624241575598717,
-0.0019290302880108356
] |
aubmindlab/bert-base-arabertv01 | 59dc633c58a7a1e9b4c1e8d4f7be94cf9dc6a2e0 | 2021-05-19T11:50:51.000Z | [
"pytorch",
"tf",
"jax",
"bert",
"fill-mask",
"ar",
"dataset:wikipedia",
"dataset:OSIAN",
"dataset:1.5B Arabic Corpus",
"arxiv:2003.00104",
"transformers",
"autotrain_compatible"
] | fill-mask | false | aubmindlab | null | aubmindlab/bert-base-arabertv01 | 1,220 | null | transformers | ---
language: ar
datasets:
- wikipedia
- OSIAN
- 1.5B Arabic Corpus
widget:
- text: " عاصمة لبنان هي [MASK] ."
---
# !!! A newer version of this model is available !!! [AraBERTv02](https://huggingface.co/aubmindlab/bert-base-arabertv02)
# AraBERT v1 & v2 : Pre-training BERT for Arabic Language Understanding
<img src="https://raw.githubusercontent.com/aub-mind/arabert/master/arabert_logo.png" width="100" align="left"/>
**AraBERT** is an Arabic pretrained lanaguage model based on [Google's BERT architechture](https://github.com/google-research/bert). AraBERT uses the same BERT-Base config. More details are available in the [AraBERT Paper](https://arxiv.org/abs/2003.00104) and in the [AraBERT Meetup](https://github.com/WissamAntoun/pydata_khobar_meetup)
There are two versions of the model, AraBERTv0.1 and AraBERTv1, with the difference being that AraBERTv1 uses pre-segmented text where prefixes and suffixes were splitted using the [Farasa Segmenter](http://alt.qcri.org/farasa/segmenter.html).
We evalaute AraBERT models on different downstream tasks and compare them to [mBERT]((https://github.com/google-research/bert/blob/master/multilingual.md)), and other state of the art models (*To the extent of our knowledge*). The Tasks were Sentiment Analysis on 6 different datasets ([HARD](https://github.com/elnagara/HARD-Arabic-Dataset), [ASTD-Balanced](https://www.aclweb.org/anthology/D15-1299), [ArsenTD-Lev](https://staff.aub.edu.lb/~we07/Publications/ArSentD-LEV_Sentiment_Corpus.pdf), [LABR](https://github.com/mohamedadaly/LABR)), Named Entity Recognition with the [ANERcorp](http://curtis.ml.cmu.edu/w/courses/index.php/ANERcorp), and Arabic Question Answering on [Arabic-SQuAD and ARCD](https://github.com/husseinmozannar/SOQAL)
# AraBERTv2
## What's New!
AraBERT now comes in 4 new variants to replace the old v1 versions:
More Detail in the AraBERT folder and in the [README](https://github.com/aub-mind/arabert/blob/master/AraBERT/README.md) and in the [AraBERT Paper](https://arxiv.org/abs/2003.00104v2)
Model | HuggingFace Model Name | Size (MB/Params)| Pre-Segmentation | DataSet (Sentences/Size/nWords) |
---|:---:|:---:|:---:|:---:
AraBERTv0.2-base | [bert-base-arabertv02](https://huggingface.co/aubmindlab/bert-base-arabertv02) | 543MB / 136M | No | 200M / 77GB / 8.6B |
AraBERTv0.2-large| [bert-large-arabertv02](https://huggingface.co/aubmindlab/bert-large-arabertv02) | 1.38G 371M | No | 200M / 77GB / 8.6B |
AraBERTv2-base| [bert-base-arabertv2](https://huggingface.co/aubmindlab/bert-base-arabertv2) | 543MB 136M | Yes | 200M / 77GB / 8.6B |
AraBERTv2-large| [bert-large-arabertv2](https://huggingface.co/aubmindlab/bert-large-arabertv2) | 1.38G 371M | Yes | 200M / 77GB / 8.6B |
AraBERTv0.1-base| [bert-base-arabertv01](https://huggingface.co/aubmindlab/bert-base-arabertv01) | 543MB 136M | No | 77M / 23GB / 2.7B |
AraBERTv1-base| [bert-base-arabert](https://huggingface.co/aubmindlab/bert-base-arabert) | 543MB 136M | Yes | 77M / 23GB / 2.7B |
All models are available in the `HuggingFace` model page under the [aubmindlab](https://huggingface.co/aubmindlab/) name. Checkpoints are available in PyTorch, TF2 and TF1 formats.
## Better Pre-Processing and New Vocab
We identified an issue with AraBERTv1's wordpiece vocabulary. The issue came from punctuations and numbers that were still attached to words when learned the wordpiece vocab. We now insert a space between numbers and characters and around punctuation characters.
The new vocabulary was learnt using the `BertWordpieceTokenizer` from the `tokenizers` library, and should now support the Fast tokenizer implementation from the `transformers` library.
**P.S.**: All the old BERT codes should work with the new BERT, just change the model name and check the new preprocessing dunction
**Please read the section on how to use the [preprocessing function](#Preprocessing)**
## Bigger Dataset and More Compute
We used ~3.5 times more data, and trained for longer.
For Dataset Sources see the [Dataset Section](#Dataset)
Model | Hardware | num of examples with seq len (128 / 512) |128 (Batch Size/ Num of Steps) | 512 (Batch Size/ Num of Steps) | Total Steps | Total Time (in Days) |
---|:---:|:---:|:---:|:---:|:---:|:---:
AraBERTv0.2-base | TPUv3-8 | 420M / 207M |2560 / 1M | 384/ 2M | 3M | -
AraBERTv0.2-large | TPUv3-128 | 420M / 207M | 13440 / 250K | 2056 / 300K | 550K | -
AraBERTv2-base | TPUv3-8 | 520M / 245M |13440 / 250K | 2056 / 300K | 550K | -
AraBERTv2-large | TPUv3-128 | 520M / 245M | 13440 / 250K | 2056 / 300K | 550K | -
AraBERT-base (v1/v0.1) | TPUv2-8 | - |512 / 900K | 128 / 300K| 1.2M | 4 days
# Dataset
The pretraining data used for the new AraBERT model is also used for Arabic **GPT2 and ELECTRA**.
The dataset consists of 77GB or 200,095,961 lines or 8,655,948,860 words or 82,232,988,358 chars (before applying Farasa Segmentation)
For the new dataset we added the unshuffled OSCAR corpus, after we thoroughly filter it, to the previous dataset used in AraBERTv1 but with out the websites that we previously crawled:
- OSCAR unshuffled and filtered.
- [Arabic Wikipedia dump](https://archive.org/details/arwiki-20190201) from 2020/09/01
- [The 1.5B words Arabic Corpus](https://www.semanticscholar.org/paper/1.5-billion-words-Arabic-Corpus-El-Khair/f3eeef4afb81223df96575adadf808fe7fe440b4)
- [The OSIAN Corpus](https://www.aclweb.org/anthology/W19-4619)
- Assafir news articles. Huge thank you for Assafir for giving us the data
# Preprocessing
It is recommended to apply our preprocessing function before training/testing on any dataset.
**Install farasapy to segment text for AraBERT v1 & v2 `pip install farasapy`**
```python
from arabert.preprocess import ArabertPreprocessor
model_name="bert-base-arabertv01"
arabert_prep = ArabertPreprocessor(model_name=model_name)
text = "ولن نبالغ إذا قلنا إن هاتف أو كمبيوتر المكتب في زمننا هذا ضروري"
arabert_prep.preprocess(text)
```
## Accepted_models
```
bert-base-arabertv01
bert-base-arabert
bert-base-arabertv02
bert-base-arabertv2
bert-large-arabertv02
bert-large-arabertv2
araelectra-base
aragpt2-base
aragpt2-medium
aragpt2-large
aragpt2-mega
```
# TensorFlow 1.x models
The TF1.x model are available in the HuggingFace models repo.
You can download them as follows:
- via git-lfs: clone all the models in a repo
```bash
curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | sudo bash
sudo apt-get install git-lfs
git lfs install
git clone https://huggingface.co/aubmindlab/MODEL_NAME
tar -C ./MODEL_NAME -zxvf /content/MODEL_NAME/tf1_model.tar.gz
```
where `MODEL_NAME` is any model under the `aubmindlab` name
- via `wget`:
- Go to the tf1_model.tar.gz file on huggingface.co/models/aubmindlab/MODEL_NAME.
- copy the `oid sha256`
- then run `wget https://cdn-lfs.huggingface.co/aubmindlab/aragpt2-base/INSERT_THE_SHA_HERE` (ex: for `aragpt2-base`: `wget https://cdn-lfs.huggingface.co/aubmindlab/aragpt2-base/3766fc03d7c2593ff2fb991d275e96b81b0ecb2098b71ff315611d052ce65248`)
# If you used this model please cite us as :
Google Scholar has our Bibtex wrong (missing name), use this instead
```
@inproceedings{antoun2020arabert,
title={AraBERT: Transformer-based Model for Arabic Language Understanding},
author={Antoun, Wissam and Baly, Fady and Hajj, Hazem},
booktitle={LREC 2020 Workshop Language Resources and Evaluation Conference 11--16 May 2020},
pages={9}
}
```
# Acknowledgments
Thanks to TensorFlow Research Cloud (TFRC) for the free access to Cloud TPUs, couldn't have done it without this program, and to the [AUB MIND Lab](https://sites.aub.edu.lb/mindlab/) Members for the continous support. Also thanks to [Yakshof](https://www.yakshof.com/#/) and Assafir for data and storage access. Another thanks for Habib Rahal (https://www.behance.net/rahalhabib), for putting a face to AraBERT.
# Contacts
**Wissam Antoun**: [Linkedin](https://www.linkedin.com/in/wissam-antoun-622142b4/) | [Twitter](https://twitter.com/wissam_antoun) | [Github](https://github.com/WissamAntoun) | <[email protected]> | <[email protected]>
**Fady Baly**: [Linkedin](https://www.linkedin.com/in/fadybaly/) | [Twitter](https://twitter.com/fadybaly) | [Github](https://github.com/fadybaly) | <[email protected]> | <[email protected]>
| [
-0.13282108306884766,
-0.02395222894847393,
0.031048879027366638,
-0.01292076613754034,
-0.05109843611717224,
0.05147882178425789,
0.027791883796453476,
-0.05389123037457466,
0.03935672715306282,
0.00007276632095454261,
0.04451025649905205,
-0.021591253578662872,
0.06644093245267868,
0.002638583304360509,
0.02220776118338108,
0.027722971513867378,
0.005758965387940407,
-0.022646861150860786,
-0.03006763570010662,
-0.05320606380701065,
0.052592694759368896,
0.06255032122135162,
0.08500789850950241,
-0.04794049263000488,
0.05918145179748535,
0.011455794796347618,
-0.04808484762907028,
-0.013413703069090843,
0.06726104021072388,
0.014376092702150345,
0.041532330214977264,
0.04088257998228073,
0.08993808180093765,
0.03579001873731613,
0.005632544867694378,
0.08547275513410568,
-0.018267054110765457,
0.02300732210278511,
0.029970072209835052,
0.09272635728120804,
0.020212752744555473,
-0.047508422285318375,
0.017932850867509842,
-0.018115278333425522,
0.03294578939676285,
0.0003219151112716645,
-0.01964983157813549,
0.05877960845828056,
-0.015893300995230675,
0.05600457638502121,
-0.07519090175628662,
-0.04142222926020622,
0.04875835031270981,
-0.03924502059817314,
-0.011407444253563881,
-0.028057297691702843,
-0.01667175441980362,
-0.0024468868505209684,
-0.004091330338269472,
-0.06796786189079285,
-0.04766898229718208,
0.0241396501660347,
-0.023232867941260338,
0.01939455233514309,
-0.09911030530929565,
0.01615697517991066,
0.03756306320428848,
-0.04213545098900795,
-0.001362527022138238,
-0.05223102495074272,
-0.04538745433092117,
0.04468704015016556,
0.0005759900668635964,
0.029794849455356598,
-0.006176377646625042,
-0.06310571730136871,
0.051399536430835724,
-0.0725896805524826,
-0.02142813615500927,
-0.05551709607243538,
0.023581573739647865,
0.026728300377726555,
0.08657675981521606,
0.004524962976574898,
0.06173902377486229,
-0.06483222544193268,
0.04000656306743622,
-0.02238200418651104,
-0.007597495801746845,
0.017804807052016258,
0.03400105983018875,
-0.07613909244537354,
0.08091804385185242,
-0.021581320092082024,
0.07807373255491257,
0.012575198896229267,
0.01913980394601822,
0.021938344463706017,
-0.07660557329654694,
0.05874430760741234,
0.027668463066220284,
-0.07128039747476578,
0.0494023896753788,
0.008100110106170177,
0.06767720729112625,
0.026256859302520752,
-0.0417499914765358,
-0.018242154270410538,
0.0405036099255085,
-0.10883554071187973,
-0.04148237779736519,
-0.05745280161499977,
-0.01691870577633381,
-0.0711866021156311,
-0.016613051295280457,
0.012009910307824612,
-0.03864151984453201,
-0.09535764157772064,
0.048350557684898376,
-0.020723816007375717,
-0.02738833613693714,
-0.0486496202647686,
0.06121721491217613,
0.01743704080581665,
-0.07236550748348236,
0.0017526957672089338,
-0.05769140645861626,
4.682931456069952e-33,
0.020039403811097145,
-0.02461293153464794,
0.007562530227005482,
-0.03137235715985298,
-0.05763871222734451,
-0.012281835079193115,
-0.04830015078186989,
0.01621427945792675,
-0.08883058279752731,
-0.06185143440961838,
-0.04190894216299057,
0.047810278832912445,
-0.03359168767929077,
0.0958443433046341,
-0.005674036685377359,
0.006695818621665239,
-0.010072613134980202,
0.032802119851112366,
0.07199862599372864,
-0.01020396314561367,
0.04731794074177742,
0.056160323321819305,
0.03364400193095207,
-0.06760186702013016,
-0.01999007537961006,
0.06701333075761795,
0.1282188445329666,
-0.08897700905799866,
-0.009284364059567451,
0.05583619698882103,
-0.10619780421257019,
0.012196714989840984,
-0.06401180475950241,
-0.032911207526922226,
-0.08118796348571777,
-0.04927147924900055,
-0.09535276889801025,
-0.05929228290915489,
-0.06162981316447258,
-0.010166998021304607,
-0.00011679933231789619,
0.031597185879945755,
0.03338106349110603,
-0.03620627149939537,
-0.006567148957401514,
0.037299301475286484,
0.03404441848397255,
0.014993946067988873,
0.05544781684875488,
0.03441108763217926,
0.02658073417842388,
0.059988781809806824,
-0.041747599840164185,
0.002648265566676855,
-0.0048272861167788506,
-0.034626733511686325,
0.06147739291191101,
0.041828397661447525,
-0.02458050660789013,
0.02212500013411045,
0.008261876180768013,
-0.014295266009867191,
0.05327308923006058,
0.05313977599143982,
-0.009434922598302364,
-0.022407829761505127,
-0.010486330837011337,
-0.017323004081845284,
0.03366648778319359,
0.030515899881720543,
0.006688577588647604,
0.005125503055751324,
0.07190317660570145,
0.11549266427755356,
-0.038122665137052536,
0.007797642610967159,
0.011018438264727592,
-0.06886188685894012,
-0.06852396577596664,
0.01743844337761402,
-0.026126336306333542,
0.05522836744785309,
0.000657128868624568,
-0.015471098013222218,
-0.16102492809295654,
0.011971058323979378,
0.12679418921470642,
-0.06869999319314957,
-0.0041191172786056995,
-0.023051366209983826,
0.03280317410826683,
-0.009633121080696583,
-0.02617875300347805,
0.030035579577088356,
-0.013052759692072868,
-4.648296984390912e-33,
0.0545208714902401,
0.026252705603837967,
-0.0881521999835968,
0.0011921529658138752,
-0.0731772854924202,
-0.06739528477191925,
0.18136411905288696,
0.19175106287002563,
0.017848826944828033,
-0.03135787323117256,
0.06478532403707504,
-0.03343014791607857,
-0.013091154396533966,
-0.09848616272211075,
0.09242799878120422,
-0.036109670996665955,
-0.014333625324070454,
-0.0013263943837955594,
-0.0116931376978755,
0.0036036036908626556,
0.02636805735528469,
-0.05145138129591942,
-0.043241504579782486,
0.016941484063863754,
-0.002177839633077383,
0.022187385708093643,
0.0077095432206988335,
0.03759171441197395,
-0.06986791640520096,
0.05071117728948593,
-0.028261719271540642,
0.013971931301057339,
-0.0746399536728859,
0.08493860810995102,
-0.07097363471984863,
0.031529732048511505,
-0.03374983370304108,
-0.03439103811979294,
-0.07405036687850952,
0.015070871450006962,
0.10995326191186905,
0.049136772751808167,
0.0052140578627586365,
0.007502412889152765,
0.007634109351783991,
0.06045873463153839,
-0.06350050866603851,
-0.014685578644275665,
0.004185838624835014,
-0.1775594800710678,
-0.011715891771018505,
0.007935752160847187,
-0.031213223934173584,
-0.029277991503477097,
-0.025573642924427986,
-0.030341772362589836,
-0.007260059472173452,
0.014836314134299755,
-0.03563299775123596,
-0.014920275658369064,
-0.0411265604197979,
-0.03331568092107773,
0.09328890591859818,
-0.04661460220813751,
-0.015546586364507675,
-0.028964122757315636,
-0.0022549086716026068,
0.006020870991051197,
-0.01103146467357874,
0.014318471774458885,
0.060827627778053284,
-0.02179652452468872,
0.0036889472976326942,
0.026250988245010376,
0.03025887906551361,
0.06998051702976227,
-0.014093685895204544,
-0.06562154740095139,
-0.06583192944526672,
-0.06906910985708237,
-0.012915698811411858,
-0.04375679790973663,
-0.029564909636974335,
0.04249059036374092,
0.02637369930744171,
0.055867999792099,
-0.0021782713010907173,
0.055733054876327515,
0.015867646783590317,
0.07368256151676178,
-0.04360212758183479,
0.006444484926760197,
-0.04440993443131447,
0.09096892178058624,
0.0021731508895754814,
-4.9415859848522814e-8,
-0.1046663299202919,
0.015416684560477734,
0.006152899470180273,
0.013479279354214668,
-0.06918567419052124,
0.019164221361279488,
-0.015138556249439716,
-0.0012886646436527371,
-0.028198925778269768,
-0.06040786951780319,
0.038211889564991,
0.0415358804166317,
-0.08348319679498672,
0.059371158480644226,
-0.07805322110652924,
0.05883230268955231,
-0.023283088579773903,
0.02340647019445896,
0.017821429297327995,
-0.03594760224223137,
-0.00878889486193657,
0.013139879330992699,
-0.005349740386009216,
-0.018406476825475693,
-0.06403911113739014,
0.0007903874502517283,
-0.059651441872119904,
0.09958306699991226,
0.01958102360367775,
-0.01635121926665306,
0.033938683569431305,
0.005757718812674284,
-0.04562130197882652,
-0.025336775928735733,
0.04901522397994995,
0.05723045393824577,
-0.09426800161600113,
-0.027187637984752655,
-0.02442626841366291,
0.03581948205828667,
0.09413489699363708,
-0.05086985602974892,
-0.05415448546409607,
-0.026275737211108208,
0.10605467110872269,
0.024090565741062164,
0.00486373296007514,
-0.07381293922662735,
-0.00871751643717289,
0.015781601890921593,
0.132720485329628,
-0.04175867140293121,
-0.008454759605228901,
0.094473697245121,
0.015379057265818119,
-0.07236722856760025,
-0.07199297845363617,
-0.05963722616434097,
0.08357754349708557,
0.08448775112628937,
0.02493034116923809,
0.025727268308401108,
0.036487117409706116,
0.02110074646770954
] |
facebook/wav2vec2-large-xlsr-53-german | 97e1c5b2b100529bbbd80d32c5b6862116beffab | 2021-07-06T02:46:28.000Z | [
"pytorch",
"jax",
"wav2vec2",
"automatic-speech-recognition",
"de",
"dataset:common_voice",
"transformers",
"speech",
"audio",
"license:apache-2.0"
] | automatic-speech-recognition | false | facebook | null | facebook/wav2vec2-large-xlsr-53-german | 1,220 | null | transformers | ---
language: de
datasets:
- common_voice
tags:
- speech
- audio
- automatic-speech-recognition
license: apache-2.0
---
## Evaluation on Common Voice DE Test
```python
import torchaudio
from datasets import load_dataset, load_metric
from transformers import (
Wav2Vec2ForCTC,
Wav2Vec2Processor,
)
import torch
import re
import sys
model_name = "facebook/wav2vec2-large-xlsr-53-german"
device = "cuda"
chars_to_ignore_regex = '[\,\?\.\!\-\;\:\"]' # noqa: W605
model = Wav2Vec2ForCTC.from_pretrained(model_name).to(device)
processor = Wav2Vec2Processor.from_pretrained(model_name)
ds = load_dataset("common_voice", "de", split="test", data_dir="./cv-corpus-6.1-2020-12-11")
resampler = torchaudio.transforms.Resample(orig_freq=48_000, new_freq=16_000)
def map_to_array(batch):
speech, _ = torchaudio.load(batch["path"])
batch["speech"] = resampler.forward(speech.squeeze(0)).numpy()
batch["sampling_rate"] = resampler.new_freq
batch["sentence"] = re.sub(chars_to_ignore_regex, '', batch["sentence"]).lower().replace("’", "'")
return batch
ds = ds.map(map_to_array)
def map_to_pred(batch):
features = processor(batch["speech"], sampling_rate=batch["sampling_rate"][0], padding=True, return_tensors="pt")
input_values = features.input_values.to(device)
attention_mask = features.attention_mask.to(device)
with torch.no_grad():
logits = model(input_values, attention_mask=attention_mask).logits
pred_ids = torch.argmax(logits, dim=-1)
batch["predicted"] = processor.batch_decode(pred_ids)
batch["target"] = batch["sentence"]
return batch
result = ds.map(map_to_pred, batched=True, batch_size=16, remove_columns=list(ds.features.keys()))
wer = load_metric("wer")
print(wer.compute(predictions=result["predicted"], references=result["target"]))
```
**Result**: 18.5 % | [
-0.0684971734881401,
-0.072535939514637,
-0.023812776431441307,
-0.05613519251346588,
-0.0008132628281600773,
0.0007074066670611501,
-0.03431041166186333,
-0.028219765052199364,
-0.020605534315109253,
-0.0714956521987915,
0.03803078085184097,
-0.11637496948242188,
-0.06191883236169815,
-0.005430998280644417,
-0.007410774473100901,
-0.010804388672113419,
-0.028811998665332794,
0.005250383168458939,
-0.053083665668964386,
-0.07034200429916382,
0.08175396919250488,
0.07014571130275726,
0.05067061632871628,
-0.03891166299581528,
0.100290946662426,
0.038228485733270645,
-0.038229331374168396,
0.054084111005067825,
0.037304364144802094,
0.0057767173275351524,
0.12473218142986298,
0.043334513902664185,
0.029523637145757675,
0.04649006575345993,
0.08675092458724976,
-0.02847278118133545,
-0.037834543734788895,
-0.0524926632642746,
-0.04727267846465111,
-0.03845340758562088,
0.026435090228915215,
-0.023747527971863747,
-0.026618532836437225,
-0.018309712409973145,
-0.07285935431718826,
-0.030609888955950737,
-0.05579749867320061,
0.025220753625035286,
-0.022040797397494316,
-0.039813268929719925,
-0.011760861612856388,
-0.0488535612821579,
-0.007916659116744995,
0.1206534281373024,
-0.06433609127998352,
-0.010754124261438847,
0.05427347123622894,
0.0016259511467069387,
0.06927917897701263,
-0.08944389224052429,
-0.03173258900642395,
-0.029693003743886948,
-0.008768369443714619,
-0.011935683898627758,
-0.09628098458051682,
-0.03522397577762604,
-0.07568256556987762,
-0.0074027120135724545,
-0.014744481071829796,
0.02874809131026268,
-0.1168425902724266,
0.053683359175920486,
0.0419900007545948,
0.10336337238550186,
0.04825233668088913,
-0.028766848146915436,
0.07070359587669373,
-0.008626500144600868,
0.04622297361493111,
-0.09671377390623093,
0.038474228233098984,
-0.06802167743444443,
0.0769275426864624,
0.0449017696082592,
0.09085369110107422,
-0.016198353841900826,
-0.003384744981303811,
-0.05169195309281349,
-0.03043198026716709,
-0.042983170598745346,
-0.13302750885486603,
-0.05533362179994583,
0.03557248413562775,
0.047476209700107574,
-0.024516187608242035,
0.049232322722673416,
0.007393962237983942,
0.11010026186704636,
-0.015287427231669426,
0.05700307339429855,
-0.009246828965842724,
-0.04756629467010498,
-0.0069446563720703125,
-0.00968245044350624,
-0.043095946311950684,
-0.05540075525641441,
-0.0362737700343132,
0.06967692077159882,
0.042809609323740005,
-0.05348525568842888,
0.020155197009444237,
0.01773574762046337,
-0.010978310368955135,
-0.02952028065919876,
0.09090916812419891,
-0.01963038183748722,
-0.019266851246356964,
-0.004318553954362869,
0.00004319786967243999,
0.03242458030581474,
-0.018320413306355476,
0.0011126683093607426,
-0.04439447820186615,
0.0014992895303294063,
-0.023827744647860527,
-0.0024288955610245466,
0.01985016278922558,
5.266995293401191e-33,
-0.017552748322486877,
0.007668746635317802,
-0.015312626026570797,
-0.03230835124850273,
0.0035182242281734943,
-0.035669222474098206,
-0.03369547426700592,
0.014899363741278648,
-0.03033069707453251,
0.012081034481525421,
-0.07082048058509827,
0.0008460546960122883,
-0.05528492107987404,
0.021002547815442085,
-0.06733591109514236,
0.06598630547523499,
0.04432598128914833,
-0.024153366684913635,
0.04581381753087044,
-0.0036251142155379057,
0.12791718542575836,
0.03885416314005852,
0.024631578475236893,
0.026043331250548363,
0.005201817490160465,
0.0085932407528162,
0.021955545991659164,
-0.05787000432610512,
0.06672286987304688,
0.022684719413518906,
-0.07798004150390625,
-0.04695834964513779,
0.06500964611768723,
-0.00836203247308731,
0.037538811564445496,
0.020397160202264786,
0.05898340791463852,
0.058567747473716736,
-0.03903533145785332,
-0.07498083263635635,
0.02782888524234295,
0.017976678907871246,
-0.037032097578048706,
-0.06697908043861389,
-0.018948251381516457,
-0.042950984090566635,
-0.04780532419681549,
0.06828546524047852,
0.037627458572387695,
0.043858956545591354,
-0.013592607341706753,
0.01858561672270298,
-0.011847465299069881,
0.1086101159453392,
0.0020653908140957355,
-0.07051167637109756,
0.06754589080810547,
0.016654152423143387,
0.05840442702174187,
-0.08294717222452164,
-0.0003154951555188745,
0.04335852712392807,
0.06212872266769409,
0.003788769943639636,
0.05036170035600662,
-0.06795499473810196,
0.025204945355653763,
0.04828624054789543,
-0.0423714742064476,
-0.0076857320964336395,
-0.08299853652715683,
-0.01977926306426525,
0.0222026314586401,
0.002407216699793935,
-0.011952784843742847,
-0.046468667685985565,
0.02246333658695221,
-0.06838028877973557,
-0.05970555916428566,
0.04216700792312622,
-0.03310881927609444,
0.04192696884274483,
-0.014659077860414982,
-0.0541510134935379,
-0.04389645904302597,
-0.018161261454224586,
0.019340934231877327,
-0.1377747803926468,
-0.053552620112895966,
-0.037382472306489944,
-0.08268453180789948,
0.03786337748169899,
-0.06123017519712448,
-0.018901249393820763,
-0.05562853813171387,
-6.397001318924518e-33,
0.033534832298755646,
0.14286354184150696,
0.009276513941586018,
0.042860377579927444,
0.0030599799938499928,
-0.015357164666056633,
0.10036636143922806,
0.05209800601005554,
0.006945512257516384,
-0.0631566196680069,
0.09297706186771393,
-0.12579263746738434,
0.03446849063038826,
-0.05991087481379509,
0.1092548742890358,
-0.0006248007412068546,
-0.046570103615522385,
0.060907453298568726,
0.04704234376549721,
0.059928882867097855,
-0.01764805242419243,
0.07569776475429535,
-0.0839892327785492,
0.055889714509248734,
-0.10407182574272156,
-0.0372207872569561,
0.0023834200110286474,
0.024728769436478615,
0.011560865677893162,
-0.016434025019407272,
-0.02700727805495262,
0.057642921805381775,
-0.11339425295591354,
0.06540285050868988,
-0.053113486617803574,
0.01561901904642582,
0.03671086207032204,
0.00657061580568552,
-0.004087725188583136,
0.03215976804494858,
0.0904386043548584,
0.07881166785955429,
-0.12654820084571838,
-0.019128188490867615,
-0.04127021133899689,
-0.0072488184086978436,
-0.027951525524258614,
0.010606463067233562,
-0.0437936969101429,
-0.04881482571363449,
0.10982940346002579,
-0.06332652270793915,
-0.015887467190623283,
0.06225035339593887,
-0.022564616054296494,
-0.011587663553655148,
0.07288031280040741,
-0.01654365099966526,
-0.0847596526145935,
-0.021751197054982185,
-0.0018610581755638123,
-0.04491477459669113,
-0.054705552756786346,
-0.08619042485952377,
-0.001420367043465376,
0.018018638715147972,
-0.03255626931786537,
0.07775573432445526,
0.05765751376748085,
-0.02883848361670971,
-0.02664172649383545,
0.056025415658950806,
-0.010587336495518684,
0.007513337768614292,
-0.046222809702157974,
0.02914155088365078,
-0.09119701385498047,
-0.03414429351687431,
0.004735309164971113,
0.02313278429210186,
-0.057012297213077545,
0.027265340089797974,
0.06617511808872223,
0.0046885316260159016,
0.03495071455836296,
0.053289808332920074,
-0.02153349481523037,
0.08688430488109589,
-0.03385205194354057,
0.035309214144945145,
-0.022184401750564575,
0.05524592101573944,
0.061041682958602905,
0.0916443020105362,
0.07601956278085709,
-4.7817863446653064e-8,
-0.07562320679426193,
0.016559142619371414,
-0.0007073613815009594,
-0.013071225956082344,
-0.06041417270898819,
-0.029093388468027115,
0.01980213262140751,
-0.0268087238073349,
0.03748290240764618,
-0.0004934066091664135,
0.018579287454485893,
-0.012824004516005516,
0.015178117901086807,
0.045294519513845444,
-0.0875372365117073,
0.08894743770360947,
-0.0018735290504992008,
0.11676463484764099,
0.009011445567011833,
-0.0768534317612648,
0.04066844284534454,
0.012798657640814781,
0.03723018616437912,
0.012151999399065971,
-0.00006008995114825666,
-0.04029080271720886,
-0.05734996125102043,
0.013417663052678108,
-0.0661683902144432,
0.02579984813928604,
-0.03898707032203674,
0.002255478408187628,
0.021246938034892082,
-0.08227300643920898,
0.02445594035089016,
0.061153046786785126,
-0.048174843192100525,
-0.031230002641677856,
-0.0062019918113946915,
0.045922037214040756,
0.052948370575904846,
0.08066179603338242,
-0.09700967371463776,
-0.0069061885587871075,
0.051306549459695816,
-0.04920729622244835,
-0.00011709304089890793,
-0.0066389418207108974,
0.031201303005218506,
0.05438520386815071,
0.053760599344968796,
0.04463744908571243,
-0.03606705367565155,
0.007251150906085968,
0.039058610796928406,
0.0037075469736009836,
-0.02514701895415783,
0.06309331953525543,
-0.024692388251423836,
0.0005932626663707197,
0.026157516986131668,
0.03527544438838959,
-0.00918694119900465,
-0.05545337125658989
] |
microsoft/cocolm-base | 2832a017dd206e3de5c043a005cb76c86b8ba83d | 2022-02-07T23:01:31.000Z | [
"pytorch",
"arxiv:2102.08473",
"transformers"
] | null | false | microsoft | null | microsoft/cocolm-base | 1,220 | 2 | transformers | # COCO-LM: Correcting and Contrasting Text Sequences for Language Model Pretraining
This model card contains the COCO-LM model (**base++** version) proposed in [this paper](https://arxiv.org/abs/2102.08473). The official GitHub repository can be found [here](https://github.com/microsoft/COCO-LM).
# Citation
If you find this model card useful for your research, please cite the following paper:
```
@inproceedings{meng2021coco,
title={{COCO-LM}: Correcting and contrasting text sequences for language model pretraining},
author={Meng, Yu and Xiong, Chenyan and Bajaj, Payal and Tiwary, Saurabh and Bennett, Paul and Han, Jiawei and Song, Xia},
booktitle={NeurIPS},
year={2021}
}
``` | [
-0.03510260954499245,
-0.046607572585344315,
0.08717425912618637,
0.05159371718764305,
-0.015589006245136261,
0.10567330569028854,
-0.05535425618290901,
0.0030147922225296497,
0.006126523483544588,
-0.04647398367524147,
0.054813168942928314,
-0.05046981945633888,
-0.02141791768372059,
-0.008059251122176647,
-0.03896127641201019,
0.012059761211276054,
0.002312258817255497,
0.060777198523283005,
0.05377887934446335,
-0.08977088332176208,
0.07714487612247467,
0.036642126739025116,
-0.03309641778469086,
0.002882459433749318,
-0.005815231241285801,
-0.035741981118917465,
-0.07897190749645233,
-0.03363364562392235,
0.06536827236413956,
0.003537556855008006,
-0.03763119876384735,
0.10792823880910873,
0.15017127990722656,
0.06637253612279892,
-0.028581758961081505,
0.062172383069992065,
-0.03927217051386833,
-0.0488726831972599,
0.04374174401164055,
0.008056623861193657,
0.02092478610575199,
-0.02991323359310627,
0.053343597799539566,
0.003642850322648883,
0.09963082522153854,
0.004360745195299387,
-0.032799798995256424,
-0.0007254345109686255,
-0.08368750661611557,
0.032668933272361755,
-0.09451509267091751,
0.003889187704771757,
0.014321374706923962,
0.04599575325846672,
-0.05097086355090141,
0.024021266028285027,
0.04103042930364609,
0.0012577406596392393,
0.04294688627123833,
-0.03373872488737106,
-0.04592214897274971,
-0.047398973256349564,
-0.08364634960889816,
0.00045112354564480484,
0.052038680762052536,
-0.010282996110618114,
-0.004821490496397018,
0.055376674979925156,
-0.027814503759145737,
0.0596327967941761,
-0.021301433444023132,
0.0248891431838274,
0.037149444222450256,
0.06470326334238052,
-0.02573913335800171,
0.08781316876411438,
0.0683300793170929,
0.04315417259931564,
0.04739660769701004,
-0.10466177016496658,
0.019484911113977432,
0.0211190078407526,
0.10917080193758011,
-0.057070162147283554,
0.000958584132604301,
0.007918059825897217,
-0.0144104128703475,
-0.013170174323022366,
0.013851679861545563,
-0.04695575684309006,
-0.020312475040555,
-0.06983491033315659,
0.05876529961824417,
0.06278643012046814,
-0.07072233408689499,
0.00016000216419342905,
0.005757059436291456,
0.004551928956061602,
-0.016575219109654427,
0.04204746335744858,
0.03987854719161987,
0.041940055787563324,
-0.023799538612365723,
-0.008935469202697277,
0.03195105120539665,
-0.10107333958148956,
0.05044397711753845,
-0.011078470386564732,
0.011201571673154831,
-0.01527455821633339,
0.03163091838359833,
0.010237307287752628,
-0.06479395180940628,
0.02606824040412903,
0.008421595208346844,
-0.051654208451509476,
0.014010760933160782,
-0.009064123034477234,
0.021201521158218384,
0.06792307645082474,
0.0005846183048561215,
-0.06329353153705597,
-0.0274723619222641,
-0.017385337501764297,
-0.11548589915037155,
-0.13026417791843414,
-0.02600752003490925,
3.367520216721195e-33,
0.07745711505413055,
-0.01831868477165699,
0.05310487374663353,
0.04542876034975052,
0.06758596003055573,
-0.038326047360897064,
-0.02498517371714115,
0.0029368670657277107,
-0.06024560704827309,
-0.00042186080827377737,
0.013899373821914196,
-0.027347568422555923,
-0.08199945092201233,
0.12789210677146912,
-0.04838850349187851,
-0.015264645218849182,
-0.09367147833108902,
-0.016413917765021324,
0.04502378776669502,
0.03756847605109215,
-0.0042910026386380196,
0.04967299848794937,
-0.014531617984175682,
-0.0975722223520279,
0.03214959427714348,
0.11029866337776184,
0.06347394734621048,
-0.10253359377384186,
-0.012326959520578384,
0.013606217689812183,
-0.021787257865071297,
-0.006134272553026676,
0.03468891605734825,
0.0031697596423327923,
-0.016246721148490906,
-0.0009798845276236534,
0.016354383900761604,
-0.05800178274512291,
-0.027375146746635437,
-0.0337175689637661,
-0.03851446136832237,
0.08618031442165375,
0.045798160135746,
-0.09690304845571518,
-0.046866729855537415,
0.013405236415565014,
0.03786122426390648,
-0.04269087687134743,
0.03768905624747276,
0.05168870836496353,
0.010242005810141563,
0.029225429520010948,
-0.10116109997034073,
-0.06887722760438919,
0.021704353392124176,
-0.04166083410382271,
-0.018193962052464485,
0.07018459588289261,
-0.018411293625831604,
0.030535971745848656,
0.04347516968846321,
-0.00012785440776497126,
0.027342267334461212,
0.025244906544685364,
0.008945673704147339,
0.07478167116641998,
-0.09118033945560455,
-0.0541917085647583,
0.051902662962675095,
-0.03194298967719078,
-0.13421058654785156,
-0.03341061249375343,
-0.041224997490644455,
0.030783729627728462,
-0.04781094938516617,
-0.06344424933195114,
0.02663487382233143,
-0.0637373998761177,
-0.05302686616778374,
0.014491083100438118,
-0.023816917091608047,
0.043875399976968765,
-0.015068979933857918,
-0.025192124769091606,
-0.08620865643024445,
-0.045517873018980026,
0.06147773936390877,
0.014197301119565964,
0.03945690765976906,
-0.01186677161604166,
0.06581632047891617,
0.024595243856310844,
0.012622897513210773,
0.06596946716308594,
0.00344349117949605,
-4.417502260260387e-33,
0.03061933070421219,
0.007255558390170336,
-0.05648240074515343,
0.024363882839679718,
-0.06128813698887825,
-0.06385596841573715,
0.08229310065507889,
0.09139784425497055,
0.0605706088244915,
-0.061764854937791824,
0.044233959168195724,
-0.001495684846304357,
0.04320084676146507,
0.036977145820856094,
0.01524821575731039,
-0.06040851026773453,
-0.01683039590716362,
-0.009815731085836887,
0.007600549608469009,
0.056739065796136856,
0.044271163642406464,
0.06497057527303696,
-0.14151647686958313,
-0.022252283990383148,
0.0023303977213799953,
0.061358317732810974,
-0.0138082355260849,
0.05608095973730087,
-0.04018168896436691,
-0.041174326092004776,
0.011956713162362576,
-0.0042596664279699326,
-0.05965681001543999,
0.030375057831406593,
-0.05539305880665779,
-0.02085275389254093,
-0.06458748877048492,
-0.004110280424356461,
0.014964332804083824,
0.06637758761644363,
0.057644039392471313,
-0.004188217222690582,
-0.07130447030067444,
-0.031470149755477905,
0.005053091328591108,
-0.012711258605122566,
-0.07155915349721909,
-0.020829366520047188,
0.020569492131471634,
-0.056104328483343124,
-0.054050035774707794,
-0.07785733789205551,
-0.15403583645820618,
-0.03362207114696503,
-0.05804809182882309,
-0.03411848470568657,
0.01514324638992548,
-0.042343851178884506,
0.007985202595591545,
-0.03125133737921715,
-0.06295675039291382,
0.04827296361327171,
0.006393715273588896,
-0.10482776910066605,
-0.025918621569871902,
-0.026167577132582664,
-0.020300202071666718,
-0.017922477796673775,
0.06789948046207428,
-0.024229830130934715,
0.0481986477971077,
0.05053203925490379,
-0.0586269237101078,
0.028971200808882713,
-0.04760710895061493,
-0.06065892428159714,
-0.02655472606420517,
-0.01626494526863098,
-0.0500095933675766,
-0.06888508051633835,
-0.07432417571544647,
0.0523199737071991,
0.016883108764886856,
0.17616881430149078,
0.037432488054037094,
0.1311550736427307,
0.020008103922009468,
0.059453658759593964,
-0.005911888554692268,
0.04354580491781235,
-0.01256929337978363,
0.008327929303050041,
0.006920613814145327,
0.08444799482822418,
-0.022531870752573013,
-5.3464795257696096e-8,
-0.078976571559906,
-0.02320072427392006,
-0.06081518158316612,
0.046531543135643005,
-0.04755616933107376,
-0.05363008379936218,
-0.01181767974048853,
-0.05162988230586052,
-0.00949760153889656,
0.001998570980504155,
0.030812690034508705,
0.08624943345785141,
-0.04507603868842125,
-0.0677693635225296,
-0.019899826496839523,
0.05830725282430649,
0.0065978821367025375,
0.06582999974489212,
-0.022433338686823845,
0.050681035965681076,
0.04951154440641403,
0.047390274703502655,
0.06567371636629105,
-0.012808934785425663,
-0.014472579583525658,
-0.008281784132122993,
-0.01570718362927437,
0.05853278934955597,
-0.042657751590013504,
-0.03424075245857239,
0.03494071587920189,
0.06754747778177261,
-0.000001961038151421235,
-0.009728499688208103,
0.06670184433460236,
0.07006657123565674,
0.012805771082639694,
0.004075118340551853,
0.067369244992733,
0.04151446372270584,
0.06792545318603516,
-0.010020647197961807,
-0.07250120490789413,
0.033997975289821625,
0.05863643065094948,
-0.032443612813949585,
-0.006576622370630503,
-0.12621505558490753,
0.029624979943037033,
-0.02805924229323864,
0.025419624522328377,
0.0007365173660218716,
-0.030411796644330025,
0.03881851211190224,
0.07415076345205307,
0.03715383633971214,
-0.042755600064992905,
-0.013057049363851547,
0.01623656414449215,
0.031068945303559303,
0.06101398542523384,
-0.023314237594604492,
0.007162587251514196,
-0.0317704901099205
] |
facebook/data2vec-vision-base | 72a7bdadab41d0e9a2c8d6887b9f8a50eebb8e0f | 2022-05-03T15:52:10.000Z | [
"pytorch",
"tf",
"data2vec-vision",
"feature-extraction",
"dataset:imagenet",
"dataset:imagenet-1k",
"arxiv:2202.03555",
"arxiv:2106.08254",
"transformers",
"image-classification",
"vision",
"license:apache-2.0"
] | feature-extraction | false | facebook | null | facebook/data2vec-vision-base | 1,220 | null | transformers | ---
license: apache-2.0
tags:
- image-classification
- vision
datasets:
- imagenet
- imagenet-1k
---
# Data2Vec-Vision (base-sized model, pre-trained only)
BEiT model pre-trained in a self-supervised fashion on ImageNet-1k (1,2 million images, 1000 classes) at resolution 224x224. It was introduced in the paper [data2vec: A General Framework for Self-supervised Learning in Speech, Vision and Language](https://arxiv.org/abs/2202.03555) by Alexei Baevski, Wei-Ning Hsu, Qiantong Xu, Arun Babu, Jiatao Gu, Michael Auli and first released in [this repository](https://github.com/facebookresearch/data2vec_vision/tree/main/beit).
Disclaimer: The team releasing Facebook team did not write a model card for this model so this model card has been written by the Hugging Face team.
## Pre-Training method

For more information, please take a look at the [official paper](https://arxiv.org/abs/2202.03555).
## Abstract
*While the general idea of self-supervised learning is identical across modalities, the actual algorithms and objectives differ widely because
they were developed with a single modality in
mind. To get us closer to general self-supervised
learning, we present data2vec, a framework that
uses the same learning method for either speech,
NLP or computer vision. The core idea is to predict latent representations of the full input data
based on a masked view of the input in a selfdistillation setup using a standard Transformer architecture. Instead of predicting modality-specific
targets such as words, visual tokens or units of
human speech which are local in nature, data2vec
predicts contextualized latent representations that
contain information from the entire input. Experiments on the major benchmarks of speech
recognition, image classification, and natural language understanding demonstrate a new state of
the art or competitive performance to predominant approaches.*
## Intended uses & limitations
You can use the raw model for image classification. See the [model hub](https://huggingface.co/models?other=data2vec-vision) to look for
fine-tuned versions on a task that interests you.
## Training data
The BEiT model was pretrained on [ImageNet-1k](http://www.image-net.org/), a dataset consisting of 1,2 million images and 1k classes.
## Training procedure
### Preprocessing
The exact details of preprocessing of images during training/validation can be found [here](https://github.com/microsoft/unilm/blob/master/beit/datasets.py).
Images are resized/rescaled to the same resolution (224x224) and normalized across the RGB channels with mean (0.5, 0.5, 0.5) and standard deviation (0.5, 0.5, 0.5).
### Pretraining
For all pre-training related hyperparameters, we refer to the [original paper](https://arxiv.org/abs/2106.08254) and the [original codebase](https://github.com/facebookresearch/data2vec_vision/tree/main/beit)
## Evaluation results
For evaluation results on several image classification benchmarks, we refer to tables 1 of the original paper. Note that for fine-tuning, the best results are obtained with a higher resolution. Of course, increasing the model size will result in better performance.
### BibTeX entry and citation info
```bibtex
@misc{https://doi.org/10.48550/arxiv.2202.03555,
doi = {10.48550/ARXIV.2202.03555},
url = {https://arxiv.org/abs/2202.03555},
author = {Baevski, Alexei and Hsu, Wei-Ning and Xu, Qiantong and Babu, Arun and Gu, Jiatao and Auli, Michael},
keywords = {Machine Learning (cs.LG), FOS: Computer and information sciences, FOS: Computer and information sciences},
title = {data2vec: A General Framework for Self-supervised Learning in Speech, Vision and Language},
publisher = {arXiv},
year = {2022},
copyright = {arXiv.org perpetual, non-exclusive license}
}
``` | [
-0.03884146735072136,
-0.09382094442844391,
0.03523491695523262,
-0.015261226333677769,
0.056105755269527435,
0.0027816586662083864,
-0.012442396022379398,
-0.02302704192698002,
-0.016945593059062958,
-0.024706045165657997,
0.05873629450798035,
-0.03389526158571243,
0.03187989443540573,
0.05016682296991348,
-0.0046907146461308,
-0.005634503439068794,
0.04662535712122917,
0.011715619824826717,
-0.043842609971761703,
-0.010508473962545395,
0.014970830641686916,
0.021369844675064087,
0.08111788332462311,
0.02160455659031868,
0.0055109127424657345,
-0.0074394172988832,
0.008301656693220139,
-0.020892975851893425,
0.05486402288079262,
-0.09802872687578201,
0.049365244805812836,
-0.003836552845314145,
0.07376036047935486,
0.10104569047689438,
-0.05164339765906334,
0.06433741748332977,
0.04161527752876282,
0.05369267985224724,
-0.06183786317706108,
-0.01804160326719284,
-0.027506224811077118,
0.0074454015120863914,
-0.01669929549098015,
-0.026031246408820152,
0.11600306630134583,
0.07257641851902008,
-0.032598014920949936,
-0.02586708404123783,
-0.015477280132472515,
-0.03538594767451286,
-0.10274630039930344,
-0.07557637989521027,
-0.03041774034500122,
0.027155838906764984,
-0.0004878757754340768,
0.02060893177986145,
-0.00006689746078336611,
-0.027890298515558243,
0.020204385742545128,
0.025095421820878983,
0.06565346568822861,
-0.04494839906692505,
-0.05779146030545235,
0.014233779162168503,
-0.04704965278506279,
-0.0017291688127443194,
0.006391693372279406,
-0.010036312974989414,
0.06192966178059578,
-0.06574736535549164,
0.019514329731464386,
0.04457762837409973,
0.026634182780981064,
0.0055249701254069805,
0.03499804437160492,
-0.0026433500461280346,
0.09620248526334763,
0.001683914102613926,
0.09647121280431747,
-0.08991985768079758,
-0.007255151867866516,
-0.013608635403215885,
0.11366812884807587,
-0.048767540603876114,
0.027781900018453598,
0.05297444388270378,
-0.060602542012929916,
0.04753902554512024,
-0.026719272136688232,
-0.029240384697914124,
-0.05027619004249573,
-0.03005184791982174,
-0.018819086253643036,
-0.006862212438136339,
0.014894723892211914,
-0.02893475443124771,
0.021784115582704544,
-0.05404933914542198,
-0.029069652780890465,
0.0792381539940834,
0.00612760242074728,
-0.04322609305381775,
-0.001527056097984314,
0.0008147800108417869,
0.09629888087511063,
-0.021690143272280693,
0.023050550371408463,
0.009244979359209538,
0.10264365375041962,
-0.04587918147444725,
0.031457751989364624,
-0.0034391290973871946,
-0.12621629238128662,
-0.07274588942527771,
0.012126271612942219,
0.016826700419187546,
-0.05508332699537277,
-0.025289546698331833,
0.07869335263967514,
-0.016487475484609604,
-0.016565853729844093,
-0.015421842224895954,
0.024446481838822365,
-0.07289651036262512,
-0.005336572881788015,
-0.08494337648153305,
-0.12664973735809326,
1.7015519500442622e-33,
0.05004280060529709,
0.048566851764917374,
0.0518307164311409,
-0.018112439662218094,
0.031127722933888435,
-0.05726275593042374,
-0.012843267992138863,
-0.057620663195848465,
-0.027340678498148918,
-0.03054850921034813,
-0.04992356151342392,
-0.03832019492983818,
-0.040188875049352646,
0.1167585626244545,
0.0164619293063879,
-0.04264773428440094,
-0.05576155334711075,
-0.0128775117918849,
0.042486801743507385,
0.03041207231581211,
0.06676341593265533,
0.031360771507024765,
0.049020297825336456,
-0.00804460234940052,
0.0452289804816246,
0.03401089832186699,
0.03529218211770058,
-0.07809591293334961,
0.08009608834981918,
0.060627635568380356,
-0.05273541435599327,
-0.0007440969347953796,
0.03687671944499016,
0.019790321588516235,
0.05021504685282707,
-0.023203173652291298,
-0.008180753327906132,
-0.012244680896401405,
0.012718659825623035,
-0.048148397356271744,
0.014726645313203335,
0.05473211407661438,
0.04774601757526398,
-0.08189965039491653,
-0.0914655551314354,
-0.05929940566420555,
-0.010502169840037823,
0.060813844203948975,
-0.023211568593978882,
-0.027906764298677444,
0.03560050204396248,
-0.009547128342092037,
-0.10042670369148254,
-0.057296935468912125,
-0.061503224074840546,
0.01604527421295643,
0.07668919861316681,
0.046529144048690796,
0.03181008994579315,
-0.01065607275813818,
0.015862716361880302,
-0.009413468651473522,
0.0035055256448686123,
0.052315983921289444,
0.026164069771766663,
-0.05289102718234062,
-0.016216041520237923,
-0.012363884598016739,
0.0075421142391860485,
-0.03607306629419327,
-0.03557981923222542,
0.015476420521736145,
-0.02571498416364193,
-0.10221922397613525,
0.043687690049409866,
-0.011433416977524757,
0.06331692636013031,
-0.09893269836902618,
-0.021350134164094925,
0.09880802035331726,
-0.05707082897424698,
-0.005425604525953531,
0.005631073843687773,
-0.09409891068935394,
-0.02346798963844776,
-0.03190796449780464,
0.06561940908432007,
-0.07929898053407669,
-0.009074428118765354,
-0.0035724909976124763,
0.02146165817975998,
0.06490647792816162,
0.028527162969112396,
0.02440689504146576,
-0.000849806412588805,
-2.856183480191362e-33,
0.02933008223772049,
0.1277369111776352,
-0.06056491285562515,
0.028688019141554832,
0.004469709470868111,
-0.048178788274526596,
0.10290643572807312,
0.14843763411045074,
-0.02595401182770729,
-0.05711013078689575,
0.056990914046764374,
-0.010838421061635017,
0.012351159006357193,
-0.037096235901117325,
0.009455633349716663,
-0.08548908680677414,
0.02381286397576332,
-0.04255358502268791,
0.006453074049204588,
0.028825141489505768,
0.031144453212618828,
0.07888796925544739,
-0.06218099221587181,
0.03381229192018509,
-0.09130891412496567,
0.033580318093299866,
-0.0036795164924114943,
0.041112154722213745,
-0.007156336214393377,
0.013980988413095474,
-0.055194269865751266,
-0.04192597419023514,
-0.02193986438214779,
-0.015622290782630444,
-0.05860968679189682,
0.048676908016204834,
-0.019592560827732086,
-0.07749726623296738,
-0.02053537219762802,
0.06385789066553116,
0.07572285085916519,
0.026905538514256477,
-0.10139662772417068,
-0.0034352540969848633,
-0.060903649777173996,
-0.09759411960840225,
-0.014054886065423489,
0.053678497672080994,
0.033098042011260986,
-0.0228599663823843,
-0.05790473893284798,
-0.0297863706946373,
-0.020043084397912025,
0.03910037875175476,
-0.03966442495584488,
-0.06485854089260101,
0.031952060759067535,
0.02528674155473709,
0.04452471062541008,
0.009313724935054779,
-0.038892894983291626,
-0.06573997437953949,
-0.06388048082590103,
0.005399646237492561,
-0.0012006505858153105,
-0.02441403642296791,
-0.10676682740449905,
0.048328351229429245,
0.002875113394111395,
0.03501822054386139,
-0.019294695928692818,
0.0481121763586998,
-0.02916199155151844,
0.04884394258260727,
-0.06875409185886383,
-0.07012303918600082,
-0.0622403621673584,
0.021523866802453995,
0.07089029997587204,
-0.0643756315112114,
-0.030198125168681145,
-0.02354491874575615,
0.01515231654047966,
0.09836435317993164,
0.16081848740577698,
0.053872812539339066,
0.05296702682971954,
0.03043416514992714,
0.010802976787090302,
-0.0023357500322163105,
-0.04520370811223984,
0.04925066977739334,
0.02172260731458664,
0.09416235983371735,
0.020824307575821877,
-5.509109257673117e-8,
-0.14936821162700653,
0.00570073164999485,
0.04672650620341301,
-0.0049567995592951775,
0.03304562345147133,
-0.08880884200334549,
0.007785777561366558,
0.05206575244665146,
0.015303746797144413,
0.02995196543633938,
-0.007069636136293411,
0.10531442612409592,
-0.06213130056858063,
-0.05349508300423622,
-0.05144929140806198,
0.03685171529650688,
0.04147076979279518,
0.07850439101457596,
-0.005163902882486582,
-0.07012464851140976,
0.012885713949799538,
-0.06584051996469498,
-0.01585989072918892,
-0.07173366099596024,
0.012689163908362389,
-0.059648703783750534,
-0.059616170823574066,
0.0365997776389122,
-0.04951542243361473,
-0.044433750212192535,
-0.03882155567407608,
0.04671370983123779,
0.010419621132314205,
-0.10253675282001495,
0.10671059787273407,
0.05472403019666672,
-0.05982585251331329,
-0.011749885976314545,
-0.028069226071238518,
-0.041979122906923294,
0.03666719049215317,
0.07743609696626663,
-0.00803933572024107,
-0.02292114496231079,
0.07032273709774017,
0.05456014722585678,
0.03200339153409004,
-0.1171317994594574,
0.0011340515920892358,
0.01019328273832798,
0.024497684091329575,
0.03141109272837639,
-0.04987679421901703,
0.079411081969738,
0.02398143894970417,
0.024173986166715622,
0.014416854828596115,
-0.0008735065348446369,
0.06111371889710426,
0.07536950707435608,
0.02891712635755539,
0.06775293499231339,
-0.022249417379498482,
-0.02402031235396862
] |
cyclone/simcse-chinese-roberta-wwm-ext | 871d7039a3fccd4869d545a25b63c545341ca7f4 | 2021-09-02T03:04:17.000Z | [
"pytorch",
"bert",
"feature-extraction",
"arxiv:2104.08821",
"transformers"
] | feature-extraction | false | cyclone | null | cyclone/simcse-chinese-roberta-wwm-ext | 1,219 | 6 | transformers | ## Cyclone SIMCSE RoBERTa WWM Ext Chinese
This model provides simplified Chinese sentence embeddings encoding based on [Simple Contrastive Learning](https://arxiv.org/abs/2104.08821).
The pretrained model(Chinese RoBERTa WWM Ext) is used for token encoding.
### Usage
Please use [SentenceTransformer](https://github.com/UKPLab/sentence-transformers) to load the model.
from sentence_transformers import SentenceTransformer
encoder = SentenceTransformer('cyclone/simcse-chinese-roberta-wwm-ext') | [
-0.04382358863949776,
-0.05263707786798477,
-0.02294524759054184,
0.0942118689417839,
-0.015183926559984684,
0.05790835991501808,
-0.014069867320358753,
0.05578943341970444,
0.07006406784057617,
-0.027162162587046623,
0.08211392909288406,
-0.033978383988142014,
0.035581085830926895,
0.031653959304094315,
0.010166263207793236,
0.08211496472358704,
-0.007073728367686272,
0.046564385294914246,
-0.06474819779396057,
-0.10852815955877304,
0.03769858554005623,
0.031271275132894516,
0.05517464503645897,
-0.005489320028573275,
0.054604146629571915,
0.016574060544371605,
-0.04644642770290375,
0.04798557609319687,
0.09390807151794434,
0.028022492304444313,
-0.011632595211267471,
0.008731185458600521,
0.02087705209851265,
0.09548910707235336,
0.05618472397327423,
0.053820449858903885,
-0.02337552234530449,
-0.12040630728006363,
-0.012645530514419079,
-0.01953861489892006,
0.017105404287576675,
0.04683427885174751,
0.009232327342033386,
-0.03323829174041748,
0.026417527347803116,
0.013098975643515587,
-0.0019060063641518354,
-0.006409374065697193,
-0.06799859553575516,
-0.04289223253726959,
-0.050704240798950195,
0.022441547363996506,
-0.0260927751660347,
0.0472298264503479,
-0.04614352062344551,
-0.026506615802645683,
0.017803270369768143,
0.04233791306614876,
0.01632828079164028,
-0.08864635974168777,
-0.11717946827411652,
0.04240918159484863,
-0.05171075463294983,
-0.013814786449074745,
-0.003234102390706539,
-0.01596027985215187,
-0.06100955232977867,
0.08711090683937073,
0.011869585141539574,
0.004162251483649015,
-0.046929970383644104,
-0.00875419843941927,
-0.05215967446565628,
0.02233992889523506,
-0.05808093398809433,
0.01333465427160263,
0.10684909671545029,
-0.027561798691749573,
0.05574891343712807,
-0.029512159526348114,
0.054181329905986786,
-0.04548005014657974,
0.11793313175439835,
0.01071992702782154,
0.08580873161554337,
-0.01935412548482418,
-0.06495774537324905,
0.05619090422987938,
0.007349906023591757,
0.027564100921154022,
-0.03830382600426674,
-0.06258183717727661,
0.05608426406979561,
0.061176370829343796,
-0.0479477196931839,
0.007400167640298605,
0.023214267566800117,
0.045070819556713104,
-0.05676160380244255,
0.03522033244371414,
0.01155480369925499,
0.03843648359179497,
0.054419342428445816,
-0.03563190996646881,
-0.047113969922065735,
-0.06757581979036331,
0.07220722734928131,
0.0018601499032229185,
0.06615649163722992,
-0.10318813472986221,
0.0053509874269366264,
0.010486604645848274,
-0.05469081178307533,
-0.035039979964494705,
0.026977738365530968,
-0.028931327164173126,
-0.022727709263563156,
-0.04087086021900177,
-0.05577527731657028,
0.043284375220537186,
0.003861404489725828,
-0.013015186414122581,
-0.07576495409011841,
0.03343832492828369,
0.009390351362526417,
-0.08208559453487396,
0.04407719895243645,
5.019530957350052e-33,
0.0154097406193614,
0.052762486040592194,
0.016978010535240173,
-0.007091302890330553,
0.03902318328619003,
0.013106264173984528,
0.04165630787611008,
-0.03518590331077576,
-0.0672285258769989,
0.022338585928082466,
-0.06822898983955383,
0.014586610719561577,
-0.06440933048725128,
0.09179118275642395,
-0.01790245622396469,
-0.012168366461992264,
-0.06963621079921722,
-0.06495139747858047,
0.03215307369828224,
0.03539451211690903,
0.09504922479391098,
0.08031857013702393,
-0.00757799344137311,
-0.13269028067588806,
-0.09418467432260513,
0.010871469974517822,
0.08523183315992355,
-0.07124079763889313,
0.02484079636633396,
0.02086053602397442,
-0.08120331168174744,
0.0228585135191679,
-0.04097844660282135,
0.014866400510072708,
-0.03187542408704758,
0.008523449301719666,
0.028622513636946678,
0.009490198455750942,
-0.0371149443089962,
-0.02253350429236889,
0.04797019436955452,
0.04390063136816025,
-0.020415866747498512,
0.025047166272997856,
-0.028742928057909012,
0.0041315797716379166,
0.013138427399098873,
-0.0299797635525465,
0.07059197872877121,
0.025127537548542023,
0.02920893393456936,
0.0209755040705204,
-0.03365197777748108,
0.01795521192252636,
0.06760650873184204,
0.0315634049475193,
0.1269492655992508,
-0.013129886239767075,
-0.02386641874909401,
-0.02443525567650795,
0.011149042285978794,
-0.08030304312705994,
0.07241678982973099,
0.048485495150089264,
0.06754583865404129,
-0.012565846554934978,
0.004150275141000748,
-0.06452023983001709,
-0.003977886401116848,
0.015429520048201084,
-0.07126793265342712,
-0.02452421560883522,
0.025794368237257004,
-0.00872023869305849,
-0.0182610172778368,
-0.027493931353092194,
-0.0032237840350717306,
-0.08270253241062164,
-0.07384185492992401,
0.0261529553681612,
0.0075129056349396706,
-0.033291660249233246,
0.022894473746418953,
-0.005589003209024668,
-0.04644785821437836,
-0.04037363454699516,
0.054036110639572144,
-0.054263919591903687,
0.017863931134343147,
-0.039694830775260925,
-0.01569836586713791,
-0.03539087995886803,
0.054280512034893036,
0.03717530518770218,
0.02667175978422165,
-5.352536017310529e-33,
-0.005673612933605909,
0.11536263674497604,
-0.056587640196084976,
-0.015375618822872639,
-0.0484868660569191,
-0.06860466301441193,
0.014453564770519733,
0.07839546352624893,
-0.0188294667750597,
-0.0018639033660292625,
0.03814883530139923,
-0.049973778426647186,
0.07011274993419647,
0.007878253236413002,
0.04149320721626282,
0.03323354572057724,
0.009836790151894093,
0.11037839949131012,
0.030227892100811005,
0.01832207664847374,
-0.042297713458538055,
0.034585922956466675,
-0.073453888297081,
0.030076364055275917,
-0.006782138720154762,
0.03521944582462311,
0.04038821905851364,
-0.017237244173884392,
0.005593339446932077,
-0.04527610167860985,
-0.0593210868537426,
0.007614353206008673,
-0.00012105997302569449,
0.019841568544507027,
-0.09618675708770752,
0.03245951980352402,
0.028081579133868217,
-0.05044097080826759,
-0.04395429790019989,
-0.02581000328063965,
0.09221643954515457,
0.024497268721461296,
-0.04078041389584541,
0.05522683262825012,
-0.0016946785617619753,
0.06322520226240158,
-0.0880851224064827,
-0.06387276202440262,
-0.000010530478903092444,
0.031126175075769424,
0.029850684106349945,
-0.03469041734933853,
-0.0860157459974289,
0.042761534452438354,
-0.02620105817914009,
-0.07787882536649704,
0.01771029829978943,
-0.10333086550235748,
-0.0700136348605156,
-0.07304495573043823,
-0.018979476764798164,
-0.00937254074960947,
-0.004833035171031952,
-0.1115598976612091,
0.04320147633552551,
-0.0789373368024826,
0.06586325168609619,
0.0429794006049633,
0.008812918327748775,
-0.08681070059537888,
0.06175840646028519,
0.0017464130651205778,
-0.0029445861000567675,
0.0665121003985405,
0.028800033032894135,
0.003618595190346241,
-0.09571562707424164,
-0.006338017992675304,
-0.04293449595570564,
-0.022780397906899452,
-0.023268956691026688,
-0.043311819434165955,
0.00983528420329094,
0.016724802553653717,
0.010045062750577927,
0.018386680632829666,
0.05135359615087509,
0.05345061793923378,
0.005837793927639723,
0.04287150129675865,
-0.007281668484210968,
0.032775312662124634,
0.07894667237997055,
0.08446473628282547,
-0.016657503321766853,
-4.4515889641161266e-8,
-0.10264381021261215,
-0.08765126764774323,
-0.10518703609704971,
-0.0176570825278759,
-0.13438037037849426,
-0.04355023801326752,
-0.07387691736221313,
0.0008072029449976981,
0.003123503178358078,
0.020448563620448112,
0.019625067710876465,
-0.03707617521286011,
-0.07624027878046036,
0.022173292934894562,
-0.0577341765165329,
0.09321320056915283,
-0.0008995083626359701,
0.05387407913804054,
0.04889640212059021,
0.022499404847621918,
-0.0012763127451762557,
0.06245602294802666,
0.04692469909787178,
-0.030220268294215202,
-0.06160210818052292,
0.03497432917356491,
-0.08945352584123611,
0.09097452461719513,
-0.031094783917069435,
-0.016963791102170944,
0.016538770869374275,
0.03704412654042244,
-0.006131269037723541,
-0.05208837613463402,
-0.13351041078567505,
0.07525686919689178,
0.08429796993732452,
-0.05212077870965004,
0.027145270258188248,
0.0646262839436531,
0.058368608355522156,
-0.04375753924250603,
-0.10968277603387833,
0.017263246700167656,
0.0731498971581459,
-0.026346411556005478,
0.006895240396261215,
-0.0851917415857315,
0.0767192617058754,
-0.006024728994816542,
0.08226723223924637,
-0.07678423076868057,
-0.016955390572547913,
-0.09718667715787888,
0.020166251808404922,
0.005204516928642988,
0.013317892327904701,
0.05625355243682861,
0.04031668230891228,
0.02271200716495514,
-0.022688882425427437,
0.07535252720117569,
-0.003641913877800107,
-0.029211949557065964
] |
allenai/macaw-3b | c4d1b101bcec5de649b927bb92c4e93c311c0be2 | 2021-09-21T15:59:14.000Z | [
"pytorch",
"t5",
"text2text-generation",
"en",
"transformers",
"license:apache-2.0",
"autotrain_compatible"
] | text2text-generation | false | allenai | null | allenai/macaw-3b | 1,216 | null | transformers | ---
language: en
widget:
- text: $answer$ ; $mcoptions$ ; $question$ = What is the color of a cloudy sky?
license: apache-2.0
---
# macaw-3b
## Model description
Macaw (<b>M</b>ulti-<b>a</b>ngle <b>c</b>(q)uestion <b>a</b>ns<b>w</b>ering) is a ready-to-use model capable of
general question answering,
showing robustness outside the domains it was trained on. It has been trained in "multi-angle" fashion,
which means it can handle a flexible set of input and output "slots"
(question, answer, multiple-choice options, context, and explanation) .
Macaw was built on top of [T5](https://github.com/google-research/text-to-text-transfer-transformer) and comes in
three sizes: [macaw-11b](https://huggingface.co/allenai/macaw-11b), [macaw-3b](https://huggingface.co/allenai/macaw-3b),
and [macaw-large](https://huggingface.co/allenai/macaw-large), as well as an answer-focused version featured on
various leaderboards [macaw-answer-11b](https://huggingface.co/allenai/macaw-answer-11b).
See https://github.com/allenai/macaw for more details. | [
-0.08988972753286362,
0.011652089655399323,
0.00644812174141407,
0.03261668235063553,
0.01700751855969429,
-0.015355605632066727,
0.05713360384106636,
-0.016219738870859146,
0.03707906976342201,
-0.0378384031355381,
-0.011452141217887402,
-0.10224273055791855,
0.07536254823207855,
-0.015432097017765045,
0.07925084233283997,
0.07426925748586655,
0.10791076719760895,
-0.04443764686584473,
-0.08710269629955292,
-0.07441166788339615,
0.02734246663749218,
0.021889982745051384,
0.022578351199626923,
0.0027124325279146433,
-0.06387008726596832,
0.060424380004405975,
0.05741807445883751,
-0.0034715079236775637,
-0.03126620128750801,
-0.015252388082444668,
-0.00824547465890646,
0.04162716493010521,
-0.006758410017937422,
0.09427909553050995,
-0.03290826082229614,
0.06532398611307144,
-0.011715096421539783,
-0.006367850583046675,
-0.10769054293632507,
-0.052015818655490875,
-0.09916916489601135,
-0.007416579406708479,
0.006048486102372408,
-0.019489387050271034,
0.05396179482340813,
-0.13955733180046082,
-0.05425161495804787,
-0.027842527255415916,
-0.053398847579956055,
-0.0077001675963401794,
-0.0775662213563919,
-0.12767216563224792,
-0.007650987710803747,
0.04997199773788452,
0.04292198270559311,
0.05707873776555061,
-0.06940402090549469,
-0.014908663928508759,
-0.018385792151093483,
-0.006652020383626223,
-0.01620844006538391,
0.008517242968082428,
-0.03870192542672157,
0.05511074513196945,
0.010926906019449234,
0.0050095985643565655,
-0.04485049843788147,
0.013996627181768417,
-0.012472310103476048,
-0.02304481714963913,
-0.075679711997509,
-0.020401159301400185,
0.035828400403261185,
0.008380592800676823,
0.024406353011727333,
-0.011374279856681824,
0.04817068204283714,
-0.030899805948138237,
0.015230290591716766,
-0.016800198704004288,
0.013851935043931007,
-0.023706398904323578,
0.026034168899059296,
0.07676201313734055,
0.11071094870567322,
0.04991825670003891,
0.013100787065923214,
0.10367251932621002,
-0.04773341119289398,
0.046475060284137726,
-0.004762118682265282,
-0.08306332677602768,
0.03903982415795326,
0.009276858530938625,
0.009590544737875462,
0.01789090596139431,
0.05471454933285713,
-0.05081988498568535,
-0.08403226733207703,
0.08591534942388535,
0.05868879705667496,
0.02636220119893551,
0.07385953515768051,
-0.07968749105930328,
-0.005761120934039354,
-0.04713018983602524,
0.02234710194170475,
-0.004593999125063419,
0.039786700159311295,
-0.09741829335689545,
-0.03199370205402374,
-0.023385843262076378,
-0.051789406687021255,
-0.017510689795017242,
0.00026494800113141537,
-0.01605648547410965,
0.02827807515859604,
0.004352842457592487,
-0.009285365231335163,
-0.005784495268017054,
0.027270499616861343,
0.07567909359931946,
-0.030498893931508064,
0.05080823972821236,
0.09183431416749954,
-0.008472736924886703,
-0.015241357497870922,
2.716933317367e-33,
0.10258457809686661,
0.0039605265483260155,
0.061226122081279755,
0.05583764612674713,
0.06028097867965698,
-0.0042295316234230995,
-0.05020316690206528,
0.049881305545568466,
-0.01302673202008009,
0.02069183439016342,
0.031593725085258484,
0.05278982222080231,
-0.06719779223203659,
0.020682930946350098,
0.08564199507236481,
-0.037957813590765,
-0.12359579652547836,
0.03296361863613129,
0.005032145418226719,
0.006188725121319294,
-0.011596729047596455,
0.03130616247653961,
0.00533317681401968,
-0.024373453110456467,
0.010550719685852528,
0.041712284088134766,
0.08134973049163818,
-0.05010008439421654,
0.002074307296425104,
0.018287094309926033,
-0.09378720074892044,
-0.0682959258556366,
0.002905473345890641,
-0.04031643271446228,
-0.007361816707998514,
-0.0435120053589344,
-0.03087053820490837,
-0.10038469731807709,
-0.0024514736142009497,
-0.004568927455693483,
-0.008632650598883629,
-0.02268066070973873,
0.022173503413796425,
-0.01286440622061491,
-0.033564258366823196,
0.008758578449487686,
-0.0282208900898695,
-0.038313575088977814,
-0.08664444833993912,
-0.001091373385861516,
-0.014672736637294292,
0.005841935519129038,
0.015594452619552612,
-0.05194440856575966,
0.0363345667719841,
0.006126665510237217,
0.04619145020842552,
0.027199285104870796,
0.008038200438022614,
0.07302109897136688,
-0.011636346578598022,
-0.03384266048669815,
0.06325725466012955,
0.01067944336682558,
0.06452055275440216,
0.02501700446009636,
-0.02293931506574154,
-0.0028759525157511234,
0.04370912164449692,
0.025847364217042923,
-0.06313994526863098,
0.0315721295773983,
-0.020434627309441566,
-0.019719552248716354,
0.009963533841073513,
-0.04003465175628662,
-0.052595481276512146,
-0.08246949315071106,
-0.016116751357913017,
0.042275648564100266,
-0.0003502639592625201,
-0.0316852405667305,
0.014708520844578743,
-0.022540299221873283,
-0.04128788411617279,
0.05595453828573227,
0.09301783889532089,
-0.06207646057009697,
-0.02287844754755497,
-0.04601690545678139,
-0.03405483067035675,
0.04411623254418373,
-0.017667600885033607,
-0.11629770696163177,
0.04798080027103424,
-3.700509113634348e-33,
0.03840452432632446,
-0.05850248783826828,
-0.14022162556648254,
0.1048082709312439,
-0.03491111472249031,
-0.018215421587228775,
0.07049769163131714,
0.09053299576044083,
-0.012485913001000881,
-0.06858395040035248,
-0.049058642238378525,
0.062295328825712204,
0.019153425469994545,
-0.03888479992747307,
0.01809772290289402,
0.025298135355114937,
-0.08734095841646194,
-0.04105399176478386,
0.027064407244324684,
0.006015768740326166,
-0.023963864892721176,
0.019718732684850693,
-0.03627320006489754,
0.012530566193163395,
-0.043035127222537994,
0.05585334450006485,
0.017676251009106636,
0.03801918774843216,
0.020766466856002808,
0.027739277109503746,
-0.062257859855890274,
0.004685955122113228,
0.06456290930509567,
0.01147561427205801,
-0.07332701981067657,
0.07565131783485413,
0.028920384123921394,
-0.006734102964401245,
-0.03008388541638851,
0.14035874605178833,
0.035114385187625885,
-0.03818938136100769,
-0.05233510211110115,
0.01968459039926529,
-0.1050318107008934,
0.05955546349287033,
-0.09530074149370193,
0.020580653101205826,
0.008433055132627487,
-0.02160567045211792,
0.0005631858948618174,
-0.04297465458512306,
-0.07553232461214066,
-0.008792348206043243,
0.0001618738897377625,
0.005533199291676283,
-0.042189568281173706,
-0.043807920068502426,
0.015238642692565918,
0.017482783645391464,
-0.10208883136510849,
0.032728951424360275,
0.0791262835264206,
-0.02291620709002018,
0.08472580462694168,
-0.01316259615123272,
-0.017147529870271683,
-0.007981205359101295,
0.006884504575282335,
-0.08348928391933441,
0.09706006199121475,
-0.049775078892707825,
0.050268471240997314,
-0.003539970377460122,
0.1214698776602745,
-0.00027324556140229106,
0.06652749329805374,
-0.021680204197764397,
-0.042736489325761795,
-0.02243131399154663,
-0.03176703304052353,
0.018312102183699608,
0.07532023638486862,
0.1005374863743782,
0.0013292591320350766,
-0.038648821413517,
0.03257634490728378,
0.08629573881626129,
0.04581795632839203,
0.01753765158355236,
0.01624111831188202,
0.08500364422798157,
-0.023900914937257767,
0.08156808465719223,
-0.07358385622501373,
-6.133699770316525e-8,
-0.140091210603714,
-0.03222103416919708,
-0.054721228778362274,
0.0409616194665432,
-0.04831834137439728,
-0.001719777937978506,
-0.0017868392169475555,
-0.0025221118703484535,
0.010474932380020618,
0.00604121433570981,
-0.01625906676054001,
0.0005941983545199037,
-0.12222284078598022,
0.023250514641404152,
0.013115404173731804,
0.05759427323937416,
-0.0012235421454533935,
0.02804672159254551,
-0.014231345616281033,
-0.09673655778169632,
0.038242973387241364,
0.05092642083764076,
-0.04148673638701439,
0.06590907275676727,
0.0014193730894476175,
0.1047467291355133,
-0.14789502322673798,
0.09697040915489197,
0.03706122562289238,
0.014735928736627102,
0.016993068158626556,
-0.03844474256038666,
-0.04280887171626091,
0.038880281150341034,
0.015496074222028255,
0.029492637142539024,
-0.12681058049201965,
-0.007772043347358704,
0.03370785713195801,
0.006869788281619549,
-0.009079938754439354,
-0.010665958747267723,
-0.10719522088766098,
-0.012380936183035374,
0.03959118202328682,
0.04364630952477455,
-0.0013286973116919398,
-0.09750235825777054,
-0.04662039130926132,
-0.0353177934885025,
-0.03026042878627777,
-0.023424720391631126,
0.02131589874625206,
0.04800909757614136,
-0.0346565879881382,
0.03844917193055153,
0.07505179941654205,
-0.03670670464634895,
0.004708888940513134,
0.014566222205758095,
0.009093883447349072,
0.021605947986245155,
-0.010149900801479816,
0.06262300908565521
] |
sentence-transformers/bert-base-wikipedia-sections-mean-tokens | bfe50e68735b7f483150fd1548ddb77e04b43fa8 | 2022-06-15T22:24:35.000Z | [
"pytorch",
"tf",
"bert",
"feature-extraction",
"arxiv:1908.10084",
"sentence-transformers",
"sentence-similarity",
"transformers",
"license:apache-2.0"
] | sentence-similarity | false | sentence-transformers | null | sentence-transformers/bert-base-wikipedia-sections-mean-tokens | 1,216 | null | sentence-transformers | ---
pipeline_tag: sentence-similarity
tags:
- sentence-transformers
- feature-extraction
- sentence-similarity
- transformers
license: apache-2.0
---
**⚠️ This model is deprecated. Please don't use it as it produces sentence embeddings of low quality. You can find recommended sentence embedding models here: [SBERT.net - Pretrained Models](https://www.sbert.net/docs/pretrained_models.html)**
# sentence-transformers/bert-base-wikipedia-sections-mean-tokens
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
## Usage (Sentence-Transformers)
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
```
pip install -U sentence-transformers
```
Then you can use the model like this:
```python
from sentence_transformers import SentenceTransformer
sentences = ["This is an example sentence", "Each sentence is converted"]
model = SentenceTransformer('sentence-transformers/bert-base-wikipedia-sections-mean-tokens')
embeddings = model.encode(sentences)
print(embeddings)
```
## Usage (HuggingFace Transformers)
Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
```python
from transformers import AutoTokenizer, AutoModel
import torch
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
token_embeddings = model_output[0] #First element of model_output contains all token embeddings
input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
# Sentences we want sentence embeddings for
sentences = ['This is an example sentence', 'Each sentence is converted']
# Load model from HuggingFace Hub
tokenizer = AutoTokenizer.from_pretrained('sentence-transformers/bert-base-wikipedia-sections-mean-tokens')
model = AutoModel.from_pretrained('sentence-transformers/bert-base-wikipedia-sections-mean-tokens')
# Tokenize sentences
encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
# Compute token embeddings
with torch.no_grad():
model_output = model(**encoded_input)
# Perform pooling. In this case, max pooling.
sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
print("Sentence embeddings:")
print(sentence_embeddings)
```
## Evaluation Results
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=sentence-transformers/bert-base-wikipedia-sections-mean-tokens)
## Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: BertModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
)
```
## Citing & Authors
This model was trained by [sentence-transformers](https://www.sbert.net/).
If you find this model helpful, feel free to cite our publication [Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks](https://arxiv.org/abs/1908.10084):
```bibtex
@inproceedings{reimers-2019-sentence-bert,
title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
author = "Reimers, Nils and Gurevych, Iryna",
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
month = "11",
year = "2019",
publisher = "Association for Computational Linguistics",
url = "http://arxiv.org/abs/1908.10084",
}
``` | [
-0.0742964819073677,
-0.06661854684352875,
0.034854643046855927,
0.031661972403526306,
0.019662924110889435,
0.08822061121463776,
-0.021996546536684036,
0.06580702215433121,
0.018528146669268608,
-0.07051090151071548,
0.032876890152692795,
0.020499205216765404,
0.059683430939912796,
0.08993538469076157,
0.050609882920980453,
0.07331549376249313,
0.05344204977154732,
0.0557364784181118,
-0.074119433760643,
-0.12039944529533386,
0.13689593970775604,
0.11610768735408783,
0.05649910122156143,
-0.0006562906783074141,
0.004337960388511419,
0.06422359496355057,
-0.040868841111660004,
-0.030387762933969498,
0.0388067401945591,
0.027854422107338905,
0.012561612762510777,
-0.018034987151622772,
-0.018659207969903946,
0.08456018567085266,
0.026038050651550293,
0.0702734887599945,
0.01129833608865738,
0.0031729210168123245,
0.0019891110714524984,
-0.049739252775907516,
0.01052846759557724,
-0.021562490612268448,
-0.04603676125407219,
-0.006129485554993153,
0.045887358486652374,
-0.04041481390595436,
-0.09911195933818817,
-0.057952214032411575,
-0.012526402249932289,
-0.022535361349582672,
-0.09802877902984619,
0.00988233182579279,
0.03173622488975525,
0.07311321049928665,
-0.014140794053673744,
0.05020321160554886,
0.0008059352985583246,
-0.036728277802467346,
0.02394811250269413,
-0.1478806585073471,
-0.08681953698396683,
-0.023308290168642998,
0.03540172427892685,
-0.00015675883332733065,
-0.07627979665994644,
0.009273507632315159,
-0.022050587460398674,
0.010951593518257141,
0.03489229455590248,
0.02185489796102047,
-0.06589677184820175,
0.05242856219410896,
-0.038134682923555374,
-0.020367855206131935,
-0.04249543324112892,
-0.02434416487812996,
0.1046745702624321,
-0.024654923006892204,
0.038090091198682785,
-0.004926756024360657,
-0.009971116669476032,
-0.06093261018395424,
0.05768886208534241,
0.05792274698615074,
0.04182809963822365,
-0.03722568601369858,
0.02201877534389496,
-0.051229868084192276,
-0.026618706062436104,
-0.007168365642428398,
-0.06844501942396164,
-0.1402500420808792,
0.06539111584424973,
-0.029149701818823814,
0.016947347670793533,
0.01367233507335186,
0.002866714959964156,
-0.05571778491139412,
0.030178120359778404,
0.05393673852086067,
0.0367872528731823,
0.054250672459602356,
0.06158309057354927,
-0.08680542558431625,
-0.014936000108718872,
0.020876403898000717,
-0.046971168369054794,
0.018779942765831947,
0.03811988607048988,
-0.10157721489667892,
0.005106003023684025,
-0.01974497176706791,
-0.02452433854341507,
-0.05775425583124161,
0.054250188171863556,
-0.046059273183345795,
0.028044726699590683,
-0.02216554805636406,
-0.017369341105222702,
0.1004653051495552,
0.013070537708699703,
0.062072448432445526,
-0.04948282614350319,
0.040128838270902634,
-0.01281729806214571,
-0.041030172258615494,
0.009965136647224426,
1.2028466349979836e-33,
-0.0019097555195912719,
-0.015349350869655609,
0.006525760982185602,
-0.005955690052360296,
0.01179497130215168,
0.015918724238872528,
0.022417940199375153,
0.06950604170560837,
-0.08937201648950577,
-0.024487778544425964,
-0.06108798086643219,
0.021946363151073456,
-0.06366938352584839,
0.07830522209405899,
-0.0038822719361633062,
-0.008430667221546173,
-0.03612801060080528,
-0.014322089962661266,
0.07997952401638031,
0.009931771084666252,
0.026538081467151642,
0.02690713480114937,
-0.0012808676110580564,
-0.0671696737408638,
-0.07305959612131119,
-0.03476362302899361,
0.0859978124499321,
-0.08938008546829224,
-0.04231765866279602,
0.011520717293024063,
-0.09529019892215729,
0.032339148223400116,
-0.025724545121192932,
-0.01108434796333313,
-0.018997320905327797,
0.004350069910287857,
0.013914382085204124,
-0.02923710085451603,
-0.018921609967947006,
-0.07729668915271759,
-0.037439536303281784,
0.022254880517721176,
-0.035547271370887756,
-0.08842756599187851,
0.007972045801579952,
-0.005983311682939529,
0.05503359064459801,
-0.012037884443998337,
0.10737255215644836,
0.025011761114001274,
0.07913943380117416,
0.007987407967448235,
-0.00594283128157258,
-0.004525233991444111,
0.040413569658994675,
0.0017052687471732497,
0.07052096724510193,
0.04438639059662819,
0.08104874938726425,
-0.02204630896449089,
0.037107858806848526,
-0.03324900195002556,
0.03268849849700928,
0.007188462186604738,
0.08600364625453949,
-0.04667296260595322,
0.04667123779654503,
0.06545885652303696,
0.011553512886166573,
0.0699586421251297,
-0.03138881176710129,
0.021059546619653702,
-0.055242959409952164,
0.022321661934256554,
0.012105787172913551,
-0.008508951403200626,
-0.013558329083025455,
-0.07668010145425797,
-0.04552086442708969,
0.0829876959323883,
-0.02151981182396412,
-0.08301939815282822,
0.03438262268900871,
-0.06567293405532837,
0.004614518955349922,
-0.0015804998110979795,
0.046844352036714554,
-0.08820081502199173,
0.04958859086036682,
-0.049017082899808884,
0.04729947820305824,
-0.010864160023629665,
-0.005872843321412802,
0.07662501186132431,
0.03038022667169571,
-2.318463691312284e-33,
0.006659324746578932,
0.03759137913584709,
-0.08822499215602875,
0.05415496230125427,
-0.02918253280222416,
-0.07951238006353378,
0.034128326922655106,
0.11643006652593613,
0.00085928023327142,
-0.025047048926353455,
-0.05902618169784546,
-0.045174747705459595,
0.030598875135183334,
-0.06861097365617752,
0.09601059556007385,
0.07684709131717682,
-0.0455843023955822,
0.038518279790878296,
0.03335447981953621,
0.0676531121134758,
-0.0068887039087712765,
0.044565342366695404,
-0.10570599883794785,
0.05260875076055527,
-0.03598194941878319,
0.03436001017689705,
-0.03248298913240433,
0.0038822086062282324,
-0.026897024363279343,
-0.02684784308075905,
-0.03061140701174736,
0.010584265924990177,
-0.02211851254105568,
-0.03463899344205856,
-0.12700560688972473,
0.0032699534203857183,
-0.01137769315391779,
-0.040509529411792755,
0.03118155710399151,
-0.013387170620262623,
0.04659920185804367,
0.05337926372885704,
-0.028058916330337524,
-0.00859540794044733,
-0.024061419069767,
-0.001916814362630248,
-0.07539936900138855,
-0.08910121768712997,
0.039909251034259796,
-0.004035404417663813,
-0.04212423413991928,
0.015842437744140625,
-0.1320420503616333,
0.008393533527851105,
-0.06735634803771973,
-0.07954349368810654,
-0.01730949431657791,
-0.03349435701966286,
-0.07957294583320618,
-0.06849293410778046,
-0.04797779396176338,
-0.008464443497359753,
0.02808033861219883,
-0.05880502611398697,
0.04586457833647728,
-0.06790176033973694,
-0.00749806547537446,
0.045687898993492126,
-0.034690119326114655,
-0.03903336822986603,
-0.017183706164360046,
-0.008001372218132019,
0.05553552508354187,
0.048499513417482376,
0.02095135860145092,
-0.0335230678319931,
0.03420473262667656,
0.002677972661331296,
-0.013722128234803677,
-0.048755425959825516,
0.02824927493929863,
-0.03495943918824196,
0.00860528089106083,
-0.01373341865837574,
0.014232054352760315,
0.022490832954645157,
0.06087559461593628,
0.08606608211994171,
-0.029996847733855247,
0.02838907763361931,
-0.018245846033096313,
-0.01391110010445118,
-0.014225826598703861,
0.08779101818799973,
0.01924940198659897,
-5.103735745137783e-8,
-0.08324190229177475,
-0.021033285185694695,
-0.09558924287557602,
0.06418685615062714,
-0.08986534923315048,
-0.049352314323186874,
0.052063290029764175,
0.07264797389507294,
-0.07166140526533127,
-0.033423371613025665,
0.005449003539979458,
0.036263562738895416,
-0.10497495532035828,
0.01281902939081192,
-0.02457248419523239,
0.10797228664159775,
-0.05082179605960846,
0.043805889785289764,
0.03621818870306015,
-0.04130861535668373,
0.014348948374390602,
0.013979421928524971,
0.002677981276065111,
0.03265446797013283,
0.020269013941287994,
0.04481353983283043,
-0.016527261584997177,
0.0643574520945549,
0.01582331955432892,
0.0187265295535326,
-0.01625647582113743,
0.03165339305996895,
-0.07324285805225372,
-0.04523859918117523,
0.047020073980093,
0.05597442761063576,
0.02511996030807495,
-0.0774296298623085,
-0.0007385063800029457,
0.05156553164124489,
0.08291040360927582,
0.0680861547589302,
-0.08814716339111328,
0.0020117834210395813,
0.1299247145652771,
0.03962911292910576,
-0.0022852355614304543,
-0.0471719428896904,
0.03510573133826256,
0.0036880525294691324,
0.09413987398147583,
-0.07134480029344559,
-0.03635039180517197,
-0.0022648589219897985,
0.00494352076202631,
0.037518057972192764,
-0.022280821576714516,
-0.014477618969976902,
0.06325790286064148,
-0.04887405410408974,
0.03457491844892502,
0.08563990145921707,
0.09571491181850433,
-0.031771980226039886
] |
hf-internal-testing/tiny-random-data2vec-seq-class | 4c59e8c7dc5db8886fca7c12e9b380daefaf4aba | 2022-03-03T12:26:02.000Z | [
"pytorch",
"data2vec-audio",
"audio-classification",
"transformers"
] | audio-classification | false | hf-internal-testing | null | hf-internal-testing/tiny-random-data2vec-seq-class | 1,216 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
philschmid/distilbert-base-multilingual-cased-sentiment-2 | 83ff874f93aacbba79642abfe2a274a3c874232b | 2022-01-24T15:08:50.000Z | [
"pytorch",
"tensorboard",
"distilbert",
"text-classification",
"dataset:amazon_reviews_multi",
"transformers",
"generated_from_trainer",
"license:apache-2.0",
"model-index"
] | text-classification | false | philschmid | null | philschmid/distilbert-base-multilingual-cased-sentiment-2 | 1,211 | 1 | transformers | ---
license: apache-2.0
tags:
- generated_from_trainer
datasets:
- amazon_reviews_multi
metrics:
- accuracy
- f1
model-index:
- name: distilbert-base-multilingual-cased-sentiment-2
results:
- task:
name: Text Classification
type: text-classification
dataset:
name: amazon_reviews_multi
type: amazon_reviews_multi
args: all_languages
metrics:
- name: Accuracy
type: accuracy
value: 0.7475666666666667
- name: F1
type: f1
value: 0.7475666666666667
---
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# distilbert-base-multilingual-cased-sentiment-2
This model is a fine-tuned version of [distilbert-base-multilingual-cased](https://huggingface.co/distilbert-base-multilingual-cased) on the amazon_reviews_multi dataset.
It achieves the following results on the evaluation set:
- Loss: 0.6067
- Accuracy: 0.7476
- F1: 0.7476
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.00024
- train_batch_size: 16
- eval_batch_size: 16
- seed: 33
- distributed_type: sagemaker_data_parallel
- num_devices: 8
- total_train_batch_size: 128
- total_eval_batch_size: 128
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 500
- num_epochs: 3
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 |
|:-------------:|:-----:|:-----:|:---------------:|:--------:|:------:|
| 0.6885 | 0.53 | 5000 | 0.6532 | 0.7217 | 0.7217 |
| 0.6411 | 1.07 | 10000 | 0.6348 | 0.7319 | 0.7319 |
| 0.6057 | 1.6 | 15000 | 0.6186 | 0.7387 | 0.7387 |
| 0.5844 | 2.13 | 20000 | 0.6236 | 0.7449 | 0.7449 |
| 0.549 | 2.67 | 25000 | 0.6067 | 0.7476 | 0.7476 |
### Framework versions
- Transformers 4.12.3
- Pytorch 1.9.1
- Datasets 1.15.1
- Tokenizers 0.10.3
| [
-0.06412733346223831,
-0.02739645540714264,
-0.04421296715736389,
0.007585793733596802,
0.03915782645344734,
0.008896146900951862,
-0.031957000494003296,
-0.054850317537784576,
0.022156957536935806,
-0.08799108862876892,
0.03872722387313843,
-0.09625851362943649,
0.02003989741206169,
-0.025771763175725937,
-0.013257560320198536,
0.0457068607211113,
0.04337455332279205,
-0.07053712010383606,
-0.12303285300731659,
-0.037188705056905746,
0.013091166503727436,
0.06462115794420242,
0.010333115234971046,
0.0712738186120987,
-0.08794054388999939,
0.011890701949596405,
-0.06463482230901718,
0.07871593534946442,
0.015572668984532356,
-0.06765702366828918,
0.05781438201665878,
0.08360490947961807,
0.001780764083378017,
0.058898113667964935,
-0.038473572582006454,
0.06612260639667511,
-0.06733881682157516,
-0.07661261409521103,
0.015548188239336014,
0.055092450231313705,
0.008641727268695831,
0.02960858680307865,
0.00683803204447031,
0.01770913600921631,
0.030375292524695396,
0.04855616018176079,
-0.0640375092625618,
-0.032230112701654434,
0.039497338235378265,
-0.010240579023957253,
-0.10428791493177414,
-0.027145512402057648,
0.06912881135940552,
-0.0514032319188118,
-0.010656220838427544,
-0.029904015362262726,
-0.011258882470428944,
0.01760769635438919,
-0.0024290650617331266,
-0.05714397132396698,
0.060256749391555786,
-0.03270954266190529,
0.012530969455838203,
0.006724692415446043,
-0.07639968395233154,
-0.006281846668571234,
-0.049824438989162445,
-0.041873086243867874,
0.0313575454056263,
-0.019154110923409462,
0.03675338998436928,
0.006617276463657618,
0.007831062190234661,
0.03853358328342438,
0.008461637422442436,
0.05883687734603882,
0.024411316961050034,
-0.02013913169503212,
0.06581787765026093,
-0.07107424736022949,
-0.07713697105646133,
-0.04477271810173988,
-0.008967556990683079,
-0.03218594565987587,
0.07890279591083527,
-0.055659905076026917,
0.0419461615383625,
-0.021146371960639954,
0.0013224633876234293,
-0.0006024684407748282,
-0.005563552025705576,
-0.05469438061118126,
0.04011214151978493,
0.04539032280445099,
-0.08397611975669861,
0.041586000472307205,
-0.02069154754281044,
0.04927409067749977,
-0.06125303730368614,
0.12350548058748245,
0.0020410565193742514,
0.0407186783850193,
0.02977817691862583,
0.004644287750124931,
-0.03326023742556572,
-0.013598675839602947,
-0.02785380370914936,
0.10569050908088684,
0.03985699638724327,
-0.10471297055482864,
0.028588421642780304,
0.04374834895133972,
-0.05778583884239197,
0.026221750304102898,
-0.034424956887960434,
0.08433470875024796,
-0.02973373793065548,
-0.02242748998105526,
0.05173150449991226,
0.1266765296459198,
-0.007154225837439299,
-0.032947342842817307,
0.022505490109324455,
-0.03415028005838394,
-0.036017704755067825,
-0.0491948164999485,
-0.047317370772361755,
3.2915292823863346e-33,
0.01435544528067112,
0.024622736498713493,
0.010548869147896767,
-0.025354629382491112,
0.02667665109038353,
-0.044274840503931046,
-0.10019540786743164,
0.045699432492256165,
-0.0766296461224556,
-0.041481271386146545,
0.011817513033747673,
0.06682978570461273,
-0.04792550951242447,
0.06288385391235352,
-0.040981270372867584,
-0.03172072395682335,
-0.01360603142529726,
0.00677560456097126,
0.0823444277048111,
0.0356268472969532,
0.18238164484500885,
0.02362184226512909,
0.0161827951669693,
-0.043671879917383194,
-0.05752464011311531,
0.07613125443458557,
0.07454626262187958,
-0.02253151126205921,
0.03927517309784889,
0.05181083455681801,
-0.039205409586429596,
-0.032709844410419464,
0.023726684972643852,
0.02172035165131092,
0.02874327264726162,
-0.04338724911212921,
-0.03328036889433861,
0.036207012832164764,
-0.02928779646754265,
-0.014211178757250309,
0.03408936411142349,
0.0229587834328413,
-0.034478213638067245,
-0.058427002280950546,
-0.01767830178141594,
0.08858068287372589,
0.05682750791311264,
0.036913130432367325,
0.032205402851104736,
0.007024945691227913,
-0.06015729531645775,
-0.04720499739050865,
0.1102047711610794,
-0.041521426290273666,
-0.05277950316667557,
-0.051069654524326324,
0.003907961770892143,
0.05187704786658287,
0.03698159381747246,
-0.058755286037921906,
-0.04743260145187378,
0.07117898017168045,
0.0045542330481112,
-0.004634235519915819,
0.0016588118160143495,
0.013042135164141655,
0.02160453051328659,
0.022147435694932938,
0.06820470094680786,
-0.002315129851922393,
-0.06064988300204277,
0.043517641723155975,
0.0511905774474144,
0.04093392193317413,
-0.010884948074817657,
-0.07475914061069489,
0.0015587062807753682,
-0.08082825690507889,
-0.01662227138876915,
0.023854751139879227,
-0.07663129270076752,
0.03129753842949867,
0.01707216538488865,
-0.06195278838276863,
-0.05739360675215721,
-0.01201708521693945,
0.01558509562164545,
-0.013050117529928684,
0.01904754713177681,
0.05923610180616379,
-0.04954614117741585,
0.10173215717077255,
-0.037678249180316925,
-0.02424975112080574,
0.03613407164812088,
-3.6250761717741434e-33,
-0.07317279279232025,
-0.06420567631721497,
-0.05497691407799721,
0.07135926187038422,
-0.003734780475497246,
-0.011676370166242123,
-0.01304203737527132,
0.14522001147270203,
0.03843890503048897,
-0.0415179580450058,
0.08920001238584518,
-0.04230731353163719,
-0.017911778762936592,
-0.017999183386564255,
0.01740264520049095,
0.06475919485092163,
-0.05641838163137436,
0.007048744708299637,
-0.023635055869817734,
0.04992605745792389,
0.01452072337269783,
0.07605035603046417,
-0.11131314188241959,
0.024987027049064636,
0.014463499188423157,
0.060833241790533066,
0.025533277541399002,
0.0033654733560979366,
0.04034954681992531,
-0.0011341627687215805,
0.02817405015230179,
-0.025279175490140915,
-0.05102595314383507,
0.06943805515766144,
-0.10311828553676605,
-0.0077530816197395325,
0.006773397326469421,
-0.04424726217985153,
0.008019739761948586,
0.13374124467372894,
0.002741070231422782,
0.03966916725039482,
-0.06951577216386795,
0.0051226019859313965,
-0.01844482310116291,
0.0053086550906300545,
-0.005149288102984428,
-0.029567720368504524,
0.009433251805603504,
-0.04499993100762367,
-0.012194247916340828,
-0.055861931294202805,
-0.0555160790681839,
0.012692546471953392,
-0.01127906795591116,
-0.055750295519828796,
0.03115040995180607,
-0.012924733571708202,
-0.031438447535037994,
-0.004323114175349474,
-0.049254897981882095,
-0.002393267350271344,
-0.046628862619400024,
-0.0543401837348938,
0.035001739859580994,
-0.043082594871520996,
-0.0042323460802435875,
0.005260035861283541,
0.030260106548666954,
0.03300393745303154,
-0.01810523122549057,
0.08371207118034363,
-0.01581626757979393,
-0.02767869085073471,
0.0188753604888916,
0.06419381499290466,
0.012063801288604736,
0.008317090570926666,
-0.0018749198643490672,
-0.027051938697695732,
-0.04823499917984009,
-0.03576452285051346,
0.05201494321227074,
0.045214440673589706,
0.10054393112659454,
-0.02438167668879032,
0.020562997087836266,
0.12701569497585297,
-0.012076285667717457,
-0.015256155282258987,
0.0030303040985018015,
0.04016958922147751,
0.0029339699540287256,
0.09924037009477615,
-0.005415000952780247,
-5.613857823050239e-8,
-0.0265464149415493,
-0.09504766017198563,
-0.08702117204666138,
0.10576657950878143,
0.034915633499622345,
-0.025366226211190224,
0.0018432445358484983,
0.04195689409971237,
-0.027059657499194145,
0.005471826530992985,
0.029978111386299133,
0.041227634996175766,
-0.17199330031871796,
-0.027854209765791893,
-0.003464140696451068,
-0.03580097854137421,
-0.025025231763720512,
0.1586155742406845,
0.007081198040395975,
-0.02843678742647171,
0.0664098858833313,
-0.02846994437277317,
0.03779895603656769,
-0.09227927774190903,
0.035155296325683594,
-0.03373758867383003,
-0.026734348386526108,
0.006847014185041189,
-0.0714191198348999,
-0.008799434639513493,
0.006637046113610268,
-0.0045206621289253235,
-0.0700894296169281,
-0.07993738353252411,
0.013516748324036598,
0.06108507141470909,
-0.02406369335949421,
0.026708872988820076,
0.022334638983011246,
0.00884949043393135,
0.056654565036296844,
0.07084130495786667,
-0.15177364647388458,
-0.0032171562779694796,
0.09436272084712982,
-0.026845159009099007,
-0.03322950378060341,
-0.024586360901594162,
0.03169313445687294,
0.08071984350681305,
0.0132288858294487,
-0.05986462160944939,
-0.006103911437094212,
0.08082349598407745,
0.01714744046330452,
0.016044894233345985,
-0.03172920271754265,
-0.1067027598619461,
-0.024940097704529762,
0.0005187976639717817,
0.0779186487197876,
-0.06276636570692062,
0.008543743751943111,
0.01579544134438038
] |
speechbrain/spkrec-xvect-voxceleb | e2cc27f853f99bd5d539432f0cba3f124c059f71 | 2022-06-25T02:56:40.000Z | [
"en",
"dataset:voxceleb",
"arxiv:2106.04624",
"speechbrain",
"embeddings",
"Speaker",
"Verification",
"Identification",
"pytorch",
"xvectors",
"TDNN",
"audio-classification",
"license:apache-2.0"
] | audio-classification | false | speechbrain | null | speechbrain/spkrec-xvect-voxceleb | 1,207 | 4 | speechbrain | ---
language: "en"
thumbnail:
tags:
- embeddings
- Speaker
- Verification
- Identification
- pytorch
- xvectors
- TDNN
- speechbrain
- audio-classification
license: "apache-2.0"
datasets:
- voxceleb
metrics:
- EER
- min_dct
widget:
- example_title: VoxCeleb Speaker id10003
src: https://cdn-media.huggingface.co/speech_samples/VoxCeleb1_00003.wav
- example_title: VoxCeleb Speaker id10004
src: https://cdn-media.huggingface.co/speech_samples/VoxCeleb_00004.wav
---
<iframe src="https://ghbtns.com/github-btn.html?user=speechbrain&repo=speechbrain&type=star&count=true&size=large&v=2" frameborder="0" scrolling="0" width="170" height="30" title="GitHub"></iframe>
<br/><br/>
# Speaker Verification with xvector embeddings on Voxceleb
This repository provides all the necessary tools to extract speaker embeddings with a pretrained TDNN model using SpeechBrain.
The system is trained on Voxceleb 1+ Voxceleb2 training data.
For a better experience, we encourage you to learn more about
[SpeechBrain](https://speechbrain.github.io). The given model performance on Voxceleb1-test set (Cleaned) is:
| Release | EER(%)
|:-------------:|:--------------:|
| 05-03-21 | 3.2 |
## Pipeline description
This system is composed of a TDNN model coupled with statistical pooling. The system is trained with Categorical Cross-Entropy Loss.
## Install SpeechBrain
First of all, please install SpeechBrain with the following command:
```
pip install speechbrain
```
Please notice that we encourage you to read our tutorials and learn more about
[SpeechBrain](https://speechbrain.github.io).
### Compute your speaker embeddings
```python
import torchaudio
from speechbrain.pretrained import EncoderClassifier
classifier = EncoderClassifier.from_hparams(source="speechbrain/spkrec-xvect-voxceleb", savedir="pretrained_models/spkrec-xvect-voxceleb")
signal, fs =torchaudio.load('tests/samples/ASR/spk1_snt1.wav')
embeddings = classifier.encode_batch(signal)
```
The system is trained with recordings sampled at 16kHz (single channel).
The code will automatically normalize your audio (i.e., resampling + mono channel selection) when calling *classify_file* if needed. Make sure your input tensor is compliant with the expected sampling rate if you use *encode_batch* and *classify_batch*.
### Inference on GPU
To perform inference on the GPU, add `run_opts={"device":"cuda"}` when calling the `from_hparams` method.
### Training
The model was trained with SpeechBrain (aa018540).
To train it from scratch follows these steps:
1. Clone SpeechBrain:
```bash
git clone https://github.com/speechbrain/speechbrain/
```
2. Install it:
```
cd speechbrain
pip install -r requirements.txt
pip install -e .
```
3. Run Training:
```
cd recipes/VoxCeleb/SpeakerRec/
python train_speaker_embeddings.py hparams/train_x_vectors.yaml --data_folder=your_data_folder
```
You can find our training results (models, logs, etc) [here](https://drive.google.com/drive/folders/1RtCBJ3O8iOCkFrJItCKT9oL-Q1MNCwMH?usp=sharing).
### Limitations
The SpeechBrain team does not provide any warranty on the performance achieved by this model when used on other datasets.
#### Referencing xvectors
```@inproceedings{DBLP:conf/odyssey/SnyderGMSPK18,
author = {David Snyder and
Daniel Garcia{-}Romero and
Alan McCree and
Gregory Sell and
Daniel Povey and
Sanjeev Khudanpur},
title = {Spoken Language Recognition using X-vectors},
booktitle = {Odyssey 2018},
pages = {105--111},
year = {2018},
}
```
# **Citing SpeechBrain**
Please, cite SpeechBrain if you use it for your research or business.
```bibtex
@misc{speechbrain,
title={{SpeechBrain}: A General-Purpose Speech Toolkit},
author={Mirco Ravanelli and Titouan Parcollet and Peter Plantinga and Aku Rouhe and Samuele Cornell and Loren Lugosch and Cem Subakan and Nauman Dawalatabad and Abdelwahab Heba and Jianyuan Zhong and Ju-Chieh Chou and Sung-Lin Yeh and Szu-Wei Fu and Chien-Feng Liao and Elena Rastorgueva and François Grondin and William Aris and Hwidong Na and Yan Gao and Renato De Mori and Yoshua Bengio},
year={2021},
eprint={2106.04624},
archivePrefix={arXiv},
primaryClass={eess.AS},
note={arXiv:2106.04624}
}
```
| [
-0.09488529711961746,
-0.044945888221263885,
0.014257253147661686,
-0.08726903051137924,
0.057796455919742584,
0.016820179298520088,
-0.037697069346904755,
-0.09717942029237747,
0.01618233323097229,
-0.08458553999662399,
-0.001929133664816618,
-0.10699016600847244,
-0.05431429669260979,
-0.040719203650951385,
-0.0676521435379982,
-0.023074837401509285,
-0.003365436103194952,
0.01635824143886566,
-0.034658271819353104,
-0.0034264433197677135,
-0.005765631329268217,
0.09426253288984299,
0.06955762207508087,
-0.03316236287355423,
0.10639186203479767,
0.032886799424886703,
-0.0271158367395401,
0.04423290491104126,
0.003067872952669859,
-0.04538654536008835,
0.12523412704467773,
0.005959094036370516,
0.1049562469124794,
0.01934298872947693,
-0.022238196805119514,
0.007848531939089298,
-0.043387509882450104,
-0.04334903508424759,
-0.005132307298481464,
0.02468658611178398,
0.006428641267120838,
0.031025700271129608,
0.017000501975417137,
-0.043738361448049545,
-0.11449597030878067,
-0.012593556195497513,
-0.08189940452575684,
-0.05727490782737732,
-0.04305961728096008,
0.020793819800019264,
-0.04031550884246826,
-0.10795342922210693,
0.04486280679702759,
0.05375749245285988,
-0.04925771802663803,
0.010094939731061459,
0.028734436258673668,
0.08562664687633514,
0.1124819964170456,
0.02565518580377102,
0.026303913444280624,
-0.02402195893228054,
0.02089858241379261,
-0.01979043148458004,
-0.01965409144759178,
0.011378857307136059,
-0.06926009804010391,
0.008689574897289276,
-0.06390205025672913,
0.02707909420132637,
-0.01406068541109562,
0.048768673092126846,
-0.013357175514101982,
0.029973391443490982,
0.0824294164776802,
-0.01795870251953602,
0.03198603540658951,
-0.01107912790030241,
0.07104310393333435,
-0.05071323737502098,
0.004817464854568243,
-0.006597996223717928,
0.02877172827720642,
-0.02548687718808651,
0.08384523540735245,
-0.021866977214813232,
-0.010393848642706871,
0.008552992716431618,
-0.09688371419906616,
-0.05250433832406998,
-0.06583723425865173,
-0.005212131422013044,
-0.021864555776119232,
0.05719389021396637,
-0.010763406753540039,
0.033392515033483505,
0.011688998900353909,
0.02377314306795597,
-0.01852460391819477,
0.06557837128639221,
0.0007421778282150626,
-0.05283982679247856,
0.0170564204454422,
-0.01983523741364479,
-0.009191327728331089,
-0.12802454829216003,
-0.004819110035896301,
0.08006300032138824,
-0.00008222908218158409,
-0.02560485154390335,
0.02190384268760681,
-0.028656428679823875,
-0.04129234328866005,
-0.014631769619882107,
0.03864096850156784,
0.004676555749028921,
-0.03155887871980667,
0.0229006577283144,
0.06641140580177307,
-0.03832836076617241,
-0.032834362238645554,
0.005078254267573357,
-0.048977985978126526,
-0.015142159536480904,
-0.03697046637535095,
-0.04108826443552971,
-0.08130072802305222,
7.781357506000909e-33,
0.07583192735910416,
-0.02269837073981762,
-0.0773450955748558,
-0.002394625451415777,
0.05585293099284172,
-0.04345198720693588,
-0.10887841880321503,
0.018018342554569244,
-0.0033183402847498655,
-0.026640646159648895,
-0.010918313637375832,
0.004754467401653528,
-0.0511198490858078,
-0.003925382159650326,
-0.06704501062631607,
-0.019673191010951996,
-0.08517797291278839,
0.0290386900305748,
-0.07339978218078613,
-0.02371635101735592,
0.016157623380422592,
0.022615130990743637,
0.013962985016405582,
0.0833086147904396,
0.008758069016039371,
0.03393411636352539,
0.04921036958694458,
-0.09786432236433029,
0.021239247173070908,
0.04842723533511162,
-0.06846773624420166,
-0.041563261300325394,
0.05364345386624336,
-0.03793361410498619,
0.020486317574977875,
0.04522788152098656,
0.018751991912722588,
0.053097035735845566,
-0.0606088824570179,
-0.11543988436460495,
0.022171661257743835,
-0.025473270565271378,
-0.04085905849933624,
-0.006726771593093872,
-0.03561181202530861,
0.026627641171216965,
0.022771326825022697,
0.05803241953253746,
0.04731915146112442,
0.025233294814825058,
-0.02019953913986683,
0.020403265953063965,
-0.012742898426949978,
-0.0149483447894454,
-0.0248195119202137,
-0.0543326735496521,
0.03968316316604614,
0.10284632444381714,
0.015389085747301579,
-0.06641760468482971,
0.048094697296619415,
0.022658279165625572,
0.06743258982896805,
-0.11060189455747604,
0.004013839177787304,
0.006036937702447176,
-0.04861990734934807,
0.010991762392222881,
0.011285630986094475,
-0.03842722624540329,
-0.02406311221420765,
-0.0035145983565598726,
0.08134754002094269,
0.0641142874956131,
-0.07225505262613297,
-0.01978534646332264,
0.03306828811764717,
-0.028864704072475433,
-0.040974345058202744,
0.04639206454157829,
-0.03468673676252365,
0.01452257763594389,
-0.011473337188363075,
-0.06497013568878174,
-0.00957016833126545,
-0.07169535011053085,
0.04193708673119545,
-0.09208496659994125,
-0.05257204920053482,
0.05785040557384491,
-0.05584287643432617,
0.038410499691963196,
-0.08287161588668823,
-0.02396022528409958,
-0.06992440670728683,
-8.852817668039547e-33,
0.06385363638401031,
0.11134805530309677,
0.01503072865307331,
0.03282225504517555,
0.04727659001946449,
0.046923041343688965,
0.10035459697246552,
0.12856435775756836,
0.046533990651369095,
-0.0402209535241127,
0.10712776333093643,
-0.05605867877602577,
0.019950583577156067,
-0.0711161196231842,
0.04570595920085907,
0.009285337291657925,
-0.05693778395652771,
-0.005511729046702385,
0.0009952830150723457,
0.06221219152212143,
0.019333137199282646,
0.055105920881032944,
-0.03418252244591713,
0.11178276687860489,
-0.08033236116170883,
-0.005503163672983646,
-0.015600111335515976,
0.03797386214137077,
0.014813493005931377,
-0.023648951202630997,
-0.0004123204271309078,
0.026967091485857964,
-0.1498696208000183,
-0.0055410913191735744,
-0.003820115001872182,
0.02763577178120613,
0.03579431772232056,
-0.02453797124326229,
-0.007802858017385006,
-0.015254783444106579,
0.06828465312719345,
0.05205598846077919,
-0.06477295607328415,
-0.007069378159940243,
0.04465092346072197,
-0.03736985847353935,
0.04281029850244522,
0.04019961506128311,
-0.04225675389170647,
0.014836736023426056,
0.04022826626896858,
-0.036577485501766205,
0.047108616679906845,
0.008723629638552666,
-0.014926920644938946,
0.005510590970516205,
-0.0049307579174637794,
0.0322318859398365,
0.01828746870160103,
0.016817517578601837,
-0.03316516429185867,
-0.027372807264328003,
-0.07252247631549835,
-0.07173369824886322,
-0.010352483950555325,
0.0612829327583313,
-0.017466751858592033,
0.05736614391207695,
0.06452424079179764,
0.015971992164850235,
0.015596016310155392,
0.0025974467862397432,
0.022572055459022522,
-0.024146582931280136,
-0.02515592612326145,
0.01898331753909588,
-0.02439848892390728,
-0.033064380288124084,
-0.01585247926414013,
-0.10198666155338287,
-0.027021808549761772,
0.062422797083854675,
0.09744429588317871,
-0.004408076871186495,
0.11840730905532837,
0.10344146192073822,
0.017343347892165184,
0.04376290738582611,
-0.03490375727415085,
0.03716321662068367,
-0.0361543744802475,
0.04362422600388527,
0.04640357196331024,
0.07792292535305023,
0.04603111371397972,
-5.789900825448058e-8,
-0.04505593329668045,
0.056243523955345154,
-0.012415952049195766,
-0.08628403395414352,
-0.017677485942840576,
-0.09008848667144775,
0.0423712395131588,
-0.001987036783248186,
0.0031854596454650164,
-0.04293454438447952,
0.09047024697065353,
-0.026622971519827843,
-0.07152514159679413,
0.05423997342586517,
-0.03389216586947441,
-0.008381994441151619,
-0.147461399435997,
0.1411079317331314,
-0.025537781417369843,
-0.09196145832538605,
-0.016588816419243813,
0.011755850166082382,
0.09600049257278442,
0.03222271427512169,
0.01887321285903454,
0.0025313219521194696,
-0.020695488899946213,
0.028437281027436256,
-0.048459768295288086,
-0.06952391564846039,
-0.024431519210338593,
0.09695464372634888,
-0.01608348824083805,
-0.09407283365726471,
0.03920014575123787,
0.08623959869146347,
-0.12094365805387497,
-0.017913708463311195,
-0.03048686310648918,
0.04763294383883476,
0.01639738120138645,
-0.018144117668271065,
-0.07912079989910126,
0.001364399096928537,
0.02263762056827545,
0.011047079227864742,
0.07081077992916107,
0.012724733911454678,
0.016802657395601273,
0.0008802540833130479,
-0.01161870639771223,
0.006521156057715416,
-0.029166094958782196,
0.0033754499163478613,
0.030532440170645714,
0.004751683212816715,
-0.04806637763977051,
0.09974117577075958,
0.04596656188368797,
0.01815657503902912,
0.008536871522665024,
0.02762649580836296,
0.003443988272920251,
-0.014440351165831089
] |
clue/roberta_chinese_clue_tiny | e51239963f4ff728b1696180a9ae86ec1d3aeff4 | 2021-05-20T15:27:44.000Z | [
"pytorch",
"jax",
"roberta",
"transformers"
] | null | false | clue | null | clue/roberta_chinese_clue_tiny | 1,204 | 1 | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
xlm-mlm-100-1280 | dafb8ab3a39720dcdf0687658c7fbd27e45bc071 | 2022-07-22T08:09:19.000Z | [
"pytorch",
"tf",
"xlm",
"fill-mask",
"multilingual",
"en",
"es",
"fr",
"de",
"zh",
"ru",
"pt",
"it",
"ar",
"ja",
"id",
"tr",
"nl",
"pl",
"fa",
"vi",
"sv",
"ko",
"he",
"ro",
"no",
"hi",
"uk",
"cs",
"fi",
"hu",
"th",
"da",
"ca",
"el",
"bg",
"sr",
"ms",
"bn",
"hr",
"sl",
"az",
"sk",
"eo",
"ta",
"sh",
"lt",
"et",
"ml",
"la",
"bs",
"sq",
"arz",
"af",
"ka",
"mr",
"eu",
"tl",
"ang",
"gl",
"nn",
"ur",
"kk",
"be",
"hy",
"te",
"lv",
"mk",
"als",
"is",
"wuu",
"my",
"sco",
"mn",
"ceb",
"ast",
"cy",
"kn",
"br",
"an",
"gu",
"bar",
"uz",
"lb",
"ne",
"si",
"war",
"jv",
"ga",
"oc",
"ku",
"sw",
"nds",
"ckb",
"ia",
"yi",
"fy",
"scn",
"gan",
"tt",
"am",
"arxiv:1901.07291",
"arxiv:1911.02116",
"arxiv:1910.09700",
"transformers",
"license:cc-by-nc-4.0",
"autotrain_compatible"
] | fill-mask | false | null | null | xlm-mlm-100-1280 | 1,201 | null | transformers | ---
language:
- multilingual
- en
- es
- fr
- de
- zh
- ru
- pt
- it
- ar
- ja
- id
- tr
- nl
- pl
- fa
- vi
- sv
- ko
- he
- ro
- no
- hi
- uk
- cs
- fi
- hu
- th
- da
- ca
- el
- bg
- sr
- ms
- bn
- hr
- sl
- az
- sk
- eo
- ta
- sh
- lt
- et
- ml
- la
- bs
- sq
- arz
- af
- ka
- mr
- eu
- tl
- ang
- gl
- nn
- ur
- kk
- be
- hy
- te
- lv
- mk
- als
- is
- wuu
- my
- sco
- mn
- ceb
- ast
- cy
- kn
- br
- an
- gu
- bar
- uz
- lb
- ne
- si
- war
- jv
- ga
- oc
- ku
- sw
- nds
- ckb
- ia
- yi
- fy
- scn
- gan
- tt
- am
license: cc-by-nc-4.0
---
# xlm-mlm-100-1280
# Table of Contents
1. [Model Details](#model-details)
2. [Uses](#uses)
3. [Bias, Risks, and Limitations](#bias-risks-and-limitations)
4. [Training](#training)
5. [Evaluation](#evaluation)
6. [Environmental Impact](#environmental-impact)
7. [Technical Specifications](#technical-specifications)
8. [Citation](#citation)
9. [Model Card Authors](#model-card-authors)
10. [How To Get Started With the Model](#how-to-get-started-with-the-model)
# Model Details
xlm-mlm-100-1280 is the XLM model, which was proposed in [Cross-lingual Language Model Pretraining](https://arxiv.org/abs/1901.07291) by Guillaume Lample and Alexis Conneau, trained on Wikipedia text in 100 languages. The model is a transformer pretrained using a masked language modeling (MLM) objective.
## Model Description
- **Developed by:** See [associated paper](https://arxiv.org/abs/1901.07291) and [GitHub Repo](https://github.com/facebookresearch/XLM)
- **Model type:** Language model
- **Language(s) (NLP):** 100 languages, see [GitHub Repo](https://github.com/facebookresearch/XLM#the-17-and-100-languages) for full list.
- **License:** CC-BY-NC-4.0
- **Related Models:** [xlm-mlm-17-1280](https://huggingface.co/xlm-mlm-17-1280)
- **Resources for more information:**
- [Associated paper](https://arxiv.org/abs/1901.07291)
- [GitHub Repo](https://github.com/facebookresearch/XLM#the-17-and-100-languages)
- [Hugging Face Multilingual Models for Inference docs](https://huggingface.co/docs/transformers/v4.20.1/en/multilingual#xlm-with-language-embeddings)
# Uses
## Direct Use
The model is a language model. The model can be used for masked language modeling.
## Downstream Use
To learn more about this task and potential downstream uses, see the Hugging Face [fill mask docs](https://huggingface.co/tasks/fill-mask) and the [Hugging Face Multilingual Models for Inference](https://huggingface.co/docs/transformers/v4.20.1/en/multilingual#xlm-with-language-embeddings) docs. Also see the [associated paper](https://arxiv.org/abs/1901.07291).
## Out-of-Scope Use
The model should not be used to intentionally create hostile or alienating environments for people.
# Bias, Risks, and Limitations
Significant research has explored bias and fairness issues with language models (see, e.g., [Sheng et al. (2021)](https://aclanthology.org/2021.acl-long.330.pdf) and [Bender et al. (2021)](https://dl.acm.org/doi/pdf/10.1145/3442188.3445922)).
## Recommendations
Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model.
# Training
This model is the XLM model trained on Wikipedia text in 100 languages. The preprocessing included tokenization with byte-pair-encoding. See the [GitHub repo](https://github.com/facebookresearch/XLM#the-17-and-100-languages) and the [associated paper](https://arxiv.org/pdf/1911.02116.pdf) for further details on the training data and training procedure.
[Conneau et al. (2020)](https://arxiv.org/pdf/1911.02116.pdf) report that this model has 16 layers, 1280 hidden states, 16 attention heads, and the dimension of the feed-forward layer is 1520. The vocabulary size is 200k and the total number of parameters is 570M (see Table 7).
# Evaluation
## Testing Data, Factors & Metrics
The model developers evaluated the model on the XNLI cross-lingual classification task (see the [XNLI data card](https://huggingface.co/datasets/xnli) for more details on XNLI) using the metric of test accuracy. See the [GitHub Repo](https://arxiv.org/pdf/1911.02116.pdf) for further details on the testing data, factors and metrics.
## Results
For xlm-mlm-100-1280, the test accuracy on the XNLI cross-lingual classification task in English (en), Spanish (es), German (de), Arabic (ar), Chinese (zh) and Urdu (ur) are:
|Language| en | es | de | ar | zh | ur |
|:------:|:--:|:---:|:--:|:--:|:--:|:--:|
| |83.7|76.6 |73.6|67.4|71.7|62.9|
See the [GitHub repo](https://github.com/facebookresearch/XLM#ii-cross-lingual-language-model-pretraining-xlm) for further details.
# Environmental Impact
Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
- **Hardware Type:** More information needed
- **Hours used:** More information needed
- **Cloud Provider:** More information needed
- **Compute Region:** More information needed
- **Carbon Emitted:** More information needed
# Technical Specifications
[Conneau et al. (2020)](https://arxiv.org/pdf/1911.02116.pdf) report that this model has 16 layers, 1280 hidden states, 16 attention heads, and the dimension of the feed-forward layer is 1520. The vocabulary size is 200k and the total number of parameters is 570M (see Table 7).
# Citation
**BibTeX:**
```bibtex
@article{lample2019cross,
title={Cross-lingual language model pretraining},
author={Lample, Guillaume and Conneau, Alexis},
journal={arXiv preprint arXiv:1901.07291},
year={2019}
}
```
**APA:**
- Lample, G., & Conneau, A. (2019). Cross-lingual language model pretraining. arXiv preprint arXiv:1901.07291.
# Model Card Authors
This model card was written by the team at Hugging Face.
# How to Get Started with the Model
More information needed. See the [ipython notebook](https://github.com/facebookresearch/XLM/blob/main/generate-embeddings.ipynb) in the associated [GitHub repo](https://github.com/facebookresearch/XLM#the-17-and-100-languages) for examples. | [
-0.0750420093536377,
0.05454452708363533,
-0.05692404508590698,
-0.09324335306882858,
0.04620163142681122,
0.05367930606007576,
0.05747755989432335,
-0.0067024873569607735,
0.04026108235120773,
0.047558873891830444,
0.06501448899507523,
-0.04690564051270485,
0.07669659703969955,
-0.0626855418086052,
-0.09417912364006042,
-0.02164645679295063,
-0.1233036071062088,
0.09319527447223663,
-0.0391332283616066,
-0.005572226829826832,
-0.010699308477342129,
-0.009082511998713017,
-0.0054659852758049965,
0.026183174923062325,
0.02518809400498867,
0.08413999527692795,
-0.05553726851940155,
0.09259431809186935,
-0.008154178969562054,
-0.14491279423236847,
0.006307149305939674,
0.11132683604955673,
0.11598086357116699,
0.020360402762889862,
0.08587025105953217,
0.002217015717178583,
-0.06209668889641762,
-0.04012957960367203,
0.041356414556503296,
0.021430538967251778,
0.04957368224859238,
-0.07292717695236206,
0.035019002854824066,
-0.019550342112779617,
0.08451388776302338,
-0.0013761306181550026,
-0.0718410462141037,
-0.003989167045801878,
0.05497518926858902,
0.0014217488933354616,
-0.11437398195266724,
0.057667434215545654,
-0.07307306677103043,
0.016177723184227943,
-0.04642042517662048,
-0.057769227772951126,
0.0053683072328567505,
0.01246071606874466,
0.05341145023703575,
-0.04026411846280098,
-0.04231753572821617,
0.011959259398281574,
-0.06696334481239319,
0.06448403000831604,
-0.008581424131989479,
-0.028369160369038582,
0.018161147832870483,
0.04202736169099808,
-0.08320166170597076,
0.08108322322368622,
0.014995520934462547,
-0.031138962134718895,
-0.0513252392411232,
0.12144310027360916,
-0.06265556067228317,
0.06165732815861702,
-0.012721666134893894,
-0.035405032336711884,
0.02076162025332451,
-0.059823982417583466,
-0.05893062800168991,
0.051965683698654175,
0.0734756663441658,
-0.0537378154695034,
-0.054309114813804626,
0.029616830870509148,
-0.023797372356057167,
0.02907317690551281,
0.05705355107784271,
-0.06478746980428696,
-0.003552482696250081,
0.009585239924490452,
0.03549788147211075,
0.04326090216636658,
-0.08576589822769165,
0.03802761808037758,
0.027945436537265778,
0.04586254805326462,
-0.010591728612780571,
0.10784520208835602,
0.06425154954195023,
0.014577754773199558,
0.00992968212813139,
-0.04911224916577339,
-0.1287938952445984,
-0.031670182943344116,
0.03523346409201622,
0.06912773847579956,
0.031971048563718796,
-0.029651284217834473,
-0.04715092107653618,
-0.004762898664921522,
0.0034040797036141157,
-0.135813906788826,
-0.021095024421811104,
0.010538420639932156,
-0.05835152789950371,
-0.03642024099826813,
0.06439001858234406,
0.018881814554333687,
-0.07878945022821426,
-0.0715775415301323,
-0.09580269455909729,
-0.013511139899492264,
-0.07783778011798859,
-0.038578517735004425,
0.04222115874290466,
-5.6630064832237575e-33,
-0.051170382648706436,
-0.022130215540528297,
-0.019719701260328293,
0.001893206499516964,
0.05921352282166481,
-0.039913177490234375,
0.001709049683995545,
-0.048770587891340256,
-0.11314957588911057,
0.012141993269324303,
-0.05310573801398277,
0.015556512400507927,
-0.059068452566862106,
0.007526880595833063,
0.003661336610093713,
-0.020472990348935127,
0.05212968960404396,
-0.0027800099924206734,
-0.07479126751422882,
0.0098330769687891,
0.03670976310968399,
0.022335423156619072,
0.06325455754995346,
0.0209805890917778,
-0.0413544736802578,
0.030521351844072342,
0.025117501616477966,
-0.07513579726219177,
0.056763578206300735,
0.04510336369276047,
0.02315266989171505,
-0.004512956365942955,
-0.06471367925405502,
-0.045223116874694824,
-0.041407953947782516,
-0.03360060602426529,
-0.025902090594172478,
0.015942659229040146,
-0.033103082329034805,
0.002414661692455411,
-0.04418451339006424,
0.010454199276864529,
-0.0245972853153944,
-0.009477350860834122,
0.026896705850958824,
0.05328221991658211,
-0.0026169712655246258,
-0.012758065015077591,
0.018180880695581436,
0.006871649529784918,
-0.04044811800122261,
-0.022244678810238838,
-0.057815518230199814,
0.04230376332998276,
0.01334802433848381,
0.020281564444303513,
-0.08959680795669556,
0.06478825956583023,
0.010872753337025642,
-0.00794424582272768,
0.025971153751015663,
0.03052888624370098,
0.0018946015043184161,
0.03714076802134514,
0.1013401448726654,
-0.05642174929380417,
0.02557516284286976,
-0.03645045682787895,
0.029289761558175087,
-0.05119667574763298,
-0.05287450924515724,
-0.03933553397655487,
0.1072535365819931,
0.04393818601965904,
0.01849050633609295,
0.01722346432507038,
-0.0343439094722271,
-0.05204388499259949,
0.011158451437950134,
-0.03491377830505371,
-0.09320194274187088,
0.019898394122719765,
-0.05176566541194916,
-0.08842048048973083,
-0.03140198439359665,
-0.01274882722645998,
0.00857999175786972,
-0.035391539335250854,
-0.05608430132269859,
-0.03443383798003197,
-0.0595150962471962,
0.03729645907878876,
-0.04033779352903366,
-0.08853769302368164,
-0.03412280231714249,
2.6494896963305558e-33,
0.005135361570864916,
0.018867602571845055,
0.009844006039202213,
0.034083347767591476,
-0.03296661749482155,
0.013693437911570072,
0.06410519778728485,
0.06174640357494354,
0.018681246787309647,
0.04877614974975586,
0.05750575661659241,
-0.05301905795931816,
0.04644473269581795,
-0.06834355741739273,
0.086494080722332,
-0.014522124081850052,
0.028627987951040268,
0.1396864503622055,
-0.028232645243406296,
0.1034121885895729,
-0.043743591755628586,
0.11433711647987366,
-0.10072226822376251,
0.10800597816705704,
0.01866612769663334,
0.03722459077835083,
0.024869967252016068,
0.050783921033144,
0.05238233134150505,
0.05835689976811409,
0.05515478923916817,
-0.026454046368598938,
-0.08767382055521011,
0.06605978310108185,
-0.04524756595492363,
-0.037608034908771515,
0.07302276045084,
0.09627655148506165,
-0.03317423537373543,
0.07104167342185974,
-0.014024650678038597,
0.05280384048819542,
0.00764219556003809,
-0.0845835730433464,
0.03705138340592384,
-0.029328592121601105,
0.023456288501620293,
-0.0480973981320858,
-0.03541366383433342,
-0.02444067969918251,
0.04958803951740265,
0.0017113033682107925,
-0.029211213812232018,
-0.036912743002176285,
0.04253143444657326,
0.005671896506100893,
-0.03896801173686981,
-0.02943400666117668,
-0.08187033236026764,
-0.02752768248319626,
-0.048945508897304535,
0.044475898146629333,
-0.03757636249065399,
-0.07410240918397903,
0.06333933770656586,
0.05918573588132858,
-0.013751332648098469,
0.024743791669607162,
0.03265279904007912,
-0.03382911533117294,
-0.006380048580467701,
-0.027877019718289375,
-0.07512806355953217,
-0.046949367970228195,
0.035096097737550735,
0.027047619223594666,
-0.05588390305638313,
0.022941797971725464,
-0.04542309045791626,
-0.06164006516337395,
0.011930925771594048,
-0.05049402266740799,
0.03405271843075752,
0.04049408435821533,
0.039593618363142014,
-0.01211064774543047,
-0.029061127454042435,
-0.04762270674109459,
0.05439123138785362,
0.04305131733417511,
0.001165547757409513,
-0.01747032441198826,
0.06479360163211823,
0.10865059494972229,
0.014613844454288483,
-5.1113545396219706e-8,
-0.05337194725871086,
-0.03911997377872467,
-0.037913307547569275,
0.0229208841919899,
0.05513612553477287,
0.03300853073596954,
-0.0594426654279232,
-0.09982411563396454,
0.02090434730052948,
-0.02213410474359989,
0.06559105217456818,
-0.0676892101764679,
-0.09934184700250626,
0.0017056678188964725,
-0.030567659065127373,
0.042997706681489944,
-0.01667178049683571,
0.1000463142991066,
-0.0261008832603693,
-0.02464136853814125,
0.04758142679929733,
0.01220690831542015,
0.07096752524375916,
-0.046918854117393494,
0.0100938118994236,
0.025114664807915688,
-0.015440928749740124,
0.021970847621560097,
0.03311949223279953,
-0.016816608607769012,
-0.015884079039096832,
-0.018320009112358093,
0.03218149393796921,
-0.02970857545733452,
0.026103533804416656,
-0.026792462915182114,
0.026065319776535034,
0.009237653575837612,
0.002263143192976713,
0.034002747386693954,
0.04494934529066086,
0.0763983428478241,
-0.011152932420372963,
0.03709302097558975,
0.0472857728600502,
-0.007875081151723862,
-0.017024530097842216,
-0.02249530702829361,
0.022531980648636818,
-0.09950463473796844,
0.003941833972930908,
-0.017004704102873802,
-0.006577972322702408,
-0.020007319748401642,
-0.020339226350188255,
0.02819417417049408,
0.0759182721376419,
0.08989474177360535,
0.023655354976654053,
0.004407626111060381,
0.054179947823286057,
0.0318852923810482,
0.056468356400728226,
-0.03601985052227974
] |
hf-internal-testing/tiny-detr-mobilenetsv3 | d22336251d71ba3637c29c23808b9dfeaa442eda | 2021-09-05T15:50:14.000Z | [
"pytorch",
"detr",
"object-detection",
"transformers"
] | object-detection | false | hf-internal-testing | null | hf-internal-testing/tiny-detr-mobilenetsv3 | 1,198 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
activebus/BERT-XD_Review | 9dbc8322c9767ac81e75e62a5a5376d948c3536f | 2021-05-19T11:38:28.000Z | [
"pytorch",
"bert",
"transformers"
] | null | false | activebus | null | activebus/BERT-XD_Review | 1,197 | null | transformers | # ReviewBERT
BERT (post-)trained from review corpus to understand sentiment, options and various e-commence aspects.
Please visit https://github.com/howardhsu/BERT-for-RRC-ABSA for details.
`BERT-XD_Review` is a cross-domain (beyond just `laptop` and `restaurant`) language model, where each example is from a single product / restaurant with the same rating, post-trained (fine-tuned) on a combination of 5-core Amazon reviews and all Yelp data, expected to be 22 G in total. It is trained for 4 epochs on `bert-base-uncased`.
The preprocessing code [here](https://github.com/howardhsu/BERT-for-RRC-ABSA/transformers).
## Model Description
The original model is from `BERT-base-uncased`.
Models are post-trained from [Amazon Dataset](http://jmcauley.ucsd.edu/data/amazon/) and [Yelp Dataset](https://www.yelp.com/dataset/challenge/).
## Instructions
Loading the post-trained weights are as simple as, e.g.,
```python
import torch
from transformers import AutoModel, AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("activebus/BERT-XD_Review")
model = AutoModel.from_pretrained("activebus/BERT-XD_Review")
```
## Evaluation Results
Check our [NAACL paper](https://www.aclweb.org/anthology/N19-1242.pdf)
`BERT_Review` is expected to have similar performance on domain-specific tasks (such as aspect extraction) as `BERT-DK`, but much better on general tasks such as aspect sentiment classification (different domains mostly share similar sentiment words).
## Citation
If you find this work useful, please cite as following.
```
@inproceedings{xu_bert2019,
title = "BERT Post-Training for Review Reading Comprehension and Aspect-based Sentiment Analysis",
author = "Xu, Hu and Liu, Bing and Shu, Lei and Yu, Philip S.",
booktitle = "Proceedings of the 2019 Conference of the North American Chapter of the Association for Computational Linguistics",
month = "jun",
year = "2019",
}
```
| [
-0.08562646061182022,
-0.07617181539535522,
-0.004869005177170038,
0.031172923743724823,
0.012952310964465141,
0.08844827860593796,
0.023221245035529137,
0.011689543724060059,
0.0013750529615208507,
-0.02278555929660797,
0.046799298375844955,
-0.02305448241531849,
0.008987975306808949,
-0.015760187059640884,
-0.013714051805436611,
0.07216567546129227,
0.138990119099617,
-0.12181078642606735,
-0.09508657455444336,
-0.06441061943769455,
0.08767782151699066,
0.07326790690422058,
0.05167737975716591,
-0.004364925902336836,
0.03698478266596794,
-0.0043630474247038364,
-0.06500664353370667,
-0.023763399571180344,
0.05678475275635719,
-0.015925994142889977,
0.05229095742106438,
0.016273660585284233,
0.06658783555030823,
0.09734605252742767,
-0.01644577831029892,
0.05583043769001961,
-0.01061201561242342,
-0.10164694488048553,
0.035371966660022736,
0.04396269842982292,
-0.02359853871166706,
-0.01577940583229065,
-0.04413188248872757,
-0.0049589271657168865,
0.1351417750120163,
-0.025767020881175995,
-0.0453508198261261,
-0.013444919139146805,
-0.021439429372549057,
-0.03081621415913105,
-0.027048835530877113,
-0.020280959084630013,
0.05702805891633034,
0.012982276268303394,
-0.025470014661550522,
0.025708407163619995,
-0.04352521523833275,
-0.012187930755317211,
0.02214755117893219,
-0.09523370116949081,
-0.031401779502630234,
-0.09396398812532425,
-0.01579154096543789,
-0.005524212494492531,
-0.05341259762644768,
-0.01442006230354309,
-0.11490384489297867,
-0.028117582201957703,
-0.03073214367032051,
-0.01156603917479515,
0.010099120438098907,
0.03063034825026989,
0.0522211492061615,
0.0336683914065361,
0.03987356275320053,
-0.005027701612561941,
0.0774654820561409,
-0.04548430070281029,
0.014587083831429482,
-0.08889178931713104,
0.000443727447418496,
-0.01691356673836708,
0.05043910816311836,
0.0013407696969807148,
0.07928396761417389,
-0.08088918030261993,
0.12099987268447876,
-0.01877986453473568,
-0.02731660008430481,
0.001030482118949294,
0.031085854396224022,
-0.07838284224271774,
0.03096858598291874,
-0.02657700516283512,
-0.023761175572872162,
0.028481217101216316,
-0.02861529402434826,
-0.04896872490644455,
0.0031470332760363817,
0.0886111930012703,
0.06158914789557457,
0.09321830421686172,
0.03459135442972183,
-0.035088568925857544,
-0.015688205137848854,
0.01905953511595726,
-0.01950138807296753,
0.0931808277964592,
0.07396755367517471,
-0.06321355700492859,
-0.03364473581314087,
0.05901647359132767,
-0.005147495772689581,
-0.016456613317131996,
-0.013499015010893345,
0.008679782971739769,
0.020358040928840637,
-0.017781492322683334,
0.060425348579883575,
0.06997144967317581,
-0.0024097447749227285,
0.004311577416956425,
0.031351249665021896,
-0.009632829576730728,
-0.06443493813276291,
0.02281307242810726,
-0.02523834817111492,
5.018288698593523e-34,
0.01636171154677868,
0.03263070806860924,
0.004441270604729652,
-0.10017094016075134,
-0.009097132831811905,
0.003982722759246826,
0.009181540459394455,
0.06031709909439087,
-0.01663569174706936,
-0.007669697981327772,
-0.087176613509655,
0.014033033512532711,
-0.038799408823251724,
0.046484608203172684,
-0.025004563853144646,
0.02207815833389759,
-0.05456740781664848,
0.0496840663254261,
0.062174297869205475,
-0.01861000619828701,
0.0974334329366684,
0.04917082190513611,
-0.002593560144305229,
-0.061727989464998245,
-0.05386687070131302,
-0.0108973179012537,
0.07866442948579788,
-0.05560782551765442,
-0.020025454461574554,
0.06685421615839005,
-0.1271606981754303,
0.08661001920700073,
-0.03093918412923813,
0.025045501068234444,
-0.012099983170628548,
0.0046857018023729324,
-0.002420374657958746,
-0.020319407805800438,
0.01569494977593422,
-0.07285130769014359,
-0.0058220974169671535,
0.0834881141781807,
-0.008868945762515068,
-0.03821166232228279,
-0.04789166525006294,
0.03795511648058891,
0.030421193689107895,
-0.020251786336302757,
0.025521190837025642,
0.06552547216415405,
0.0032257933635264635,
0.012916360050439835,
-0.03235674649477005,
0.025592582300305367,
-0.02129989303648472,
-0.04200632870197296,
0.07487418502569199,
0.012765618972480297,
0.08967167884111404,
0.024686846882104874,
0.052654169499874115,
0.04114633798599243,
0.012069384567439556,
-0.01788030005991459,
-0.035842616111040115,
0.0073257144540548325,
-0.03891269862651825,
0.022864041849970818,
0.011931436136364937,
0.015924425795674324,
-0.048998307436704636,
-0.010612226091325283,
0.03308101370930672,
-0.02990524284541607,
0.013585773296654224,
-0.11025980114936829,
-0.012977231293916702,
-0.05119423568248749,
-0.015227754600346088,
0.007095494773238897,
-0.0019043837673962116,
0.03168832138180733,
-0.038246434181928635,
-0.0860522985458374,
-0.09633225202560425,
-0.017696185037493706,
0.02957948110997677,
-0.07095929235219955,
-0.02550293132662773,
0.040063854306936264,
-0.0151584567502141,
-0.0067214397713541985,
0.019987313076853752,
-0.007161150686442852,
0.00941573828458786,
-1.0063284996179713e-33,
-0.06490024924278259,
0.0038523925468325615,
-0.10098380595445633,
0.11520491540431976,
-0.06183316186070442,
-0.0929829478263855,
-0.004405605606734753,
0.173076331615448,
0.05361619591712952,
-0.007244785316288471,
0.007842663675546646,
-0.0645957812666893,
-0.026676146313548088,
-0.03284426033496857,
0.04171283170580864,
0.01970607601106167,
-0.06896936148405075,
0.018524887040257454,
-0.01113853882998228,
0.02232293412089348,
0.053111135959625244,
0.03339861333370209,
-0.0799446851015091,
0.09564214944839478,
0.004980816971510649,
0.09489236027002335,
0.009476907551288605,
0.07039077579975128,
0.028804132714867592,
-0.016433879733085632,
-0.030483298003673553,
-0.024433376267552376,
-0.044957295060157776,
0.11202450841665268,
-0.1177910789847374,
0.010951494798064232,
-0.002969051944091916,
-0.029184306040406227,
0.0136632164940238,
0.057543981820344925,
0.08885926753282547,
-0.01607338711619377,
-0.054150212556123734,
0.05517679452896118,
-0.025336822494864464,
0.028378698974847794,
-0.08157029747962952,
-0.04575100913643837,
0.03279106691479683,
-0.05418939143419266,
-0.02213861420750618,
0.019332928583025932,
-0.07179045677185059,
0.03668107092380524,
-0.10184061527252197,
-0.08905068039894104,
0.011752215214073658,
-0.0503678098320961,
-0.023835450410842896,
0.015556186437606812,
-0.024201534688472748,
0.05395861715078354,
0.048367686569690704,
-0.01838560774922371,
0.026389919221401215,
-0.0981052815914154,
0.0721743255853653,
0.048568904399871826,
0.020955145359039307,
0.009234433993697166,
-0.03413647785782814,
0.03551160916686058,
0.06393247842788696,
-0.0011072942288592458,
-0.011411763727664948,
0.034660253673791885,
-0.0114515395835042,
-0.07448974251747131,
-0.020640535280108452,
-0.07406604290008545,
-0.0930890142917633,
-0.0382242351770401,
0.06111837923526764,
0.042454373091459274,
0.042832184582948685,
0.0621701180934906,
0.041339971125125885,
0.011199280619621277,
-0.03182646259665489,
0.04992542043328285,
-0.0009084840421564877,
0.002770272083580494,
0.028812136501073837,
0.025267215445637703,
-0.014077682048082352,
-5.206966591231321e-8,
-0.05666354298591614,
-0.06875552982091904,
-0.041137367486953735,
0.0965220183134079,
0.02040437050163746,
-0.0451187863945961,
-0.029966024681925774,
0.04882781207561493,
-0.001949229510501027,
-0.020524941384792328,
0.07132742553949356,
0.06050543114542961,
-0.14551976323127747,
-0.034287773072719574,
-0.02584320865571499,
-0.004663568921387196,
-0.02481597661972046,
0.08899158984422684,
-0.004877354949712753,
-0.019544359296560287,
0.009263855405151844,
0.0399724580347538,
0.04895809292793274,
-0.0659484714269638,
0.06732625514268875,
-0.040954459458589554,
-0.011024962179362774,
0.12843485176563263,
-0.015666404739022255,
-0.012496606446802616,
-0.019173678010702133,
0.006726528983563185,
-0.08586950600147247,
0.016604045405983925,
0.04025772958993912,
0.04292236268520355,
-0.002600139006972313,
-0.015766246244311333,
-0.052293263375759125,
0.004218340385705233,
0.030485009774565697,
-0.0248819962143898,
-0.12726689875125885,
-0.033275946974754333,
0.06709026545286179,
0.014930343255400658,
-0.01947633922100067,
-0.0591517835855484,
0.007919416762888432,
0.033796921372413635,
0.04975906014442444,
-0.03099830262362957,
-0.04256065562367439,
0.0737772062420845,
-0.015730515122413635,
-0.01100444421172142,
-0.06518140435218811,
-0.1164507269859314,
-0.0012554655550047755,
0.027412239462137222,
0.0015008535701781511,
-0.043248649686574936,
0.03057902306318283,
0.07072296738624573
] |
HooshvareLab/distilbert-fa-zwnj-base-ner | 36ccd9aa3dd64c3a83c76de0b8cc5b3f6fa3dc30 | 2021-03-21T14:32:29.000Z | [
"pytorch",
"tf",
"distilbert",
"token-classification",
"fa",
"transformers",
"autotrain_compatible"
] | token-classification | false | HooshvareLab | null | HooshvareLab/distilbert-fa-zwnj-base-ner | 1,194 | 1 | transformers | ---
language: fa
---
# DistilbertNER
This model fine-tuned for the Named Entity Recognition (NER) task on a mixed NER dataset collected from [ARMAN](https://github.com/HaniehP/PersianNER), [PEYMA](http://nsurl.org/2019-2/tasks/task-7-named-entity-recognition-ner-for-farsi/), and [WikiANN](https://elisa-ie.github.io/wikiann/) that covered ten types of entities:
- Date (DAT)
- Event (EVE)
- Facility (FAC)
- Location (LOC)
- Money (MON)
- Organization (ORG)
- Percent (PCT)
- Person (PER)
- Product (PRO)
- Time (TIM)
## Dataset Information
| | Records | B-DAT | B-EVE | B-FAC | B-LOC | B-MON | B-ORG | B-PCT | B-PER | B-PRO | B-TIM | I-DAT | I-EVE | I-FAC | I-LOC | I-MON | I-ORG | I-PCT | I-PER | I-PRO | I-TIM |
|:------|----------:|--------:|--------:|--------:|--------:|--------:|--------:|--------:|--------:|--------:|--------:|--------:|--------:|--------:|--------:|--------:|--------:|--------:|--------:|--------:|--------:|
| Train | 29133 | 1423 | 1487 | 1400 | 13919 | 417 | 15926 | 355 | 12347 | 1855 | 150 | 1947 | 5018 | 2421 | 4118 | 1059 | 19579 | 573 | 7699 | 1914 | 332 |
| Valid | 5142 | 267 | 253 | 250 | 2362 | 100 | 2651 | 64 | 2173 | 317 | 19 | 373 | 799 | 387 | 717 | 270 | 3260 | 101 | 1382 | 303 | 35 |
| Test | 6049 | 407 | 256 | 248 | 2886 | 98 | 3216 | 94 | 2646 | 318 | 43 | 568 | 888 | 408 | 858 | 263 | 3967 | 141 | 1707 | 296 | 78 |
## Evaluation
The following tables summarize the scores obtained by model overall and per each class.
**Overall**
| Model | accuracy | precision | recall | f1 |
|:----------:|:--------:|:---------:|:--------:|:--------:|
| Distilbert | 0.994534 | 0.946326 | 0.95504 | 0.950663 |
**Per entities**
| | number | precision | recall | f1 |
|:---: |:------: |:---------: |:--------: |:--------: |
| DAT | 407 | 0.812048 | 0.828010 | 0.819951 |
| EVE | 256 | 0.955056 | 0.996094 | 0.975143 |
| FAC | 248 | 0.972549 | 1.000000 | 0.986083 |
| LOC | 2884 | 0.968403 | 0.967060 | 0.967731 |
| MON | 98 | 0.925532 | 0.887755 | 0.906250 |
| ORG | 3216 | 0.932095 | 0.951803 | 0.941846 |
| PCT | 94 | 0.936842 | 0.946809 | 0.941799 |
| PER | 2645 | 0.959818 | 0.957278 | 0.958546 |
| PRO | 318 | 0.963526 | 0.996855 | 0.979907 |
| TIM | 43 | 0.760870 | 0.813953 | 0.786517 |
## How To Use
You use this model with Transformers pipeline for NER.
### Installing requirements
```bash
pip install transformers
```
### How to predict using pipeline
```python
from transformers import AutoTokenizer
from transformers import AutoModelForTokenClassification # for pytorch
from transformers import TFAutoModelForTokenClassification # for tensorflow
from transformers import pipeline
model_name_or_path = "HooshvareLab/distilbert-fa-zwnj-base-ner"
tokenizer = AutoTokenizer.from_pretrained(model_name_or_path)
model = AutoModelForTokenClassification.from_pretrained(model_name_or_path) # Pytorch
# model = TFAutoModelForTokenClassification.from_pretrained(model_name_or_path) # Tensorflow
nlp = pipeline("ner", model=model, tokenizer=tokenizer)
example = "در سال ۲۰۱۳ درگذشت و آندرتیکر و کین برای او مراسم یادبود گرفتند."
ner_results = nlp(example)
print(ner_results)
```
## Questions?
Post a Github issue on the [ParsNER Issues](https://github.com/hooshvare/parsner/issues) repo. | [
-0.04383894056081772,
-0.032571256160736084,
-0.062205493450164795,
-0.06179426237940788,
0.022152669727802277,
-0.03915128484368324,
0.037361614406108856,
-0.007038395386189222,
0.04886763170361519,
-0.02353312075138092,
0.04553021863102913,
-0.11598778516054153,
-0.02474392019212246,
0.04533214122056961,
-0.008992484770715237,
0.03842956945300102,
0.005608109757304192,
0.008313166908919811,
-0.07299836724996567,
-0.10075867921113968,
-0.02112823724746704,
-0.009014051407575607,
0.08911658823490143,
-0.06648300588130951,
-0.028765423223376274,
-0.03798535466194153,
-0.013273237273097038,
-0.01790396310389042,
0.06477133929729462,
-0.0466986745595932,
0.008172208443284035,
0.12577490508556366,
0.0650898888707161,
0.06018678471446037,
-0.003346785670146346,
0.08296225219964981,
-0.05500702187418938,
0.003413897007703781,
-0.031257741153240204,
0.005962233990430832,
0.038812365382909775,
-0.041673172265291214,
-0.03241434693336487,
-0.05019151791930199,
0.023760057985782623,
0.021447958424687386,
-0.043862514197826385,
0.055493954569101334,
-0.062382135540246964,
0.05682888999581337,
-0.06978988647460938,
-0.018552085384726524,
-0.007000837940722704,
0.09994453936815262,
-0.015422796830534935,
-0.054264143109321594,
-0.04996100813150406,
-0.06523256748914719,
-0.04578158259391785,
-0.02982325479388237,
-0.03705989569425583,
-0.016080524772405624,
-0.07451115548610687,
-0.005058420822024345,
-0.0216512493789196,
0.0494895838201046,
-0.048298098146915436,
0.042569492012262344,
0.009786012582480907,
-0.02393195405602455,
0.02113177254796028,
0.02464539371430874,
-0.08375251293182373,
-0.0043639675714075565,
-0.027967119589447975,
0.01295202225446701,
0.0357787050306797,
0.03602422773838043,
0.009727170690894127,
-0.12757521867752075,
-0.013253623619675636,
0.07045940309762955,
0.051648419350385666,
-0.04262605682015419,
0.09238769114017487,
0.013619913719594479,
0.005531824193894863,
0.07158881425857544,
-0.010234269313514233,
-0.0043103969655931,
-0.018699076026678085,
0.0041832211427390575,
0.11928970366716385,
-0.06258386373519897,
0.05923859775066376,
0.01356863509863615,
0.07727251201868057,
0.036452457308769226,
0.0759039968252182,
0.08945024758577347,
-0.08775167167186737,
0.039375271648168564,
0.00012840211275033653,
0.001462167245335877,
-0.10348237305879593,
-0.038279708474874496,
-0.051603686064481735,
0.01026857364922762,
-0.000901905819773674,
0.011750202625989914,
-0.0447872094810009,
0.042029403150081635,
-0.04662897810339928,
-0.045349638909101486,
-0.0059952703304588795,
-0.027446508407592773,
-0.06684970110654831,
-0.005628937389701605,
0.03260655328631401,
-0.013795975595712662,
-0.041219938546419144,
0.008878154680132866,
0.005719244014471769,
-0.02261342853307724,
0.05463237315416336,
0.08672408759593964,
-0.0430506132543087,
4.740839889375808e-34,
0.00034705179859884083,
0.045953866094350815,
-0.0020081778056919575,
-0.018526827916502953,
-0.047533560544252396,
0.004636820405721664,
-0.05535979941487312,
0.03251158818602562,
-0.00472263852134347,
0.023328473791480064,
-0.05959365889430046,
-0.007074206601828337,
-0.027388857677578926,
0.0034582267981022596,
0.07185979187488556,
-0.00398210808634758,
0.0001913103915285319,
0.0414569154381752,
-0.011871106922626495,
0.05694754794239998,
0.11155898869037628,
0.03442685306072235,
-0.04383412003517151,
0.02168232575058937,
0.053917836397886276,
0.005236572585999966,
0.03145577386021614,
-0.026432514190673828,
-0.02755681611597538,
-0.02304789051413536,
-0.011210210621356964,
-0.04869897663593292,
0.04792965576052666,
0.0352540984749794,
0.020825257524847984,
0.0056260316632688046,
0.005578533746302128,
-0.04543345421552658,
0.0062324777245521545,
-0.016376111656427383,
0.01532558724284172,
0.042958393692970276,
0.05859706923365593,
-0.042364977300167084,
-0.0305133406072855,
-0.04071574658155441,
-0.0023001241497695446,
-0.006389121524989605,
0.033543944358825684,
0.10451208055019379,
-0.014669963158667088,
0.004399359226226807,
-0.019800875335931778,
-0.03769915923476219,
-0.015558179467916489,
0.03355603665113449,
-0.01618698239326477,
0.008174590766429901,
0.022549664601683617,
0.08502203971147537,
-0.01522587426006794,
-0.0395001582801342,
0.028747636824846268,
0.02450280264019966,
0.014331793412566185,
0.044598884880542755,
-0.024931415915489197,
-0.05218534171581268,
0.1334211379289627,
0.04148456081748009,
0.02145572565495968,
0.0325176939368248,
0.02288697101175785,
0.06897903978824615,
0.08046960830688477,
0.025085119530558586,
0.04112371429800987,
-0.07530097663402557,
-0.0635959580540657,
0.055875618010759354,
-0.008807308040559292,
0.044430606067180634,
-0.0012665478279814124,
-0.011448059231042862,
-0.05480623245239258,
-0.012539656832814217,
-0.010552212595939636,
-0.06292594224214554,
-0.04658834636211395,
-0.036663465201854706,
0.04854290187358856,
0.05103345960378647,
-0.07114534825086594,
-0.011859679594635963,
-0.036412596702575684,
-1.6309023518369427e-33,
-0.010095576755702496,
-0.0013881907798349857,
0.028421880677342415,
0.006012477912008762,
0.05698970705270767,
-0.09330721944570541,
0.05885619297623634,
-0.007379978429526091,
0.09274286031723022,
0.05611603707075119,
0.017511144280433655,
-0.11040197312831879,
0.06446966528892517,
-0.07398298382759094,
0.0018394356593489647,
0.025425106287002563,
-0.024610187858343124,
0.032652903348207474,
0.06861021369695663,
0.11619295179843903,
-0.02031969465315342,
0.08212976902723312,
-0.08710634708404541,
0.0026165584567934275,
0.020701637491583824,
0.06236866116523743,
-0.013168763369321823,
-0.0017304295906797051,
0.008172689005732536,
-0.04356873407959938,
-0.0818774551153183,
-0.10343599319458008,
-0.0857248604297638,
-0.009013107046484947,
-0.043572742491960526,
-0.05701068043708801,
-0.008137946017086506,
0.01385484542697668,
-0.029054151847958565,
-0.010584606789052486,
0.045990239828825,
0.11387878656387329,
-0.1581842452287674,
-0.007776597980409861,
-0.05505923554301262,
-0.002445543184876442,
-0.09609894454479218,
0.006308442447334528,
0.03266626596450806,
-0.10717795789241791,
-0.010312039405107498,
0.048027049750089645,
-0.038758039474487305,
-0.022517025470733643,
0.022983234375715256,
0.007195645477622747,
0.12191672623157501,
-0.12103265523910522,
-0.09980376809835434,
-0.0004380334576126188,
-0.04731560871005058,
0.006117762997746468,
0.029295913875102997,
0.09910903126001358,
0.04917432367801666,
-0.015359985642135143,
0.007253861520439386,
-0.008785104379057884,
-0.032662998884916306,
-0.06410177797079086,
0.049958087503910065,
0.019005363807082176,
-0.030233770608901978,
-0.019525203853845596,
-0.030567290261387825,
-0.02375502698123455,
-0.05355749651789665,
-0.02193884365260601,
0.045469917356967926,
-0.01125262025743723,
-0.10028891265392303,
-0.061391204595565796,
0.058786790817976,
0.04749779775738716,
-0.029325898736715317,
0.09986326098442078,
0.05150569975376129,
0.00021333729091566056,
0.021985437721014023,
0.055377062410116196,
-0.03775504231452942,
0.0037835082039237022,
-0.0174954142421484,
0.11044738441705704,
-0.015929022803902626,
-6.072330904771661e-8,
0.0007863013888709247,
-0.05802693963050842,
-0.036332517862319946,
-0.016283303499221802,
-0.0011167911579832435,
-0.09730081260204315,
-0.0469403974711895,
0.019830957055091858,
-0.019080720841884613,
0.06711180508136749,
0.015679366886615753,
0.040358737111091614,
-0.14070285856723785,
-0.05252256989479065,
0.09272658824920654,
0.02845880761742592,
0.06147680804133415,
0.020646391436457634,
-0.058329854160547256,
-0.018982205539941788,
0.08713243156671524,
0.05257553234696388,
-0.032910604029893875,
-0.030358577147126198,
0.04247187823057175,
-0.011830948293209076,
-0.01965091936290264,
0.09842351078987122,
0.07006534934043884,
-0.03730613738298416,
-0.042000912129879,
-0.009019681252539158,
-0.012203522957861423,
-0.011318947188556194,
0.0594152994453907,
-0.019251413643360138,
0.01237221248447895,
0.027250776067376137,
-0.06199511140584946,
0.06591625511646271,
0.04024447128176689,
0.012022734619677067,
-0.11890977621078491,
0.03454464301466942,
0.13564208149909973,
0.0013102925149723887,
-0.11266250908374786,
-0.12279438227415085,
0.053718239068984985,
-0.018927335739135742,
0.04627624899148941,
-0.010304806753993034,
0.008548804558813572,
0.005597373005002737,
0.008541833609342575,
0.02446085587143898,
0.007610905449837446,
-0.06465674191713333,
0.07962384819984436,
-0.0404951237142086,
0.044020164757966995,
-0.021282274276018143,
0.02315218560397625,
-0.006246207281947136
] |
ml6team/mt5-small-german-finetune-mlsum | c466d1eeefc34cf39b4e8411410ef1ea3bade115 | 2021-01-28T13:15:00.000Z | [
"pytorch",
"tf",
"t5",
"text2text-generation",
"de",
"dataset:mlsum",
"transformers",
"summarization",
"autotrain_compatible"
] | summarization | false | ml6team | null | ml6team/mt5-small-german-finetune-mlsum | 1,193 | 9 | transformers | ---
language: de
tags:
- summarization
datasets:
- mlsum
---
# mT5-small fine-tuned on German MLSUM
This model was finetuned for 3 epochs with a max_len (input) of 768 tokens and target_max_len of 192 tokens.
It was fine-tuned on all German articles present in the train split of the [MLSUM dataset](https://huggingface.co/datasets/mlsum) having less than 384 "words" after splitting on whitespace, which resulted in 80249 articles.
The exact expression to filter the dataset was the following:
```python
dataset = dataset.filter(lambda e: len(e['text'].split()) < 384)
```
## Evaluation results
The fine-tuned model was evaluated on 2000 random articles from the validation set.
Mean [f1 ROUGE scores](https://github.com/pltrdy/rouge) were calculated for both the fine-tuned model and the lead-3 baseline (which simply produces the leading three sentences of the document) and are presented in the following table.
| Model | Rouge-1 | Rouge-2 | Rouge-L |
| ------------- |:-------:| --------:| -------:|
| mt5-small | 0.399 | 0.318 | 0.392 |
| lead-3 | 0.343 | 0.263 | 0.341 | | [
-0.021918660029768944,
-0.001484303968027234,
0.020744886249303818,
0.03631197288632393,
0.07454904913902283,
0.030321326106786728,
-0.04932461306452751,
0.046763207763433456,
-0.013640292920172215,
-0.057559993118047714,
0.01854819804430008,
-0.047129515558481216,
0.047718800604343414,
-0.03706839680671692,
-0.03586798161268234,
0.012392248958349228,
0.0027798826340585947,
-0.0032174591906368732,
-0.14863164722919464,
-0.03961428627371788,
0.10954682528972626,
0.07577131688594818,
0.05759764462709427,
-0.0023980350233614445,
0.056623268872499466,
0.04447769746184349,
-0.1133185476064682,
-0.023933229967951775,
0.028664181008934975,
0.009626565501093864,
0.038925301283597946,
0.044378358870744705,
0.09701379388570786,
0.05745997279882431,
0.04576479643583298,
-0.03578406199812889,
-0.03979494795203209,
-0.04648209363222122,
-0.00392501475289464,
0.05187065154314041,
0.00017796483007259667,
-0.0024513849057257175,
0.013571604155004025,
0.05054998770356178,
0.019240250810980797,
0.008687601424753666,
-0.06755863130092621,
-0.05546252056956291,
-0.017669623717665672,
0.023340066894888878,
-0.0960802435874939,
0.05671969801187515,
-0.04367326945066452,
0.12454530596733093,
-0.005454869940876961,
-0.05347643792629242,
-0.08495510369539261,
0.0005379713838919997,
-0.011731203645467758,
-0.09043698012828827,
-0.07862599939107895,
-0.08162964135408401,
-0.04103071242570877,
-0.0005607091006822884,
0.00990279484540224,
-0.013355190865695477,
-0.08813666552305222,
-0.006080486346036196,
-0.018742935732007027,
0.04936952516436577,
-0.009789506904780865,
0.07939016073942184,
0.0201591607183218,
0.08555128425359726,
0.03259719908237457,
0.05002659931778908,
0.04109755903482437,
-0.07137726247310638,
0.0625617727637291,
-0.06898956000804901,
0.0010768085485324264,
-0.04564997926354408,
0.03970416262745857,
-0.02971123531460762,
0.03101237118244171,
-0.05196446552872658,
0.048587244004011154,
-0.04233741760253906,
0.027871057391166687,
0.0000676088166073896,
0.015912022441625595,
-0.11993243545293808,
0.007374637760221958,
0.015597068704664707,
-0.04312548041343689,
0.08059579133987427,
0.032312992960214615,
0.04427475109696388,
0.03599781170487404,
0.06387671083211899,
0.04044173285365105,
0.05577415972948074,
0.0060373032465577126,
-0.037303533405065536,
-0.013014635071158409,
-0.07920394092798233,
0.09635826200246811,
0.06335132569074631,
-0.015174339525401592,
-0.06948614120483398,
0.03292553126811981,
0.047113221138715744,
-0.06505920737981796,
-0.027383018285036087,
0.040313996374607086,
0.03065522573888302,
0.0435623824596405,
0.014854294247925282,
0.013769898563623428,
0.03973137214779854,
-0.07971318066120148,
0.022277433425188065,
0.005507695022970438,
-0.027378978207707405,
-0.002917781239375472,
0.07465376704931259,
0.021328451111912727,
5.295584418039143e-33,
0.012222041375935078,
0.004589560441672802,
-0.045721154659986496,
-0.0313674733042717,
0.00498842541128397,
-0.02136976458132267,
-0.037064455449581146,
0.010856914333999157,
-0.028777174651622772,
0.04753897711634636,
-0.11363058537244797,
0.04668198898434639,
-0.0441289022564888,
-0.021382059901952744,
0.046722374856472015,
0.00226452574133873,
-0.03754332289099693,
0.002892761956900358,
-0.06570278853178024,
0.044762495905160904,
0.12704619765281677,
-0.012489487417042255,
0.06644880026578903,
-0.028644416481256485,
-0.06124988570809364,
0.01368360873311758,
-0.011543665081262589,
-0.05299533158540726,
-0.09812388569116592,
0.05463686212897301,
-0.1737721562385559,
0.05073212832212448,
-0.0006639061612077057,
0.013178384862840176,
0.04463968053460121,
0.03035402111709118,
-0.042643867433071136,
0.0006229936261661351,
0.015500434674322605,
-0.024000804871320724,
-0.037545908242464066,
0.05352636054158211,
0.04121295362710953,
-0.07008353620767593,
-0.026561321690678596,
0.013604083098471165,
-0.039352964609861374,
0.009544664062559605,
-0.005964270792901516,
0.04263695701956749,
0.015848958864808083,
-0.00900325458496809,
0.04940623790025711,
0.014686862006783485,
0.006314295344054699,
0.04434845224022865,
0.11773266643285751,
0.07767033576965332,
0.06356901675462723,
-0.000022156944396556355,
-0.01549510844051838,
0.044814333319664,
0.0898158997297287,
0.07567984610795975,
0.12222521007061005,
0.06300374865531921,
-0.022587522864341736,
0.06355339288711548,
0.02224067412316799,
0.0062627531588077545,
-0.019582798704504967,
-0.06032051146030426,
-0.06996504962444305,
0.020057901740074158,
0.05220678821206093,
-0.08106940984725952,
0.0664740577340126,
-0.05815848708152771,
-0.04506079852581024,
-0.011180518195033073,
0.052822813391685486,
0.02078896574676037,
0.008158212527632713,
-0.10147145390510559,
-0.06732427328824997,
-0.017497947439551353,
-0.008008843287825584,
-0.04241110756993294,
-0.036508433520793915,
-0.12449312955141068,
0.00580569077283144,
0.04978954792022705,
0.0050189560279250145,
-0.059611327946186066,
-0.045644406229257584,
-5.1949786319980635e-33,
-0.00238618697039783,
-0.017871471121907234,
0.013894804753363132,
0.04625521972775459,
-0.0030551147647202015,
-0.029005488380789757,
0.01793777011334896,
0.16782455146312714,
0.07153782248497009,
0.002706955885514617,
0.07999882847070694,
-0.11787889152765274,
-0.021648641675710678,
-0.06207188218832016,
0.006556241307407618,
-0.019119201228022575,
0.013886175118386745,
-0.00001669893208600115,
0.03132239356637001,
0.05807708576321602,
-0.017967959865927696,
0.015874147415161133,
-0.08699718862771988,
0.06322595477104187,
-0.03650073707103729,
0.022063441574573517,
0.011060202494263649,
-0.02973623014986515,
0.00025384334730915725,
-0.055166229605674744,
-0.04241177439689636,
-0.016454089432954788,
-0.01081559807062149,
0.04763812571763992,
-0.06140296161174774,
-0.03519430011510849,
0.052106428891420364,
0.007060667499899864,
0.0074194869957864285,
0.08557465672492981,
0.0538572333753109,
0.09114877134561539,
-0.06949024647474289,
0.05451066419482231,
-0.006277046632021666,
-0.014586822129786015,
-0.06263898313045502,
-0.03080032765865326,
-0.04234665632247925,
-0.018373724073171616,
0.029623055830597878,
-0.032032307237386703,
-0.052366212010383606,
0.08719635754823685,
-0.06694302707910538,
-0.06027987226843834,
0.0020432262681424618,
-0.047334469854831696,
-0.09113792330026627,
0.0023521778639405966,
-0.0741717517375946,
0.044441867619752884,
-0.0815906897187233,
-0.01271646749228239,
0.057213783264160156,
-0.008502940647304058,
-0.05558100342750549,
-0.04886764660477638,
-0.03494584932923317,
-0.009494028054177761,
0.014945534989237785,
-0.049303773790597916,
0.010100960731506348,
0.01039058342576027,
-0.043447718024253845,
0.03226970136165619,
-0.024781087413430214,
-0.005603194236755371,
0.0019648028537631035,
0.030451204627752304,
-0.048920564353466034,
-0.0007305753533728421,
0.06506030261516571,
0.040291618555784225,
0.0410580188035965,
0.03017991967499256,
-0.003185293171554804,
0.02736572176218033,
-0.036401983350515366,
-0.004811656195670366,
0.02289079688489437,
-0.03906552121043205,
0.01994200423359871,
0.10540454834699631,
-0.02570217102766037,
-6.093627291647863e-8,
-0.06882785260677338,
0.03697168827056885,
-0.10655029118061066,
0.10900449007749557,
-0.003546894760802388,
0.02825240232050419,
-0.041768722236156464,
0.0437299907207489,
-0.025239333510398865,
0.0033176199067384005,
0.05300309881567955,
0.06738633662462234,
-0.1630188226699829,
-0.0196806900203228,
-0.03657596558332443,
-0.024430207908153534,
-0.01432941947132349,
0.11939114332199097,
-0.01969817653298378,
0.04889708384871483,
0.08032979816198349,
-0.009394463151693344,
0.007974743843078613,
-0.037279024720191956,
0.03985835611820221,
0.018425511196255684,
-0.06954335421323776,
0.03601938858628273,
-0.003935173153877258,
-0.09801716357469559,
0.011121575720608234,
-0.014325041323900223,
-0.0580177865922451,
-0.024246204644441605,
-0.013996139168739319,
0.06935247778892517,
-0.012091677635908127,
-0.03447313234210014,
-0.026670029386878014,
0.08561008423566818,
0.045005008578300476,
0.008466859348118305,
-0.04569400101900101,
-0.011810376308858395,
0.030022455379366875,
-0.012683925218880177,
-0.0510968342423439,
-0.03026161715388298,
0.05882443115115166,
0.026147138327360153,
0.07103424519300461,
-0.05999279394745827,
-0.03805089741945267,
-0.0382101871073246,
0.010545441880822182,
0.007170048542320728,
0.0239325650036335,
-0.0005441135144792497,
-0.05649533495306969,
-0.07837620377540588,
0.03772949054837227,
-0.002099614357575774,
0.01761208474636078,
0.0027161508332937956
] |
davanstrien/deit_flyswot | 035587aa11a00f4590f87e748a359c32efe44a76 | 2022-04-03T17:45:11.000Z | [
"pytorch",
"vit",
"image-classification",
"dataset:image_folder",
"transformers",
"generated_from_trainer",
"model-index"
] | image-classification | false | davanstrien | null | davanstrien/deit_flyswot | 1,190 | null | transformers | ---
tags:
- generated_from_trainer
datasets:
- image_folder
metrics:
- f1
model-index:
- name: deit_flyswot
results:
- task:
name: Image Classification
type: image-classification
dataset:
name: image_folder
type: image_folder
args: default
metrics:
- name: F1
type: f1
value: 0.990761405263678
---
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# deit_flyswot
This model was trained from scratch on the image_folder dataset.
It achieves the following results on the evaluation set:
- Loss: 0.0755
- F1: 0.9908
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 2e-05
- train_batch_size: 32
- eval_batch_size: 32
- seed: 666
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 30
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | F1 |
|:-------------:|:-----:|:----:|:---------------:|:------:|
| No log | 1.0 | 52 | 0.5710 | 0.8095 |
| No log | 2.0 | 104 | 0.2814 | 0.9380 |
| No log | 3.0 | 156 | 0.1719 | 0.9555 |
| No log | 4.0 | 208 | 0.1410 | 0.9692 |
| No log | 5.0 | 260 | 0.1457 | 0.9680 |
| No log | 6.0 | 312 | 0.1084 | 0.9747 |
| No log | 7.0 | 364 | 0.0892 | 0.9736 |
| No log | 8.0 | 416 | 0.0962 | 0.9831 |
| No log | 9.0 | 468 | 0.0819 | 0.9796 |
| 0.2034 | 10.0 | 520 | 0.0916 | 0.9778 |
| 0.2034 | 11.0 | 572 | 0.0793 | 0.9827 |
| 0.2034 | 12.0 | 624 | 0.0818 | 0.9894 |
| 0.2034 | 13.0 | 676 | 0.0852 | 0.9807 |
| 0.2034 | 14.0 | 728 | 0.0938 | 0.9778 |
| 0.2034 | 15.0 | 780 | 0.0814 | 0.9876 |
| 0.2034 | 16.0 | 832 | 0.0702 | 0.9892 |
| 0.2034 | 17.0 | 884 | 0.0801 | 0.9892 |
| 0.2034 | 18.0 | 936 | 0.0806 | 0.9892 |
| 0.2034 | 19.0 | 988 | 0.0769 | 0.9926 |
| 0.0115 | 20.0 | 1040 | 0.0800 | 0.9926 |
| 0.0115 | 21.0 | 1092 | 0.0794 | 0.9926 |
| 0.0115 | 22.0 | 1144 | 0.0762 | 0.9846 |
| 0.0115 | 23.0 | 1196 | 0.0789 | 0.9830 |
| 0.0115 | 24.0 | 1248 | 0.0794 | 0.9829 |
| 0.0115 | 25.0 | 1300 | 0.0770 | 0.9908 |
| 0.0115 | 26.0 | 1352 | 0.0791 | 0.9829 |
| 0.0115 | 27.0 | 1404 | 0.0813 | 0.9892 |
| 0.0115 | 28.0 | 1456 | 0.0816 | 0.9908 |
| 0.0058 | 29.0 | 1508 | 0.0774 | 0.9908 |
| 0.0058 | 30.0 | 1560 | 0.0755 | 0.9908 |
### Framework versions
- Transformers 4.17.0
- Pytorch 1.10.0+cu111
- Datasets 2.0.0
- Tokenizers 0.11.6
| [
-0.02259407378733158,
-0.01958511769771576,
-0.04176652058959007,
0.08217952400445938,
0.06258313357830048,
0.004637193400412798,
0.01160475891083479,
0.02469019964337349,
-0.08152352273464203,
-0.09196677803993225,
0.08534213900566101,
-0.07986665517091751,
0.005819398444145918,
0.0028184622060507536,
-0.10975459963083267,
0.035689495503902435,
0.02115524746477604,
-0.039802033454179764,
-0.11991043388843536,
0.013211236335337162,
0.0225629024207592,
0.014916060492396355,
0.03136591240763664,
0.0841929167509079,
-0.035764336585998535,
0.03207112103700638,
-0.04653966426849365,
0.031953755766153336,
-0.025169873610138893,
-0.07039384543895721,
0.03257965296506882,
0.03594423085451126,
-0.011266998015344143,
0.04103121906518936,
0.05076384171843529,
0.07168841361999512,
-0.015058577992022038,
-0.05661965161561966,
0.033489055931568146,
-0.0556640699505806,
0.009951485320925713,
-0.06213397905230522,
-0.021323472261428833,
0.03893416002392769,
-0.006302473600953817,
0.07558083534240723,
0.009001040831208229,
-0.04517447203397751,
0.011958327144384384,
-0.0004625788133125752,
-0.08474034816026688,
-0.019537176936864853,
0.028209730982780457,
-0.01894439198076725,
0.007627499755471945,
0.05440003424882889,
0.010014628060162067,
-0.04384993016719818,
-0.05561687424778938,
-0.06602047383785248,
-0.015922464430332184,
0.0004893654258921742,
-0.07454336434602737,
-0.01485274638980627,
-0.011090324260294437,
-0.05038498342037201,
-0.032118018716573715,
-0.051089782267808914,
0.10986586660146713,
-0.06362704187631607,
0.05408475175499916,
0.05050103738903999,
0.01894272491335869,
0.03389847278594971,
-0.006642750930041075,
0.04111779108643532,
0.05248365178704262,
0.0590401291847229,
0.0711810365319252,
-0.1469736248254776,
0.00840938463807106,
-0.0406133197247982,
0.07161634415388107,
-0.036226533353328705,
0.06739693135023117,
-0.003144708229228854,
0.03376046568155289,
-0.007228183560073376,
0.03572072461247444,
-0.009014024399220943,
-0.0012473624665290117,
-0.024041753262281418,
-0.07880222052335739,
0.013322126120328903,
0.013162613846361637,
0.038563113659620285,
-0.0035873244050890207,
0.0028317582327872515,
-0.04172748699784279,
0.08463471382856369,
-0.07631510496139526,
0.03223040699958801,
0.043797485530376434,
0.04236815124750137,
0.034383200109004974,
0.03820706903934479,
0.07288690656423569,
0.072016142308712,
0.06713464856147766,
-0.02284727245569229,
0.016708815470337868,
-0.006168257910758257,
-0.05194183811545372,
0.0017278568120673299,
-0.014104018919169903,
0.13413605093955994,
-0.07876972109079361,
0.04124736785888672,
-0.11711642891168594,
0.06923926621675491,
-0.04064930975437164,
-0.012798923999071121,
0.00040566734969615936,
-0.007542036939412355,
-0.04283865541219711,
-0.08903220295906067,
-0.15319287776947021,
4.293013000404491e-33,
0.029906069859862328,
-0.02487592212855816,
0.03108990006148815,
-0.04487127438187599,
0.01222070399671793,
-0.05330517515540123,
0.022829139605164528,
-0.006788900587707758,
-0.0006620570202358067,
0.0035328639205545187,
-0.10884851962327957,
-0.029897836968302727,
-0.02851802483201027,
0.07939957082271576,
0.035496119409799576,
-0.05617964267730713,
-0.03568847104907036,
-0.014801613986492157,
0.03557971119880676,
0.015508627519011497,
0.10303323715925217,
-0.009795162826776505,
-0.03676174581050873,
-0.07886914163827896,
-0.030725695192813873,
0.054008323699235916,
0.034340716898441315,
0.006537618115544319,
0.02583303488790989,
0.06905559450387955,
-0.0381450392305851,
-0.000559969455935061,
-0.0016550442669540644,
0.001171212992630899,
-0.0032039559446275234,
-0.0043180459178984165,
0.016036709770560265,
0.018439238891005516,
-0.02813883125782013,
0.024900157004594803,
0.03928106650710106,
0.01309699285775423,
0.01174349244683981,
-0.10106263309717178,
-0.06217112019658089,
-0.030822796747088432,
0.07087726891040802,
0.04302206262946129,
-0.008596331812441349,
0.07566475123167038,
-0.04783342033624649,
-0.06408204883337021,
0.03916036710143089,
-0.08869905769824982,
-0.042455386370420456,
0.0031162186060100794,
0.02963101491332054,
0.06303936243057251,
0.039244119077920914,
-0.053833115845918655,
0.04980200156569481,
0.04215565323829651,
-0.01899181306362152,
0.023576606065034866,
0.0003273097681812942,
-0.02963501214981079,
-0.019419275224208832,
-0.0355689562857151,
0.007237460929900408,
0.02720011956989765,
-0.09923022985458374,
0.027619557455182076,
-0.05594545975327492,
-0.03336912766098976,
0.08956405520439148,
-0.07711219042539597,
-0.012816110625863075,
-0.024755077436566353,
-0.0782940536737442,
0.022263195365667343,
0.0009569518733769655,
0.05143208056688309,
-0.032294370234012604,
-0.07805009931325912,
-0.03096993826329708,
-0.03963446617126465,
-0.010033062659204006,
-0.02051134966313839,
-0.03432633727788925,
-0.010699435137212276,
-0.019320404157042503,
0.005003300961107016,
-0.06396503746509552,
-0.03754941374063492,
-0.038593590259552,
-5.519970126563794e-33,
-0.007164048030972481,
0.07183441519737244,
-0.03701227903366089,
0.05097240209579468,
-0.019718093797564507,
0.05199927091598511,
0.04500037431716919,
0.10928520560264587,
-0.07821416854858398,
-0.015406054444611073,
0.049949049949645996,
0.024617930874228477,
-0.08480460196733475,
-0.07543900609016418,
-0.009780574589967728,
-0.008772813715040684,
-0.0697227343916893,
-0.0386076383292675,
0.016283702105283737,
0.00026140804402530193,
0.03848886117339134,
0.15911343693733215,
-0.031874921172857285,
0.026964541524648666,
-0.01929844170808792,
0.04928337037563324,
0.07970824092626572,
0.12247224897146225,
0.07501910626888275,
-0.012003188021481037,
0.020836129784584045,
0.004159255884587765,
0.006492996588349342,
0.05434974282979965,
0.0006680103251710534,
0.016217872500419617,
0.04636441171169281,
-0.04284590855240822,
-0.04653305932879448,
0.09487629681825638,
0.024808360263705254,
0.046037569642066956,
-0.07851502299308777,
0.06569631397724152,
0.007180326618254185,
-0.06119745969772339,
0.013067737221717834,
-0.059218257665634155,
0.07504331320524216,
-0.03304421901702881,
0.026079446077346802,
-0.10663212835788727,
-0.10231511294841766,
0.0027549113146960735,
0.011442760936915874,
-0.01919461600482464,
0.050453025847673416,
-0.027287136763334274,
-0.0008473447524011135,
-0.015321428887546062,
-0.0024380539543926716,
-0.004870597738772631,
-0.09687162935733795,
-0.054872289299964905,
-0.009116753935813904,
-0.04832565411925316,
-0.12135851383209229,
-0.0009802471613511443,
0.01524802390486002,
0.07007899135351181,
-0.03510774299502373,
0.03967351093888283,
0.0533871054649353,
-0.0015686878468841314,
-0.029581038281321526,
-0.012502766214311123,
-0.0031081070192158222,
0.05809493735432625,
0.07592282444238663,
-0.09083180874586105,
-0.022978631779551506,
-0.04610585793852806,
0.04545683413743973,
0.08210816234350204,
0.08297812938690186,
0.08534442633390427,
0.018254904076457024,
0.04104764014482498,
-0.03828123211860657,
-0.06538794934749603,
0.04754678159952164,
-0.02504388429224491,
0.0644395723938942,
0.17128397524356842,
0.0060256063006818295,
-5.392197621745254e-8,
-0.015445500612258911,
0.021574895828962326,
0.021634681150317192,
0.08642908185720444,
0.021885577589273453,
0.015503318049013615,
0.009975975379347801,
0.02314600721001625,
-0.03614942356944084,
-0.02249920554459095,
0.039522215723991394,
0.03152558207511902,
-0.08722717314958572,
0.020514870062470436,
0.014211758971214294,
-0.016061170026659966,
0.000483615294797346,
0.12647947669029236,
-0.023041393607854843,
-0.02854720875620842,
0.015317310579121113,
-0.09233611822128296,
-0.00633292505517602,
-0.020106779411435127,
0.03545228764414787,
-0.0705040842294693,
-0.027663934975862503,
0.01995643973350525,
-0.03290089964866638,
-0.061645880341529846,
-0.00943678803741932,
-0.0011401165975257754,
0.0531010627746582,
-0.01819092221558094,
-0.040543872863054276,
0.06770039349794388,
-0.06571339070796967,
-0.004623582120984793,
0.0332234688103199,
0.010091728530824184,
-0.05704944580793381,
0.04847489669919014,
-0.001965793315321207,
-0.03485559672117233,
0.06671241670846939,
-0.007422774564474821,
0.07262592017650604,
-0.05334102362394333,
-0.052734747529029846,
0.01960986666381359,
-0.009703192859888077,
-0.01924031414091587,
-0.03545349836349487,
0.09119643270969391,
0.02494012750685215,
0.009631481021642685,
-0.008752365596592426,
-0.09005606919527054,
-0.002019469393417239,
0.029151849448680878,
0.04230629280209541,
-0.04428693279623985,
-0.09889218211174011,
-0.006772632244974375
] |
RajSang/pegasus-sports-titles | 6bfbb3f6138b4b573ca80d4051b245868a1bf84e | 2022-05-09T09:26:14.000Z | [
"pytorch",
"tensorboard",
"pegasus",
"text2text-generation",
"en",
"transformers",
"generated_from_trainer",
"autotrain_compatible"
] | text2text-generation | false | RajSang | null | RajSang/pegasus-sports-titles | 1,185 | 1 | transformers | ---
tags:
- generated_from_trainer
widget:
- text: "Coutinho was just about to be introduced by Villa boss Gerrard midway through the second half when Bruno Fernandes slammed home
his second goal of the game off the underside of the bar. But the Brazilian proved the catalyst for a memorable response.
First he drove at the United defence, helping to create the space which Jacob Ramsey exploited to halve the deficit. Then Ramsey slid over an excellent
cross from the left which Raphael Varane was unable to intercept as he slid back, leaving Coutinho to finish into an empty net.
The goal brought celebrations at both ends of the pitch as Emiliano Martinez also went into the crowd in relief - it was the Argentine's horrible sixth-minute error that had gifted Fernandes the visitors' opener.
Given his background - with Liverpool, Barcelona and Bayern Munich - Coutinho is a bold loan signing by Villa, and underlines the pedigree of the man they appointed as manager in November.
Gerrard is not at Villa to learn how to avoid relegation.
His demands remain as high as they were as a player and Coutinho's arrival is an example of that.
Villa are a better team since Gerrard's arrival and, after a sluggish start against opponents they dominated but lost to in the FA Cup five days ago, they grew into the game.
The club's other newboy, Lucas Digne, was among those denied by United keeper David de Gea at the end of the first half - in unorthodox fashion, with his knees.
Ollie Watkins did not really test the Spain keeper when Villa broke after Edinson Cavani lost possession in his own half. However, Emi Buendia certainly did with a near-post header. Rooted to his line, De Gea's reactions were up to the job as he beat Buendia's effort away.
When De Gea produced more saves after half-time to deny Ramsey and Digne again, it appeared the image of the night for Villa would be midfielder Morgan Sanson kicking a drinks bottle in fury after his error in gifting Fred possession to set up Fernandes for the visitors' second had been followed immediately by his substitution.
However, as it was the prelude to Coutinho's arrival, it was the moment that changed the course of the game - and the acclaim for the Brazilian at the final whistle indicated Villa's fans are already firmly behind him."
language: en
---
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# pegasus-sports-titles
This model is a fine-tuned pegasus on some **sports news articles scraped from the internet. (For educational purposes only)**. The model can generate titles for sports articles. Try it out using the inference API.
## Model description
A Pegasus model tuned on generating scientific titles has been further fine-tuned to generate titles for sports articles. While training articles on **Tennis, Football (Soccer), Cricket , Athletics and Rugby** were used to generate titles. I experimented training the Tokenizer from scratch but it did not give good results compared to the pre-trained tokenizer.
## Usage
```python
from transformers import pipeline
#Feel free to play around with the generation parameters.
#Reduce the beam width for faster inference
#Note that the maximum length for the generated titles is 64
gen_kwargs = {"length_penalty": 0.6, "num_beams":4, "num_return_sequences": 4,"num_beam_groups":4,"diversity_penalty":2.0}
pipe = pipeline("summarization", model="RajSang/pegasus-sports-titles")
#Change the article according to your wish
article="""
Coutinho was just about to be introduced by Villa boss Gerrard midway through the second half when Bruno Fernandes slammed home
his second goal of the game off the underside of the bar. But the Brazilian proved the catalyst for a memorable response.
First he drove at the United defence, helping to create the space which Jacob Ramsey exploited to halve the deficit. Then Ramsey slid over an excellent
cross from the left which Raphael Varane was unable to intercept as he slid back, leaving Coutinho to finish into an empty net.
The goal brought celebrations at both ends of the pitch as Emiliano Martinez also went into the crowd in relief - it was the Argentine's horrible sixth-minute error that had gifted Fernandes the visitors' opener.
Given his background - with Liverpool, Barcelona and Bayern Munich - Coutinho is a bold loan signing by Villa, and underlines the pedigree of the man they appointed as manager in November.
Gerrard is not at Villa to learn how to avoid relegation.
His demands remain as high as they were as a player and Coutinho's arrival is an example of that.
Villa are a better team since Gerrard's arrival and, after a sluggish start against opponents they dominated but lost to in the FA Cup five days ago, they grew into the game.
The club's other newboy, Lucas Digne, was among those denied by United keeper David de Gea at the end of the first half - in unorthodox fashion, with his knees.
Ollie Watkins did not really test the Spain keeper when Villa broke after Edinson Cavani lost possession in his own half. However, Emi Buendia certainly did with a near-post header. Rooted to his line, De Gea's reactions were up to the job as he beat Buendia's effort away.
When De Gea produced more saves after half-time to deny Ramsey and Digne again, it appeared the image of the night for Villa would be midfielder Morgan Sanson kicking a drinks bottle in fury after his error in gifting Fred possession to set up Fernandes for the visitors' second had been followed immediately by his substitution.
However, as it was the prelude to Coutinho's arrival, it was the moment that changed the course of the game - and the acclaim for the Brazilian at the final whistle indicated Villa's fans are already firmly behind him.
"""
result=pipe(article, **gen_kwargs)[0]["summary_text"]
print(result)
''' Output
Title 1 :
Coutinho's arrival sparks Villa comeback
Title 2 :
Philippe Coutinho marked his debut for Aston Villa with a goal and an assist as Steven Gerrard's side came from two goals down to draw with Manchester United.
Title 3 :
Steven Gerrard's first game in charge of Aston Villa ended in a dramatic draw against Manchester United - but it was the arrival of Philippe Coutinho that marked the night.
Title 4 :
Liverpool loanee Philippe Coutinho marked his first appearance for Aston Villa with two goals as Steven Gerrard's side came from two goals down to draw 2-2.'''
```
## Training procedure
While training, **short titles were combined with the subtitles for the articles to improve the quality of the generated titles and the subtitles were removed from the main body of the articles.**
##Limitations
In rare cases, if the opening few lines of a passage/article are descriptive enough, the model often just copies these lines instead of looking for information further down the articles, which may not be conducive in some cases.
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 5e-05
- train_batch_size: 2
- eval_batch_size: 8
- seed: 42
- gradient_accumulation_steps: 8
- total_train_batch_size: 16
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 100
- num_epochs: 2
### Training results
**Rouge1:38.2315**
**Rouge2: 18.6598**
**RougueL: 31.7393**
**RougeLsum: 31.7086**
### Framework versions
- Transformers 4.15.0
- Pytorch 1.10.0+cu111
- Datasets 1.17.0
- Tokenizers 0.10.3
| [
0.018062802031636238,
0.02977808564901352,
-0.07513448596000671,
-0.005631963722407818,
0.08585456758737564,
0.04288526251912117,
0.07128555327653885,
0.07861965894699097,
0.07334683835506439,
0.010454297997057438,
0.050037775188684464,
-0.05692846328020096,
-0.03621998056769371,
0.07102767378091812,
0.009010237641632557,
-0.04364822059869766,
-0.047100573778152466,
-0.04913260415196419,
-0.019405284896492958,
0.03890084847807884,
0.03280889242887497,
-0.017774777486920357,
-0.043952226638793945,
0.023114213719964027,
-0.05792328715324402,
-0.03998691588640213,
0.09319785982370377,
-0.024326875805854797,
-0.03027164936065674,
-0.013658186420798302,
0.05033469572663307,
-0.023597726598381996,
0.06545327603816986,
-0.0012720898957923055,
-0.0641162171959877,
0.04167914018034935,
0.10205422341823578,
-0.048186250030994415,
0.02935854345560074,
0.07231506705284119,
-0.06768881529569626,
-0.03890137001872063,
-0.018494248390197754,
0.07458262890577316,
0.06100693717598915,
-0.015381395816802979,
0.10122618824243546,
-0.031903304159641266,
-0.026904910802841187,
-0.02989271469414234,
-0.04469119757413864,
0.06374763697385788,
-0.0239014383405447,
-0.03053423762321472,
-0.03543131425976753,
0.11661582440137863,
-0.00314854527823627,
-0.009729939512908459,
0.02277282066643238,
-0.009938064031302929,
0.03961064666509628,
-0.09153394401073456,
0.03341931104660034,
-0.02003634162247181,
-0.027608597651124,
-0.09151184558868408,
0.000010823829143191688,
-0.02634991891682148,
-0.023377766832709312,
0.07219168543815613,
0.0942358747124672,
-0.054681047797203064,
-0.023689163848757744,
-0.07955081015825272,
0.03191980719566345,
0.059981852769851685,
-0.04070238023996353,
0.05468418449163437,
-0.012867588549852371,
-0.0661788359284401,
-0.011236793361604214,
-0.01146507728844881,
-0.0173584446310997,
-0.04718329384922981,
-0.0027458551339805126,
0.045931000262498856,
-0.01963317207992077,
-0.023923855274915695,
0.048448994755744934,
0.014204079285264015,
0.09572383761405945,
0.03532903641462326,
-0.0201727282255888,
0.0457213893532753,
0.020374266430735588,
0.028459137305617332,
0.127736896276474,
-0.009214263409376144,
-0.015842383727431297,
0.05181233957409859,
-0.0192495658993721,
0.10583331435918808,
-0.08354439586400986,
0.032111383974552155,
0.03964614495635033,
-0.051975518465042114,
0.09356870502233505,
0.08250366151332855,
-0.021702999249100685,
0.027214614674448967,
-0.09601812064647675,
0.07943233102560043,
-0.12888950109481812,
0.040950946509838104,
-0.046985287219285965,
-0.02235153689980507,
0.021192915737628937,
0.03724076226353645,
0.02490033395588398,
-0.0022659965325146914,
0.05510324239730835,
0.03450186550617218,
-0.061504825949668884,
0.030362410470843315,
0.0030475398525595665,
0.060217272490262985,
-0.001067830715328455,
5.494898301428693e-33,
0.04028012603521347,
0.0680622085928917,
-0.030895253643393517,
0.023920293897390366,
-0.05693308636546135,
0.03225986286997795,
-0.030702494084835052,
0.023735707625746727,
-0.017148056998848915,
-0.017866749316453934,
-0.054677851498126984,
-0.03994269296526909,
-0.01922643929719925,
-0.01282233651727438,
0.015221856534481049,
0.06451921164989471,
-0.08187165856361389,
-0.012090252712368965,
-0.018207624554634094,
0.014941629022359848,
0.040613699704408646,
0.11398611962795258,
0.03044819086790085,
-0.028452811762690544,
0.0114430608227849,
0.015486149117350578,
-0.034276459366083145,
-0.061036497354507446,
-0.05536928027868271,
-0.025253308936953545,
-0.018356673419475555,
-0.012708235532045364,
0.006135433912277222,
0.030503664165735245,
-0.0647238940000534,
-0.016085341572761536,
-0.07855436950922012,
-0.06013153865933418,
-0.054048288613557816,
0.0075730811804533005,
-0.05977341905236244,
-0.009577914141118526,
-0.049435678869485855,
-0.03041650913655758,
0.01984528638422489,
-0.034421443939208984,
-0.040994793176651,
0.010061988607048988,
-0.1109500601887703,
-0.04172342270612717,
-0.04500194266438484,
0.0628194510936737,
0.06611987203359604,
0.040818218141794205,
-0.046135157346725464,
-0.01911393739283085,
-0.002308758907020092,
-0.011843445710837841,
0.03432510793209076,
-0.033440954983234406,
0.03891163319349289,
-0.023044630885124207,
0.00730129424482584,
0.059444598853588104,
-0.03576348349452019,
-0.03310000151395798,
0.035368651151657104,
0.029553577303886414,
0.058263178914785385,
-0.0359986387193203,
-0.07886409014463425,
-0.01651657558977604,
-0.020093543455004692,
0.08082818239927292,
-0.018513886258006096,
-0.07892335206270218,
-0.03473493456840515,
-0.0021807875018566847,
0.06544316560029984,
-0.016653569415211678,
0.013562148436903954,
-0.005399141926318407,
0.03363567218184471,
-0.013723126612603664,
0.04279319569468498,
0.09113192558288574,
-0.007046350743621588,
-0.009827305562794209,
-0.07278234511613846,
0.07058651745319366,
0.01856876350939274,
0.046094756573438644,
0.003991435281932354,
-0.039713047444820404,
0.047657549381256104,
-6.499818136371097e-33,
-0.016490498557686806,
-0.05862743780016899,
-0.017891261726617813,
-0.03260275349020958,
-0.049025703221559525,
0.04462146759033203,
-0.03352125734090805,
-0.046016670763492584,
-0.057825785130262375,
-0.06671582907438278,
0.013644855469465256,
-0.01807240955531597,
-0.01862114667892456,
-0.043309953063726425,
-0.1587865650653839,
-0.05020611360669136,
0.06551952660083771,
-0.02804616279900074,
-0.01705949753522873,
0.01607239805161953,
0.12649360299110413,
-0.0006623142398893833,
0.019915591925382614,
-0.09281899034976959,
-0.07298325002193451,
0.022333599627017975,
0.10044452548027039,
0.041129253804683685,
-0.1046360656619072,
0.018590571358799934,
-0.006945323199033737,
-0.005772710777819157,
0.049132347106933594,
-0.07433847337961197,
0.023923173546791077,
0.03503844141960144,
-0.1359153687953949,
0.04824492335319519,
0.01381593942642212,
0.11154154688119888,
0.03587634116411209,
-0.04222390428185463,
-0.005394339095801115,
-0.004497462417930365,
0.019144587218761444,
0.022296102717518806,
-0.028825124725699425,
-0.017065051943063736,
-0.016009964048862457,
0.11109574139118195,
0.03170391917228699,
0.006673772819340229,
-0.18268732726573944,
0.043345943093299866,
0.053616657853126526,
-0.06413700431585312,
-0.06445828080177307,
-0.06314888596534729,
0.04121515899896622,
-0.009799406863749027,
-0.01757875643670559,
0.08376496285200119,
-0.06185358390212059,
-0.014634605497121811,
0.04716121777892113,
0.03343350440263748,
-0.015664026141166687,
0.013815364800393581,
0.03685265779495239,
0.005076123867183924,
-0.02627504989504814,
-0.018853595480322838,
0.04664631187915802,
0.01553508173674345,
-0.03785186633467674,
0.0010324250906705856,
-0.07105734944343567,
-0.042322419583797455,
0.03426272049546242,
-0.04320863261818886,
-0.044270824640989304,
-0.08139026910066605,
0.032272789627313614,
0.06501083076000214,
0.005143019836395979,
-0.012591215781867504,
0.05358486250042915,
0.0036153937689960003,
-0.030352536588907242,
0.0075725773349404335,
0.044301386922597885,
-0.027910713106393814,
0.017072811722755432,
0.04556576535105705,
0.015764907002449036,
-6.49508393735232e-8,
-0.03779124841094017,
-0.02819961868226528,
0.003360005794093013,
-0.049321744590997696,
0.011722608469426632,
0.00806139875203371,
-0.09514451771974564,
-0.08545585721731186,
0.054413676261901855,
0.003690466983243823,
0.06093969941139221,
-0.006768146064132452,
-0.01670820824801922,
-0.03933592885732651,
-0.05375136807560921,
0.058825425803661346,
-0.07380238175392151,
0.027668852359056473,
-0.0033141709864139557,
0.02117529883980751,
-0.07640762627124786,
0.007710565812885761,
-0.09147657454013824,
0.02848328836262226,
0.1158505454659462,
-0.012818478047847748,
-0.09920377284288406,
0.103480763733387,
0.007034915965050459,
-0.09580884128808975,
0.06788423657417297,
-0.04750204458832741,
0.11698784679174423,
0.05925160273909569,
-0.0013466712553054094,
0.05673546716570854,
0.022241050377488136,
-0.05837578698992729,
0.012671472504734993,
-0.08124762773513794,
0.007765755522996187,
-0.04611764848232269,
-0.05638542026281357,
0.024217212572693825,
0.01294386014342308,
-0.021129976958036423,
-0.046180468052625656,
0.019290588796138763,
-0.0838061273097992,
-0.02620871551334858,
0.010762350633740425,
0.0006396102253347635,
0.08755268901586533,
0.0728725716471672,
0.0682642012834549,
0.0005156814004294574,
-0.04020865261554718,
0.0005707573727704585,
-0.045963454991579056,
0.07092065364122391,
-0.011019677855074406,
-0.03444987162947655,
-0.03148943558335304,
0.001871163141913712
] |
indobenchmark/indobart-v2 | 7192ee75ba70ca247c7abfb8e7268588145c0bde | 2022-06-21T17:52:37.000Z | [
"pytorch",
"mbart",
"text2text-generation",
"id",
"dataset:Indo4B+",
"arxiv:2104.08200",
"transformers",
"indogpt",
"indobenchmark",
"indonlg",
"license:mit",
"autotrain_compatible"
] | text2text-generation | false | indobenchmark | null | indobenchmark/indobart-v2 | 1,183 | 3 | transformers | ---
language: id
tags:
- indogpt
- indobenchmark
- indonlg
license: mit
inference: false
datasets:
- Indo4B+
---
# IndoBART-v2 Model
[IndoBART-v2](https://arxiv.org/abs/2104.08200) is a state-of-the-art language model for Indonesian based on the BART model. The pretrained model is trained using the BART training objective.
## All Pre-trained Models
| Model | #params | Training data |
|--------------------------------|--------------------------------|-----------------------------------|
| `indobenchmark/indobart-v2` | 132M | Indo4B-Plus (26 GB of text) |
## Authors
<b>IndoBART</b> was trained and evaluated by Samuel Cahyawijaya*, Genta Indra Winata*, Bryan Wilie*, Karissa Vincentio*, Xiaohong Li*, Adhiguna Kuncoro*, Sebastian Ruder, Zhi Yuan Lim, Syafri Bahar, Masayu Leylia Khodra, Ayu Purwarianti, Pascale Fung
## Citation
If you use our work, please cite:
```bibtex
@article{cahyawijaya2021indonlg,
title={IndoNLG: Benchmark and Resources for Evaluating Indonesian Natural Language Generation},
author={Cahyawijaya, Samuel and Winata, Genta Indra and Wilie, Bryan and Vincentio, Karissa and Li, Xiaohong and Kuncoro, Adhiguna and Ruder, Sebastian and Lim, Zhi Yuan and Bahar, Syafri and Khodra, Masayu Leylia and others},
journal={arXiv preprint arXiv:2104.08200},
year={2021}
}
```
| [
0.007228884380310774,
-0.04523831605911255,
0.02544442191720009,
-0.043247271329164505,
-0.08791975677013397,
0.08130671083927155,
-0.04252803325653076,
0.03538115322589874,
0.06431493163108826,
0.024474043399095535,
0.11109151691198349,
-0.06788541376590729,
-0.02921927720308304,
-0.039448004215955734,
0.016123240813612938,
-0.014629442244768143,
0.062321461737155914,
-0.04555699601769447,
-0.02068549580872059,
-0.10294759273529053,
0.04472507908940315,
0.026373736560344696,
0.01866925321519375,
-0.07303312420845032,
0.03991584852337837,
0.025863684713840485,
0.005006191786378622,
-0.07932944595813751,
0.027487987652420998,
-0.03977109491825104,
0.010289715602993965,
0.07474636286497116,
0.02846948616206646,
0.03334609791636467,
-0.06417959928512573,
-0.005568970926105976,
-0.008613208308815956,
-0.03323275223374367,
0.08472783863544464,
0.03531969338655472,
-0.023112857714295387,
-0.01093858852982521,
-0.05666142702102661,
-0.016493916511535645,
0.12258787453174591,
-0.0017463760450482368,
-0.038412317633628845,
0.0056921932846307755,
-0.02884659171104431,
-0.06028773635625839,
-0.09867855161428452,
-0.025807149708271027,
-0.020754002034664154,
0.061158519238233566,
0.029464323073625565,
-0.1019899919629097,
-0.004430998116731644,
0.06250403821468353,
0.018303275108337402,
0.02425389736890793,
-0.07397318631410599,
-0.0035569199826568365,
-0.08261991292238235,
0.03618491441011429,
-0.032697681337594986,
0.014590269885957241,
-0.027186639606952667,
0.04776228591799736,
-0.0332280695438385,
0.04917512461543083,
-0.04243108257651329,
0.08391324430704117,
-0.005853379610925913,
0.060061633586883545,
-0.07467278838157654,
-0.03642997145652771,
0.08699508756399155,
0.006272804923355579,
0.0654386356472969,
-0.12007863819599152,
-0.005235414020717144,
0.03163152560591698,
0.0589493103325367,
-0.007061167154461145,
-0.00792438443750143,
0.0008567098993808031,
0.011507462710142136,
-0.00661622965708375,
-0.009481843560934067,
-0.023243991658091545,
0.042567960917949677,
-0.036472611129283905,
0.0005980771384201944,
0.005447051487863064,
-0.011635370552539825,
0.04971407726407051,
-0.02751776948571205,
0.04584168270230293,
-0.010621367022395134,
0.05343952775001526,
0.08904649317264557,
0.10403025150299072,
-0.013121044263243675,
-0.06053294613957405,
-0.05420646443963051,
-0.04333239048719406,
-0.01814005896449089,
-0.008964205160737038,
0.11712093651294708,
0.0023592780344188213,
-0.01970462128520012,
-0.017852481454610825,
-0.05633091926574707,
-0.007072103209793568,
-0.05859815329313278,
0.024318531155586243,
-0.023629790171980858,
-0.011013670824468136,
-0.006460657808929682,
0.009932130575180054,
-0.04940061643719673,
-0.024437198415398598,
0.034722547978162766,
-0.0007814804557710886,
-0.09845402836799622,
-0.06685426831245422,
0.0016491415444761515,
-3.128666009447845e-33,
0.021978633478283882,
-0.031615015119314194,
0.0030083723831921816,
-0.03310219943523407,
-0.007602353114634752,
-0.04446617141366005,
-0.028627410531044006,
-0.06410868465900421,
-0.036223407834768295,
-0.06976616382598877,
-0.04999435320496559,
-0.03227504342794418,
-0.10993936657905579,
0.04832177236676216,
0.048653244972229004,
0.044610682874917984,
-0.08020466566085815,
0.04329977557063103,
0.016652535647153854,
-0.0034148578997701406,
0.07886329293251038,
0.02115429751574993,
0.044588398188352585,
-0.07535682618618011,
0.01824105717241764,
0.023176051676273346,
0.09821977466344833,
-0.07399561256170273,
-0.0633576512336731,
0.041053805500268936,
-0.09262364357709885,
-0.0200865026563406,
-0.03742547705769539,
0.020783482119441032,
-0.09233038127422333,
-0.03106577694416046,
0.014621331356465816,
-0.08134282380342484,
-0.04643679037690163,
-0.07537391036748886,
0.01779279299080372,
0.042929209768772125,
0.08950760215520859,
-0.04847728833556175,
-0.01126168854534626,
-0.053270068019628525,
-0.005344220902770758,
-0.06834481656551361,
-0.05069568008184433,
0.1169629767537117,
-0.05939347296953201,
0.005027547478675842,
0.02437453344464302,
0.024434443563222885,
0.03875918686389923,
0.0332791693508625,
0.04160138964653015,
0.01563972234725952,
0.010914456099271774,
0.09664111584424973,
-0.00916161946952343,
-0.04104776680469513,
0.061423175036907196,
0.03030245192348957,
0.04275558516383171,
0.057264022529125214,
-0.03651652857661247,
-0.03969031572341919,
0.04074559733271599,
-0.019276626408100128,
-0.04918153956532478,
-0.0781152993440628,
0.07335121929645538,
0.033377163112163544,
-0.002701924880966544,
-0.03593171387910843,
0.03147239238023758,
-0.06372454762458801,
-0.07653222978115082,
0.030415434390306473,
-0.06432929635047913,
-0.06616199016571045,
0.009280114434659481,
-0.039897654205560684,
0.011058851145207882,
-0.011922894977033138,
0.046426866203546524,
-0.059167832136154175,
-0.03332686051726341,
0.0028915363363921642,
0.005593637935817242,
0.027734631672501564,
-0.06512287259101868,
0.022343376651406288,
0.028753813356161118,
-1.796006410881687e-33,
-0.029417339712381363,
0.07147152721881866,
-0.10898230224847794,
0.014323202893137932,
-0.07422913610935211,
-0.051725275814533234,
0.058187101036310196,
0.12183425575494766,
-0.060860149562358856,
-0.010520813055336475,
-0.01287245936691761,
-0.07262174040079117,
0.07070109993219376,
0.048366766422986984,
0.05017821863293648,
-0.0015720351366326213,
0.0217584278434515,
0.08045531809329987,
0.019190941005945206,
0.051036983728408813,
0.019969329237937927,
0.07094299048185349,
-0.0906677171587944,
0.09269580245018005,
0.05907168984413147,
0.02500283159315586,
-0.027973055839538574,
0.06039818748831749,
-0.08532106876373291,
0.009948676452040672,
0.018321922048926353,
-0.06472322344779968,
-0.073592409491539,
0.021463291719555855,
-0.09557932615280151,
-0.06531402468681335,
0.064393050968647,
-0.014873724430799484,
-0.043607499450445175,
0.10934878140687943,
0.07245304435491562,
0.07980773597955704,
-0.09457356482744217,
-0.027266159653663635,
-0.07196758687496185,
-0.014332395978271961,
-0.054757047444581985,
-0.037866972386837006,
0.06089239567518234,
-0.12990282475948334,
0.04902161657810211,
0.019381770864129066,
-0.0028423627372831106,
-0.01214667223393917,
-0.05422458425164223,
-0.06180134415626526,
0.00217178906314075,
-0.007851161994040012,
-0.02488681487739086,
-0.06008649989962578,
-0.07799610495567322,
-0.047852639108896255,
0.05311845242977142,
-0.01066532451659441,
0.047367557883262634,
-0.033171601593494415,
0.05096074566245079,
0.06979476660490036,
0.009973104111850262,
-0.11225832998752594,
0.013637148775160313,
0.0030933560337871313,
-0.003933566156774759,
0.06595964729785919,
0.012210307642817497,
0.012557792477309704,
-0.029744796454906464,
-0.03713473677635193,
0.022211596369743347,
-0.04835693910717964,
-0.004109796602278948,
-0.018276743590831757,
0.05746049806475639,
0.013788056559860706,
0.028169142082333565,
0.05767945200204849,
-0.0354989692568779,
0.028441371396183968,
-0.023262949660420418,
0.02737240679562092,
0.017817625775933266,
0.06649457663297653,
-0.0331440195441246,
0.09170963615179062,
-0.028476236388087273,
-4.8594600343676575e-8,
-0.025499247014522552,
-0.09005990624427795,
0.034820254892110825,
0.06488081812858582,
-0.018388355150818825,
-0.04955723136663437,
-0.02233153022825718,
0.038965724408626556,
-0.08963500708341599,
-0.05150600150227547,
-0.00658917473629117,
0.04169095307588577,
-0.10185767710208893,
0.037783194333314896,
0.00663274759426713,
0.020610271021723747,
0.018752600997686386,
0.05890114977955818,
-0.027977950870990753,
-0.04962126165628433,
0.06861238181591034,
0.006261192727833986,
0.053763434290885925,
0.004584335722029209,
0.03534252941608429,
-0.014053881168365479,
-0.013117571361362934,
0.08465835452079773,
0.03355637937784195,
-0.06728348881006241,
-0.0195052158087492,
0.03533357009291649,
-0.08101548999547958,
-0.02159368060529232,
0.01870041713118553,
0.0325729176402092,
-0.008018690161406994,
-0.014651154167950153,
-0.02671152353286743,
0.10352945327758789,
-0.0006284216069616377,
-0.0606943741440773,
-0.04989895224571228,
0.0066103870049119,
0.1052154079079628,
0.02990179881453514,
-0.005050175357609987,
-0.08080577105283737,
0.038376372307538986,
-0.04580919072031975,
-0.024379286915063858,
0.01717780902981758,
0.021020710468292236,
-0.06745119392871857,
-0.0225672647356987,
0.03469468280673027,
-0.08289656043052673,
0.04210997000336647,
0.017079928889870644,
0.00941448099911213,
0.11811979115009308,
0.0034170111175626516,
-0.009791006334125996,
0.04097539931535721
] |
textattack/xlnet-base-cased-SST-2 | 9ceeb077dcd5cf5ae790572b2bd6aec755a263be | 2020-06-09T16:56:53.000Z | [
"pytorch",
"xlnet",
"text-classification",
"transformers"
] | text-classification | false | textattack | null | textattack/xlnet-base-cased-SST-2 | 1,183 | 2 | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
facebook/mcontriever-msmarco | 9ff6abed2c2fdf32bbbd8b4e98fb10160e317375 | 2022-05-29T08:50:51.000Z | [
"pytorch",
"bert",
"transformers"
] | null | false | facebook | null | facebook/mcontriever-msmarco | 1,183 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
IDEA-CCNL/Erlangshen-Ubert-330M-Chinese | 13a559f940c1dec0d06812a453c9c79c1ba3c523 | 2022-07-02T13:41:32.000Z | [
"pytorch",
"bert",
"fill-mask",
"zh",
"transformers",
"NLU",
"Sentiment",
"Chinese",
"license:apache-2.0",
"autotrain_compatible"
] | fill-mask | false | IDEA-CCNL | null | IDEA-CCNL/Erlangshen-Ubert-330M-Chinese | 1,180 | null | transformers | ---
language:
- zh
license: apache-2.0
tags:
- bert
- NLU
- Sentiment
- Chinese
inference: false
---
# Erlangshen-Ubert-110M, model (Chinese),one model of [Fengshenbang-LM](https://github.com/IDEA-CCNL/Fengshenbang-LM/tree/dev/yangping/fengshen/examples/ubert).
We collect 70+ datasets in the Chinese domain for finetune, with a total of 1065069 samples. Our model is mainly based on [macbert](https://huggingface.co/hfl/chinese-macbert-base)
Ubert is a solution we proposed when we were doing the [2022 AIWIN Competition](http://ailab.aiwin.org.cn/competitions/68#results), and achieved **<font color=#FF0000 > the first place in the A/B list</font>**.. Compared with the officially provided baseline, an increase of 20 percentage points. Ubert can not only complete common extraction tasks such as entity recognition and event extraction, but also classification tasks such as news classification and natural language reasoning.
**<font color=#FF0000 > more detail in our [github](https://github.com/IDEA-CCNL/Fengshenbang-LM/tree/dev/yangping/fengshen/examples/ubert)</font>**
## Usage
pip install fengshen
```python
git clone https://github.com/IDEA-CCNL/Fengshenbang-LM.git
cd Fengshenbang-LM
pip install --editable ./
```
run the code
```python
import argparse
from fengshen import UbertPiplines
total_parser = argparse.ArgumentParser("TASK NAME")
total_parser = UbertPiplines.piplines_args(total_parser)
args = total_parser.parse_args()
args.pretrained_model_path = "IDEA-CCNL/Erlangshen-Ubert-330M-Chinese"
test_data=[
{
"task_type": "抽取任务",
"subtask_type": "实体识别",
"text": "这也让很多业主据此认为,雅清苑是政府公务员挤对了国家的经适房政策。",
"choices": [
{"entity_type": "小区名字"},
{"entity_type": "岗位职责"}
],
"id": 0}
]
model = UbertPiplines(args)
result = model.predict(test_data)
for line in result:
print(line)
```
If you find the resource is useful, please cite the following website in your paper.
```
@misc{Fengshenbang-LM,
title={Fengshenbang-LM},
author={IDEA-CCNL},
year={2021},
howpublished={\url{https://github.com/IDEA-CCNL/Fengshenbang-LM}},
}
| [
-0.09546367079019547,
-0.016946125775575638,
0.09274153411388397,
-0.0021732947789132595,
0.04695427417755127,
0.01606973260641098,
0.01561257615685463,
-0.00951252318918705,
0.01887461729347706,
-0.005719163455069065,
0.04846411943435669,
-0.07673538476228714,
0.03563614562153816,
-0.024675926193594933,
0.053875233978033066,
0.09751372784376144,
0.056343454867601395,
-0.04003152996301651,
-0.08128701895475388,
-0.06905689090490341,
0.0015891932416707277,
0.0087226377800107,
0.04485492780804634,
-0.02421785332262516,
0.015598773024976254,
-0.08842377364635468,
-0.05122092366218567,
0.05546092614531517,
0.14131894707679749,
-0.04531719535589218,
0.0229951199144125,
0.11569765955209732,
0.005365172866731882,
0.06968788057565689,
0.012447016313672066,
0.049030181020498276,
-0.015558063052594662,
-0.04499467462301254,
-0.013307694345712662,
0.014021272771060467,
0.06774827092885971,
0.05181868001818657,
0.0030558614525943995,
-0.06902205944061279,
0.08486442267894745,
0.010896611027419567,
-0.03914947807788849,
-0.05894208699464798,
-0.06274241209030151,
0.028541920706629753,
-0.07108655571937561,
0.0018004736630246043,
0.03453836962580681,
0.03949486091732979,
-0.013841076754033566,
-0.017064739018678665,
0.00592875387519598,
-0.054054144769907,
-0.01476122997701168,
-0.050707608461380005,
-0.06612192094326019,
0.0168126430362463,
-0.061743877828121185,
-0.0036994435358792543,
-0.014996839687228203,
0.01007018517702818,
-0.06145637854933739,
0.05693330988287926,
0.024990731850266457,
0.03318667784333229,
0.03940736502408981,
-0.018211813643574715,
-0.037488117814064026,
0.04670140519738197,
-0.010878370143473148,
0.02059534192085266,
0.030717696994543076,
-0.06757737696170807,
-0.04171472042798996,
-0.10603241622447968,
-0.04313909262418747,
-0.005933783482760191,
0.11982987821102142,
0.0003864048048853874,
0.06758223474025726,
-0.05800757557153702,
-0.013513213954865932,
0.03940383344888687,
-0.025730570778250694,
0.0440366193652153,
-0.010618875734508038,
-0.031081680208444595,
0.06430984288454056,
0.06558214873075485,
0.048152513802051544,
0.08988279849290848,
0.07919787615537643,
-0.041938114911317825,
-0.02012011967599392,
0.10882508754730225,
0.00840452965348959,
0.12819936871528625,
0.001416454673744738,
-0.1421201080083847,
-0.03591015189886093,
-0.032291099429130554,
0.020580686628818512,
0.008142226375639439,
0.04329817369580269,
-0.08153539896011353,
0.027423933148384094,
-0.019209569320082664,
0.016821125522255898,
-0.04897313565015793,
-0.015121391974389553,
-0.04666009545326233,
-0.009082391858100891,
-0.009855695068836212,
-0.026274170726537704,
0.003637598128989339,
-0.05457266420125961,
0.02955164574086666,
0.015673190355300903,
0.032597195357084274,
0.04351746290922165,
0.014699499122798443,
-0.056774672120809555,
2.6699067475073927e-33,
0.05372398719191551,
0.014319117181003094,
-0.02089363895356655,
-0.027133431285619736,
-0.0011869046138599515,
-0.034291595220565796,
-0.04583621025085449,
0.012208524160087109,
-0.08757837116718292,
-0.0204768106341362,
-0.03701373562216759,
0.017832359299063683,
-0.0908297449350357,
0.012332841753959656,
0.08464566618204117,
-0.016915693879127502,
-0.01821223460137844,
0.019960341975092888,
0.0029681348241865635,
0.017934570088982582,
0.05890956521034241,
-0.029639432206749916,
-0.030365338549017906,
-0.10413964092731476,
-0.040780793875455856,
0.014527186751365662,
0.10348597168922424,
-0.018765889108181,
-0.03653046861290932,
0.04550101235508919,
-0.11045613884925842,
0.05430404469370842,
-0.006961040198802948,
0.061311811208724976,
-0.05030551925301552,
0.004094588104635477,
-0.014775902032852173,
-0.041813336312770844,
-0.014675065875053406,
-0.0028312555514276028,
0.0010482996003702283,
0.0694407969713211,
0.022438066080212593,
-0.07151498645544052,
-0.040302176028490067,
0.023349424824118614,
-0.012980516068637371,
-0.04713040590286255,
0.03677060827612877,
0.06312566250562668,
-0.01617385819554329,
0.015128249302506447,
-0.015812138095498085,
0.038161709904670715,
-0.041756320744752884,
0.01214828621596098,
0.0011580850696191192,
-0.04022781923413277,
0.0478685162961483,
0.06748136878013611,
-0.0038717922288924456,
-0.04340736195445061,
0.014462942257523537,
0.031908515840768814,
0.029130345210433006,
0.043999433517456055,
-0.020129235461354256,
-0.0277200099080801,
-0.021825773641467094,
0.0016937657492235303,
-0.01317866425961256,
-0.04415455088019371,
0.029111061245203018,
-0.039816223084926605,
0.0612185075879097,
-0.05261099711060524,
-0.016586028039455414,
-0.11323029547929764,
0.012817331589758396,
0.014624054543673992,
-0.006360316649079323,
-0.014046508818864822,
-0.052018050104379654,
-0.029885759577155113,
-0.023399749770760536,
-0.042274799197912216,
0.05722025781869888,
-0.0288949366658926,
-0.004443024750798941,
-0.019004840403795242,
-0.007876282557845116,
-0.041888266801834106,
0.023106882348656654,
-0.01386767253279686,
-0.0981997698545456,
-2.9781584678334443e-33,
-0.026979763060808182,
0.028325650840997696,
-0.08986418694257736,
0.04767313227057457,
0.0038073216564953327,
-0.08087427914142609,
0.07440225780010223,
0.11115746945142746,
0.005693294107913971,
-0.03486743941903114,
0.04847503826022148,
0.003812092589214444,
0.014127054251730442,
-0.0022334831301122904,
0.00913150142878294,
-0.034436553716659546,
0.036901988089084625,
0.050016142427921295,
0.022678472101688385,
0.08457329124212265,
0.017120301723480225,
-0.0036568217910826206,
-0.13766738772392273,
0.04502486437559128,
0.026008078828454018,
0.12578724324703217,
-0.06533414870500565,
0.02511843852698803,
-0.019906695932149887,
0.03105086274445057,
-0.05361122637987137,
0.005941500887274742,
-0.07455413788557053,
-0.022377068176865578,
-0.03111017681658268,
0.032387614250183105,
0.00045850060996599495,
-0.020191796123981476,
-0.049636125564575195,
0.09803401678800583,
0.08363143354654312,
-0.023709792643785477,
-0.08389642834663391,
0.010457142256200314,
0.015730546787381172,
0.033001262694597244,
-0.09961652755737305,
-0.03167850151658058,
0.06387647986412048,
-0.04440978541970253,
0.040711045265197754,
0.04377992823719978,
0.004738728981465101,
0.06489302217960358,
-0.053463876247406006,
-0.030518902465701103,
0.03529368340969086,
-0.057610008865594864,
-0.08546581119298935,
-0.006116908974945545,
-0.09855971485376358,
0.073470838367939,
-0.004375559743493795,
-0.011256778612732887,
-0.000542669091373682,
-0.05735527724027634,
0.010328905656933784,
0.00486341817304492,
0.01770494133234024,
-0.06118188425898552,
0.037856634706258774,
0.038040891289711,
0.008350598625838757,
0.029449352994561195,
-0.0311378575861454,
0.030026113614439964,
0.03787098824977875,
-0.03266644477844238,
0.002546709030866623,
-0.07124368101358414,
-0.027417780831456184,
0.03847459331154823,
0.0670345202088356,
-0.0012749640736728907,
-0.013594472780823708,
0.0029530050233006477,
-0.03816748782992363,
0.008917475119233131,
0.031882207840681076,
0.004667121451348066,
0.023999299854040146,
0.0012723481049761176,
-0.00853884220123291,
0.11932964622974396,
-0.024862822145223618,
-6.07909811378704e-8,
-0.11958137154579163,
-0.048191703855991364,
-0.05078753083944321,
0.08809260278940201,
-0.07612784951925278,
-0.027609148994088173,
-0.030480144545435905,
0.04902777448296547,
-0.019478773698210716,
0.00011142378207296133,
0.10657604783773422,
0.04006093740463257,
-0.16199453175067902,
0.058270376175642014,
-0.020395303145051003,
-0.0032675343099981546,
0.011131620034575462,
0.06010163947939873,
-0.03383941575884819,
-0.061689410358667374,
-0.03229087218642235,
0.07828578352928162,
0.04173819720745087,
-0.0644330307841301,
-0.009901149198412895,
-0.008034288883209229,
-0.13082824647426605,
0.05288301035761833,
-0.028749525547027588,
-0.01639341562986374,
0.0027676718309521675,
0.012743866071105003,
-0.08666659891605377,
0.032603226602077484,
0.08887803554534912,
0.04928424209356308,
-0.10769863426685333,
-0.024450503289699554,
0.024691537022590637,
-0.003073203843086958,
0.054854489862918854,
0.04254980757832527,
-0.0784476175904274,
-0.0011007245630025864,
0.1374332457780838,
-0.0787852481007576,
-0.005589512176811695,
-0.10706056654453278,
0.10528627038002014,
0.02629850059747696,
0.05468444526195526,
-0.04547727853059769,
-0.010849036276340485,
0.022317254915833473,
0.030503546819090843,
0.05754224210977554,
-0.07429330796003342,
-0.044266194105148315,
0.010609745047986507,
0.015381850302219391,
0.08733260631561279,
-0.018417224287986755,
0.011343085207045078,
0.038356997072696686
] |
bakrianoo/sinai-voice-ar-stt | 2d226249edf809b01a0e11159d1201ae1704b63c | 2022-03-23T18:25:21.000Z | [
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"ar",
"dataset:mozilla-foundation/common_voice_8_0",
"transformers",
"hf-asr-leaderboard",
"robust-speech-event",
"license:apache-2.0",
"model-index"
] | automatic-speech-recognition | false | bakrianoo | null | bakrianoo/sinai-voice-ar-stt | 1,179 | 7 | transformers | ---
language:
- ar
license: apache-2.0
tags:
- automatic-speech-recognition
- hf-asr-leaderboard
- robust-speech-event
datasets:
- mozilla-foundation/common_voice_8_0
metrics:
- wer
- cer
model-index:
- name: Sinai Voice Arabic Speech Recognition Model
results:
- task:
type: automatic-speech-recognition
name: Speech Recognition
dataset:
type: mozilla-foundation/common_voice_8_0
name: Common Voice ar
args: ar
metrics:
- type: wer
value: 0.181
name: Test WER
- type: cer
value: 0.049
name: Test CER
- task:
name: Automatic Speech Recognition
type: automatic-speech-recognition
dataset:
name: Robust Speech Event - Dev Data
type: speech-recognition-community-v2/dev_data
args: ar
metrics:
- name: Test WER
type: wer
value: 93.03
- task:
name: Automatic Speech Recognition
type: automatic-speech-recognition
dataset:
name: Robust Speech Event - Test Data
type: speech-recognition-community-v2/eval_data
args: ar
metrics:
- name: Test WER
type: wer
value: 90.79
widget:
- example_title: Example 1
src: https://huggingface.co/bakrianoo/sinai-voice-ar-stt/raw/main/examples/common_voice_ar_19077324.mp3
- example_title: Example 2
src: https://huggingface.co/bakrianoo/sinai-voice-ar-stt/raw/main/examples/common_voice_ar_19205138.mp3
- example_title: Example 3
src: https://huggingface.co/bakrianoo/sinai-voice-ar-stt/raw/main/examples/common_voice_ar_19331711.mp3
---
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# Sinai Voice Arabic Speech Recognition Model
# نموذج **صوت سيناء** للتعرف على الأصوات العربية الفصحى و تحويلها إلى نصوص
This model is a fine-tuned version of [facebook/wav2vec2-xls-r-300m](https://huggingface.co/facebook/wav2vec2-xls-r-300m) on the MOZILLA-FOUNDATION/COMMON_VOICE_8_0 - AR dataset.
It achieves the following results on the evaluation set:
- Loss: 0.2141
- Wer: 0.1808
It achieves the following results on the evaluation set:
- eval_loss = 0.2141
- eval_samples = 10388
- eval_wer = 0.181
- eval_cer = 0.049
#### Evaluation Commands
1. To evaluate on `mozilla-foundation/common_voice_8_0` with split `test`
```bash
python eval.py --model_id bakrianoo/sinai-voice-ar-stt --dataset mozilla-foundation/common_voice_8_0 --config ar --split test
```
### Inference Without LM
```python
from transformers import (Wav2Vec2Processor, Wav2Vec2ForCTC)
import torchaudio
import torch
def speech_file_to_array_fn(voice_path, resampling_to=16000):
speech_array, sampling_rate = torchaudio.load(voice_path)
resampler = torchaudio.transforms.Resample(sampling_rate, resampling_to)
return resampler(speech_array)[0].numpy(), sampling_rate
# load the model
cp = "bakrianoo/sinai-voice-ar-stt"
processor = Wav2Vec2Processor.from_pretrained(cp)
model = Wav2Vec2ForCTC.from_pretrained(cp)
# recognize the text in a sample sound file
sound_path = './my_voice.mp3'
sample, sr = speech_file_to_array_fn(sound_path)
inputs = processor([sample], sampling_rate=16_000, return_tensors="pt", padding=True)
with torch.no_grad():
logits = model(inputs.input_values,).logits
predicted_ids = torch.argmax(logits, dim=-1)
print("Prediction:", processor.batch_decode(predicted_ids))
```
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0002
- train_batch_size: 32
- eval_batch_size: 10
- seed: 42
- distributed_type: multi-GPU
- num_devices: 8
- total_train_batch_size: 256
- total_eval_batch_size: 80
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 1000
- num_epochs: 10
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Wer |
|:-------------:|:-----:|:-----:|:---------------:|:------:|
| 1.354 | 0.64 | 1000 | 0.4109 | 0.4493 |
| 0.5886 | 1.28 | 2000 | 0.2798 | 0.3099 |
| 0.4977 | 1.92 | 3000 | 0.2387 | 0.2673 |
| 0.4253 | 2.56 | 4000 | 0.2266 | 0.2523 |
| 0.3942 | 3.2 | 5000 | 0.2171 | 0.2437 |
| 0.3619 | 3.84 | 6000 | 0.2076 | 0.2253 |
| 0.3245 | 4.48 | 7000 | 0.2088 | 0.2186 |
| 0.308 | 5.12 | 8000 | 0.2086 | 0.2206 |
| 0.2881 | 5.76 | 9000 | 0.2089 | 0.2105 |
| 0.2557 | 6.4 | 10000 | 0.2015 | 0.2004 |
| 0.248 | 7.04 | 11000 | 0.2044 | 0.1953 |
| 0.2251 | 7.68 | 12000 | 0.2058 | 0.1932 |
| 0.2052 | 8.32 | 13000 | 0.2117 | 0.1878 |
| 0.1976 | 8.96 | 14000 | 0.2104 | 0.1825 |
| 0.1845 | 9.6 | 15000 | 0.2156 | 0.1821 |
### Framework versions
- Transformers 4.16.2
- Pytorch 1.10.2+cu113
- Datasets 1.18.3
- Tokenizers 0.11.0 | [
-0.08994504064321518,
-0.07872242480516434,
-0.029282907024025917,
-0.07860617339611053,
-0.04658283293247223,
-0.002582227811217308,
0.03490915149450302,
-0.10949362814426422,
-0.028800761327147484,
-0.06391210854053497,
0.010049795731902122,
-0.1209440603852272,
-0.03101838193833828,
0.023915279656648636,
0.005685930140316486,
-0.08342976868152618,
-0.004216920584440231,
-0.06407208740711212,
-0.05008460208773613,
-0.0638125091791153,
0.026543039828538895,
0.14884786307811737,
0.04379548504948616,
0.02408616617321968,
0.0050903428345918655,
0.007995530031621456,
-0.0717921331524849,
0.024320831522345543,
0.00223234249278903,
-0.05787093564867973,
0.10845884680747986,
0.05129178613424301,
0.1526244729757309,
0.006689145229756832,
0.02672049030661583,
0.0455930121243,
0.024088870733976364,
-0.030348563566803932,
-0.03621142357587814,
-0.034239303320646286,
-0.012517185881733894,
-0.04953475296497345,
0.020704977214336395,
-0.0782250165939331,
-0.0034824260510504246,
-0.05257079005241394,
-0.0767725259065628,
-0.023916980251669884,
0.020965686067938805,
0.05881152302026749,
-0.10328835248947144,
0.005351425614207983,
0.05847219005227089,
-0.001599692041054368,
-0.07223042100667953,
0.007040119264274836,
0.002969350665807724,
0.036506760865449905,
0.05417243018746376,
0.022557901218533516,
-0.043841902166604996,
-0.020309051498770714,
-0.03485598415136337,
0.0423562191426754,
-0.07494283467531204,
-0.003825773950666189,
0.042530424892902374,
-0.08914104104042053,
0.011361432261765003,
-0.01762603409588337,
-0.1160259023308754,
0.08148500323295593,
-0.017140256240963936,
0.04286108538508415,
-0.03212510421872139,
-0.014561899937689304,
0.015851037576794624,
-0.07234474271535873,
0.07602694630622864,
-0.0640474483370781,
0.006267833057790995,
-0.07015129923820496,
-0.004339126404374838,
0.011167163960635662,
0.08497657626867294,
-0.012614159844815731,
-0.03779009357094765,
-0.017625465989112854,
-0.039615049958229065,
-0.002981102094054222,
-0.06366094201803207,
-0.047460831701755524,
0.00031735835364088416,
0.07819024473428726,
0.03459736332297325,
0.06282228976488113,
0.03437989950180054,
0.04575980082154274,
-0.028777068480849266,
0.08643946796655655,
-0.014525708742439747,
-0.06671280413866043,
-0.005158504471182823,
-0.017159895971417427,
-0.005701733287423849,
-0.013263324275612831,
-0.00368966325186193,
0.03454574570059776,
0.03894106671214104,
-0.019366273656487465,
-0.05236749351024628,
-0.0404634103178978,
-0.010094940662384033,
-0.08681225776672363,
0.004565782379359007,
0.03324369341135025,
-0.0802103653550148,
-0.034919556230306625,
0.03497067093849182,
0.03431958705186844,
-0.05215802788734436,
-0.01259241346269846,
0.003380815964192152,
-0.019955836236476898,
0.03850168362259865,
0.005172810982912779,
-0.011784118600189686,
7.339835950360304e-33,
0.035660270601511,
0.04433957114815712,
0.014833002351224422,
0.004315512254834175,
-0.006224877666682005,
-0.06717165559530258,
-0.06433846056461334,
0.0646626353263855,
-0.04508376121520996,
-0.04404063522815704,
0.03867264837026596,
-0.03702721372246742,
-0.07140452414751053,
0.030915820971131325,
0.050834525376558304,
0.042909763753414154,
0.020777100697159767,
-0.023465115576982498,
-0.04024795442819595,
-0.04580235853791237,
0.09845235198736191,
0.018334567546844482,
0.04307251423597336,
-0.002839859342202544,
0.07492075860500336,
0.055421970784664154,
0.05375279486179352,
-0.05273815244436264,
-0.025305090472102165,
0.039994969964027405,
0.0037609939463436604,
-0.03515113890171051,
-0.03423250466585159,
-0.0575285330414772,
0.0475386418402195,
-0.01042110938578844,
-0.001632942003197968,
0.0266413576900959,
-0.08700979501008987,
-0.09012273699045181,
0.0006059485604055226,
0.025759955868124962,
-0.03813542053103447,
-0.05652716010808945,
0.022885093465447426,
-0.05392700806260109,
-0.010766653344035149,
0.02437834069132805,
0.04010900482535362,
0.058284349739551544,
-0.06030483916401863,
0.028805581852793694,
-0.046874869614839554,
-0.019050253555178642,
-0.005252458155155182,
-0.014551298692822456,
0.029237983748316765,
0.06510820239782333,
-0.010754742659628391,
0.003029383486136794,
-0.0022354123648256063,
-0.0065637375228106976,
-0.005310457665473223,
-0.04539978876709938,
0.0723901242017746,
-0.06551161408424377,
0.009461984969675541,
0.0030849596951156855,
0.05580192804336548,
0.02170380763709545,
0.02836296707391739,
-0.021393397822976112,
0.08510395139455795,
0.09614939987659454,
-0.03143046423792839,
0.03157668560743332,
-0.01568039320409298,
-0.029115382581949234,
0.007344589103013277,
0.05351986736059189,
-0.05488517880439758,
0.04731746017932892,
0.04960816726088524,
-0.035294100642204285,
-0.016241809353232384,
-0.014027747325599194,
0.0793810784816742,
-0.0370183065533638,
-0.01892266236245632,
0.011254713870584965,
0.0064081717282533646,
0.09975392371416092,
-0.06810933351516724,
-0.005187101196497679,
-0.06686671823263168,
-1.0086270502513097e-32,
-0.03056163899600506,
0.0892915427684784,
-0.024380620568990707,
0.09261011332273483,
-0.008276470936834812,
-0.028663115575909615,
0.14190448820590973,
0.07732754945755005,
0.02799053117632866,
-0.02397395670413971,
0.02613881044089794,
-0.03584396839141846,
0.11376085132360458,
-0.09098094701766968,
0.10589499026536942,
0.0033241405617445707,
-0.04921889305114746,
-0.004756754729896784,
0.021370669826865196,
0.09498231112957001,
-0.004930995870381594,
0.09782187640666962,
-0.04668762534856796,
0.04355031996965408,
-0.06373312324285507,
-0.06828628480434418,
-0.05829767882823944,
0.01991100236773491,
0.01230180449783802,
-0.010444729588925838,
0.02265232428908348,
0.03806004673242569,
-0.17805755138397217,
0.056566476821899414,
-0.024054385721683502,
-0.0690753161907196,
0.008858636021614075,
-0.004775297828018665,
-0.020301643759012222,
0.023852841928601265,
0.11493242532014847,
0.06408898532390594,
-0.0522041991353035,
-0.10238176584243774,
0.06416074186563492,
-0.030311219394207,
0.020164450630545616,
0.03421952575445175,
-0.03157322481274605,
-0.09446927905082703,
0.04053269699215889,
-0.019253382459282875,
0.00038730050437152386,
0.0114772729575634,
0.012912368401885033,
-0.0056020044721663,
0.018317868933081627,
-0.039884231984615326,
-0.036180950701236725,
0.005620600655674934,
0.010374690406024456,
-0.0013319820864126086,
-0.05672591179609299,
-0.02231953851878643,
0.05769352987408638,
0.03256837651133537,
-0.023996863514184952,
0.005798612255603075,
0.025781432166695595,
-0.01976720057427883,
-0.010540759190917015,
-0.032373376190662384,
0.002451982581987977,
-0.01415853388607502,
0.0067520663142204285,
0.009322320111095905,
-0.12159772962331772,
-0.03446454182267189,
-0.03868198022246361,
-0.07012157142162323,
0.010819277726113796,
0.046151429414749146,
0.014084437862038612,
0.0062178452499210835,
0.04486214742064476,
0.0751916691660881,
0.00423339381814003,
0.010438572615385056,
0.011450597085058689,
0.021591853350400925,
-0.05650951340794563,
0.04358261078596115,
-0.03360520303249359,
0.041786327958106995,
-0.02478104829788208,
-5.6885653521021595e-8,
-0.07244891673326492,
0.01055893860757351,
-0.021090004593133926,
-0.05820119008421898,
0.021519634872674942,
-0.04888211563229561,
-0.00403689406812191,
-0.04605546221137047,
0.03899248316884041,
-0.031950779259204865,
0.05379871651530266,
-0.06103645637631416,
-0.04933691397309303,
0.0902329534292221,
-0.005134156905114651,
-0.05164198577404022,
-0.06133613735437393,
0.14303511381149292,
-0.021407172083854675,
-0.12822206318378448,
0.07124138623476028,
-0.0036011794582009315,
-0.00018984080816153437,
0.000512364786118269,
-0.002058803103864193,
0.0053195287473499775,
-0.022266218438744545,
0.08279236406087875,
-0.011575425043702126,
0.008048457093536854,
-0.03830243647098541,
0.05717098340392113,
-0.01311853900551796,
-0.08252134919166565,
0.0821729525923729,
0.04377447068691254,
-0.03516357019543648,
-0.023526517674326897,
-0.009156656451523304,
0.030114412307739258,
0.054634757339954376,
0.04736681282520294,
-0.07908175140619278,
0.004041085485368967,
0.06412111222743988,
0.019886357709765434,
-0.040535978972911835,
-0.07352548837661743,
0.025078777223825455,
-0.026123980060219765,
0.06311933696269989,
0.0016359660075977445,
0.026149403303861618,
0.010197962634265423,
0.0835212990641594,
0.04205259308218956,
0.0487479642033577,
-0.06672637164592743,
0.05051258206367493,
-0.002120777266100049,
0.15524409711360931,
-0.04018032178282738,
-0.0874728411436081,
-0.027954325079917908
] |
CAMeL-Lab/bert-base-arabic-camelbert-msa | 277069fd3645fedb22b746caf38d111aadee0241 | 2021-09-14T14:33:41.000Z | [
"pytorch",
"tf",
"jax",
"bert",
"fill-mask",
"ar",
"arxiv:2103.06678",
"transformers",
"license:apache-2.0",
"autotrain_compatible"
] | fill-mask | false | CAMeL-Lab | null | CAMeL-Lab/bert-base-arabic-camelbert-msa | 1,178 | 3 | transformers | ---
language:
- ar
license: apache-2.0
widget:
- text: "الهدف من الحياة هو [MASK] ."
---
# CAMeLBERT: A collection of pre-trained models for Arabic NLP tasks
## Model description
**CAMeLBERT** is a collection of BERT models pre-trained on Arabic texts with different sizes and variants.
We release pre-trained language models for Modern Standard Arabic (MSA), dialectal Arabic (DA), and classical Arabic (CA), in addition to a model pre-trained on a mix of the three.
We also provide additional models that are pre-trained on a scaled-down set of the MSA variant (half, quarter, eighth, and sixteenth).
The details are described in the paper *"[The Interplay of Variant, Size, and Task Type in Arabic Pre-trained Language Models](https://arxiv.org/abs/2103.06678)."*
This model card describes **CAMeLBERT-MSA** (`bert-base-arabic-camelbert-msa`), a model pre-trained on the entire MSA dataset.
||Model|Variant|Size|#Word|
|-|-|:-:|-:|-:|
||`bert-base-arabic-camelbert-mix`|CA,DA,MSA|167GB|17.3B|
||`bert-base-arabic-camelbert-ca`|CA|6GB|847M|
||`bert-base-arabic-camelbert-da`|DA|54GB|5.8B|
|✔|`bert-base-arabic-camelbert-msa`|MSA|107GB|12.6B|
||`bert-base-arabic-camelbert-msa-half`|MSA|53GB|6.3B|
||`bert-base-arabic-camelbert-msa-quarter`|MSA|27GB|3.1B|
||`bert-base-arabic-camelbert-msa-eighth`|MSA|14GB|1.6B|
||`bert-base-arabic-camelbert-msa-sixteenth`|MSA|6GB|746M|
## Intended uses
You can use the released model for either masked language modeling or next sentence prediction.
However, it is mostly intended to be fine-tuned on an NLP task, such as NER, POS tagging, sentiment analysis, dialect identification, and poetry classification.
We release our fine-tuninig code [here](https://github.com/CAMeL-Lab/CAMeLBERT).
#### How to use
You can use this model directly with a pipeline for masked language modeling:
```python
>>> from transformers import pipeline
>>> unmasker = pipeline('fill-mask', model='CAMeL-Lab/bert-base-arabic-camelbert-msa')
>>> unmasker("الهدف من الحياة هو [MASK] .")
[{'sequence': '[CLS] الهدف من الحياة هو العمل. [SEP]',
'score': 0.08507660031318665,
'token': 2854,
'token_str': 'العمل'},
{'sequence': '[CLS] الهدف من الحياة هو الحياة. [SEP]',
'score': 0.058905381709337234,
'token': 3696, 'token_str': 'الحياة'},
{'sequence': '[CLS] الهدف من الحياة هو النجاح. [SEP]',
'score': 0.04660581797361374, 'token': 6232,
'token_str': 'النجاح'},
{'sequence': '[CLS] الهدف من الحياة هو الربح. [SEP]',
'score': 0.04156001657247543,
'token': 12413, 'token_str': 'الربح'},
{'sequence': '[CLS] الهدف من الحياة هو الحب. [SEP]',
'score': 0.03534102067351341,
'token': 3088,
'token_str': 'الحب'}]
```
*Note*: to download our models, you would need `transformers>=3.5.0`. Otherwise, you could download the models manually.
Here is how to use this model to get the features of a given text in PyTorch:
```python
from transformers import AutoTokenizer, AutoModel
tokenizer = AutoTokenizer.from_pretrained('CAMeL-Lab/bert-base-arabic-camelbert-msa')
model = AutoModel.from_pretrained('CAMeL-Lab/bert-base-arabic-camelbert-msa')
text = "مرحبا يا عالم."
encoded_input = tokenizer(text, return_tensors='pt')
output = model(**encoded_input)
```
and in TensorFlow:
```python
from transformers import AutoTokenizer, TFAutoModel
tokenizer = AutoTokenizer.from_pretrained('CAMeL-Lab/bert-base-arabic-camelbert-msa')
model = TFAutoModel.from_pretrained('CAMeL-Lab/bert-base-arabic-camelbert-msa')
text = "مرحبا يا عالم."
encoded_input = tokenizer(text, return_tensors='tf')
output = model(encoded_input)
```
## Training data
- MSA (Modern Standard Arabic)
- [The Arabic Gigaword Fifth Edition](https://catalog.ldc.upenn.edu/LDC2011T11)
- [Abu El-Khair Corpus](http://www.abuelkhair.net/index.php/en/arabic/abu-el-khair-corpus)
- [OSIAN corpus](https://vlo.clarin.eu/search;jsessionid=31066390B2C9E8C6304845BA79869AC1?1&q=osian)
- [Arabic Wikipedia](https://archive.org/details/arwiki-20190201)
- The unshuffled version of the Arabic [OSCAR corpus](https://oscar-corpus.com/)
## Training procedure
We use [the original implementation](https://github.com/google-research/bert) released by Google for pre-training.
We follow the original English BERT model's hyperparameters for pre-training, unless otherwise specified.
### Preprocessing
- After extracting the raw text from each corpus, we apply the following pre-processing.
- We first remove invalid characters and normalize white spaces using the utilities provided by [the original BERT implementation](https://github.com/google-research/bert/blob/eedf5716ce1268e56f0a50264a88cafad334ac61/tokenization.py#L286-L297).
- We also remove lines without any Arabic characters.
- We then remove diacritics and kashida using [CAMeL Tools](https://github.com/CAMeL-Lab/camel_tools).
- Finally, we split each line into sentences with a heuristics-based sentence segmenter.
- We train a WordPiece tokenizer on the entire dataset (167 GB text) with a vocabulary size of 30,000 using [HuggingFace's tokenizers](https://github.com/huggingface/tokenizers).
- We do not lowercase letters nor strip accents.
### Pre-training
- The model was trained on a single cloud TPU (`v3-8`) for one million steps in total.
- The first 90,000 steps were trained with a batch size of 1,024 and the rest was trained with a batch size of 256.
- The sequence length was limited to 128 tokens for 90% of the steps and 512 for the remaining 10%.
- We use whole word masking and a duplicate factor of 10.
- We set max predictions per sequence to 20 for the dataset with max sequence length of 128 tokens and 80 for the dataset with max sequence length of 512 tokens.
- We use a random seed of 12345, masked language model probability of 0.15, and short sequence probability of 0.1.
- The optimizer used is Adam with a learning rate of 1e-4, \\(\beta_{1} = 0.9\\) and \\(\beta_{2} = 0.999\\), a weight decay of 0.01, learning rate warmup for 10,000 steps and linear decay of the learning rate after.
## Evaluation results
- We evaluate our pre-trained language models on five NLP tasks: NER, POS tagging, sentiment analysis, dialect identification, and poetry classification.
- We fine-tune and evaluate the models using 12 dataset.
- We used Hugging Face's transformers to fine-tune our CAMeLBERT models.
- We used transformers `v3.1.0` along with PyTorch `v1.5.1`.
- The fine-tuning was done by adding a fully connected linear layer to the last hidden state.
- We use \\(F_{1}\\) score as a metric for all tasks.
- Code used for fine-tuning is available [here](https://github.com/CAMeL-Lab/CAMeLBERT).
### Results
| Task | Dataset | Variant | Mix | CA | DA | MSA | MSA-1/2 | MSA-1/4 | MSA-1/8 | MSA-1/16 |
| -------------------- | --------------- | ------- | ----- | ----- | ----- | ----- | ------- | ------- | ------- | -------- |
| NER | ANERcorp | MSA | 80.8% | 67.9% | 74.1% | 82.4% | 82.0% | 82.1% | 82.6% | 80.8% |
| POS | PATB (MSA) | MSA | 98.1% | 97.8% | 97.7% | 98.3% | 98.2% | 98.3% | 98.2% | 98.2% |
| | ARZTB (EGY) | DA | 93.6% | 92.3% | 92.7% | 93.6% | 93.6% | 93.7% | 93.6% | 93.6% |
| | Gumar (GLF) | DA | 97.3% | 97.7% | 97.9% | 97.9% | 97.9% | 97.9% | 97.9% | 97.9% |
| SA | ASTD | MSA | 76.3% | 69.4% | 74.6% | 76.9% | 76.0% | 76.8% | 76.7% | 75.3% |
| | ArSAS | MSA | 92.7% | 89.4% | 91.8% | 93.0% | 92.6% | 92.5% | 92.5% | 92.3% |
| | SemEval | MSA | 69.0% | 58.5% | 68.4% | 72.1% | 70.7% | 72.8% | 71.6% | 71.2% |
| DID | MADAR-26 | DA | 62.9% | 61.9% | 61.8% | 62.6% | 62.0% | 62.8% | 62.0% | 62.2% |
| | MADAR-6 | DA | 92.5% | 91.5% | 92.2% | 91.9% | 91.8% | 92.2% | 92.1% | 92.0% |
| | MADAR-Twitter-5 | MSA | 75.7% | 71.4% | 74.2% | 77.6% | 78.5% | 77.3% | 77.7% | 76.2% |
| | NADI | DA | 24.7% | 17.3% | 20.1% | 24.9% | 24.6% | 24.6% | 24.9% | 23.8% |
| Poetry | APCD | CA | 79.8% | 80.9% | 79.6% | 79.7% | 79.9% | 80.0% | 79.7% | 79.8% |
### Results (Average)
| | Variant | Mix | CA | DA | MSA | MSA-1/2 | MSA-1/4 | MSA-1/8 | MSA-1/16 |
| -------------------- | ------- | ----- | ----- | ----- | ----- | ------- | ------- | ------- | -------- |
| Variant-wise-average<sup>[[1]](#footnote-1)</sup> | MSA | 82.1% | 75.7% | 80.1% | 83.4% | 83.0% | 83.3% | 83.2% | 82.3% |
| | DA | 74.4% | 72.1% | 72.9% | 74.2% | 74.0% | 74.3% | 74.1% | 73.9% |
| | CA | 79.8% | 80.9% | 79.6% | 79.7% | 79.9% | 80.0% | 79.7% | 79.8% |
| Macro-Average | ALL | 78.7% | 74.7% | 77.1% | 79.2% | 79.0% | 79.2% | 79.1% | 78.6% |
<a name="footnote-1">[1]</a>: Variant-wise-average refers to average over a group of tasks in the same language variant.
## Acknowledgements
This research was supported with Cloud TPUs from Google’s TensorFlow Research Cloud (TFRC).
## Citation
```bibtex
@inproceedings{inoue-etal-2021-interplay,
title = "The Interplay of Variant, Size, and Task Type in {A}rabic Pre-trained Language Models",
author = "Inoue, Go and
Alhafni, Bashar and
Baimukan, Nurpeiis and
Bouamor, Houda and
Habash, Nizar",
booktitle = "Proceedings of the Sixth Arabic Natural Language Processing Workshop",
month = apr,
year = "2021",
address = "Kyiv, Ukraine (Online)",
publisher = "Association for Computational Linguistics",
abstract = "In this paper, we explore the effects of language variants, data sizes, and fine-tuning task types in Arabic pre-trained language models. To do so, we build three pre-trained language models across three variants of Arabic: Modern Standard Arabic (MSA), dialectal Arabic, and classical Arabic, in addition to a fourth language model which is pre-trained on a mix of the three. We also examine the importance of pre-training data size by building additional models that are pre-trained on a scaled-down set of the MSA variant. We compare our different models to each other, as well as to eight publicly available models by fine-tuning them on five NLP tasks spanning 12 datasets. Our results suggest that the variant proximity of pre-training data to fine-tuning data is more important than the pre-training data size. We exploit this insight in defining an optimized system selection model for the studied tasks.",
}
```
| [
-0.08480460941791534,
-0.06157763674855232,
0.06550054997205734,
-0.012643663212656975,
-0.09904792159795761,
0.07742918282747269,
-0.0024453045334666967,
-0.03506958857178688,
0.03235059604048729,
0.02080320380628109,
0.013343445956707,
-0.003017805051058531,
0.019713256508111954,
0.028160760179162025,
0.03732611611485481,
0.00844994280487299,
0.08597776293754578,
-0.02155161276459694,
-0.08321388810873032,
-0.03374813497066498,
0.06359359622001648,
0.05883948877453804,
0.040397725999355316,
0.007697176653891802,
0.030494187027215958,
-0.03391784429550171,
-0.07321465015411377,
-0.038829255849123,
0.0761883482336998,
0.026192834600806236,
0.025040851905941963,
0.04619516059756279,
0.08562887459993362,
0.060074999928474426,
-0.0019799070432782173,
0.0641406700015068,
-0.018091674894094467,
0.00860778708010912,
0.06597348302602768,
0.06002957001328468,
-0.022975478321313858,
-0.006382734049111605,
-0.01932179369032383,
-0.00040965116932056844,
0.04672148823738098,
-0.07998760789632797,
-0.035885199904441833,
0.08926980197429657,
-0.051438529044389725,
0.03968247026205063,
-0.08806047588586807,
-0.003563898615539074,
0.04492280259728432,
-0.0281376913189888,
-0.012941182591021061,
-0.07999669760465622,
0.006020999047905207,
0.0013191415928304195,
0.010744243860244751,
-0.08832784742116928,
-0.12672197818756104,
-0.022844797000288963,
-0.04361534118652344,
0.013383771292865276,
-0.08272115886211395,
0.0054739429615437984,
-0.0043545495718717575,
-0.035926513373851776,
-0.0029410722199827433,
-0.0288742296397686,
-0.07030569016933441,
0.053958773612976074,
0.033610835671424866,
0.0521734282374382,
-0.010435991920530796,
-0.07302463799715042,
0.07031920552253723,
-0.029490569606423378,
-0.014033918268978596,
-0.03471389785408974,
-0.00032869275310076773,
0.04309641569852829,
0.04088735207915306,
-0.01615208387374878,
0.08492179960012436,
-0.030068503692746162,
0.047538235783576965,
0.024662010371685028,
0.02479645609855652,
0.00020633378881029785,
-0.013238557614386082,
-0.08841560781002045,
0.05652114748954773,
0.020719515159726143,
0.03601864352822304,
-0.015492240898311138,
0.05674612149596214,
-0.03319716453552246,
-0.04547014832496643,
0.07124755531549454,
0.06735817342996597,
-0.04104994982481003,
0.05799546092748642,
0.0023748816456645727,
0.04734911769628525,
0.003423066809773445,
-0.017207082360982895,
0.01625855080783367,
0.05616123229265213,
-0.10996053367853165,
-0.002762731397524476,
-0.04725123196840286,
-0.03866707906126976,
-0.05068723112344742,
-0.0005056462832726538,
0.016218720003962517,
-0.03085058182477951,
-0.09482079744338989,
0.009192178025841713,
0.014635142870247364,
-0.030914727598428726,
-0.013169793412089348,
0.07924164831638336,
-0.024303140118718147,
-0.060787416994571686,
0.0019891054835170507,
-0.026982799172401428,
6.043397373504811e-34,
0.03120478242635727,
0.005563513841480017,
0.012322759255766869,
-0.0034628729335963726,
-0.02297709882259369,
-0.005411152262240648,
-0.0012224985985085368,
0.049175769090652466,
-0.04688340798020363,
-0.053686533123254776,
-0.016226425766944885,
0.04961206018924713,
-0.0782160609960556,
0.06862669438123703,
0.006811846978962421,
0.038166698068380356,
0.009601728990674019,
-0.012616257183253765,
0.05662117153406143,
-0.03735188767313957,
0.07727544009685516,
0.04738646000623703,
0.05809374526143074,
-0.04423246532678604,
-0.02049204334616661,
0.050955694168806076,
0.10470005124807358,
-0.10202735662460327,
-0.030509617179632187,
0.07850117981433868,
-0.13437312841415405,
-0.024156974628567696,
-0.05907132104039192,
-0.03579898923635483,
-0.0807579830288887,
-0.04885411635041237,
-0.09610293060541153,
-0.0658353865146637,
0.035219743847846985,
-0.0347169004380703,
-0.01639270782470703,
0.04408521205186844,
0.06571837514638901,
-0.05901367589831352,
-0.010813240893185139,
-0.04512035846710205,
0.030198711901903152,
0.02496572583913803,
-0.0016336862463504076,
0.04272513836622238,
0.0366174653172493,
0.03617347404360771,
-0.03792046383023262,
-0.008118117228150368,
-0.01931881532073021,
-0.005031566601246595,
0.07576972246170044,
0.051648110151290894,
0.016494572162628174,
0.037007320672273636,
0.020583515986800194,
-0.04446709156036377,
0.014877266250550747,
0.04906495288014412,
0.01491550076752901,
-0.04194378852844238,
-0.0015934830298647285,
0.0020099333487451077,
0.07819151878356934,
0.02082952670753002,
-0.029330797493457794,
-0.015174373053014278,
0.05830836296081543,
0.12419146299362183,
-0.009453725069761276,
0.011035987176001072,
0.04691457375884056,
-0.09724748879671097,
-0.0017493406776338816,
0.04749442636966705,
-0.029796529561281204,
0.06112149357795715,
-0.016234716400504112,
-0.05619525536894798,
-0.12661419808864594,
-0.045001763850450516,
0.11129015684127808,
-0.06844410300254822,
0.019677260890603065,
-0.03971664234995842,
0.01645335555076599,
0.017962060868740082,
0.001477701822295785,
0.026966102421283722,
0.027004607021808624,
-1.7526211184447442e-33,
0.01557000819593668,
0.016910120844841003,
-0.08073091506958008,
0.02809952385723591,
-0.07485371828079224,
-0.09769178181886673,
0.13737477362155914,
0.1802235096693039,
-0.030280645936727524,
-0.027455532923340797,
-0.006758957169950008,
-0.057888928800821304,
0.07302221655845642,
-0.056419678032398224,
0.10423236340284348,
-0.023467624559998512,
-0.02657667174935341,
-0.005303913727402687,
0.07426412403583527,
0.02605809085071087,
0.050779130309820175,
-0.04017067700624466,
-0.08222644031047821,
0.05174420401453972,
0.04450516030192375,
-0.011983640491962433,
-0.07121783494949341,
0.025144211947917938,
-0.059880323708057404,
0.049598827958106995,
-0.030280642211437225,
0.016377707943320274,
-0.054062873125076294,
0.054153118282556534,
-0.09133023023605347,
-0.005945886019617319,
-0.01776719093322754,
-0.0007098857313394547,
-0.028495920822024345,
0.0764191523194313,
0.10734079778194427,
-0.053145669400691986,
0.005867146886885166,
0.022267283871769905,
-0.01453974936157465,
0.027095427736639977,
-0.04956362396478653,
-0.011844832450151443,
0.022150106728076935,
-0.11704135686159134,
-0.02446170151233673,
0.02730722166597843,
-0.03479916229844093,
-0.041308511048555374,
-0.027257759124040604,
-0.07909396290779114,
0.013456271961331367,
-0.07417808473110199,
-0.03184866905212402,
-0.017735997214913368,
-0.03368121013045311,
-0.014379635453224182,
0.10335283726453781,
-0.06838402152061462,
-0.0206147450953722,
-0.04843469709157944,
-0.015232764184474945,
0.0259688813239336,
-0.0037731812335550785,
-0.006516622845083475,
0.053202565759420395,
-0.014551562257111073,
-0.03345814347267151,
-0.011912434361875057,
0.015315450727939606,
0.02829357236623764,
-0.046698812395334244,
-0.05472922325134277,
-0.02461901865899563,
-0.08665221184492111,
-0.02588285692036152,
0.00027050470816902816,
0.024481484666466713,
0.033413540571928024,
0.04468980431556702,
0.11041156947612762,
0.03951007500290871,
0.01009304728358984,
-0.03000999242067337,
0.07074154913425446,
-0.023031627759337425,
0.04115989804267883,
0.007922956719994545,
0.05973383039236069,
-0.03716735541820526,
-4.257627139736542e-8,
-0.11190196871757507,
0.029223475605249405,
0.004544130060821772,
0.03671504929661751,
-0.04652009159326553,
-0.0013063688529655337,
-0.06288907676935196,
0.024779438972473145,
-0.023969214409589767,
-0.033214908093214035,
0.054682716727256775,
0.030431611463427544,
-0.048570726066827774,
-0.0015040392754599452,
-0.036155082285404205,
0.055822692811489105,
-0.0005213108379393816,
0.0021865004673600197,
-0.0009123192867264152,
-0.054865602403879166,
0.08952967822551727,
0.04185304045677185,
0.04270623251795769,
-0.03591511398553848,
-0.04147649556398392,
-0.03592377156019211,
-0.07152140885591507,
0.10037568211555481,
-0.014657237567007542,
0.0032453371677547693,
0.009250893257558346,
0.02336951531469822,
-0.057620447129011154,
-0.033729489892721176,
0.03945336118340492,
0.044695932418107986,
-0.05775875970721245,
-0.00046197822666727006,
-0.02490251511335373,
0.02388509176671505,
0.11220203340053558,
-0.04035037383437157,
-0.08332207053899765,
-0.022666610777378082,
0.11407417804002762,
0.028989532962441444,
-0.041262686252593994,
-0.11271855235099792,
-0.005111042410135269,
-0.006646621040999889,
0.12109876424074173,
-0.03561452031135559,
0.019025161862373352,
0.10260213911533356,
0.04626688361167908,
0.0019261720590293407,
-0.06135372072458267,
-0.07914532721042633,
0.00645140465348959,
0.014652161858975887,
0.041672833263874054,
0.022520529106259346,
0.030514942482113838,
0.02733217552304268
] |
hf-internal-testing/tiny-detr-mobilenetsv3-panoptic | d7cb3c9eb87c7d7de00190ea97d48da1ba07206d | 2021-09-27T19:40:12.000Z | [
"pytorch",
"detr",
"image-segmentation",
"transformers"
] | image-segmentation | false | hf-internal-testing | null | hf-internal-testing/tiny-detr-mobilenetsv3-panoptic | 1,177 | 1 | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
junnyu/roformer_chinese_sim_char_ft_base | 38c5088bbdaeeecfef68696bd2c83b16baa0fb92 | 2022-04-15T03:52:49.000Z | [
"pytorch",
"roformer",
"text-generation",
"zh",
"transformers",
"tf2.0"
] | text-generation | false | junnyu | null | junnyu/roformer_chinese_sim_char_ft_base | 1,174 | 3 | transformers | ---
language: zh
tags:
- roformer
- pytorch
- tf2.0
inference: False
---
# 安装
- pip install roformer==0.4.3
# 使用
```python
import torch
import numpy as np
from roformer import RoFormerForCausalLM, RoFormerConfig
from transformers import BertTokenizer
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
pretrained_model = "junnyu/roformer_chinese_sim_char_base"
tokenizer = BertTokenizer.from_pretrained(pretrained_model)
config = RoFormerConfig.from_pretrained(pretrained_model)
config.is_decoder = True
config.eos_token_id = tokenizer.sep_token_id
config.pooler_activation = "linear"
model = RoFormerForCausalLM.from_pretrained(pretrained_model, config=config)
model.to(device)
model.eval()
def gen_synonyms(text, n=100, k=20):
''''含义: 产生sent的n个相似句,然后返回最相似的k个。
做法:用seq2seq生成,并用encoder算相似度并排序。
'''
# 寻找所有相似的句子
r = []
inputs1 = tokenizer(text, return_tensors="pt")
for _ in range(n):
inputs1.to(device)
output = tokenizer.batch_decode(model.generate(**inputs1, top_p=0.95, do_sample=True, max_length=128), skip_special_tokens=True)[0].replace(" ","").replace(text, "") # 去除空格,去除原始text文本。
r.append(output)
# 对相似的句子进行排序
r = [i for i in set(r) if i != text and len(i) > 0]
r = [text] + r
inputs2 = tokenizer(r, padding=True, return_tensors="pt")
with torch.no_grad():
inputs2.to(device)
outputs = model(**inputs2)
Z = outputs.pooler_output.cpu().numpy()
Z /= (Z**2).sum(axis=1, keepdims=True)**0.5
argsort = np.dot(Z[1:], -Z[0]).argsort()
return [r[i + 1] for i in argsort[:k]]
out = gen_synonyms("广州和深圳哪个好?")
print(out)
# ['深圳和广州哪个好?',
# '广州和深圳哪个好',
# '深圳和广州哪个好',
# '深圳和广州哪个比较好。',
# '深圳和广州哪个最好?',
# '深圳和广州哪个比较好',
# '广州和深圳那个比较好',
# '深圳和广州哪个更好?',
# '深圳与广州哪个好',
# '深圳和广州,哪个比较好',
# '广州与深圳比较哪个好',
# '深圳和广州哪里比较好',
# '深圳还是广州比较好?',
# '广州和深圳哪个地方好一些?',
# '广州好还是深圳好?',
# '广州好还是深圳好呢?',
# '广州与深圳哪个地方好点?',
# '深圳好还是广州好',
# '广州好还是深圳好',
# '广州和深圳哪个城市好?']
``` | [
-0.09350518882274628,
-0.10403788834810257,
-0.06122535467147827,
0.037267301231622696,
0.041284799575805664,
0.015013430267572403,
-0.04963044822216034,
0.09185737371444702,
-0.008376067504286766,
-0.062034837901592255,
0.03647521510720253,
-0.08799146115779877,
-0.0634993389248848,
0.016416871920228004,
0.018445173278450966,
0.03715648874640465,
-0.018184585496783257,
-0.04276493936777115,
-0.050034891813993454,
-0.07769212126731873,
0.10789132863283157,
-0.005772916600108147,
0.06818724423646927,
0.0012709859292954206,
0.0598103366792202,
-0.0709838941693306,
-0.04117128252983093,
0.015161330811679363,
0.056808821856975555,
0.015624837949872017,
0.036965783685445786,
0.0663677528500557,
-0.000580597494263202,
0.03135759383440018,
0.11322092264890671,
0.00015795667422935367,
-0.038276441395282745,
-0.0325104221701622,
-0.04376364126801491,
0.05259975418448448,
0.039966125041246414,
-0.06515918672084808,
-0.057481419295072556,
-0.05964517965912819,
0.007555924355983734,
0.0005864406703040004,
0.0890180766582489,
-0.024734977632761,
0.000024722388843656518,
-0.035129282623529434,
-0.03433360531926155,
0.007103967480361462,
0.013985502533614635,
0.026690883561968803,
-0.044275593012571335,
0.051445867866277695,
0.06288861483335495,
-0.07209630310535431,
0.022140227258205414,
-0.09646635502576828,
-0.08788206428289413,
0.007001935970038176,
0.000023017510102363303,
-0.07623215019702911,
-0.10556460916996002,
0.037216123193502426,
-0.05447590723633766,
-0.01672249287366867,
0.06379424035549164,
0.008478074334561825,
0.0033829479943960905,
0.010925584472715855,
-0.03241490200161934,
0.04617740958929062,
-0.03938506171107292,
-0.05748340114951134,
0.16598930954933167,
-0.02817111276090145,
0.046278517693281174,
-0.09345486760139465,
-0.023513508960604668,
-0.02780858613550663,
0.07704994827508926,
0.0037627581041306257,
0.10216501355171204,
-0.01045356597751379,
0.012327933683991432,
-0.004822846036404371,
0.03064602054655552,
0.00580306863412261,
-0.0459267757833004,
-0.05505041405558586,
-0.006650633178651333,
0.04729695990681648,
0.006109047681093216,
-0.0032869037240743637,
0.04952089861035347,
0.019375259056687355,
-0.05450285226106644,
0.06559725105762482,
-0.005882153287529945,
-0.020475395023822784,
0.03941812738776207,
0.01208176463842392,
-0.016285490244627,
0.050547368824481964,
0.02151079662144184,
0.03442051634192467,
-0.034357015043497086,
-0.013708439655601978,
0.06759323179721832,
-0.006041564978659153,
0.06956330686807632,
-0.052540600299835205,
0.06357776373624802,
0.0373167023062706,
-0.055069442838430405,
0.03014039620757103,
-0.07816340774297714,
0.0659080371260643,
0.024193065240979195,
0.030285507440567017,
-0.070772685110569,
0.0408916212618351,
-0.02526305802166462,
-0.021937871351838112,
0.025369711220264435,
8.154299986530573e-33,
-0.012048001401126385,
0.04410092905163765,
-0.00908744614571333,
0.016142791137099266,
-0.05802140012383461,
0.05644666403532028,
0.05898712947964668,
0.06839747726917267,
0.0010785588528960943,
0.01179376058280468,
-0.07421920448541641,
-0.010046647861599922,
-0.12475636601448059,
0.03302004188299179,
-0.10969151556491852,
-0.039723362773656845,
0.013890988193452358,
0.005868218373507261,
0.03071446157991886,
-0.010693480260670185,
0.10697769373655319,
0.060195550322532654,
-0.07094643265008926,
-0.07972993701696396,
-0.07728621363639832,
-0.002701156074181199,
0.010320277884602547,
-0.09176890552043915,
-0.037205178290605545,
0.013617531396448612,
-0.010246725752949715,
0.018843969330191612,
-0.012544837780296803,
-0.01175269391387701,
-0.04364308342337608,
-0.03215520828962326,
-0.03890667110681534,
-0.040734343230724335,
-0.0696757510304451,
-0.04763233661651611,
-0.020682144910097122,
0.0967269241809845,
-0.039680115878582,
-0.055244557559490204,
-0.0033538411371409893,
-0.032255399972200394,
-0.011037319898605347,
0.05741491913795471,
0.0722481906414032,
-0.0057955351658165455,
-0.018518630415201187,
0.015630191192030907,
-0.009539441205561161,
0.062021613121032715,
0.08695156127214432,
-0.06341283023357391,
0.03061535954475403,
0.029939323663711548,
0.09127452969551086,
-0.04791468381881714,
0.015446305274963379,
0.007685063406825066,
-0.033009231090545654,
0.034697551280260086,
-0.012481692247092724,
-0.012697303667664528,
-0.0011786142131313682,
-0.0590938925743103,
-0.006762541830539703,
-0.010751772671937943,
-0.09652875363826752,
0.013548823073506355,
-0.040500860661268234,
0.0012570073595270514,
0.038963012397289276,
-0.08859645575284958,
0.017422618344426155,
-0.06316117197275162,
-0.0883200392127037,
-0.02743801474571228,
-0.044013865292072296,
0.05044566094875336,
-0.041643645614385605,
-0.02073029614984989,
-0.046077582985162735,
-0.04259686544537544,
0.051331035792827606,
-0.017321018502116203,
-0.03003106638789177,
-0.03435925021767616,
-0.0035958795342594385,
-0.10620524734258652,
0.011064086109399796,
0.03961215168237686,
-0.056321125477552414,
-7.042958239904412e-33,
-0.04960392415523529,
0.050642095506191254,
-0.03886566311120987,
0.009523418731987476,
-0.05291159823536873,
-0.006258233916014433,
0.06795443594455719,
0.021833760663866997,
-0.0260987039655447,
-0.039096321910619736,
0.027452440932393074,
-0.06644093245267868,
-0.025233987718820572,
-0.048252031207084656,
0.13019539415836334,
-0.009542353451251984,
-0.05925080552697182,
0.08956905454397202,
0.08734734356403351,
0.06235823780298233,
-0.07201658934354782,
0.10196831822395325,
-0.07349971681833267,
0.013003267347812653,
-0.024227237328886986,
0.033266838639974594,
0.008794855326414108,
0.050001222640275955,
0.04932644963264465,
-0.008819195441901684,
0.019557597115635872,
0.03470788523554802,
-0.06749878823757172,
0.11832613497972488,
-0.08857646584510803,
0.014158067293465137,
0.045929111540317535,
-0.009988944977521896,
-0.03230375424027443,
0.02367101050913334,
0.13347022235393524,
0.02537699043750763,
-0.07882542908191681,
0.04580982029438019,
-0.08013001084327698,
0.019056057557463646,
-0.020336497575044632,
-0.06874751299619675,
0.05660497024655342,
-0.06382321566343307,
0.05391569063067436,
-0.06121932715177536,
-0.07497566938400269,
0.014247901737689972,
-0.003555147210136056,
-0.004700073506683111,
0.06028098613023758,
-0.05597102269530296,
-0.06357623636722565,
-0.044644005596637726,
0.0066750431433320045,
-0.041371557861566544,
0.047510191798210144,
-0.06125284731388092,
0.02991885505616665,
-0.0076773990876972675,
-0.02096329629421234,
0.1351660043001175,
0.0450383685529232,
-0.024994203820824623,
-0.01873067207634449,
0.08443083614110947,
0.1221359446644783,
0.016785655170679092,
0.02377614937722683,
0.05796521529555321,
-0.08059129118919373,
-0.0039010560140013695,
0.015894340351223946,
-0.009715517051517963,
-0.10427409410476685,
-0.07038698345422745,
0.07698285579681396,
0.029597077518701553,
-0.04250739514827728,
0.008327200077474117,
0.07018697261810303,
0.0742715373635292,
0.04699762538075447,
-0.00902195181697607,
0.002111718524247408,
0.0036972800735384226,
0.07693823426961899,
0.14226727187633514,
0.057327911257743835,
-5.2069985656544304e-8,
-0.005984920542687178,
0.011938540264964104,
0.011058374308049679,
0.01788034662604332,
-0.04939020425081253,
-0.03890210762619972,
0.04443158209323883,
-0.02018895372748375,
-0.055522371083498,
0.008820249699056149,
0.01979929581284523,
-0.008445569314062595,
0.03739986568689346,
-0.014777162112295628,
0.039385274052619934,
0.09721482545137405,
-0.022800764068961143,
0.08504357933998108,
0.02393832616508007,
-0.0488617941737175,
0.02352455072104931,
-0.0035641018766909838,
-0.007266666274517775,
-0.012781970202922821,
-0.033906470984220505,
-0.01294602919369936,
-0.036700598895549774,
0.10029470920562744,
-0.007148922886699438,
-0.002857241779565811,
0.017853450030088425,
-0.029586834833025932,
0.06680221110582352,
0.022404758259654045,
0.016334379091858864,
0.07382957637310028,
-0.029810449108481407,
-0.020303884521126747,
0.02475007437169552,
0.01322996150702238,
-0.0018795087235048413,
0.02850344218313694,
-0.09681323170661926,
-0.02417970821261406,
0.07637675106525421,
-0.0068741850554943085,
-0.015356396324932575,
-0.07031375169754028,
0.011306875385344028,
0.03197241947054863,
-0.01821654848754406,
0.010260423645377159,
-0.006420536432415247,
0.05674060061573982,
-0.018547948449850082,
0.004146291874349117,
-0.08137201517820358,
0.0019104453967884183,
-0.017092430964112282,
-0.029339734464883804,
0.0014462224207818508,
0.030042806640267372,
-0.00447236280888319,
-0.03167876601219177
] |
Helsinki-NLP/opus-mt-de-es | d6bff091731341b977e4ca7294d2c309a2ca11e4 | 2021-09-09T21:30:58.000Z | [
"pytorch",
"marian",
"text2text-generation",
"de",
"es",
"transformers",
"translation",
"license:apache-2.0",
"autotrain_compatible"
] | translation | false | Helsinki-NLP | null | Helsinki-NLP/opus-mt-de-es | 1,171 | null | transformers | ---
tags:
- translation
license: apache-2.0
---
### opus-mt-de-es
* source languages: de
* target languages: es
* OPUS readme: [de-es](https://github.com/Helsinki-NLP/OPUS-MT-train/blob/master/models/de-es/README.md)
* dataset: opus
* model: transformer-align
* pre-processing: normalization + SentencePiece
* download original weights: [opus-2020-01-15.zip](https://object.pouta.csc.fi/OPUS-MT-models/de-es/opus-2020-01-15.zip)
* test set translations: [opus-2020-01-15.test.txt](https://object.pouta.csc.fi/OPUS-MT-models/de-es/opus-2020-01-15.test.txt)
* test set scores: [opus-2020-01-15.eval.txt](https://object.pouta.csc.fi/OPUS-MT-models/de-es/opus-2020-01-15.eval.txt)
## Benchmarks
| testset | BLEU | chr-F |
|-----------------------|-------|-------|
| Tatoeba.de.es | 48.5 | 0.676 |
| [
-0.06485388427972794,
-0.02563769929111004,
0.02846563048660755,
-0.016802627593278885,
0.006782801356166601,
0.08687944710254669,
-0.05895353481173515,
0.028369540348649025,
0.025729505345225334,
-0.005012847017496824,
0.010840103030204773,
-0.04566425830125809,
-0.07901959121227264,
-0.029529526829719543,
-0.027518844231963158,
0.0005732090212404728,
-0.03075854480266571,
0.08815331012010574,
-0.061449356377124786,
-0.019054677337408066,
0.050317514687776566,
0.029444552958011627,
0.018962625414133072,
-0.01332562044262886,
0.10789336264133453,
0.08456870913505554,
-0.09812264144420624,
-0.001447363174520433,
0.0831022784113884,
-0.05244472250342369,
-0.0057943398132920265,
-0.001599155948497355,
0.05178213119506836,
0.07913786917924881,
0.0422062911093235,
0.0681954175233841,
-0.012171527370810509,
-0.07081291824579239,
-0.029232123866677284,
0.04732717573642731,
0.03773676976561546,
0.0648777186870575,
-0.0403386689722538,
-0.006551847793161869,
0.04372907802462578,
-0.0044009448029100895,
-0.09089457988739014,
0.03197181224822998,
0.014874139800667763,
0.0042717852629721165,
-0.10943914204835892,
-0.015451065264642239,
0.017339179292321205,
0.07176613807678223,
-0.07479706406593323,
0.04067787900567055,
0.05420541763305664,
-0.004978861194103956,
0.07304786890745163,
-0.022259023040533066,
-0.11627992987632751,
-0.026594582945108414,
-0.09589970111846924,
0.002558983862400055,
-0.009100385941565037,
-0.01483637373894453,
0.022037925198674202,
0.057734426110982895,
-0.06235697865486145,
0.06047472357749939,
-0.01626148261129856,
-0.004533515311777592,
0.013749467208981514,
0.053837116807699203,
-0.006578643340617418,
0.04467545822262764,
-0.003911660052835941,
-0.06291072815656662,
-0.006003131158649921,
-0.07341576367616653,
0.0009637266048230231,
-0.05752621591091156,
0.06698404997587204,
-0.005388668272644281,
0.07639362663030624,
0.005433536134660244,
0.026036787778139114,
-0.00022800295846536756,
-0.01644252985715866,
0.040672264993190765,
-0.07368602603673935,
-0.04502170905470848,
-0.0032548322342336178,
0.023682352155447006,
0.006985027343034744,
0.06362562626600266,
0.012537661008536816,
0.05947177857160568,
0.017760643735527992,
0.06285437196493149,
0.018335707485675812,
0.021703992038965225,
0.06882648915052414,
-0.03710843250155449,
-0.11260299384593964,
-0.0274407509714365,
0.06921795010566711,
0.04347916692495346,
0.0013347077183425426,
-0.09060879796743393,
0.029073452576994896,
-0.01940423808991909,
-0.020552849397063255,
-0.08373438566923141,
0.029332751408219337,
-0.05200205743312836,
0.010346771217882633,
-0.021691838279366493,
-0.010095561854541302,
0.04015365615487099,
-0.02645881101489067,
-0.012541827745735645,
-0.03396543487906456,
-0.003923377953469753,
-0.04650300741195679,
-0.05457567051053047,
0.02937033586204052,
1.1372939986629287e-33,
0.06612604111433029,
-0.022423923015594482,
-0.013414072804152966,
-0.0018252338049933314,
-0.05632906034588814,
-0.016637250781059265,
-0.0324736014008522,
0.03676325082778931,
-0.11547411233186722,
-0.0038468751590698957,
-0.016434993594884872,
-0.01242669764906168,
-0.08503320813179016,
0.02111024782061577,
-0.025310520082712173,
0.011744039133191109,
0.07615850120782852,
0.011474930681288242,
0.0440400205552578,
0.0350559763610363,
0.07368525117635727,
0.0431232750415802,
-0.0074404701590538025,
-0.034136202186346054,
-0.05209178104996681,
0.0632091611623764,
0.017028098925948143,
-0.10969629138708115,
-0.11761029064655304,
0.02450239285826683,
-0.10565638542175293,
0.022553086280822754,
-0.014795320108532906,
0.0099733741953969,
-0.00851472094655037,
-0.02341744489967823,
-0.007881908677518368,
-0.006571110337972641,
-0.029639828950166702,
-0.08484229445457458,
0.005151683464646339,
0.007569543085992336,
-0.012064892798662186,
-0.053874485194683075,
0.02117394283413887,
0.0119373993948102,
0.006624458357691765,
0.002916189143434167,
0.10536418855190277,
0.012600140646100044,
0.006357751786708832,
0.050473444163799286,
-0.06566052883863449,
-0.00456739729270339,
0.03257620334625244,
0.1114838570356369,
0.06709972023963928,
0.017527420073747635,
0.02869238518178463,
0.040079135447740555,
0.07722971588373184,
0.03137841448187828,
0.018738828599452972,
0.022551260888576508,
0.10715052485466003,
-0.010278300382196903,
-0.04722702503204346,
-0.079277902841568,
0.08979782462120056,
0.03390537202358246,
-0.14734096825122833,
-0.04994947463274002,
0.0621362142264843,
0.07741343975067139,
0.059128113090991974,
-0.024376800283789635,
-0.016153652220964432,
-0.018162164837121964,
-0.015201456844806671,
-0.01888841949403286,
-0.06604424118995667,
0.01580786146223545,
-0.010433944873511791,
-0.021072693169116974,
-0.02930353581905365,
0.007739667315036058,
0.04670415818691254,
-0.061429060995578766,
-0.03330567851662636,
0.008885910734534264,
0.035411447286605835,
0.04831349477171898,
-0.10314170271158218,
-0.023128947243094444,
0.0024557695724070072,
-1.5429077834702633e-33,
0.09334513545036316,
0.01618453860282898,
-0.04482848942279816,
0.07496064901351929,
-0.032198380678892136,
-0.07236898690462112,
0.0001345468481304124,
0.11292330175638199,
0.06462856382131577,
0.04933016002178192,
0.0640622153878212,
-0.14130190014839172,
0.02766181342303753,
-0.0806029886007309,
0.06793184578418732,
-0.03686337172985077,
-0.012312687002122402,
0.03355909138917923,
0.03084406815469265,
0.027004778385162354,
0.01172933354973793,
0.09272606670856476,
-0.024347811937332153,
0.0845346674323082,
-0.0034000719897449017,
-0.024732541292905807,
-0.024205757305026054,
0.06832806766033173,
0.0026091099716722965,
-0.008497556671500206,
0.004834409803152084,
0.0008067374583333731,
-0.10313589125871658,
-0.017219383269548416,
-0.09023289382457733,
0.045521583408117294,
0.03001413866877556,
0.03893038257956505,
0.04331030696630478,
0.06429064273834229,
0.05836384743452072,
0.06848333030939102,
-0.03751396760344505,
-0.039452724158763885,
0.027151431888341904,
-0.02702799066901207,
0.012443800456821918,
0.004108678083866835,
0.002447091741487384,
-0.07420787215232849,
0.020204773172736168,
0.0014082987327128649,
-0.0887952670454979,
-0.026712609454989433,
-0.017907671630382538,
-0.0856708288192749,
-0.009889150969684124,
-0.1375655084848404,
-0.05806293711066246,
-0.01782190054655075,
-0.010377964936196804,
0.03510083630681038,
-0.043401896953582764,
-0.07829337567090988,
0.033133167773485184,
-0.0006274761399254203,
0.038873471319675446,
0.012150635942816734,
0.01748363859951496,
0.0570620559155941,
-0.023238996043801308,
-0.06466969102621078,
0.06405925005674362,
0.095756895840168,
0.0029622847214341164,
-0.05179872736334801,
-0.054501164704561234,
0.04234854504466057,
0.056173957884311676,
-0.07638651132583618,
-0.02045157179236412,
0.014868903905153275,
0.0069887093268334866,
0.025160126388072968,
0.11307840049266815,
0.11443629860877991,
0.02197394333779812,
0.0006017799023538828,
-0.009174053557217121,
0.07049599289894104,
0.01614275760948658,
0.015768837183713913,
0.018880067393183708,
0.11031778156757355,
-0.003649083198979497,
-4.9338346741478745e-8,
-0.10011136531829834,
0.013629459775984287,
-0.10252000391483307,
0.04527297243475914,
-0.04228712245821953,
-0.07139229029417038,
-0.05270639434456825,
-0.021097416058182716,
-0.029565799981355667,
-0.03391006216406822,
0.0005134809180162847,
0.017744706943631172,
-0.07867185771465302,
-0.0015149771934375167,
-0.05059043690562248,
0.027968307957053185,
-0.013523466885089874,
0.08687052130699158,
-0.028274215757846832,
-0.03659835457801819,
0.058893684297800064,
0.04907757043838501,
0.041262928396463394,
-0.07525958120822906,
0.0006021265871822834,
0.004406356252729893,
-0.03805999830365181,
0.027639079838991165,
0.013487192802131176,
0.0005090137710794806,
0.03482354059815407,
0.044857561588287354,
-0.017039550468325615,
-0.10153115540742874,
0.05083070695400238,
0.06167840585112572,
0.0018974156118929386,
-0.028979545459151268,
-0.012098494917154312,
0.06968516111373901,
0.09059187024831772,
0.041906435042619705,
-0.1170981153845787,
0.02089092507958412,
0.026346480473876,
-0.029477614909410477,
-0.04983847960829735,
-0.0289370846003294,
0.0341729111969471,
-0.06592880934476852,
0.0776539146900177,
-0.07537885010242462,
-0.06237109750509262,
0.023657573387026787,
0.024319341406226158,
0.012000921182334423,
0.061779916286468506,
-0.010978377424180508,
0.009213604032993317,
-0.02532244101166725,
0.04864831641316414,
-0.02662753500044346,
-0.019837727770209312,
-0.016634931787848473
] |
facebook/wmt21-dense-24-wide-x-en | b5e35923f54293f03bd6072b93585124475829e0 | 2022-05-26T22:27:50.000Z | [
"pytorch",
"m2m_100",
"text2text-generation",
"multilingual",
"ha",
"is",
"ja",
"cs",
"ru",
"zh",
"de",
"en",
"arxiv:2108.03265",
"transformers",
"translation",
"wmt21",
"license:mit",
"autotrain_compatible"
] | translation | false | facebook | null | facebook/wmt21-dense-24-wide-x-en | 1,166 | 6 | transformers | ---
language:
- multilingual
- ha
- is
- ja
- cs
- ru
- zh
- de
- en
license: mit
tags:
- translation
- wmt21
---
# WMT 21 X-En
WMT 21 X-En is a 4.7B multilingual encoder-decoder (seq-to-seq) model trained for one-to-many multilingual translation.
It was introduced in this [paper](https://arxiv.org/abs/2108.03265) and first released in [this](https://github.com/pytorch/fairseq/tree/main/examples/wmt21) repository.
The model can directly translate text from 7 languages: Hausa (ha), Icelandic (is), Japanese (ja), Czech (cs), Russian (ru), Chinese (zh), German (de) to English.
To translate into a target language, the target language id is forced as the first generated token.
To force the target language id as the first generated token, pass the `forced_bos_token_id` parameter to the `generate` method.
*Note: `M2M100Tokenizer` depends on `sentencepiece`, so make sure to install it before running the example.*
To install `sentencepiece` run `pip install sentencepiece`
Since the model was trained with domain tags, you should prepend them to the input as well.
* "wmtdata newsdomain": Use for sentences in the news domain
* "wmtdata otherdomain": Use for sentences in all other domain
```python
from transformers import AutoModelForSeq2SeqLM, AutoTokenizer
model = AutoModelForSeq2SeqLM.from_pretrained("facebook/wmt21-dense-24-wide-x-en")
tokenizer = AutoTokenizer.from_pretrained("facebook/wmt21-dense-24-wide-x-en")
# translate German to English
tokenizer.src_lang = "de"
inputs = tokenizer("wmtdata newsdomain Ein Modell für viele Sprachen", return_tensors="pt")
generated_tokens = model.generate(**inputs)
tokenizer.batch_decode(generated_tokens, skip_special_tokens=True)
# => "A model for many languages"
# translate Icelandic to English
tokenizer.src_lang = "is"
inputs = tokenizer("wmtdata newsdomain Ein fyrirmynd fyrir mörg tungumál", return_tensors="pt")
generated_tokens = model.generate(**inputs)
tokenizer.batch_decode(generated_tokens, skip_special_tokens=True)
# => "One model for many languages"
```
See the [model hub](https://huggingface.co/models?filter=wmt21) to look for more fine-tuned versions.
## Languages covered
English (en), Hausa (ha), Icelandic (is), Japanese (ja), Czech (cs), Russian (ru), Chinese (zh), German (de)
## BibTeX entry and citation info
```
@inproceedings{tran2021facebook
title={Facebook AI’s WMT21 News Translation Task Submission},
author={Chau Tran and Shruti Bhosale and James Cross and Philipp Koehn and Sergey Edunov and Angela Fan},
booktitle={Proc. of WMT},
year={2021},
}
``` | [
-0.0749499723315239,
-0.017996912822127342,
-0.05289164185523987,
0.04416022077202797,
-0.006946846842765808,
0.0016231106128543615,
0.02580428123474121,
-0.01508022379130125,
0.0007713652448728681,
-0.03496319428086281,
0.02793782949447632,
-0.11903979629278183,
0.07667510956525803,
-0.028965922072529793,
0.04451853409409523,
0.0042710620909929276,
-0.04744679108262062,
0.027928441762924194,
-0.05256379768252373,
-0.1522359400987625,
0.08082424104213715,
0.046651121228933334,
0.05296145752072334,
0.018093066290020943,
0.05357234925031662,
-0.10712388902902603,
-0.007374085485935211,
0.03530736267566681,
0.08349748700857162,
-0.04390404745936394,
0.03588802367448807,
0.05947858467698097,
0.02604644186794758,
0.0825154185295105,
0.010937141254544258,
0.05829737335443497,
-0.06994730979204178,
-0.13197621703147888,
-0.017707915976643562,
-0.016183292493224144,
-0.007360951974987984,
0.024299759417772293,
-0.02534577064216137,
-0.004563711117953062,
0.06638135015964508,
0.01217389665544033,
0.014523348771035671,
0.027456054463982582,
-0.06325333565473557,
0.0604262612760067,
-0.04951176792383194,
-0.012310829944908619,
0.013896498829126358,
0.07491303980350494,
0.0149958161637187,
-0.06915731728076935,
-0.04503801465034485,
0.014155601151287556,
0.02289527840912342,
-0.029656337574124336,
-0.09034799039363861,
0.0014055370120331645,
0.024108344689011574,
-0.017339656129479408,
-0.026927892118692398,
-0.08097728341817856,
0.024194853380322456,
0.054196566343307495,
-0.028173154219985008,
-0.05496317148208618,
-0.014991129748523235,
-0.02636108174920082,
-0.08301610499620438,
0.058101553469896317,
-0.006957435514777899,
0.038439325988292694,
0.06097019836306572,
-0.017809823155403137,
0.02606804482638836,
-0.0446351058781147,
0.0602671355009079,
0.02099132537841797,
0.11147962510585785,
-0.00294718099758029,
0.03943442180752754,
0.012448365800082684,
-0.07572966068983078,
0.0519978292286396,
0.0686514601111412,
0.01578340120613575,
-0.002104140818119049,
-0.05631343647837639,
0.041062816977500916,
0.07002122700214386,
0.07429300993680954,
0.023062948137521744,
0.05690933018922806,
0.10492558032274246,
0.05450337007641792,
0.07973968982696533,
0.052102990448474884,
0.038731664419174194,
0.034762486815452576,
0.014500897377729416,
-0.02391093038022518,
-0.04461047798395157,
0.06068859621882439,
0.028905145823955536,
0.033964261412620544,
-0.10070466250181198,
-0.013470165431499481,
0.001186802051961422,
0.03996659442782402,
-0.0144944554194808,
-0.01939339190721512,
0.07435217499732971,
0.011797926388680935,
0.021661978214979172,
0.0322025790810585,
-0.059651512652635574,
-0.0807449221611023,
0.00027008139295503497,
0.030507924035191536,
-0.010582017712295055,
-0.009000401012599468,
-0.04024682193994522,
-0.05425306782126427,
-2.3110262559026847e-34,
0.01706697978079319,
0.05857293680310249,
-0.004577586892992258,
-0.01377124898135662,
-0.0496658980846405,
-0.02522786147892475,
0.03805812820792198,
0.019470006227493286,
-0.09595631808042526,
-0.0011473654303699732,
-0.0286728348582983,
-0.011470283381640911,
-0.040221258997917175,
0.031575094908475876,
-0.016696782782673836,
-0.018914632499217987,
-0.037465065717697144,
0.012092222459614277,
0.002114047994837165,
0.09861573576927185,
0.0639771893620491,
-0.0057985251769423485,
-0.06413701176643372,
-0.02438363991677761,
-0.053304098546504974,
0.07492679357528687,
0.04914400354027748,
-0.058263640850782394,
0.031211931258440018,
0.004621859639883041,
-0.06446661055088043,
-0.045485083013772964,
0.003937678411602974,
0.013653259724378586,
-0.037968359887599945,
-0.053014617413282394,
-0.059924717992544174,
-0.008730853907763958,
-0.06836088746786118,
-0.10555170476436615,
-0.00995172280818224,
0.040592748671770096,
-0.027652187272906303,
-0.00008507502207066864,
-0.017481965944170952,
-0.06185442581772804,
-0.023761577904224396,
-0.04168074205517769,
0.014203603379428387,
-0.006787247955799103,
-0.05570141226053238,
0.021943774074316025,
-0.017124755308032036,
-0.08503668755292892,
0.09363099932670593,
-0.009985760785639286,
0.08109253644943237,
0.03515198454260826,
0.028058921918272972,
0.09765352308750153,
-0.03829704970121384,
-0.06191333010792732,
0.02311551757156849,
0.08901260048151016,
0.14722654223442078,
0.04028548672795296,
-0.0237989891320467,
-0.032950956374406815,
0.04798972234129906,
0.03802214190363884,
-0.012089988216757774,
-0.04154535382986069,
-0.07289615273475647,
-0.031758543103933334,
-0.027528440579771996,
-0.059815652668476105,
-0.015480564907193184,
-0.07767150551080704,
-0.0006920975865796208,
-0.003597815055400133,
-0.02989093028008938,
0.03206804394721985,
-0.02689092420041561,
0.03567608818411827,
-0.05892321839928627,
0.006178176961839199,
0.018508559092879295,
-0.07309871166944504,
-0.005668117199093103,
-0.01275361143052578,
0.07974794507026672,
-0.03648929297924042,
0.009845573455095291,
-0.05385422706604004,
0.01826012134552002,
8.469834445372946e-34,
-0.03751270845532417,
0.06665927916765213,
-0.08246158808469772,
0.0449824295938015,
-0.08728273957967758,
-0.10202725231647491,
0.09741442650556564,
0.06563740223646164,
0.014745092950761318,
-0.03587191179394722,
0.050601325929164886,
-0.08813099563121796,
0.029872342944145203,
0.04270891100168228,
0.1174984872341156,
0.008724215440452099,
0.0487666130065918,
0.038317326456308365,
0.02844448946416378,
0.11104071885347366,
-0.032254721969366074,
0.05844060331583023,
-0.09444765746593475,
0.007707921322435141,
-0.022149300202727318,
0.042600929737091064,
0.03255609795451164,
0.005583326797932386,
-0.0006852081278339028,
0.04761736840009689,
-0.04816162958741188,
-0.08400179445743561,
-0.006320293061435223,
0.041490666568279266,
-0.06805378198623657,
-0.05899382010102272,
0.0763719230890274,
0.05672731623053551,
-0.058891307562589645,
0.027786146849393845,
0.007875367999076843,
0.009774637408554554,
-0.004172480199486017,
0.04203224554657936,
0.0037242788821458817,
0.1295795887708664,
-0.09004940837621689,
-0.020983245223760605,
-0.03086056560277939,
-0.052594032138586044,
0.06552248448133469,
0.0292936023324728,
-0.05701962858438492,
-0.01600656285881996,
-0.0068643162958323956,
-0.10528011620044708,
0.0024865709710866213,
-0.025898467749357224,
-0.03278182074427605,
-0.12404840439558029,
0.006508754100650549,
-0.030110683292150497,
0.033229533582925797,
0.02532762661576271,
-0.002025259891524911,
-0.0451037660241127,
-0.011845684610307217,
0.0053190565668046474,
-0.0019474815344437957,
-0.07451058179140091,
0.016152624040842056,
-0.013121037743985653,
0.026607753708958626,
0.03321770206093788,
0.024539224803447723,
-0.033943045884370804,
-0.04279060289263725,
0.014993637800216675,
0.005522202700376511,
-0.05288205295801163,
-0.06108978018164635,
-0.02951904572546482,
0.05161432921886444,
0.03357560560107231,
0.05048203095793724,
-0.0670037791132927,
0.02387760952115059,
0.089571513235569,
0.007192213088274002,
0.06583072245121002,
-0.003818545024842024,
0.037126325070858,
0.020698431879281998,
0.09291426837444305,
-0.050134241580963135,
-4.954553389779903e-8,
-0.1121099442243576,
-0.04385922849178314,
-0.12981486320495605,
-0.0012932162499055266,
-0.02966594696044922,
-0.00848891120404005,
-0.05339107662439346,
-0.025150682777166367,
0.012860625050961971,
0.04759145900607109,
-0.036802493035793304,
0.04504306986927986,
-0.038256458938121796,
-0.029813840985298157,
0.014824265614151955,
0.017147982493042946,
0.039097171276807785,
0.08854135870933533,
0.03037005104124546,
0.02475608140230179,
0.02141842059791088,
0.0737949013710022,
0.00811754073947668,
-0.005695746745914221,
0.023380331695079803,
0.009883212856948376,
-0.00846029445528984,
0.008449015207588673,
0.09404689818620682,
-0.15936827659606934,
-0.033659011125564575,
0.005789892747998238,
0.01503049861639738,
0.007444838993251324,
0.0034637756180018187,
0.045695796608924866,
-0.029673879966139793,
-0.008445297367870808,
0.04325418919324875,
0.07046689838171005,
0.07309149205684662,
0.025957556441426277,
-0.10676883906126022,
0.004055837634950876,
0.024829309433698654,
-0.01692289300262928,
-0.047531608492136,
-0.09517453610897064,
0.03704923391342163,
-0.00279922503978014,
0.037090785801410675,
-0.06896711885929108,
0.027221854776144028,
-0.019528308883309364,
0.046472128480672836,
0.0581333190202713,
0.010253840126097202,
-0.05334550887346268,
0.07948556542396545,
0.046163562685251236,
0.08153286576271057,
0.030020466074347496,
0.011853955686092377,
-0.02956182323396206
] |
textattack/roberta-base-ag-news | 80f0a42b53970634dc15f4b59342978410585b46 | 2021-05-20T22:15:20.000Z | [
"pytorch",
"jax",
"roberta",
"text-classification",
"transformers"
] | text-classification | false | textattack | null | textattack/roberta-base-ag-news | 1,166 | 1 | transformers | ## TextAttack Model CardThis `roberta-base` model was fine-tuned for sequence classification using TextAttack
and the ag_news dataset loaded using the `nlp` library. The model was fine-tuned
for 5 epochs with a batch size of 16, a learning
rate of 5e-05, and a maximum sequence length of 128.
Since this was a classification task, the model was trained with a cross-entropy loss function.
The best score the model achieved on this task was 0.9469736842105263, as measured by the
eval set accuracy, found after 4 epochs.
For more information, check out [TextAttack on Github](https://github.com/QData/TextAttack).
| [
-0.07643035054206848,
-0.03920567408204079,
-0.04880964383482933,
0.013585930690169334,
-0.005392242223024368,
0.08895982801914215,
-0.04113169014453888,
0.037072159349918365,
0.04997522011399269,
-0.03783588856458664,
-0.03604700043797493,
0.046886250376701355,
-0.012139647267758846,
0.013740080408751965,
-0.06269499659538269,
0.0040888674557209015,
0.030251076444983482,
-0.04148251563310623,
-0.05920330435037613,
-0.03456221893429756,
0.02954917401075363,
0.1066751629114151,
0.11245973408222198,
-0.02013455145061016,
0.06804953515529633,
0.015995953232049942,
-0.08825861662626266,
0.038615163415670395,
0.03341813385486603,
0.00575430691242218,
0.010760357603430748,
-0.0017077605007216334,
0.13344483077526093,
0.04965886473655701,
-0.023195192217826843,
0.06644842773675919,
-0.04305477440357208,
-0.05057867243885994,
0.0024682360235601664,
0.02122604474425316,
0.047382403165102005,
-0.006311315111815929,
-0.00004263049777364358,
0.0637843906879425,
0.07142221927642822,
-0.059912484139204025,
-0.020913124084472656,
0.000707252649590373,
-0.037266772240400314,
0.005490331444889307,
-0.07008010149002075,
0.01747553050518036,
0.016325760632753372,
0.06993543356657028,
-0.09586858004331589,
-0.013503752648830414,
-0.060635410249233246,
-0.022118857130408287,
-0.04990845173597336,
-0.04272353649139404,
0.0017119324766099453,
-0.06802690029144287,
-0.08587951958179474,
-0.03476352617144585,
0.03108164668083191,
-0.04271766543388367,
-0.008636021986603737,
0.00006801081326557323,
0.01583508588373661,
-0.03453873097896576,
0.00535207474604249,
0.019168110564351082,
-0.007820285856723785,
0.09752354025840759,
0.034575119614601135,
0.07582119107246399,
0.07767388224601746,
0.0030745617114007473,
0.02890424244105816,
-0.06333141028881073,
-0.0074472203850746155,
-0.05376000329852104,
0.06951215118169785,
0.029785024002194405,
0.04382520914077759,
-0.031894925981760025,
0.0356694795191288,
0.061386838555336,
-0.04626878723502159,
-0.01257409993559122,
0.03441161662340164,
-0.08494313806295395,
0.09819085150957108,
-0.05226258561015129,
-0.05896034091711044,
0.07072731107473373,
0.01253463514149189,
-0.003952869214117527,
-0.048446591943502426,
0.046231213957071304,
0.01950451359152794,
0.05547444522380829,
-0.03166286647319794,
-0.03161908686161041,
0.01914106495678425,
-0.031552255153656006,
0.09305790811777115,
0.02503780461847782,
0.018312769010663033,
-0.09551011025905609,
0.09141917526721954,
0.008402164094150066,
-0.08138883113861084,
-0.02923724800348282,
0.05929753929376602,
0.000780724105425179,
-0.04631161317229271,
-0.002681280020624399,
0.0046080551110208035,
0.10985305160284042,
-0.03833877667784691,
-0.005499671213328838,
-0.026983527466654778,
0.02997109852731228,
-0.046816181391477585,
0.03536725789308548,
-0.0062535787001252174,
4.806506034384114e-33,
0.03322828188538551,
0.03830321505665779,
0.026544148102402687,
-0.02604372426867485,
-0.01057196594774723,
-0.013782927766442299,
-0.03406558558344841,
0.00985887460410595,
-0.03528284654021263,
0.02616308070719242,
-0.0347459502518177,
0.02406715601682663,
-0.0051415953785181046,
0.000254539045272395,
0.026132363826036453,
-0.03112858161330223,
-0.11366213113069534,
-0.004893921781331301,
0.0003405555908102542,
0.03761409595608711,
0.045025937259197235,
0.022081678733229637,
0.019465681165456772,
-0.0770493745803833,
0.017534002661705017,
0.09034460037946701,
0.04526981711387634,
-0.05530641973018646,
-0.041103485971689224,
0.03215614706277847,
-0.07299420237541199,
0.05498477444052696,
-0.004495921544730663,
0.018875079229474068,
0.01156842801719904,
-0.007290639914572239,
-0.06654807925224304,
-0.029264401644468307,
0.050208985805511475,
-0.018749769777059555,
-0.030542001128196716,
0.007804120425134897,
0.05581344664096832,
-0.053062573075294495,
-0.01287405751645565,
0.025260038673877716,
-0.005758162587881088,
0.03974828124046326,
0.014898994006216526,
0.01329792384058237,
0.06990857422351837,
-0.002887631068006158,
-0.00613265298306942,
0.06046142056584358,
0.0027973572723567486,
0.06075423210859299,
0.10849873721599579,
0.0625874325633049,
0.021203404292464256,
0.06789716333150864,
0.045853763818740845,
0.013711407780647278,
0.07377869635820389,
-0.024883970618247986,
-0.022860391065478325,
-0.027967913076281548,
-0.04012564569711685,
-0.018334437161684036,
0.0034035767894238234,
0.08059128373861313,
0.0154039291664958,
0.007148580625653267,
-0.07230301946401596,
0.019743427634239197,
0.025990068912506104,
-0.014179927296936512,
0.059941135346889496,
-0.07525408267974854,
-0.03293285146355629,
0.04729378595948219,
0.014095201157033443,
0.0030885834712535143,
0.0455440916121006,
-0.08072710037231445,
-0.07596968114376068,
-0.05173943564295769,
0.03568794205784798,
-0.09234264492988586,
-0.0798913910984993,
0.010263731703162193,
0.01373547688126564,
0.09625016152858734,
0.016780709847807884,
0.005251883529126644,
-0.027505656704306602,
-3.944189092559808e-33,
0.031227601692080498,
0.002156286733224988,
-0.051925916224718094,
0.09769706428050995,
-0.0152851277962327,
-0.07505109906196594,
-0.05229582265019417,
0.13262313604354858,
-0.03966608643531799,
0.004403050988912582,
0.09808694571256638,
0.023190254345536232,
0.018573598936200142,
-0.02443471923470497,
0.014915567822754383,
0.05591804161667824,
-0.009075152687728405,
-0.022304916754364967,
0.05875536799430847,
0.030669022351503372,
0.002206629840657115,
0.10023416578769684,
-0.05632596090435982,
0.057214196771383286,
0.012692994438111782,
-0.02549615316092968,
0.03816824406385422,
0.039761342108249664,
0.05000944063067436,
-0.09908900409936905,
-0.040190573781728745,
-0.026904145255684853,
0.01556660607457161,
0.019638363271951675,
-0.07488375902175903,
0.08265494555234909,
0.0754980593919754,
-0.05238112807273865,
-0.003233762923628092,
0.09135753661394119,
0.09148290008306503,
0.0655975267291069,
-0.09291757643222809,
0.032875243574380875,
-0.037390097975730896,
-0.04101889953017235,
-0.1497965008020401,
0.043257273733615875,
0.02626909129321575,
0.031155042350292206,
0.018177548423409462,
-0.028723642230033875,
-0.10091054439544678,
0.08907680958509445,
-0.044725410640239716,
-0.06997616589069366,
0.00796644389629364,
-0.018390590324997902,
-0.08446930348873138,
0.052119284868240356,
-0.04898032173514366,
0.06900972872972488,
0.009346461854875088,
-0.018073253333568573,
0.10317759215831757,
-0.052388180047273636,
0.01095479354262352,
-0.014955583959817886,
-0.06556318700313568,
0.029185937717556953,
0.07822270691394806,
-0.033205967396497726,
0.010124366730451584,
0.069294273853302,
-0.015120407566428185,
0.004425555933266878,
-0.0334920696914196,
0.004525734111666679,
-0.08901277184486389,
-0.07069680094718933,
-0.07374175637960434,
0.03134214133024216,
-0.013857780024409294,
0.08384769409894943,
0.03097081556916237,
0.1260438859462738,
0.08884608745574951,
0.03628690913319588,
-0.02134750410914421,
0.052197545766830444,
0.008986338973045349,
0.012743977829813957,
0.039519645273685455,
0.09895311295986176,
-0.040406424552202225,
-5.2443503761878674e-8,
-0.029924891889095306,
0.005857640411704779,
-0.08003973960876465,
0.011625983752310276,
-0.05254537984728813,
-0.014424266293644905,
-0.04002878814935684,
0.01304317731410265,
-0.017982540652155876,
-0.010500309988856316,
0.018413469195365906,
0.017343254759907722,
-0.09658314287662506,
0.000682126497849822,
-0.008222358301281929,
-0.022226078435778618,
0.0184626504778862,
0.006047130096703768,
-0.024474037811160088,
-0.03577098250389099,
0.08154286444187164,
0.04562658444046974,
-0.012479529716074467,
-0.029645467177033424,
-0.03401010110974312,
-0.0423566959798336,
-0.06641575694084167,
0.10485537350177765,
-0.0016102356603369117,
-0.09994122385978699,
0.026245079934597015,
0.0035464249085634947,
-0.06801578402519226,
-0.08478904515504837,
-0.0846337229013443,
0.0830283984541893,
-0.019515516236424446,
-0.09372793883085251,
0.01581598073244095,
0.09566135704517365,
0.04820898920297623,
0.008858962915837765,
-0.08381573855876923,
-0.05228276550769806,
-0.04457201808691025,
-0.046751637011766434,
-0.04253516346216202,
-0.09984245151281357,
0.07627013325691223,
-0.020026519894599915,
0.057394400238990784,
-0.03826289251446724,
-0.06955412030220032,
0.003955910447984934,
0.05639934912323952,
-0.05720474198460579,
-0.022459160536527634,
-0.01808338239789009,
0.04632261395454407,
0.005491972900927067,
-0.002387360669672489,
-0.06256870180368423,
-0.008420943282544613,
-0.0003805234737228602
] |
CAUKiel/JavaBERT | 5028efb75040cbd2fe33e10fe5f4c232b455cee8 | 2022-07-19T18:45:37.000Z | [
"pytorch",
"bert",
"fill-mask",
"code",
"transformers",
"license:apache-2.0",
"autotrain_compatible"
] | fill-mask | false | CAUKiel | null | CAUKiel/JavaBERT | 1,165 | 4 | transformers | ---
language:
- code
license: apache-2.0
widget:
- text: 'public [MASK] isOdd(Integer num) {if (num % 2 == 0) {return "even";} else {return "odd";}}'
---
## JavaBERT
A BERT-like model pretrained on Java software code.
### Training Data
The model was trained on 2,998,345 Java files retrieved from open source projects on GitHub. A ```bert-base-cased``` tokenizer is used by this model.
### Training Objective
A MLM (Masked Language Model) objective was used to train this model.
### Usage
```python
from transformers import pipeline
pipe = pipeline('fill-mask', model='CAUKiel/JavaBERT')
output = pipe(CODE) # Replace with Java code; Use '[MASK]' to mask tokens/words in the code.
```
#### Related Model
A version of this model using an uncased tokenizer is available at [CAUKiel/JavaBERT-uncased](https://huggingface.co/CAUKiel/JavaBERT-uncased).
| [
-0.10622118413448334,
-0.05139143019914627,
0.09822454303503036,
-0.016405073925852776,
-0.024515850469470024,
0.04412556812167168,
-0.007597534917294979,
0.040869828313589096,
0.001397851388901472,
-0.04624391347169876,
0.03930819779634476,
-0.06046716868877411,
0.04226129874587059,
0.028677688911557198,
0.056666359305381775,
0.05846494063735008,
0.038744743913412094,
0.04665963724255562,
-0.10344762355089188,
-0.11576389521360397,
0.10263904929161072,
0.08750398457050323,
0.021397249773144722,
-0.06413080543279648,
0.006485688034445047,
0.005411582067608833,
-0.039383694529533386,
-0.07115352898836136,
0.1028272733092308,
0.06838284432888031,
0.02742423303425312,
-0.03786168992519379,
0.011804785579442978,
0.038353972136974335,
0.031729940325021744,
0.08136626332998276,
-0.02342911623418331,
-0.003366523189470172,
0.08032077550888062,
0.0241146981716156,
-0.04743603989481926,
-0.002401904668658972,
-0.06865699589252472,
-0.0637410506606102,
0.039670124650001526,
-0.057555705308914185,
-0.04632933810353279,
-0.0027417014352977276,
-0.09009630978107452,
-0.03764459863305092,
-0.07264779508113861,
-0.007547480054199696,
0.05421822518110275,
-0.0034033474512398243,
-0.0531139001250267,
-0.0024064406752586365,
0.03709762543439865,
-0.022219831123948097,
0.0011571915820240974,
-0.11562470346689224,
-0.1230243369936943,
-0.04060043394565582,
0.015117091126739979,
-0.0011458848603069782,
-0.10456951707601547,
-0.02052142471075058,
-0.0672418624162674,
0.047315724194049835,
0.0597887746989727,
0.05037553235888481,
-0.04123029485344887,
0.03759172186255455,
0.0091311139985919,
0.0036988859064877033,
0.0024101221933960915,
-0.037842001765966415,
0.0943956971168518,
0.001955818384885788,
0.043903570622205734,
-0.025444911792874336,
0.009704958647489548,
-0.05086911469697952,
0.07260002940893173,
0.05242623761296272,
0.06336957216262817,
0.015206526964902878,
0.030497150495648384,
-0.0038939930964261293,
-0.004764235578477383,
0.03237884119153023,
-0.041635528206825256,
-0.11947540938854218,
0.08136901259422302,
0.012404462322592735,
0.012412493117153645,
-0.012558476999402046,
0.04496027156710625,
0.043734364211559296,
-0.026239750906825066,
0.10257242619991302,
-0.023229161277413368,
0.02311820536851883,
0.01775163970887661,
-0.07939759641885757,
0.06637300550937653,
0.06231299042701721,
-0.05185062810778618,
-0.02671814151108265,
0.09832388162612915,
-0.0999988541007042,
-0.0031300347764045,
-0.04701261222362518,
-0.02092740684747696,
-0.06940269470214844,
-0.012262596748769283,
-0.011564305983483791,
0.027928810566663742,
-0.030554359778761864,
-0.018395621329545975,
0.10130985826253891,
0.03947603702545166,
-0.002449551597237587,
-0.00465521402657032,
0.06720233708620071,
-0.06718124449253082,
0.010617496445775032,
-0.004041336942464113,
4.4085038510048424e-33,
-0.01597336120903492,
-0.023463046178221703,
-0.01136693824082613,
-0.01788037270307541,
-0.005773602519184351,
-0.0047480762004852295,
0.010944130830466747,
0.03344400227069855,
-0.05570165067911148,
-0.040674496442079544,
0.016986358910799026,
0.0011427750578150153,
-0.11028368771076202,
0.044509436935186386,
-0.08201908320188522,
0.0058827828615903854,
-0.0027968573849648237,
0.0236998051404953,
0.06849251687526703,
-0.052350569516420364,
0.08409945666790009,
0.02628452144563198,
-0.02995743416249752,
-0.04296935349702835,
-0.067268967628479,
0.04814203456044197,
0.08585543185472488,
-0.09224173426628113,
-0.003968114499002695,
0.055615972727537155,
-0.0860828384757042,
0.026884829625487328,
-0.0330217070877552,
0.006977951619774103,
-0.017191806808114052,
-0.01831720396876335,
0.05732788145542145,
-0.0561019703745842,
-0.017986513674259186,
-0.05490916594862938,
-0.06218307837843895,
0.015179402194917202,
-0.008552889339625835,
-0.08792582154273987,
0.0002212872786913067,
-0.022783201187849045,
0.028562471270561218,
-0.014478775672614574,
0.018794583156704903,
0.023370813578367233,
0.03913457691669464,
0.04271472990512848,
-0.009444032795727253,
0.043254636228084564,
0.02455616183578968,
-0.06192304193973541,
0.04693305492401123,
0.04649970680475235,
0.059834182262420654,
-0.021196167916059494,
-0.02653021551668644,
0.007494989316910505,
0.05616780370473862,
0.02374003455042839,
0.029347117990255356,
-0.03238532692193985,
-0.02157238870859146,
0.005630658008158207,
0.020266730338335037,
0.035316091030836105,
-0.03958127275109291,
0.0026403628289699554,
-0.02032417617738247,
0.037355173379182816,
0.004189160652458668,
-0.08980350941419601,
0.026340456679463387,
-0.05722922459244728,
-0.06539418548345566,
-0.03514409065246582,
0.021105915307998657,
0.030452460050582886,
-0.052131280303001404,
-0.04989975318312645,
-0.05740095674991608,
-0.020977603271603584,
0.04440324380993843,
-0.04738074913620949,
0.0029716864228248596,
-0.016944851726293564,
0.0573652982711792,
-0.051549360156059265,
-0.039785679429769516,
0.04457373544573784,
-0.0501863993704319,
-5.7244076959549904e-33,
-0.03308456763625145,
0.07870063185691833,
-0.09453751891851425,
0.045785579830408096,
-0.06454283744096756,
-0.0952075645327568,
0.0777982771396637,
0.18913739919662476,
0.017477992922067642,
-0.01328734029084444,
-0.030104340985417366,
-0.042708516120910645,
-0.018730366602540016,
-0.01970534771680832,
0.05418621376156807,
0.03185882046818733,
-0.02317206934094429,
0.03956175595521927,
0.006295571569353342,
0.03720922768115997,
0.010602572001516819,
0.006834774743765593,
-0.09151104837656021,
0.0868319422006607,
-0.0192129947245121,
0.1191149353981018,
-0.08487097918987274,
0.06137217953801155,
0.052697524428367615,
0.021935639902949333,
-0.021924173459410667,
0.06156683713197708,
-0.05217580124735832,
0.03191656619310379,
-0.06895509362220764,
-0.0025540865026414394,
0.0052392021752893925,
0.0564035028219223,
-0.00934362318366766,
0.026516051962971687,
0.032648228108882904,
-0.041784077882766724,
-0.03232872113585472,
0.08606767654418945,
-0.01028906274586916,
-0.025635359808802605,
0.0008110456983558834,
-0.05778976157307625,
0.046050842851400375,
-0.08871675282716751,
0.03760307654738426,
-0.00046325413859449327,
-0.06668917089700699,
-0.013094635680317879,
-0.08859790116548538,
-0.03375948965549469,
0.025531983003020287,
-0.03901270404458046,
-0.09229671210050583,
0.00943802110850811,
-0.006826755125075579,
-0.028187232092022896,
0.05385347083210945,
-0.018145928159356117,
-0.003433608915656805,
-0.06096548214554787,
-0.019588181748986244,
0.08215754479169846,
-0.015269695781171322,
-0.05277283117175102,
0.03170689940452576,
0.028884470462799072,
0.04935408756136894,
0.02002212032675743,
0.07813920080661774,
0.040503934025764465,
-0.016968082636594772,
-0.11776653677225113,
-0.040143392980098724,
0.019190357998013496,
-0.06114588677883148,
-0.028848808258771896,
0.06876509636640549,
0.04616463929414749,
0.007304704748094082,
0.02346952259540558,
0.021829577162861824,
0.05226452276110649,
-0.04265512526035309,
0.022537434473633766,
-0.013655736111104488,
0.04784655198454857,
-0.019845526665449142,
0.09900444000959396,
0.05383394658565521,
-5.4424667439434415e-8,
-0.055830057710409164,
-0.05101217329502106,
-0.037146322429180145,
0.006961052305996418,
-0.06336480379104614,
-0.03324250876903534,
-0.06492634117603302,
-0.010884430259466171,
-0.04926327243447304,
-0.06212055683135986,
0.03142009302973747,
0.05633644014596939,
-0.12922611832618713,
0.03725072741508484,
-0.009868907742202282,
0.045898422598838806,
-0.05270257219672203,
0.04888875409960747,
-0.008743912912905216,
-0.029098624363541603,
-0.04070243984460831,
0.03550984710454941,
-0.004266996402293444,
-0.04766741394996643,
-0.015396768227219582,
-0.04764046147465706,
-0.007503908593207598,
0.10061030834913254,
-0.03478379175066948,
0.004110750742256641,
-0.03194498270750046,
0.011926930397748947,
-0.06534508615732193,
0.003780923318117857,
0.004192820750176907,
0.08175778388977051,
-0.036468904465436935,
-0.08719328790903091,
-0.04321800172328949,
0.04567887634038925,
0.09295379370450974,
0.02714601345360279,
-0.08772613853216171,
0.027050230652093887,
0.07855349779129028,
0.033825427293777466,
0.049340419471263885,
-0.12153273820877075,
0.002946292283013463,
0.02571641281247139,
0.015488720498979092,
-0.03775937855243683,
-0.0539664588868618,
0.027736537158489227,
-0.041353508830070496,
0.002366420580074191,
-0.08396586775779724,
-0.0015720139490440488,
-0.02262861281633377,
0.0006848435150459409,
-0.018096381798386574,
0.06125762686133385,
0.09693440049886703,
0.009809782728552818
] |
facebook/wav2vec2-large-100k-voxpopuli | ad2f1b5b6f2f0a78683b90e78ebc07af6022c6db | 2021-11-05T12:45:52.000Z | [
"pytorch",
"jax",
"wav2vec2",
"pretraining",
"multilingual",
"arxiv:2101.00390",
"transformers",
"audio",
"automatic-speech-recognition",
"voxpopuli",
"license:cc-by-nc-4.0"
] | automatic-speech-recognition | false | facebook | null | facebook/wav2vec2-large-100k-voxpopuli | 1,163 | 2 | transformers | ---
language: multilingual
tags:
- audio
- automatic-speech-recognition
- voxpopuli
license: cc-by-nc-4.0
---
# Wav2Vec2-Large-VoxPopuli
[Facebook's Wav2Vec2](https://ai.facebook.com/blog/wav2vec-20-learning-the-structure-of-speech-from-raw-audio/) large model pretrained on the 100k unlabeled subset of [VoxPopuli corpus](https://arxiv.org/abs/2101.00390).
**Note**: This model does not have a tokenizer as it was pretrained on audio alone. In order to use this model **speech recognition**, a tokenizer should be created and the model should be fine-tuned on labeled text data. Check out [this blog](https://huggingface.co/blog/fine-tune-wav2vec2-english) for more in-detail explanation of how to fine-tune the model.
**Paper**: *[VoxPopuli: A Large-Scale Multilingual Speech Corpus for Representation
Learning, Semi-Supervised Learning and Interpretation](https://arxiv.org/abs/2101.00390)*
**Authors**: *Changhan Wang, Morgane Riviere, Ann Lee, Anne Wu, Chaitanya Talnikar, Daniel Haziza, Mary Williamson, Juan Pino, Emmanuel Dupoux* from *Facebook AI*
See the official website for more information, [here](https://github.com/facebookresearch/voxpopuli/)
# Fine-Tuning
Please refer to [this blog](https://huggingface.co/blog/fine-tune-xlsr-wav2vec2) on how to fine-tune this model on a specific language. Note that you should replace `"facebook/wav2vec2-large-xlsr-53"` with this checkpoint for fine-tuning.
| [
-0.045501306653022766,
-0.09884454309940338,
-0.025190500542521477,
-0.038069408386945724,
0.01961768977344036,
0.042700931429862976,
-0.03367209807038307,
-0.041489727795124054,
-0.02840311825275421,
-0.06551806628704071,
-0.01709160767495632,
-0.0993644967675209,
-0.013015495613217354,
0.019341442734003067,
-0.010690335184335709,
-0.021328918635845184,
0.03672647103667259,
0.04699181020259857,
-0.05570144206285477,
-0.006293514743447304,
0.06853780150413513,
0.06240612640976906,
0.07604654878377914,
0.04005078971385956,
0.0365690253674984,
0.10016750544309616,
-0.0642726942896843,
-0.006646246183663607,
0.05237043648958206,
-0.04410305246710777,
0.06057535856962204,
0.01861063577234745,
0.09821426868438721,
0.04062614217400551,
0.006942481268197298,
-0.011639992706477642,
-0.04284112900495529,
-0.02590048499405384,
0.010590496473014355,
-0.04066936671733856,
-0.010885126888751984,
0.007934736087918282,
-0.028983989730477333,
-0.003437203820794821,
0.043122921139001846,
0.03862520307302475,
-0.03868087753653526,
0.008853163570165634,
-0.03970690444111824,
0.012672512792050838,
-0.08545885235071182,
-0.06611985713243484,
0.029312333092093468,
0.0748649314045906,
-0.10649525374174118,
-0.0031776491086930037,
0.013165270909667015,
0.017260730266571045,
0.0797773227095604,
-0.004670211113989353,
-0.04791567847132683,
-0.048610322177410126,
0.006707216612994671,
0.01252810936421156,
-0.026858748868107796,
0.02163543738424778,
-0.05329117551445961,
0.0015509749064221978,
0.013137072324752808,
0.0383380651473999,
0.022474447265267372,
0.068837009370327,
0.05922972783446312,
0.03866899386048317,
0.04082796350121498,
0.025781255215406418,
0.05283449962735176,
-0.006864731665700674,
0.06781753897666931,
-0.08443393558263779,
0.03130292147397995,
0.0021591961849480867,
0.06017598509788513,
-0.08451724797487259,
0.06062920019030571,
-0.02245304360985756,
0.018227102234959602,
-0.05271263048052788,
-0.032609160989522934,
-0.010487167164683342,
-0.03866172954440117,
-0.005267844069749117,
0.022492708638310432,
0.010803663171827793,
-0.06079934909939766,
0.024288782849907875,
0.007143830414861441,
0.03483010455965996,
0.02106119506061077,
0.055572446435689926,
0.022552335634827614,
-0.0389329269528389,
0.005866883788257837,
-0.03102535754442215,
-0.04278888180851936,
-0.10491573065519333,
0.061289846897125244,
0.05681125074625015,
0.07389391213655472,
-0.043648093938827515,
0.03155827894806862,
0.06705925613641739,
-0.06908359378576279,
-0.025862859562039375,
0.04735347256064415,
0.044569231569767,
-0.02281029149889946,
-0.07602088898420334,
0.061498962342739105,
0.07662218064069748,
-0.1590084284543991,
-0.06537698209285736,
-0.062482934445142746,
-0.036212582141160965,
-0.020713433623313904,
-0.04026314616203308,
-0.04304064065217972,
1.503091769426997e-33,
-0.02038702368736267,
0.04871896281838417,
-0.030733205378055573,
-0.0012812389759346843,
0.03119460865855217,
-0.057342395186424255,
-0.0360616073012352,
-0.025210339576005936,
-0.03534385561943054,
-0.019725224003195763,
-0.012877389788627625,
0.017974037677049637,
-0.06354612857103348,
0.11653034389019012,
-0.05740204080939293,
-0.05076780170202255,
-0.07193911075592041,
0.007347161415964365,
-0.022082556039094925,
0.004318453837186098,
0.09937018156051636,
0.016407055780291557,
0.05246375501155853,
0.04091586917638779,
0.058323074132204056,
0.0327872596681118,
0.044619448482990265,
-0.10925456881523132,
0.050582628697156906,
0.05980166420340538,
-0.08361009508371353,
-0.08148473501205444,
0.04634859785437584,
-0.002519550733268261,
0.011181408539414406,
0.042825739830732346,
-0.007038694806396961,
0.01954173669219017,
-0.06837684661149979,
-0.11699819564819336,
0.008269759826362133,
0.01898067258298397,
0.006706079002469778,
-0.09636125713586807,
-0.06560507416725159,
0.0034839720465242863,
-0.029760878533124924,
0.03901909291744232,
0.0009832370560616255,
-0.01609148271381855,
0.05189956724643707,
-0.009749293327331543,
-0.08107077330350876,
0.008345372043550014,
-0.008007703348994255,
-0.01609845645725727,
0.03905663639307022,
0.043409038335084915,
0.04332946240901947,
-0.022372808307409286,
0.02858823351562023,
0.010067809373140335,
0.08519497513771057,
-0.013131731189787388,
0.06355206668376923,
-0.0029687946662306786,
-0.0260208360850811,
0.017880136147141457,
0.06977969408035278,
-0.059402547776699066,
-0.02770760841667652,
-0.024588536471128464,
0.013229476287961006,
0.05468040332198143,
-0.019179200753569603,
0.015441520139575005,
0.04341467097401619,
-0.13214322924613953,
-0.015057885088026524,
0.08011238276958466,
-0.035643309354782104,
0.007685539312660694,
0.00837328564375639,
-0.06205989792943001,
-0.0031962553039193153,
-0.04805338382720947,
-0.008991790935397148,
-0.12600114941596985,
-0.014037678949534893,
0.026673467829823494,
-0.0013009639224037528,
0.051810264587402344,
-0.04660199582576752,
-0.01237276941537857,
-0.0691341832280159,
-3.5248560948169276e-33,
0.05565439164638519,
0.06100541725754738,
0.0011115020606666803,
0.047523487359285355,
-0.04784689098596573,
0.02387174963951111,
0.05210414528846741,
0.11428202688694,
0.02685980312526226,
-0.08566992729902267,
0.07826925069093704,
-0.06562574207782745,
0.07534224539995193,
0.034533340483903885,
0.03752090781927109,
-0.03207625076174736,
-0.0395241379737854,
0.05250638350844383,
0.10061933845281601,
0.1330246925354004,
-0.016795586794614792,
0.011313267983496189,
-0.12131448835134506,
0.1058821752667427,
-0.07204890996217728,
0.02624957449734211,
-0.08654946833848953,
0.05847717821598053,
0.0774855837225914,
-0.009295398369431496,
-0.06850001215934753,
0.01737132854759693,
-0.0413428395986557,
0.0041701896116137505,
-0.031092576682567596,
0.010955756530165672,
0.05251266062259674,
-0.034623149782419205,
0.0030116946436464787,
0.03642977774143219,
0.06566468626260757,
0.06243177503347397,
-0.1007910966873169,
-0.05924184247851372,
0.004107247106730938,
-0.06118756905198097,
-0.03879488259553909,
0.03189721331000328,
-0.06182972341775894,
0.007622989360243082,
0.04363138601183891,
-0.05127109959721565,
-0.005176976788789034,
0.014848963357508183,
-0.051054928451776505,
-0.04376845806837082,
-0.00781539548188448,
-0.07491837441921234,
-0.019203893840312958,
-0.018660051748156548,
-0.08965586870908737,
0.004637340549379587,
-0.029224148020148277,
-0.07708742469549179,
0.028541432693600655,
-0.0012653109151870012,
-0.021805886179208755,
-0.013093076646327972,
0.02661731094121933,
-0.0070076738484203815,
0.012682121247053146,
0.034242697060108185,
-0.05206279456615448,
0.04371520131826401,
-0.06595227122306824,
-0.017940586432814598,
-0.06289225071668625,
-0.04720676317811012,
-0.03226730599999428,
-0.10535915195941925,
-0.03530190512537956,
0.047810107469558716,
0.04648558795452118,
0.07392993569374084,
0.09442808479070663,
0.12110568583011627,
0.028680620715022087,
-0.01193501427769661,
-0.02684079296886921,
0.053307127207517624,
-0.0068525043316185474,
0.05670062452554703,
0.049077849835157394,
0.10972843319177628,
-0.022019537165760994,
-5.7748675175162134e-8,
-0.06131809204816818,
0.03687107563018799,
-0.0075254132971167564,
-0.018609123304486275,
-0.014303142204880714,
-0.07580254971981049,
0.004362329374998808,
0.010166425257921219,
-0.019764654338359833,
0.0008282223716378212,
0.012326897121965885,
-0.02736031450331211,
-0.06979703158140182,
-0.015464569441974163,
-0.03587988391518593,
0.05394957587122917,
0.00443634670227766,
0.11711142212152481,
-0.0032340495381504297,
-0.06021646037697792,
0.011583582498133183,
0.018260225653648376,
0.03967662900686264,
0.01768256537616253,
0.06504367291927338,
-0.013958861120045185,
0.01746390387415886,
0.05518382787704468,
-0.02572314813733101,
-0.08565086871385574,
-0.0411127507686615,
0.07115018367767334,
-0.0667375698685646,
-0.04782452806830406,
0.043828144669532776,
0.04991843178868294,
-0.038230281323194504,
-0.06412435322999954,
-0.040078699588775635,
0.04888597130775452,
0.07201677560806274,
0.10405229777097702,
-0.1151474267244339,
-0.05396566912531853,
0.06296606361865997,
0.0003661598020698875,
-0.04373818635940552,
-0.08770330995321274,
0.028831595554947853,
0.02151741273701191,
-0.0019242914859205484,
0.050594981759786606,
-0.062382668256759644,
0.03044195845723152,
0.03008182905614376,
0.06596142053604126,
-0.031548477709293365,
0.0359390527009964,
0.02755812183022499,
0.029202790930867195,
0.023214809596538544,
0.019942833110690117,
-0.013954805210232735,
-0.03644909709692001
] |
codeparrot/codeparrot-small | e7e4f5d39319551a760f07c0e1035e379617c721 | 2022-07-03T19:54:59.000Z | [
"pytorch",
"tensorboard",
"gpt2",
"text-generation",
"code",
"dataset:codeparrot/codeparrot-clean",
"dataset:openai_humaneval",
"transformers",
"generation",
"license:apache-2.0"
] | text-generation | false | codeparrot | null | codeparrot/codeparrot-small | 1,163 | 9 | transformers | ---
language:
- code
license: apache-2.0
tags:
- code
- gpt2
- generation
datasets:
- "codeparrot/codeparrot-clean"
- "openai_humaneval"
metrics:
- "evaluate-metric/code_eval"
---
# CodeParrot 🦜 (small)
CodeParrot 🦜 is a GPT-2 model (110M parameters) trained to generate Python code.
## Usage
You can load the CodeParrot model and tokenizer directly in `transformers`:
```Python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("codeparrot/codeparrot-small")
model = AutoModelWithLMHead.from_pretrained("codeparrot/codeparrot-small")
inputs = tokenizer("def hello_world():", return_tensors="pt")
outputs = model(**inputs)
```
or with a `pipeline`:
```Python
from transformers import pipeline
pipe = pipeline("text-generation", model="codeparrot/codeparrot-small")
outputs = pipe("def hello_world():")
```
## Training
The model was trained on the cleaned [CodeParrot 🦜 dataset](https://huggingface.co/datasets/codeparrot/codeparrot-clean) with the following settings:
|Config|Value|
|-------|-----|
|Batch size| 192 |
|Context size| 1024 |
|Training steps| 150'000|
|Gradient accumulation| 1|
|Gradient checkpointing| False|
|Learning rate| 5e-4 |
|Weight decay | 0.1 |
|Warmup steps| 2000 |
|Schedule| Cosine |
The training was executed on 16 x A100 (40GB) GPUs. This setting amounts to roughly 29 billion tokens.
## Performance
We evaluated the model on OpenAI's [HumanEval](https://huggingface.co/datasets/openai_humaneval) benchmark which consists of programming challenges:
| Metric | Value |
|-------|-----|
|pass@1 | 3.80% |
|pass@10 | 6.57% |
|pass@100 | 12.78% |
The [pass@k metric](https://huggingface.co/metrics/code_eval) tells the probability that at least one out of k generations passes the tests.
## Resources
- Dataset: [full](https://huggingface.co/datasets/codeparrot/codeparrot-clean), [train](https://huggingface.co/datasets/codeparrot/codeparrot-clean-train), [valid](https://huggingface.co/datasets/codeparrot/codeparrot-clean-valid)
- Code: [repository](https://github.com/huggingface/transformers/tree/master/examples/research_projects/codeparrot)
- Spaces: [generation](), [highlighting]() | [
-0.08863992989063263,
-0.02731425315141678,
-0.043157871812582016,
0.042379919439554214,
0.003027164377272129,
-0.08353544026613235,
-0.045196425169706345,
0.08047743886709213,
-0.07153411954641342,
-0.07030888646841049,
0.008883134461939335,
-0.09741672873497009,
0.010903509333729744,
-0.040959835052490234,
0.02936861850321293,
0.0046297661028802395,
-0.0011731548001989722,
0.05705046281218529,
-0.11163145303726196,
-0.10406681150197983,
0.08368992060422897,
0.09666379541158676,
0.020876126363873482,
0.048107344657182693,
0.02432551048696041,
0.016451826319098473,
-0.00423828000202775,
0.007944630458950996,
0.040072001516819,
0.05236222594976425,
0.04205956682562828,
-0.019934162497520447,
-0.03719841688871384,
0.06192438304424286,
0.027659261599183083,
0.12427143007516861,
-0.04352174326777458,
-0.050388336181640625,
-0.02266618236899376,
-0.047634970396757126,
0.0614660419523716,
-0.03953845798969269,
-0.027928035706281662,
0.0031533041037619114,
0.020863810554146767,
-0.041871409863233566,
-0.018312761560082436,
-0.035646356642246246,
-0.04740443080663681,
-0.05184254050254822,
-0.04572966694831848,
0.008550411090254784,
-0.0005992620135657489,
-0.014401614665985107,
-0.014644860289990902,
-0.015991317108273506,
0.06689656525850296,
-0.07259373366832733,
0.04837552458047867,
-0.10656022280454636,
-0.07344159483909607,
-0.009419695474207401,
-0.039581943303346634,
-0.038587454706430435,
-0.08120957762002945,
-0.06419108808040619,
0.033088382333517075,
0.01051669754087925,
0.023257635533809662,
-0.012546868994832039,
-0.06076650694012642,
0.04687732458114624,
-0.08917903155088425,
0.03181812912225723,
0.026593143120408058,
0.037013206630945206,
0.03250005841255188,
0.02803082764148712,
0.022371366620063782,
-0.06102916598320007,
0.06586331129074097,
-0.053842511028051376,
0.055740900337696075,
0.07778479903936386,
-0.030691541731357574,
-0.043385084718465805,
0.040709029883146286,
0.11310437321662903,
0.08125067502260208,
0.07014094293117523,
-0.07007453590631485,
-0.024169977754354477,
0.02908879891037941,
-0.0016281417338177562,
-0.07775112986564636,
-0.02345777116715908,
-0.014348557218909264,
-0.039215896278619766,
-0.05542751029133797,
0.09472225606441498,
-0.06877834349870682,
-0.003948845434933901,
0.00570696871727705,
0.014386114664375782,
-0.04787435010075569,
-0.01370205171406269,
-0.035112157464027405,
0.07120268046855927,
0.025171028450131416,
-0.018563812598586082,
0.037591353058815,
-0.0026385278906673193,
0.006881894078105688,
-0.0423743911087513,
0.08488905429840088,
0.10504728555679321,
-0.012024661526083946,
0.0029847752302885056,
-0.006850609090179205,
0.12948910892009735,
0.02397000975906849,
-0.009978880174458027,
-0.044379159808158875,
0.017208965495228767,
0.018579404801130295,
0.025110749527812004,
-0.04810076206922531,
5.515301944623091e-33,
-0.0031610862351953983,
-0.02001948095858097,
0.055063504725694656,
0.026614224538207054,
-0.04871197044849396,
0.07854779064655304,
0.02416224218904972,
-0.029411636292934418,
0.039560649544000626,
-0.04155025631189346,
-0.022448625415563583,
0.01910814829170704,
-0.07680357992649078,
0.06435269862413406,
-0.06951902061700821,
-0.01593906432390213,
-0.04699349403381348,
-0.0010219089454039931,
0.034367527812719345,
0.011382141150534153,
0.05763675272464752,
0.02387426421046257,
-0.04680037125945091,
-0.00949046853929758,
-0.06861582398414612,
-0.020432692021131516,
0.020759670063853264,
-0.04019350931048393,
-0.05624738708138466,
0.058380477130413055,
-0.04951456934213638,
-0.03222782164812088,
0.0016036703018471599,
0.02182796038687229,
-0.029488613829016685,
-0.008567700162529945,
0.01883489266037941,
-0.04328101873397827,
-0.006664198357611895,
-0.0450582355260849,
-0.03497431427240372,
0.04002954810857773,
0.017804080620408058,
-0.0943748950958252,
0.003925688564777374,
-0.04851614311337471,
0.017700443044304848,
0.06691601872444153,
0.052235737442970276,
0.09593569487333298,
0.05556275695562363,
0.05574539303779602,
-0.0009984681382775307,
-0.014562217518687248,
-0.035105329006910324,
0.054490942507982254,
0.017275987192988396,
0.00005615350528387353,
0.0896168202161789,
-0.05477471277117729,
-0.0451616570353508,
0.02967812307178974,
0.002584569389000535,
-0.020768677815794945,
0.06771524250507355,
0.02356031723320484,
0.04621440917253494,
0.02166985161602497,
0.042669542133808136,
0.07795283198356628,
-0.058957941830158234,
-0.026269441470503807,
-0.07189925760030746,
0.01763947121798992,
0.021843213587999344,
-0.020615382120013237,
0.11911670863628387,
-0.04114313796162605,
-0.05472802370786667,
-0.0069586774334311485,
-0.03681757301092148,
0.10052527487277985,
0.0727253258228302,
-0.0669185146689415,
0.0007480733911506832,
-0.045926354825496674,
0.011537918820977211,
0.017759360373020172,
-0.07497705519199371,
-0.05748186260461807,
-0.08773255348205566,
-0.08000805974006653,
-0.035581618547439575,
-0.001264104968868196,
-0.03787706792354584,
-7.485885245978219e-33,
0.027729457244277,
0.04707496613264084,
-0.03551570698618889,
0.12457562983036041,
-0.0006319900276139379,
-0.08712111413478851,
0.04659711942076683,
0.07386356592178345,
0.012460766360163689,
0.02410055138170719,
-0.006377975456416607,
-0.045200660824775696,
0.02213895507156849,
0.003305885475128889,
0.1347653716802597,
-0.03462919220328331,
-0.08192895352840424,
-0.007102778647094965,
0.05203791335225105,
0.09351345896720886,
-0.04128411412239075,
0.08425653725862503,
-0.060174569487571716,
0.02868388406932354,
-0.032636646181344986,
-0.02609008178114891,
-0.06369910389184952,
0.05937916040420532,
0.07431083917617798,
-0.028095876798033714,
-0.001180361956357956,
0.07553687691688538,
-0.022277947515249252,
0.041931286454200745,
-0.07332570850849152,
-0.02918979525566101,
0.05722425505518913,
0.02362123131752014,
0.0027783014811575413,
0.12148932367563248,
0.02437925897538662,
0.005131430458277464,
-0.05133627727627754,
0.04690352454781532,
-0.08593346923589706,
0.05871503800153732,
0.05689099431037903,
-0.025582633912563324,
-0.010931748896837234,
-0.02349899336695671,
0.00967211090028286,
0.0576644241809845,
-0.07130013406276703,
0.06294698268175125,
-0.06478072702884674,
-0.0761910080909729,
0.10730772465467453,
-0.010276860557496548,
-0.09399273246526718,
-0.0027892470825463533,
-0.034868285059928894,
-0.04777101054787636,
0.07062117010354996,
-0.08339402079582214,
-0.0410466194152832,
-0.10175895690917969,
-0.039484135806560516,
-0.01532931998372078,
-0.011203180998563766,
0.012310636229813099,
-0.05369510129094124,
-0.04219961166381836,
0.03403080254793167,
-0.028093814849853516,
-0.030781539157032967,
0.02049134112894535,
-0.03996862843632698,
-0.02811332792043686,
0.05984397605061531,
-0.008464312180876732,
-0.04595077410340309,
0.0229348111897707,
0.06140624359250069,
0.013073280453681946,
0.011599494144320488,
-0.07319941371679306,
0.032511696219444275,
0.10575229674577713,
0.01920442283153534,
0.05222252383828163,
-0.08165252208709717,
0.02570953033864498,
0.03406447917222977,
0.021076321601867676,
0.0008028082083910704,
-5.569655670001339e-8,
-0.004078887403011322,
-0.002510813996195793,
-0.0169807281345129,
0.11516561359167099,
-0.08610507845878601,
-0.014966686256229877,
-0.03491741791367531,
0.08302173763513565,
-0.01976403035223484,
0.016974009573459625,
0.015875710174441338,
-0.02872909978032112,
-0.0928204134106636,
-0.020117048174142838,
-0.03120233304798603,
0.06329692155122757,
-0.012702448293566704,
0.07598745077848434,
-0.05945859104394913,
-0.03297470882534981,
-0.0674949437379837,
0.009213369339704514,
-0.023294722661376,
-0.07134020328521729,
-0.016148358583450317,
-0.026658235117793083,
-0.021976163610816002,
0.002975512994453311,
-0.024386778473854065,
-0.022595258429646492,
0.01911346986889839,
-0.0019566344562917948,
-0.029307890683412552,
-0.08196116983890533,
-0.05945363640785217,
0.03081347979605198,
-0.007581240497529507,
-0.04978593811392784,
0.10401758551597595,
0.03638099879026413,
0.035277899354696274,
0.07811784744262695,
-0.12579160928726196,
-0.0314396470785141,
0.03034575656056404,
0.021390466019511223,
-0.0006391158094629645,
-0.05475755035877228,
-0.03790619969367981,
0.031729359179735184,
0.012624116614460945,
-0.021621812134981155,
-0.06548454612493515,
-0.018698852509260178,
0.055705923587083817,
0.0360880047082901,
-0.04528307169675827,
-0.05223877355456352,
0.03332960605621338,
0.030628547072410583,
0.0036985769402235746,
0.04571966081857681,
0.04163636267185211,
-0.07876628637313843
] |
paulowoicho/t5-podcast-summarisation | 162966482402d91ce84facd36e835ad09f244a72 | 2020-11-11T10:15:57.000Z | [
"pytorch",
"t5",
"text2text-generation",
"[en]",
"dataset:Spotify Podcasts Dataset",
"arxiv:2004.04270",
"arxiv:1910.10683",
"transformers",
"summarisation",
"lm-head",
"autotrain_compatible"
] | text2text-generation | false | paulowoicho | null | paulowoicho/t5-podcast-summarisation | 1,161 | 2 | transformers | ---
language: "[en]"
datasets:
- Spotify Podcasts Dataset
tags:
- t5
- summarisation
- pytorch
- lm-head
metrics:
- ROUGE
pipeline:
- summarisation
---
# T5 for Automatic Podcast Summarisation
This model is the result of fine-tuning [t5-base](https://huggingface.co/t5-base) on the [Spotify Podcast Dataset](https://arxiv.org/abs/2004.04270).
It is based on [Google's T5](https://ai.googleblog.com/2020/02/exploring-transfer-learning-with-t5.html) which was pretrained on the [C4 dataset](https://huggingface.co/datasets/c4).
Paper: [Exploring the Limits of Transfer Learning with a Unified Text-to-Text Transformer](https://arxiv.org/pdf/1910.10683.pdf)
Authors: Colin Raffel, Noam Shazeer, Adam Roberts, Katherine Lee, Sharan Narang, Michael Matena, Yanqi Zhou, Wei Li, Peter J. Liu
## Intended uses & limitations
This model is intended to be used for automatic podcast summarisation. As creator provided descriptions
were used for training, the model also learned to generate promotional material (links, hashtags, etc) in its summaries, as such
some post processing may be required on the model's outputs.
If using on Colab, the instance will crash if the number of tokens in the transcript exceeds 7000. I discovered that the model
generated reasonable summaries even when the podcast transcript was truncated to reduce the number of tokens.
#### How to use
The model can be used with the summarisation as follows:
```python
from transformers import pipeline
summarizer = pipeline("summarization", model="paulowoicho/t5-podcast-summarisation", tokenizer="paulowoicho/t5-podcast-summarisation")
summary = summarizer(podcast_transcript, min_length=5, max_length=20)
print(summary[0]['summary_text'])
```
## Training data
This model is the result of fine-tuning [t5-base](https://huggingface.co/t5-base) on the [Spotify Podcast Dataset](https://arxiv.org/abs/2004.04270).
[Pre-processing](https://github.com/paulowoicho/msc_project/blob/master/reformat.py) was done on the original data before fine-tuning.
## Training procedure
Training was largely based on [Fine-tune T5 for Summarization](https://github.com/abhimishra91/transformers-tutorials/blob/master/transformers_summarization_wandb.ipynb) by [Abhishek Kumar Mishra](https://github.com/abhimishra91)
| [
-0.06298195570707321,
-0.08455812931060791,
0.024836519733071327,
-0.02807418629527092,
0.08362159132957458,
0.07456979900598526,
0.006142895203083754,
-0.024513091892004013,
0.004746070597320795,
-0.07108055055141449,
-0.05640849843621254,
-0.029528876766562462,
0.004703207407146692,
0.009018724784255028,
-0.06985457986593246,
-0.008266953751444817,
0.09706766903400421,
0.031139729544520378,
-0.08289659023284912,
-0.07234825193881989,
-0.05285869911313057,
0.05820052698254585,
0.08068803697824478,
0.014544004574418068,
0.09705948829650879,
0.0021041680593043566,
-0.06924863159656525,
-0.04832944646477699,
0.03984241187572479,
-0.03616354987025261,
-0.024782948195934296,
0.046093154698610306,
0.034235257655382156,
0.03679100051522255,
-0.1023656353354454,
0.049043796956539154,
-0.025315269827842712,
0.02823157049715519,
-0.03851812705397606,
0.01660572923719883,
0.05548136681318283,
-0.012653771787881851,
-0.011935746297240257,
0.0046014804393053055,
0.049709245562553406,
-0.0032821642234921455,
-0.0467672273516655,
-0.05616680905222893,
-0.03173675760626793,
0.05678017809987068,
-0.0934939831495285,
-0.019172873347997665,
-0.014381630346179008,
0.10050954669713974,
-0.08977309614419937,
0.032952580600976944,
0.021442396566271782,
0.05236605554819107,
0.029948337003588676,
-0.027647823095321655,
-0.02707093581557274,
-0.14017148315906525,
-0.007121956441551447,
-0.011426020413637161,
0.03362570330500603,
-0.045363765209913254,
0.05427246168255806,
0.08941281586885452,
-0.010119006037712097,
0.019942285493016243,
-0.08300700038671494,
0.07668193429708481,
0.007969449274241924,
0.01554166991263628,
0.03489861637353897,
0.04860407114028931,
0.04034924879670143,
-0.02860174886882305,
0.028652634471654892,
-0.05850745365023613,
0.013136766850948334,
0.020778018981218338,
0.07105059921741486,
-0.027703993022441864,
0.022782402113080025,
-0.02052166685461998,
-0.0021936949342489243,
0.027293916791677475,
-0.06402572244405746,
-0.011310086585581303,
-0.05594371631741524,
0.0012296412605792284,
0.04365435987710953,
-0.05380816385149956,
-0.03331870958209038,
0.03288940712809563,
-0.05730671063065529,
0.010293098166584969,
0.05127233639359474,
0.05182938650250435,
0.034431181848049164,
0.0451810285449028,
0.006278687156736851,
-0.05964471772313118,
-0.019601652398705482,
-0.11318456381559372,
0.06213519722223282,
0.012338102795183659,
0.016705842688679695,
-0.09202145040035248,
0.09252186119556427,
0.0241350457072258,
-0.051254063844680786,
-0.06275169551372528,
0.06580371409654617,
-0.007932990789413452,
-0.02553647942841053,
-0.054233208298683167,
0.03547031804919243,
0.04856682941317558,
-0.03393178433179855,
-0.022242559120059013,
0.01900874264538288,
-0.0037157819606363773,
-0.036251407116651535,
-0.006720632314682007,
-0.03251592814922333,
2.2822222821214712e-33,
0.05283137410879135,
0.05624685436487198,
0.04797273129224777,
0.03798510506749153,
0.04508142173290253,
-0.04745148867368698,
-0.04674658551812172,
0.02966376394033432,
-0.05809180438518524,
0.0012891592923551798,
-0.02713710628449917,
0.06739428639411926,
-0.047517821192741394,
0.019860727712512016,
0.006242741364985704,
-0.04109624773263931,
-0.08337903022766113,
0.11218932271003723,
0.002704627113416791,
-0.018053244799375534,
0.08252119272947311,
0.03354674205183983,
0.026995031163096428,
-0.07213285565376282,
-0.003262334270402789,
0.04790074750781059,
0.03879312425851822,
-0.04881316050887108,
0.04448355361819267,
0.007421264424920082,
-0.190525621175766,
-0.013330169953405857,
0.04636130854487419,
-0.016407474875450134,
0.05953318625688553,
-0.02796339802443981,
-0.06017424166202545,
-0.052942123264074326,
0.05085066705942154,
-0.08468080312013626,
0.03405686095356941,
0.01961669884622097,
0.003987578209489584,
-0.1049196645617485,
-0.07819920778274536,
0.02440115623176098,
0.02670908533036709,
-0.00858168862760067,
0.0033254504669457674,
0.00020828410924877971,
0.02301340363919735,
-0.04405125230550766,
-0.0565967857837677,
-0.02854590117931366,
0.0594816654920578,
0.01577078551054001,
0.03368676081299782,
0.04710707440972328,
0.0782230794429779,
0.0693041980266571,
-0.001031029038131237,
0.08661163598299026,
0.06615938991308212,
0.03240905702114105,
0.042628660798072815,
0.07209154963493347,
0.005259496159851551,
0.004218013491481543,
0.07297992706298828,
-0.015108169987797737,
-0.03614171966910362,
0.041840940713882446,
-0.034504570066928864,
0.02518249675631523,
0.09902311861515045,
-0.02340315282344818,
0.009162383154034615,
-0.0741608664393425,
-0.05902736634016037,
0.056174371391534805,
-0.044271260499954224,
0.011816656216979027,
0.06491363793611526,
-0.05473443493247032,
-0.0810326412320137,
0.006598819978535175,
0.028582926839590073,
-0.06819028407335281,
-0.044676583260297775,
0.013371910899877548,
-0.003416106104850769,
0.06098121777176857,
-0.04755716398358345,
0.00448618596419692,
0.009489478543400764,
-1.7151840110939545e-33,
-0.005368308629840612,
0.05107622221112251,
-0.06392624974250793,
0.10495820641517639,
-0.02754897065460682,
-0.01480958517640829,
0.027342701330780983,
0.07647711038589478,
0.021658889949321747,
-0.024591853842139244,
0.017772478982806206,
-0.0747649222612381,
-0.010570569895207882,
-0.031550198793411255,
-0.04560617730021477,
-0.06112770363688469,
-0.02567007951438427,
-0.056861523538827896,
0.01628413423895836,
0.009421264752745628,
0.02084430865943432,
0.0012053071986883879,
-0.08874763548374176,
0.09844386577606201,
0.00895166490226984,
0.030367042869329453,
0.0015456302789971232,
0.05371683090925217,
0.016003644093871117,
-0.015864532440900803,
0.015131221152842045,
-0.0662621259689331,
-0.06497260183095932,
-0.01447907742112875,
-0.07394944131374359,
-0.00036489684134721756,
0.0784243792295456,
0.043743494898080826,
0.0010385303758084774,
0.10065105557441711,
0.05754518136382103,
-0.012468437664210796,
-0.03936005383729935,
-0.024813193827867508,
-0.028641022741794586,
-0.015311913564801216,
-0.11664202809333801,
0.0405849851667881,
0.018729539588093758,
-0.029595060274004936,
0.05186507850885391,
-0.03814329206943512,
-0.0767185389995575,
-0.05554929003119469,
-0.036674439907073975,
-0.042796410620212555,
0.0619874969124794,
-0.06141478940844536,
-0.04230109602212906,
-0.03320799022912979,
-0.08710043132305145,
-0.03959531709551811,
-0.020889023318886757,
-0.06811860203742981,
0.031090395525097847,
-0.027874812483787537,
0.049898575991392136,
-0.031302765011787415,
-0.05638011172413826,
0.059231095016002655,
0.06563682109117508,
-0.018544306978583336,
0.049521591514348984,
0.026461133733391762,
-0.005586898885667324,
0.029763877391815186,
0.06857854872941971,
-0.024721328169107437,
-0.004004298243671656,
-0.08488892018795013,
-0.060603007674217224,
-0.00410620728507638,
0.0905008539557457,
0.03443336859345436,
0.09209676086902618,
0.1435965746641159,
0.045661311596632004,
0.03895210847258568,
0.026871129870414734,
0.029089076444506645,
-0.04532096907496452,
0.018233930692076683,
-0.027048686519265175,
0.039624180644750595,
-0.036952439695596695,
-5.5358000849992095e-8,
-0.08716865628957748,
0.040206894278526306,
-0.08948838710784912,
0.03322558104991913,
-0.04837736859917641,
-0.017492642626166344,
-0.012893732637166977,
0.054866205900907516,
0.02768290601670742,
0.04078096151351929,
-0.017992647364735603,
-0.04040272906422615,
-0.048300787806510925,
0.025595229119062424,
-0.03869122266769409,
0.06601724028587341,
0.043302927166223526,
0.014529655687510967,
-0.032433077692985535,
-0.06478928029537201,
0.03794210031628609,
0.080534428358078,
0.013553768396377563,
-0.0758822113275528,
0.05137846618890762,
-0.0688764676451683,
-0.004030317068099976,
0.10290563106536865,
0.04231267794966698,
-0.11880849301815033,
-0.006606042850762606,
0.04127129539847374,
-0.07158748060464859,
-0.014532598666846752,
0.07399237155914307,
0.06070168688893318,
0.019070452079176903,
0.03000548854470253,
-0.044101495295763016,
0.0830925703048706,
0.009159000590443611,
0.07157547026872635,
-0.06850681453943253,
-0.01781429350376129,
-0.031016051769256592,
-0.02183596044778824,
-0.019694766029715538,
-0.05624677240848541,
0.018169455230236053,
0.006571607664227486,
0.08407977968454361,
-0.00878070667386055,
-0.017866136506199837,
0.04811222478747368,
0.12692561745643616,
0.014756251126527786,
0.028970520943403244,
-0.046310584992170334,
0.04206182435154915,
0.06953753530979156,
-0.00883524864912033,
0.013546740636229515,
-0.02377493493258953,
0.04224161431193352
] |
MaRiOrOsSi/t5-base-finetuned-question-answering | 2c815b9dd13188d751e372a0d8cc9f3892087c9a | 2022-04-08T18:00:14.000Z | [
"pytorch",
"tf",
"t5",
"text2text-generation",
"en",
"dataset:duorc",
"transformers",
"Generative Question Answering",
"autotrain_compatible"
] | text2text-generation | false | MaRiOrOsSi | null | MaRiOrOsSi/t5-base-finetuned-question-answering | 1,161 | null | transformers | ---
language: en
datasets:
- duorc
widget:
- text: "question: Is Giacomo Italian? context: Giacomo is 25 years old and he was born in Tuscany"
- text: "question: Where does Christian come from? context: Christian is a student of UNISI but he come from Caserta"
- text: "question: Is the dog coat grey? context: You have a beautiful dog with a brown coat"
tags:
- Generative Question Answering
---
# T5 for Generative Question Answering
This model is the result produced by Christian Di Maio and Giacomo Nunziati for the Language Processing Technologies exam.
Reference for [Google's T5](https://ai.googleblog.com/2020/02/exploring-transfer-learning-with-t5.html) fine-tuned on [DuoRC](https://huggingface.co/datasets/duorc) for **Generative Question Answering** by just prepending the *question* to the *context*.
## Code
The code used for T5 training is available at this [repository](https://github.com/nunziati/bert-vs-t5-for-question-answering/blob/main/train_t5_selfrc.py).
## Results
The results are evaluated on:
- DuoRC/SelfRC -> Test Subset
- DuoRC/ParaphraseRC -> Test Subset
- SQUADv1 -> Validation Subset
Removing all tokens not related to dictionary words from the evaluation metrics.
The model used as reference is BERT finetuned on SQUAD v1.
| Model | SelfRC | ParaphraseRC | SQUAD
|--|--|--|--|
| T5-BASE-FINETUNED | **F1**: 49.00 **EM**: 31.38 | **F1**: 28.75 **EM**: 15.18 | **F1**: 63.28 **EM**: 37.24 |
| BERT-BASE-FINETUNED | **F1**: 47.18 **EM**: 30.76 | **F1**: 21.20 **EM**: 12.62 | **F1**: 77.19 **EM**: 57.81 |
## How to use it 🚀
```python
from transformers import AutoTokenizer, AutoModelWithLMHead, pipeline
model_name = "MaRiOrOsSi/t5-base-finetuned-question-answering"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelWithLMHead.from_pretrained(model_name)
question = "What is 42?"
context = "42 is the answer to life, the universe and everything"
input = f"question: {question} context: {context}"
encoded_input = tokenizer([input],
return_tensors='pt',
max_length=512,
truncation=True)
output = model.generate(input_ids = encoded_input.input_ids,
attention_mask = encoded_input.attention_mask)
output = tokenizer.decode(output[0], skip_special_tokens=True)
print(output)
```
## Citation
Created by [Christian Di Maio](https://it.linkedin.com/in/christiandimaio) and [Giacomo Nunziati](https://it.linkedin.com/in/giacomo-nunziati-b19572185)
> Made with <span style="color: #e25555;">♥</span> in Italy
| [
-0.08546653389930725,
0.03762934356927872,
0.07879457622766495,
0.031506285071372986,
-0.008278383873403072,
-0.009596738964319229,
0.044283427298069,
0.006447951775044203,
-0.0386684387922287,
-0.05491425096988678,
-0.005268258508294821,
-0.12843161821365356,
0.0054443152621388435,
-0.020449593663215637,
-0.006308078300207853,
0.01262473315000534,
0.058769937604665756,
-0.0021294662728905678,
-0.02880677580833435,
-0.04343276843428612,
-0.009175027720630169,
0.014386395923793316,
0.07511265575885773,
0.008711240254342556,
0.07492586970329285,
-0.03994444012641907,
0.0004277099505998194,
-0.03885676711797714,
0.016642294824123383,
0.06361617892980576,
-0.02247077040374279,
0.11433151364326477,
-0.03626561164855957,
0.08569636195898056,
-0.06360926479101181,
0.08077019453048706,
-0.06205398216843605,
0.06727743148803711,
-0.014599891379475594,
0.0048188879154622555,
-0.06262344121932983,
-0.05222174525260925,
-0.0313052199780941,
0.00440607126802206,
0.11235300451517105,
-0.06595740467309952,
-0.07495096325874329,
0.010603734292089939,
-0.04679403454065323,
-0.0367187075316906,
-0.17194317281246185,
-0.02553989738225937,
-0.027121558785438538,
0.0246625654399395,
-0.04626661539077759,
0.02989216521382332,
0.02296818047761917,
0.029048781841993332,
-0.020733898505568504,
0.012098036706447601,
-0.12492518126964569,
-0.053805842995643616,
-0.03338717669248581,
0.07483519613742828,
0.001699603395536542,
-0.040956441313028336,
-0.026940498501062393,
0.05252779647707939,
-0.07000109553337097,
-0.03496096283197403,
-0.02524389885365963,
0.032079558819532394,
0.005767575930804014,
0.0016988394781947136,
0.026165571063756943,
0.019982486963272095,
-0.03105541504919529,
-0.04452566057443619,
0.039649851620197296,
-0.08718410134315491,
0.06529540568590164,
-0.004447328392416239,
0.03277451917529106,
0.047472577542066574,
0.12671096622943878,
0.02646614983677864,
0.02481240965425968,
0.0752510130405426,
-0.04595958814024925,
0.0295072253793478,
-0.027994489297270775,
-0.03710174933075905,
0.0007235630182549357,
0.029945008456707,
0.046746958047151566,
0.052324455231428146,
0.007000717334449291,
0.008278798311948776,
-0.0377969816327095,
0.0437619574368,
0.05431962013244629,
0.022595256567001343,
0.031839046627283096,
-0.020036356523633003,
-0.04907597601413727,
-0.00209706905297935,
0.023238057270646095,
-0.01991565339267254,
0.050945427268743515,
-0.13096384704113007,
0.04164748266339302,
0.021314918994903564,
-0.04621214419603348,
-0.10753951966762543,
-0.022023869678378105,
-0.009637360461056232,
0.050810556858778,
0.040396664291620255,
0.03439100459218025,
0.018524253740906715,
-0.03507831692695618,
0.030997812747955322,
-0.06234702840447426,
0.0073328809812664986,
-0.005681160371750593,
-0.12846699357032776,
0.018918151035904884,
4.7964327824815364e-33,
0.07084158062934875,
0.006702177692204714,
0.03613056614995003,
0.10193140804767609,
-0.011447315104305744,
0.03827503323554993,
-0.0516347773373127,
0.03410018980503082,
-0.062185369431972504,
-0.03294952213764191,
0.010969774797558784,
-0.028797507286071777,
-0.09599869698286057,
0.04453086853027344,
-0.0018998057348653674,
-0.018323425203561783,
-0.06728799641132355,
0.0171001348644495,
0.0451972633600235,
-0.00292111630551517,
0.09879636019468307,
0.03878720849752426,
0.013649236410856247,
-0.01143802423030138,
0.03530587628483772,
0.03262386471033096,
0.04762484133243561,
-0.11825751513242722,
0.041526954621076584,
0.023778943344950676,
-0.1704854667186737,
-0.06771241128444672,
-0.033251840621232986,
0.012295982800424099,
-0.0048151761293411255,
0.007190308999270201,
0.0001608090242370963,
-0.06728139519691467,
-0.030234161764383316,
-0.011349194683134556,
0.034568991512060165,
0.029150856658816338,
0.1096339151263237,
0.03356611728668213,
-0.10803414136171341,
-0.0633942261338234,
0.014585093595087528,
-0.026926135644316673,
0.0391000397503376,
0.03054591454565525,
-0.038993995636701584,
0.01679588481783867,
-0.03582203388214111,
-0.11550476402044296,
0.04314260557293892,
0.04421575739979744,
0.03959910199046135,
0.056422311812639236,
0.038566652685403824,
0.07167045027017593,
0.01660134643316269,
0.05110877379775047,
0.03493788093328476,
0.029285036027431488,
0.042394787073135376,
0.018650487065315247,
-0.06248675659298897,
-0.00790838710963726,
0.09356745332479477,
0.019811056554317474,
-0.04033021628856659,
-0.027665164321660995,
0.012139320373535156,
0.003799519734457135,
0.05894257873296738,
0.038061972707509995,
-0.05309581384062767,
-0.06429123133420944,
0.03344184160232544,
-0.03830970078706741,
-0.013907503336668015,
-0.008789940737187862,
-0.03190852701663971,
-0.00946719665080309,
-0.030557511374354362,
0.006368795409798622,
0.09938837587833405,
-0.05290688946843147,
-0.021361306309700012,
-0.0035915146581828594,
0.05707805976271629,
-0.03223169967532158,
-0.022623131051659584,
-0.06860332190990448,
0.02794918231666088,
-5.120994486925247e-33,
0.030614584684371948,
-0.06602999567985535,
-0.06710436195135117,
0.09614463150501251,
-0.03519317880272865,
-0.09543387591838837,
-0.007514882832765579,
0.05947580933570862,
0.04312553629279137,
-0.000739058421459049,
0.02514718659222126,
0.007900862023234367,
0.008079810068011284,
0.0011091988999396563,
-0.039023227989673615,
0.01174851506948471,
-0.04099244624376297,
0.02608272060751915,
-0.026810958981513977,
0.03384796902537346,
0.003175107529386878,
0.05444813147187233,
-0.0866064801812172,
0.014788771979510784,
-0.051947806030511856,
0.06945843994617462,
-0.021333739161491394,
0.054516419768333435,
-0.007986754179000854,
0.0015048551140353084,
-0.0571925975382328,
-0.05039219930768013,
-0.024295974522829056,
-0.04331628233194351,
-0.058099549263715744,
0.12799188494682312,
0.06647682189941406,
-0.03654510900378227,
-0.06392055004835129,
0.1029059886932373,
0.040347713977098465,
-0.01920328103005886,
-0.06863954663276672,
0.02059178613126278,
-0.048371121287345886,
0.014831271022558212,
-0.06564643979072571,
-0.017247555777430534,
0.07016322016716003,
0.04026736319065094,
0.04829248785972595,
-0.018007783219218254,
-0.10803946852684021,
-0.0569937527179718,
-0.02967671863734722,
-0.06291769444942474,
0.015023406594991684,
-0.04244610294699669,
-0.02162305638194084,
-0.05074214190244675,
0.003630831139162183,
0.01654278300702572,
0.060478173196315765,
-0.07315371930599213,
0.05179554596543312,
-0.041181497275829315,
-0.036181651055812836,
0.04406163468956947,
0.03795390948653221,
-0.06275371462106705,
-0.03897281736135483,
-0.0030250363051891327,
0.07133325934410095,
-0.07503128051757812,
0.06571327149868011,
0.04405771195888519,
0.0683232918381691,
-0.03203285112977028,
0.03702067583799362,
-0.07517019659280777,
-0.010894768871366978,
-0.08415975421667099,
0.02697896584868431,
0.15874886512756348,
0.013165690004825592,
-0.0455920547246933,
0.03387533500790596,
0.10281609743833542,
0.05222121253609657,
0.026418346911668777,
0.015448988415300846,
0.08921051770448685,
0.009217125363647938,
0.021304750815033913,
-0.03699599951505661,
-6.327583434995177e-8,
-0.06444764137268066,
0.00041085935663431883,
-0.07396332919597626,
0.07932455092668533,
0.02618376351892948,
-0.016198834404349327,
-0.06677175313234329,
-0.024064945057034492,
-0.026766516268253326,
-0.0045137726701796055,
-0.024053024128079414,
0.04515976086258888,
-0.031850192695856094,
-0.07469145953655243,
0.030913734808564186,
0.05486619099974632,
0.03532148525118828,
0.0013000782346352935,
-0.028545254841446877,
-0.06231655925512314,
0.08777589350938797,
0.050176624208688736,
-0.04618034511804581,
0.04242601990699768,
-0.03536957502365112,
0.013579940423369408,
-0.0678957998752594,
0.051609694957733154,
-0.020134715363383293,
-0.08641721308231354,
0.015608185902237892,
-0.029670830816030502,
-0.10246913135051727,
0.005007551517337561,
0.07840641587972641,
0.04180442914366722,
-0.052520502358675,
-0.04666353389620781,
0.03570782393217087,
-0.023471208289265633,
0.029179120436310768,
-0.008595051243901253,
-0.07202347368001938,
0.02801259234547615,
0.036676038056612015,
-0.026764456182718277,
-0.035579606890678406,
-0.05894897133111954,
-0.004587787669152021,
0.04128396138548851,
-0.043302688747644424,
-0.023963360115885735,
-0.004089233931154013,
0.015053273178637028,
0.05799940600991249,
-0.0390608049929142,
0.03358260169625282,
0.03366920351982117,
0.00827827025204897,
0.006737259216606617,
0.02858619950711727,
0.07534080743789673,
0.0019188394071534276,
0.014563139528036118
] |
PlanTL-GOB-ES/RoBERTalex | bedf21ecb3a6beec20f1e68d88b7dbb041991dfb | 2021-11-09T09:30:02.000Z | [
"pytorch",
"roberta",
"fill-mask",
"es",
"dataset:legal_ES",
"dataset:temu_legal",
"arxiv:2110.12201",
"transformers",
"legal",
"spanish",
"license:apache-2.0",
"autotrain_compatible"
] | fill-mask | false | PlanTL-GOB-ES | null | PlanTL-GOB-ES/RoBERTalex | 1,160 | 4 | transformers | ---
language:
- es
license: apache-2.0
tags:
- legal
- spanish
datasets:
- legal_ES
- temu_legal
metrics:
- ppl
widget:
- text: "La ley fue <mask> finalmente."
- text: "El Tribunal <mask> desestimó el recurso de amparo."
- text: "Hay base legal dentro del marco <mask> actual."
---
# Spanish Legal-domain RoBERTa
There are few models trained for the Spanish language. Some of the models have been trained with a low resource, unclean corpora. The ones derived from the Spanish National Plan for Language Technologies are proficient solving several tasks and have been trained using large scale clean corpora. However, the Spanish Legal domain language could be think of an independent language on its own. We therefore created a Spanish Legal model from scratch trained exclusively on legal corpora.
## Citing
```
@misc{gutierrezfandino2021legal,
title={Spanish Legalese Language Model and Corpora},
author={Asier Gutiérrez-Fandiño and Jordi Armengol-Estapé and Aitor Gonzalez-Agirre and Marta Villegas},
year={2021},
eprint={2110.12201},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
For more information visit our [GitHub repository](https://github.com/PlanTL-GOB-ES/lm-legal-es)
## Funding
This work was funded by the Spanish State Secretariat for Digitalization and Artificial Intelligence (SEDIA) within the framework of the Plan-TL.
## Disclaimer
The models published in this repository are intended for a generalist purpose and are available to third parties. These models may have bias and/or any other undesirable distortions.
When third parties, deploy or provide systems and/or services to other parties using any of these models (or using systems based on these models) or become users of the models, they should note that it is their responsibility to mitigate the risks arising from their use and, in any event, to comply with applicable regulations, including regulations regarding the use of artificial intelligence.
In no event shall the owner of the models (SEDIA – State Secretariat for digitalization and artificial intelligence) nor the creator (BSC – Barcelona Supercomputing Center) be liable for any results arising from the use made by third parties of these models.
Los modelos publicados en este repositorio tienen una finalidad generalista y están a disposición de terceros. Estos modelos pueden tener sesgos y/u otro tipo de distorsiones indeseables.
Cuando terceros desplieguen o proporcionen sistemas y/o servicios a otras partes usando alguno de estos modelos (o utilizando sistemas basados en estos modelos) o se conviertan en usuarios de los modelos, deben tener en cuenta que es su responsabilidad mitigar los riesgos derivados de su uso y, en todo caso, cumplir con la normativa aplicable, incluyendo la normativa en materia de uso de inteligencia artificial.
En ningún caso el propietario de los modelos (SEDIA – Secretaría de Estado de Digitalización e Inteligencia Artificial) ni el creador (BSC – Barcelona Supercomputing Center) serán responsables de los resultados derivados del uso que hagan terceros de estos modelos. | [
-0.028048541396856308,
-0.058068424463272095,
-0.02330443449318409,
-0.0850299522280693,
-0.009402839466929436,
0.007432810962200165,
-0.03135554492473602,
0.014045401476323605,
0.0791095718741417,
0.04570208117365837,
0.05148732289671898,
-0.021156975999474525,
0.012485828250646591,
0.053116582334041595,
0.06004474312067032,
0.020490087568759918,
-0.0074503435753285885,
0.03663960099220276,
-0.037904106080532074,
-0.023713141679763794,
0.06622253358364105,
0.04816398024559021,
-0.03182893618941307,
0.04109059274196625,
-0.00015225945389829576,
-0.06660202890634537,
-0.034127961844205856,
-0.02287808619439602,
0.01139059104025364,
-0.0724136158823967,
0.01380823366343975,
0.04046962037682533,
0.037333909422159195,
0.05703281983733177,
-0.0058285449631512165,
-0.003513106843456626,
0.001446860725991428,
-0.031507592648267746,
-0.030293554067611694,
0.017586031928658485,
-0.10398752242326736,
0.024689335376024246,
-0.043229639530181885,
-0.01166439987719059,
0.03510729968547821,
-0.05808166787028313,
-0.010387290269136429,
0.06344243884086609,
-0.05059591308236122,
-0.007706421427428722,
-0.11803918331861496,
-0.06402741372585297,
0.06514742225408554,
0.04949118569493294,
-0.05217264965176582,
-0.07504934072494507,
0.025896374136209488,
0.029975902289152145,
-0.014874077402055264,
0.010597964748740196,
0.0018714485922828317,
0.01928369328379631,
-0.07064035534858704,
0.006559307221323252,
-0.04861094057559967,
-0.008578860200941563,
0.030326463282108307,
0.0526505783200264,
-0.10095124691724777,
-0.006763607729226351,
-0.034800976514816284,
0.026067478582262993,
0.038312554359436035,
0.08293481171131134,
-0.008618975058197975,
0.035767097026109695,
0.010539418086409569,
0.008973896503448486,
0.040041178464889526,
-0.12909376621246338,
0.00499006500467658,
0.06308107078075409,
-0.025963693857192993,
-0.03431639447808266,
0.005142932292073965,
0.011746945790946484,
-0.00031885565840639174,
0.07484347373247147,
0.09420708566904068,
0.014685733243823051,
0.040580373257398605,
0.008420044556260109,
0.11105765402317047,
-0.0012242664815858006,
0.02608516626060009,
0.046954575926065445,
0.08009936660528183,
0.009419901296496391,
0.018540076911449432,
0.06241133064031601,
0.019246533513069153,
0.06262565404176712,
0.026098493486642838,
-0.08826087415218353,
-0.03022424876689911,
0.020038696005940437,
0.03791498765349388,
0.03784112632274628,
0.044701457023620605,
-0.07638576626777649,
0.05902871489524841,
0.06120312213897705,
-0.07279756665229797,
-0.028148872777819633,
-0.017778262495994568,
0.03633927181363106,
0.02967211790382862,
-0.015317740850150585,
0.03334644436836243,
0.006310033146291971,
-0.06895984709262848,
0.04082836955785751,
-0.06640005111694336,
-0.05646860599517822,
0.03875814005732536,
-0.02305157668888569,
-0.09056831151247025,
5.330981491678279e-33,
0.04339559003710747,
0.06981806457042694,
-0.0803370550274849,
0.04792732372879982,
0.034396082162857056,
-0.04296666011214256,
-0.018135488033294678,
0.026759693399071693,
-0.07658742368221283,
-0.021479904651641846,
-0.01659308560192585,
0.023699844256043434,
-0.03868875652551651,
0.05588134005665779,
0.021434620022773743,
0.03952265903353691,
-0.034866973757743835,
-0.05022623762488365,
0.0073334709741175175,
0.02717653661966324,
0.11806418001651764,
0.006896874867379665,
0.00970375631004572,
0.020124493166804314,
-0.05794742330908775,
0.083488330245018,
-0.012521568685770035,
-0.09865505993366241,
-0.028982898220419884,
0.06249024346470833,
-0.050004176795482635,
-0.010258710011839867,
0.06505026668310165,
0.051003940403461456,
0.10689004510641098,
-0.0021967918146401644,
0.02392582781612873,
-0.03368556872010231,
0.04787634313106537,
0.017075128853321075,
-0.02341858111321926,
0.021460073068737984,
0.05449248105287552,
0.015640247613191605,
-0.042443107813596725,
-0.07807035744190216,
-0.010993021540343761,
-0.05949253588914871,
0.03893584385514259,
0.03273414820432663,
0.04453376680612564,
-0.009760711342096329,
0.001007268438115716,
-0.07414372265338898,
-0.028515435755252838,
0.1164390817284584,
-0.10195771604776382,
0.052969712764024734,
0.005924970842897892,
0.0204864963889122,
-0.00376999843865633,
0.08378354460000992,
0.03852503374218941,
0.09231207519769669,
-0.00836701225489378,
-0.00626946147531271,
-0.05471853166818619,
-0.00394152756780386,
0.18718476593494415,
-0.058279480785131454,
-0.06012339144945145,
0.026077628135681152,
-0.053953833878040314,
0.03938598558306694,
-0.0280007254332304,
-0.05432852357625961,
0.04503941535949707,
-0.07642864435911179,
0.016782376915216446,
0.024472882971167564,
-0.05042437091469765,
0.02301471307873726,
-0.013886542990803719,
0.009234546683728695,
0.004325758665800095,
0.029570752754807472,
0.03208819031715393,
-0.0003357059904374182,
0.010283362120389938,
0.04442994296550751,
0.02334674447774887,
-0.01483145635575056,
-0.05504484102129936,
-0.05068432539701462,
0.05215342715382576,
-6.432349903421682e-33,
-0.01804390735924244,
-0.05120212957262993,
-0.008047433570027351,
0.009463485330343246,
-0.05171799287199974,
-0.04604974761605263,
-0.0016053699655458331,
0.03838370367884636,
-0.03729888051748276,
-0.08697482198476791,
-0.03457435965538025,
-0.117278553545475,
0.031024368479847908,
-0.008162038400769234,
0.020049529150128365,
-0.02559737116098404,
-0.04613591730594635,
-0.05059399455785751,
-0.003066925797611475,
0.08336855471134186,
0.004514305852353573,
0.051850493997335434,
-0.06699255853891373,
0.06557566672563553,
0.015918392688035965,
-0.040316686034202576,
-0.0633765310049057,
0.01668994314968586,
0.01822037249803543,
0.0151041429489851,
-0.010714000090956688,
-0.04049218073487282,
-0.05842265114188194,
-0.0550655722618103,
-0.11638902127742767,
-0.06918635964393616,
0.022659065201878548,
-0.03466913476586342,
0.04888817295432091,
0.0911702960729599,
0.03259845823049545,
0.07314487546682358,
-0.0634382963180542,
-0.025265641510486603,
-0.07847569137811661,
0.005153740290552378,
-0.06742540746927261,
-0.0773809552192688,
0.0633477196097374,
-0.09870808571577072,
0.061767928302288055,
-0.05436364561319351,
-0.06522911787033081,
-0.0651908814907074,
0.020021095871925354,
-0.11558976769447327,
0.026062393561005592,
-0.06770846992731094,
-0.11014851182699203,
0.024505196139216423,
-0.027981974184513092,
0.06070234254002571,
-0.038997750729322433,
0.018786689266562462,
0.05667933449149132,
-0.024247540161013603,
-0.07908128201961517,
0.016857264563441277,
-0.014724609442055225,
-0.08547481894493103,
0.08692266047000885,
-0.0710061565041542,
-0.10492385923862457,
-0.03124835714697838,
-0.027779102325439453,
-0.03129473328590393,
-0.03883093595504761,
-0.018569687381386757,
-0.014102650806307793,
-0.007718444336205721,
0.05039362236857414,
-0.06179628521203995,
0.020891383290290833,
0.010267777368426323,
0.12761375308036804,
0.051160309463739395,
-0.08789031952619553,
0.02018648199737072,
-0.02370637282729149,
0.0480344332754612,
-0.005471506156027317,
0.02120940200984478,
-0.018300501629710197,
0.12033714354038239,
0.017242735251784325,
-5.0882018598485956e-8,
-0.07742000371217728,
0.036602940410375595,
-0.014720133505761623,
0.04557521268725395,
-0.08076675236225128,
-0.0072271511889994144,
-0.001359051326289773,
-0.017796974629163742,
-0.03711894154548645,
-0.0011279912432655692,
0.0026713756378740072,
0.0028472761623561382,
-0.07108423113822937,
-0.0024993603583425283,
-0.036566540598869324,
0.07417172938585281,
0.05725230276584625,
0.12802369892597198,
-0.011637834832072258,
0.08146654814481735,
0.014501910656690598,
0.023662865161895752,
-0.008076377213001251,
-0.033185798674821854,
0.03333194926381111,
-0.02214174158871174,
-0.08019661158323288,
0.03123646043241024,
0.035018447786569595,
0.008179732598364353,
-0.016376029700040817,
0.041123513132333755,
-0.07042723894119263,
-0.09334436058998108,
0.03414442017674446,
0.010992627590894699,
-0.059376347810029984,
-0.030758874490857124,
-0.020846927538514137,
0.04839986190199852,
0.10908650606870651,
0.05742048844695091,
-0.07794581353664398,
-0.015884244814515114,
0.06635022908449173,
-0.05158244073390961,
-0.05460921302437782,
-0.01908957026898861,
0.0600188672542572,
-0.01342681236565113,
0.0369756817817688,
-0.059611234813928604,
-0.03924904018640518,
0.027376536279916763,
0.02453291043639183,
0.023073723539710045,
0.05618925392627716,
0.028024518862366676,
0.0034849257208406925,
0.005705500952899456,
0.07929171621799469,
0.030849119648337364,
0.09166499227285385,
-0.04532284289598465
] |
peterhsu/marian-finetuned-kde4-en-to-zh_TW-accelerate | 57bd8aa1bbf04ec9234d74caabdd329a9927c942 | 2022-02-28T09:36:28.000Z | [
"pytorch",
"marian",
"text2text-generation",
"dataset:kde4",
"transformers",
"translation",
"license:apache-2.0",
"model-index",
"autotrain_compatible"
] | translation | false | peterhsu | null | peterhsu/marian-finetuned-kde4-en-to-zh_TW-accelerate | 1,159 | null | transformers | ---
license: apache-2.0
tags:
- translation
datasets:
- kde4
metrics:
- bleu
model-index:
- name: marian-finetuned-kde4-en-to-zh_TW-accelerate
results:
- task:
name: Sequence-to-sequence Language Modeling
type: text2text-generation
dataset:
name: kde4
type: kde4
args: en-zh_TW
metrics:
- name: Bleu
type: bleu
value: 40.07
---
# marian-finetuned-kde4-en-to-zh_TW-accelerate
## Model description
This model is a fine-tuned version of [Helsinki-NLP/opus-mt-en-zh](https://huggingface.co/Helsinki-NLP/opus-mt-en-zh) on the kde4 dataset.
It achieves the following results on the evaluation set:
- Bleu: 40.70
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 2e-05
- train_batch_size: 8
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 3
- mixed_precision_training: Native AMP
### Training results
### Framework versions
- Transformers 4.16.2
- Pytorch 1.10.0+cu111
- Datasets 1.18.3
- Tokenizers 0.11.0 | [
-0.06135788559913635,
-0.025608066469430923,
0.006149237975478172,
-0.045391373336315155,
-0.016789495944976807,
0.042010486125946045,
-0.034844815731048584,
-0.05861144885420799,
0.0023725249338895082,
-0.07243742793798447,
0.010307179763913155,
-0.11860726028680801,
-0.014316574670374393,
-0.05478377267718315,
-0.05655917525291443,
0.03118789754807949,
0.0685357004404068,
-0.034199368208646774,
-0.07327218353748322,
-0.03462955728173256,
0.06583960354328156,
0.07734251767396927,
0.09004045277833939,
0.011305241845548153,
0.05215198174118996,
0.052162639796733856,
-0.05562612786889076,
0.04958903044462204,
0.094355009496212,
0.004405585117638111,
0.024316051974892616,
0.0643143281340599,
0.0007632054039277136,
0.02192923054099083,
0.037358954548835754,
0.056713350117206573,
-0.028363732621073723,
-0.06125592812895775,
-0.03854411095380783,
0.0301397442817688,
0.01451025903224945,
0.012342724949121475,
-0.02237783931195736,
0.040514614433050156,
0.06157049909234047,
0.008467255160212517,
-0.033130813390016556,
-0.060518939048051834,
-0.07741324603557587,
0.0011016925564035773,
-0.11075180023908615,
-0.022604385390877724,
0.004352732095867395,
0.08236587047576904,
-0.009931868873536587,
0.046529751271009445,
-0.10071887075901031,
0.04249170422554016,
0.0696244165301323,
-0.004350272938609123,
-0.006826177705079317,
-0.04542979970574379,
-0.07266106456518173,
-0.0033559936564415693,
-0.008294295519590378,
-0.03205518424510956,
0.03307400643825531,
-0.005628776736557484,
-0.04135677590966225,
0.01800553873181343,
-0.020666345953941345,
0.01058450248092413,
0.0009979110909625888,
0.05543576553463936,
0.009111280553042889,
0.03354733809828758,
0.019197652116417885,
0.018060166388750076,
0.02815984934568405,
-0.15064680576324463,
0.05805649235844612,
-0.026538502424955368,
0.04670101776719093,
-0.04488223046064377,
0.016027426347136497,
-0.021547764539718628,
0.055898699909448624,
-0.022431621327996254,
-0.011849815025925636,
0.003536865347996354,
0.018290750682353973,
-0.05439036339521408,
0.013788911513984203,
-0.017897112295031548,
-0.011753257364034653,
0.0460449643433094,
0.034175023436546326,
0.07650860399007797,
0.02258116938173771,
0.10020963102579117,
-0.0032860387582331896,
0.073081836104393,
0.019411811605095863,
0.0051363189704716206,
-0.09899039566516876,
-0.05765070766210556,
0.08061344921588898,
0.08946029841899872,
-0.0011849607108160853,
-0.050130441784858704,
0.06530638039112091,
0.021158698946237564,
-0.0242769792675972,
-0.025680935010313988,
0.02851472795009613,
0.00416173879057169,
-0.0025408759247511625,
-0.05659870430827141,
0.011885828338563442,
0.07363397628068924,
-0.07109209150075912,
-0.06307639181613922,
-0.018034573644399643,
0.03129568696022034,
-0.045780859887599945,
-0.010169945657253265,
-0.029660310596227646,
6.087744183586528e-33,
0.0942939966917038,
0.03910132125020027,
0.056092023849487305,
-0.05286568030714989,
-0.04323430731892586,
-0.0668598935008049,
-0.08242245763540268,
0.0067122154869139194,
-0.08469533920288086,
-0.045371752232313156,
-0.09101959317922592,
-0.03898058831691742,
-0.04160603508353233,
0.022605642676353455,
0.015337626449763775,
-0.03490598499774933,
-0.008179008029401302,
0.06637797504663467,
0.02938312664628029,
0.06985516846179962,
0.14485912024974823,
0.013112829066812992,
-0.02349092811346054,
-0.016029197722673416,
-0.01897803321480751,
0.0762033686041832,
0.07773086428642273,
-0.07804988324642181,
-0.07124121487140656,
0.039030928164720535,
-0.08909035474061966,
-0.006661985069513321,
-0.02511073835194111,
0.0013208369491621852,
-0.03751901537179947,
-0.01973923295736313,
-0.041985638439655304,
-0.034187037497758865,
-0.031790949404239655,
-0.075918048620224,
-0.028568733483552933,
0.03956534340977669,
-0.018577616661787033,
-0.05245000496506691,
-0.044205039739608765,
-0.019182896241545677,
0.029194079339504242,
-0.03647913038730621,
0.03777119517326355,
0.0028159620705991983,
-0.0147748664021492,
0.0380355566740036,
-0.02623857744038105,
0.03247397020459175,
-0.002248572651296854,
0.08044623583555222,
0.11347568780183792,
0.04960949346423149,
0.023921554908156395,
0.07925132662057877,
0.006439376622438431,
0.00413557281717658,
0.010994313284754753,
0.028580021113157272,
0.08062022179365158,
-0.014990847557783127,
-0.03301975131034851,
-0.001181625877507031,
0.02576872706413269,
-0.01906917430460453,
-0.08255016058683395,
-0.04949534684419632,
0.08485869318246841,
0.04597251117229462,
0.10951884835958481,
-0.042504504323005676,
-0.006233823485672474,
-0.12316005676984787,
-0.05073077231645584,
0.01806783117353916,
-0.06588739901781082,
0.007258690427988768,
0.05506151542067528,
-0.0693877562880516,
-0.02624479867517948,
-0.07671955972909927,
0.026033446192741394,
-0.059796981513500214,
-0.00550040602684021,
-0.014873859472572803,
-0.007666722405701876,
0.026786351576447487,
-0.07919729501008987,
-0.05279140919446945,
-0.06439951062202454,
-6.031369678572999e-33,
0.05748807266354561,
0.04216625168919563,
-0.0540856309235096,
0.09190903604030609,
0.010380541905760765,
0.015704374760389328,
0.045537613332271576,
0.1115492731332779,
0.0012661982327699661,
-0.047537241131067276,
0.12172434478998184,
-0.07632032036781311,
0.04183516278862953,
-0.028104795143008232,
0.07310598343610764,
0.011210097000002861,
-0.04775606840848923,
-0.01956687681376934,
0.05215584859251976,
0.07418828457593918,
0.032719358801841736,
0.06078363582491875,
-0.07475719600915909,
0.03831370919942856,
0.03460638225078583,
-0.0005614677793346345,
0.041061919182538986,
0.10224956274032593,
0.008230109699070454,
-0.00485770870000124,
-0.0003228232671972364,
-0.04218810051679611,
-0.09326860308647156,
0.010652551427483559,
-0.042986683547496796,
0.022389933466911316,
0.043068040162324905,
-0.00248351925984025,
-0.052648358047008514,
0.1235155463218689,
0.08671393990516663,
0.09858083724975586,
-0.06915035843849182,
0.021728655323386192,
-0.015263150446116924,
-0.04059354588389397,
-0.05993488430976868,
0.01850901171565056,
0.03936013951897621,
-0.08691780269145966,
0.05776509642601013,
0.026473380625247955,
-0.07414278388023376,
-0.011895084753632545,
-0.00981841515749693,
-0.06534986943006516,
-0.03612121194601059,
-0.05739782750606537,
-0.09599950164556503,
-0.012861097231507301,
-0.07057727128267288,
-0.036939412355422974,
0.03113081119954586,
-0.016650380566716194,
0.06867274641990662,
-0.05396658182144165,
-0.031575124710798264,
0.006012082565575838,
0.01806415244936943,
-0.020201891660690308,
0.025893056765198708,
-0.07295514643192291,
0.0217723585665226,
0.06828608363866806,
-0.01902247592806816,
-0.07697033137083054,
0.016590941697359085,
-0.004304155707359314,
-0.00932358205318451,
-0.0688152015209198,
-0.0370657779276371,
0.07749943435192108,
0.051730021834373474,
0.035697393119335175,
0.0512869618833065,
0.07712239772081375,
0.039359692484140396,
0.043211307376623154,
-0.02127177082002163,
-0.024417396634817123,
0.05198609456419945,
0.03503907099366188,
-0.0407676063477993,
0.14510363340377808,
-0.018306607380509377,
-5.654959167600282e-8,
-0.03433722257614136,
-0.05520031228661537,
-0.11353535950183868,
0.07363606244325638,
0.00834471732378006,
-0.05975877121090889,
-0.055482298135757446,
-0.0049017625860869884,
-0.008898125030100346,
-0.09328291565179825,
0.05370963737368584,
0.01769307628273964,
-0.08801788091659546,
0.013765014708042145,
-0.006280332803726196,
0.051996856927871704,
-0.045009493827819824,
0.0417020283639431,
-0.033112458884716034,
0.010783388279378414,
0.014300934039056301,
0.03187176585197449,
0.006543231196701527,
-0.039047110825777054,
0.017380695790052414,
0.023330822587013245,
-0.05741063877940178,
0.06507726013660431,
0.05269245058298111,
-0.05109446123242378,
0.02490076795220375,
0.007998800836503506,
-0.04105290770530701,
-0.033989157527685165,
0.034292902797460556,
0.04133973643183708,
-0.07907851785421371,
0.020090416073799133,
0.008861389011144638,
0.08488402515649796,
0.0617537647485733,
0.05262409523129463,
-0.08593366295099258,
0.012475105933845043,
0.03431635722517967,
0.0009677957859821618,
-0.0818469375371933,
-0.04058501869440079,
0.041467536240816116,
-0.012605252675712109,
-0.0011283265193924308,
-0.03141035884618759,
-0.05431041494011879,
0.009066717699170113,
0.05634820833802223,
0.038728807121515274,
-0.02411116100847721,
-0.03049747459590435,
0.04582039266824722,
-0.013413718901574612,
0.04942949488759041,
-0.04405353590846062,
-0.011198338121175766,
-0.011187650263309479
] |
remotejob/tweetsGPT2fi_v0 | 34abb218bb8e6f61bec9a47c0db81e776229f1a6 | 2022-05-27T22:22:53.000Z | [
"pytorch",
"rust",
"gpt2",
"text-generation",
"transformers"
] | text-generation | false | remotejob | null | remotejob/tweetsGPT2fi_v0 | 1,157 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
setu4993/smaller-LaBSE | abd4e324cf0850b32f1dbf4b08fad6022ab47c0b | 2021-12-05T06:13:27.000Z | [
"pytorch",
"tf",
"jax",
"bert",
"feature-extraction",
"ar",
"de",
"en",
"es",
"fr",
"it",
"ja",
"ko",
"nl",
"pl",
"pt",
"ru",
"th",
"tr",
"zh",
"dataset:CommonCrawl",
"dataset:Wikipedia",
"arxiv:2010.05609",
"arxiv:2007.01852",
"transformers",
"sentence_embedding",
"multilingual",
"google",
"sentence-similarity",
"labse",
"license:apache-2.0"
] | feature-extraction | false | setu4993 | null | setu4993/smaller-LaBSE | 1,156 | 4 | transformers | ---
language:
- ar
- de
- en
- es
- fr
- it
- ja
- ko
- nl
- pl
- pt
- ru
- th
- tr
- zh
tags:
- bert
- sentence_embedding
- multilingual
- google
- sentence-similarity
- labse
license: apache-2.0
datasets:
- CommonCrawl
- Wikipedia
---
# LaBSE
## Model description
Smaller Language-agnostic BERT Sentence Encoder (LaBSE) is a BERT-based model distilled from the [original LaBSE model](https://huggingface.co/setu4993/LaBSE) to 15 languages (from the original 109 languages) using the techniques described in the paper ['Load What You Need: Smaller Versions of Multilingual BERT'](https://arxiv.org/abs/2010.05609) by [Ukjae Jeong](https://github.com/jeongukjae/).
- Model: [HuggingFace's model hub](https://huggingface.co/setu4993/smaller-LaBSE).
- Original model: [TensorFlow Hub](https://tfhub.dev/jeongukjae/smaller_LaBSE_15lang/1).
- Distillation source: [GitHub](https://github.com/jeongukjae/smaller-labse).
- Conversion from TensorFlow to PyTorch: [GitHub](https://github.com/setu4993/convert-labse-tf-pt).
## Usage
Using the model:
```python
import torch
from transformers import BertModel, BertTokenizerFast
tokenizer = BertTokenizerFast.from_pretrained("setu4993/smaller-LaBSE")
model = BertModel.from_pretrained("setu4993/smaller-LaBSE")
model = model.eval()
english_sentences = [
"dog",
"Puppies are nice.",
"I enjoy taking long walks along the beach with my dog.",
]
english_inputs = tokenizer(english_sentences, return_tensors="pt", padding=True)
with torch.no_grad():
english_outputs = model(**english_inputs)
```
To get the sentence embeddings, use the pooler output:
```python
english_embeddings = english_outputs.pooler_output
```
Output for other languages:
```python
italian_sentences = [
"cane",
"I cuccioli sono carini.",
"Mi piace fare lunghe passeggiate lungo la spiaggia con il mio cane.",
]
japanese_sentences = ["犬", "子犬はいいです", "私は犬と一緒にビーチを散歩するのが好きです"]
italian_inputs = tokenizer(italian_sentences, return_tensors="pt", padding=True)
japanese_inputs = tokenizer(japanese_sentences, return_tensors="pt", padding=True)
with torch.no_grad():
italian_outputs = model(**italian_inputs)
japanese_outputs = model(**japanese_inputs)
italian_embeddings = italian_outputs.pooler_output
japanese_embeddings = japanese_outputs.pooler_output
```
For similarity between sentences, an L2-norm is recommended before calculating the similarity:
```python
import torch.nn.functional as F
def similarity(embeddings_1, embeddings_2):
normalized_embeddings_1 = F.normalize(embeddings_1, p=2)
normalized_embeddings_2 = F.normalize(embeddings_2, p=2)
return torch.matmul(
normalized_embeddings_1, normalized_embeddings_2.transpose(0, 1)
)
print(similarity(english_embeddings, italian_embeddings))
print(similarity(english_embeddings, japanese_embeddings))
print(similarity(italian_embeddings, japanese_embeddings))
```
## Details
Details about data, training, evaluation and performance metrics are available in the [original paper](https://arxiv.org/abs/2007.01852).
### BibTeX entry and citation info
```bibtex
@misc{feng2020languageagnostic,
title={Language-agnostic BERT Sentence Embedding},
author={Fangxiaoyu Feng and Yinfei Yang and Daniel Cer and Naveen Arivazhagan and Wei Wang},
year={2020},
eprint={2007.01852},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
| [
-0.11334580928087234,
-0.06219619885087013,
0.0709598958492279,
-0.029147394001483917,
0.009488803334534168,
0.05025215446949005,
-0.0809202641248703,
0.048790719360113144,
0.002362303202971816,
-0.030198290944099426,
0.029412632808089256,
-0.07006020843982697,
0.026838915422558784,
0.05007068067789078,
0.021276414394378662,
0.03608810529112816,
0.0253469105809927,
0.01621842384338379,
-0.09596064686775208,
-0.07023776322603226,
0.0446881502866745,
0.028362369164824486,
0.038579557090997696,
-0.03713742643594742,
0.039370305836200714,
-0.033606626093387604,
-0.026886438950896263,
-0.01057951059192419,
0.06952456384897232,
0.01575450226664543,
0.020547984167933464,
0.009494626894593239,
-0.0275481678545475,
0.0849776491522789,
0.03547205403447151,
0.05469406023621559,
-0.04561777412891388,
-0.054508328437805176,
0.03505401685833931,
0.049085792154073715,
-0.014444908127188683,
0.03410401940345764,
-0.06932080537080765,
-0.07293327152729034,
0.06068839505314827,
-0.024846848100423813,
-0.08593923598527908,
-0.007069997023791075,
-0.037598125636577606,
-0.021008972078561783,
-0.05887985974550247,
-0.005915880668908358,
0.034903377294540405,
0.032031893730163574,
-0.0048015909269452095,
-0.06501497328281403,
-0.000005949337264610222,
-0.013488905504345894,
0.006023845169693232,
-0.057687677443027496,
-0.07057302445173264,
-0.03338850289583206,
-0.027083048596978188,
0.04054064303636551,
-0.08028095215559006,
0.021093720570206642,
-0.01936069130897522,
0.009691537357866764,
-0.04468998685479164,
0.024938933551311493,
-0.08377014845609665,
0.002429049462080002,
-0.027709610760211945,
0.02985680289566517,
-0.00741172069683671,
-0.02928832918405533,
0.06803235411643982,
-0.0028167760465294123,
0.014905703254044056,
-0.10080081224441528,
-0.006625188514590263,
0.005351618863642216,
0.04074469953775406,
-0.013005095534026623,
0.05205744504928589,
-0.01063339039683342,
0.04385293275117874,
-0.00904173869639635,
0.031207727268338203,
0.033150263130664825,
-0.06335758417844772,
-0.09378387033939362,
0.04889027774333954,
0.01982230879366398,
0.029616639018058777,
0.005539830774068832,
0.06524203717708588,
0.024983642622828484,
-0.00752092944458127,
0.06265318393707275,
0.03610282018780708,
0.05012550577521324,
0.13200771808624268,
-0.07477612793445587,
-0.022571317851543427,
0.012478972785174847,
-0.011541569605469704,
0.021294305101037025,
0.03504905849695206,
-0.12864358723163605,
0.0032594960648566484,
0.016962867230176926,
0.006304743234068155,
-0.07262174040079117,
0.028937874361872673,
-0.027816832065582275,
0.008436284959316254,
-0.05024468153715134,
0.06683077663183212,
0.04545677453279495,
-0.00229632924310863,
-0.01249318104237318,
0.047937411814928055,
-0.0014846116537228227,
-0.06269927322864532,
0.005474434699863195,
0.009663363918662071,
8.990064164008734e-34,
0.04709040746092796,
0.04077677056193352,
-0.008621866814792156,
-0.012998619116842747,
0.029132284224033356,
0.00011305679072393104,
-0.007147765252739191,
0.06919829547405243,
-0.1041712760925293,
-0.040145184844732285,
-0.013042186386883259,
0.060371771454811096,
-0.07347080111503601,
0.037593428045511246,
-0.03776362165808678,
0.07835653424263,
-0.01647805981338024,
0.04813516139984131,
0.055251412093639374,
0.04034772515296936,
0.06792061030864716,
0.030034910887479782,
-0.011248149909079075,
-0.05107591301202774,
-0.07452470809221268,
0.04124027490615845,
0.07313137501478195,
-0.08595539629459381,
-0.06643539667129517,
0.025657013058662415,
-0.06943537294864655,
0.008743997663259506,
-0.016348810866475105,
0.036510903388261795,
-0.017884476110339165,
-0.03597056865692139,
0.02290690876543522,
-0.04998592287302017,
0.015207798220217228,
-0.040104132145643234,
0.050056878477334976,
0.051436897367239,
0.017863348126411438,
-0.053727902472019196,
0.026501039043068886,
-0.0074916076846420765,
0.031352996826171875,
-0.03394971042871475,
0.045187875628471375,
0.0014076920924708247,
0.047348763793706894,
0.002510015619918704,
-0.06707122921943665,
0.02464519627392292,
0.01917048543691635,
-0.02863786555826664,
0.05143203213810921,
-0.015093611553311348,
0.08429762721061707,
0.0643344521522522,
0.021769525483250618,
-0.014194674789905548,
0.043069157749414444,
0.05431366339325905,
0.12248141318559647,
-0.028880517929792404,
-0.04322797432541847,
-0.011529590003192425,
-0.00421138945966959,
0.054256439208984375,
-0.05049887299537659,
0.004651609808206558,
0.021304307505488396,
0.023837875574827194,
0.03412114828824997,
-0.05277758836746216,
-0.00964941456913948,
-0.11031720787286758,
-0.030383482575416565,
0.053683482110500336,
-0.049877554178237915,
0.025034330785274506,
-0.04270161688327789,
-0.04285306856036186,
-0.0989866703748703,
-0.004534972365945578,
0.036665089428424835,
-0.05802788957953453,
0.030890056863427162,
0.015284980647265911,
0.06371951103210449,
-0.06808363646268845,
-0.01674065738916397,
-0.048728059977293015,
0.02314058504998684,
-1.937601868606932e-33,
-0.0031269825994968414,
0.06622371822595596,
-0.048086587339639664,
0.0329461544752121,
-0.05844554677605629,
-0.07695938646793365,
0.0594998337328434,
0.17644518613815308,
-0.001046610763296485,
-0.04604220390319824,
0.014430899173021317,
-0.0807017832994461,
0.06681933999061584,
-0.020027847960591316,
0.0964386835694313,
0.0569445975124836,
-0.01733994111418724,
0.029070673510432243,
0.05161640793085098,
0.0677284523844719,
0.0834684818983078,
-0.04684451222419739,
-0.07504761219024658,
0.1180698350071907,
0.026253800839185715,
0.05534804239869118,
-0.05200060084462166,
0.00007751878001727164,
-0.02861996926367283,
-0.006436611991375685,
-0.02091541886329651,
-0.01776875928044319,
-0.04106296971440315,
0.018687112256884575,
-0.09919098019599915,
0.02884015254676342,
0.03706073760986328,
0.005559941753745079,
-0.004036860074847937,
-0.004025638103485107,
0.06964324414730072,
-0.005955955479294062,
-0.03972897306084633,
0.01312233880162239,
-0.01220871601253748,
0.024351032450795174,
-0.15153175592422485,
-0.055060114711523056,
0.03481043130159378,
-0.07472250610589981,
0.004202498123049736,
0.00008149779750965536,
-0.10673756152391434,
-0.02817770466208458,
-0.06478888541460037,
-0.09744465351104736,
0.022581294178962708,
-0.05680786073207855,
-0.0956273004412651,
-0.027476811781525612,
-0.028651971369981766,
-0.04217783734202385,
0.05310888588428497,
-0.008340888656675816,
0.004427551757544279,
-0.0160160381346941,
0.004324090667068958,
0.05769481509923935,
-0.0302035603672266,
-0.05619364231824875,
-0.026260221377015114,
-0.07410691678524017,
0.04346165806055069,
0.02496791072189808,
0.04048110544681549,
-0.022564351558685303,
0.01456852350383997,
-0.06164824217557907,
0.0032150743063539267,
-0.046524778008461,
-0.01260063424706459,
-0.055541783571243286,
0.04922337457537651,
-0.008055295795202255,
-0.00005253633207757957,
0.04531858116388321,
0.016360139474272728,
0.049265116453170776,
-0.0022629296872764826,
0.03900057077407837,
0.00116910960059613,
0.0590490996837616,
0.015115304850041866,
0.09909474104642868,
0.0465884692966938,
-5.288584148388509e-8,
-0.06457410752773285,
-0.028433416038751602,
-0.07431859523057938,
0.022342462092638016,
-0.10071838647127151,
-0.048982713371515274,
-0.035476379096508026,
0.03551657125353813,
-0.03041483461856842,
0.010996305383741856,
0.035867609083652496,
0.09584735333919525,
-0.06952110677957535,
0.09429080784320831,
-0.0007976462366059422,
0.062355637550354004,
-0.039364416152238846,
0.04523080214858055,
0.03136475756764412,
0.010798807255923748,
-0.005337458103895187,
0.05245344340801239,
0.04304586350917816,
-0.053502149879932404,
0.029205773025751114,
-0.03652336075901985,
-0.017771335318684578,
0.07450586557388306,
0.009030434302985668,
-0.040533874183893204,
-0.05349788814783096,
0.026052700355648994,
-0.08728575706481934,
-0.020641623064875603,
0.031455934047698975,
0.05094751715660095,
-0.08756696432828903,
-0.03649509698152542,
-0.007425080053508282,
0.08483316004276276,
0.1406209021806717,
0.04521210119128227,
-0.14888900518417358,
-0.009540170431137085,
0.15397492051124573,
0.01922123320400715,
-0.022757234051823616,
-0.0806669294834137,
0.06830154359340668,
-0.01362130418419838,
0.023612122982740402,
-0.048210326582193375,
0.003281737444922328,
0.019804855808615685,
-0.024805305525660515,
0.014178665354847908,
-0.11168661713600159,
-0.043638572096824646,
0.10274652391672134,
-0.03349456936120987,
0.02214614301919937,
0.05191391706466675,
0.07870902121067047,
0.03758013993501663
] |
Helsinki-NLP/opus-mt-az-en | d5618bb9172d2400a504d8b95baf144517ac6b48 | 2021-01-18T07:48:32.000Z | [
"pytorch",
"marian",
"text2text-generation",
"az",
"en",
"transformers",
"translation",
"license:apache-2.0",
"autotrain_compatible"
] | translation | false | Helsinki-NLP | null | Helsinki-NLP/opus-mt-az-en | 1,155 | null | transformers | ---
language:
- az
- en
tags:
- translation
license: apache-2.0
---
### aze-eng
* source group: Azerbaijani
* target group: English
* OPUS readme: [aze-eng](https://github.com/Helsinki-NLP/Tatoeba-Challenge/tree/master/models/aze-eng/README.md)
* model: transformer-align
* source language(s): aze_Latn
* target language(s): eng
* model: transformer-align
* pre-processing: normalization + SentencePiece (spm12k,spm12k)
* download original weights: [opus-2020-06-16.zip](https://object.pouta.csc.fi/Tatoeba-MT-models/aze-eng/opus-2020-06-16.zip)
* test set translations: [opus-2020-06-16.test.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/aze-eng/opus-2020-06-16.test.txt)
* test set scores: [opus-2020-06-16.eval.txt](https://object.pouta.csc.fi/Tatoeba-MT-models/aze-eng/opus-2020-06-16.eval.txt)
## Benchmarks
| testset | BLEU | chr-F |
|-----------------------|-------|-------|
| Tatoeba-test.aze.eng | 31.9 | 0.490 |
### System Info:
- hf_name: aze-eng
- source_languages: aze
- target_languages: eng
- opus_readme_url: https://github.com/Helsinki-NLP/Tatoeba-Challenge/tree/master/models/aze-eng/README.md
- original_repo: Tatoeba-Challenge
- tags: ['translation']
- languages: ['az', 'en']
- src_constituents: {'aze_Latn'}
- tgt_constituents: {'eng'}
- src_multilingual: False
- tgt_multilingual: False
- prepro: normalization + SentencePiece (spm12k,spm12k)
- url_model: https://object.pouta.csc.fi/Tatoeba-MT-models/aze-eng/opus-2020-06-16.zip
- url_test_set: https://object.pouta.csc.fi/Tatoeba-MT-models/aze-eng/opus-2020-06-16.test.txt
- src_alpha3: aze
- tgt_alpha3: eng
- short_pair: az-en
- chrF2_score: 0.49
- bleu: 31.9
- brevity_penalty: 0.997
- ref_len: 16165.0
- src_name: Azerbaijani
- tgt_name: English
- train_date: 2020-06-16
- src_alpha2: az
- tgt_alpha2: en
- prefer_old: False
- long_pair: aze-eng
- helsinki_git_sha: 480fcbe0ee1bf4774bcbe6226ad9f58e63f6c535
- transformers_git_sha: 2207e5d8cb224e954a7cba69fa4ac2309e9ff30b
- port_machine: brutasse
- port_time: 2020-08-21-14:41 | [
-0.07888252288103104,
-0.0012796936789527535,
-0.015278263948857784,
-0.021771250292658806,
-0.04786687716841698,
0.03154049441218376,
-0.014094512909650803,
-0.011604641564190388,
0.05565973371267319,
0.01616682857275009,
0.00326856249012053,
-0.0387648306787014,
0.025233397260308266,
0.005290742963552475,
-0.02955900877714157,
-0.005757679231464863,
-0.0856499895453453,
0.038122743368148804,
-0.10570362955331802,
-0.06883297115564346,
0.01372687891125679,
0.06861405819654465,
0.033794403076171875,
0.03393357992172241,
0.09596645087003708,
0.05928942188620567,
-0.04906955361366272,
0.0036881386768072844,
0.04875914752483368,
-0.03596682474017143,
0.019647235050797462,
0.03883710503578186,
0.04057367891073227,
0.03518329933285713,
0.00591856986284256,
0.061513882130384445,
-0.05542769283056259,
-0.12416897714138031,
0.0164346881210804,
0.025407178327441216,
0.02270515076816082,
0.038034021854400635,
-0.027658050879836082,
-0.032597288489341736,
0.02102815732359886,
0.05564294382929802,
-0.017707180231809616,
0.03472663462162018,
-0.013760109432041645,
0.014264550060033798,
-0.1362740397453308,
-0.020376434549689293,
-0.03227268531918526,
0.06100241467356682,
-0.055566076189279556,
0.047613948583602905,
0.026374517008662224,
-0.00032253784593194723,
0.008368807844817638,
-0.061312198638916016,
-0.0716060921549797,
0.012866339646279812,
-0.07217372208833694,
0.02391964942216873,
-0.03936970606446266,
-0.0009494891855865717,
0.03530540689826012,
0.03493727743625641,
-0.07173934578895569,
0.04873065650463104,
-0.0694449320435524,
0.011631105095148087,
-0.012280832976102829,
0.05333004519343376,
-0.020645298063755035,
-0.011157927103340626,
0.07451498508453369,
-0.036694154143333435,
0.021738629788160324,
-0.08723723143339157,
0.03417670726776123,
-0.037879690527915955,
0.06250661611557007,
-0.013808534480631351,
0.04588785022497177,
0.013767589814960957,
0.01847279816865921,
0.0030124573968350887,
0.04555458575487137,
0.033323246985673904,
-0.002196977846324444,
-0.028864234685897827,
0.050963740795850754,
0.028074214234948158,
0.017900317907333374,
-0.0006792402709834278,
0.01966577023267746,
0.038885630667209625,
-0.033453889191150665,
0.1099005714058876,
0.06052018702030182,
-0.015786373987793922,
0.0652632936835289,
-0.024648308753967285,
-0.11606888473033905,
-0.04675285890698433,
0.03230660408735275,
0.030859429389238358,
-0.05235697701573372,
-0.06381972134113312,
0.03049341030418873,
-0.05041341856122017,
0.00458149891346693,
-0.07827349752187729,
0.021150697022676468,
-0.03225072845816612,
0.025368403643369675,
-0.02290222980082035,
-0.0241678599268198,
0.027958326041698456,
-0.049011848866939545,
-0.01197636965662241,
-0.048590488731861115,
0.03602750971913338,
-0.029909491539001465,
-0.00962253101170063,
-0.042252376675605774,
2.9831675431358858e-33,
0.04107202962040901,
0.05047253146767616,
-0.019617335870862007,
0.016811810433864594,
-0.058603908866643906,
0.03084477223455906,
-0.04075933247804642,
0.004228380974382162,
-0.05366571247577667,
-0.027052782475948334,
-0.01482745073735714,
-0.02499707229435444,
-0.08056838065385818,
0.009431922808289528,
-0.03950563445687294,
-0.0048989346250891685,
0.02609500288963318,
0.03912791237235069,
-0.03687762841582298,
0.06654860079288483,
0.04889009892940521,
0.02564333751797676,
-0.034077394753694534,
-0.046139538288116455,
-0.07091272622346878,
0.09987182170152664,
0.05692289024591446,
-0.14922131597995758,
-0.1091705709695816,
-0.0028432179242372513,
-0.07195413112640381,
-0.009062420576810837,
-0.05796964839100838,
-0.04150616377592087,
-0.03558019921183586,
-0.06034839153289795,
-0.004382214043289423,
-0.02772732637822628,
-0.07499899715185165,
-0.04925999790430069,
0.009802999906241894,
0.020601317286491394,
0.035055600106716156,
-0.018889380618929863,
0.04938437417149544,
0.03793027997016907,
0.012831764295697212,
0.04674539342522621,
0.12675493955612183,
-0.020174551755189896,
-0.033826958388090134,
0.04131237417459488,
-0.06499606370925903,
0.02589389495551586,
0.028306759893894196,
0.08110854774713516,
0.06857912242412567,
0.042465876787900925,
0.009898969903588295,
0.03914555907249451,
0.05325593054294586,
-0.03496519476175308,
0.04792473092675209,
0.011080874130129814,
0.10108696669340134,
-0.006910675670951605,
-0.06530551612377167,
-0.04510505497455597,
0.08593733608722687,
0.04634982347488403,
-0.08564670383930206,
-0.04874894395470619,
0.08045891672372818,
0.11472033709287643,
0.03277673199772835,
-0.03494918718934059,
0.03220682591199875,
-0.08055640012025833,
0.003123402828350663,
0.011281667277216911,
-0.12534618377685547,
0.045833930373191833,
0.0404539480805397,
-0.06504873186349869,
-0.06093735620379448,
-0.02904845029115677,
0.07231750339269638,
-0.05755457282066345,
-0.024205278605222702,
0.004592114128172398,
0.009966227225959301,
0.04159602150321007,
-0.05115809291601181,
-0.0043548643589019775,
0.0013522360241040587,
-3.8659404743873996e-33,
0.03801150992512703,
0.008582518436014652,
-0.07211156189441681,
0.06338855624198914,
-0.024981597438454628,
-0.08667904138565063,
0.028241034597158432,
0.1231253519654274,
0.03624456003308296,
0.02468332275748253,
0.06448011100292206,
-0.08514036238193512,
0.025011545047163963,
-0.054459117352962494,
0.07211162894964218,
-0.03383567929267883,
0.10881076008081436,
-0.009089769795536995,
0.08015778660774231,
0.0867653489112854,
-0.004103934392333031,
0.08691483736038208,
-0.08659711480140686,
0.07606185972690582,
0.002001864602789283,
-0.024640299379825592,
-0.005536333657801151,
0.04182811826467514,
0.00733061833307147,
0.00382500933483243,
-0.0033721723593771458,
-0.09007564932107925,
-0.08812303841114044,
0.01942920871078968,
-0.114272840321064,
0.036786999553442,
0.014814645983278751,
0.027730843052268028,
-0.03552062809467316,
0.04254814609885216,
0.05450541153550148,
0.039220958948135376,
-0.021149734035134315,
0.018542099744081497,
-0.012618482112884521,
-0.020785296335816383,
-0.05848166346549988,
-0.009703799150884151,
-0.019815394654870033,
-0.055003803223371506,
0.013454335741698742,
-0.004104893654584885,
-0.04874219372868538,
-0.02754184976220131,
-0.006102407351136208,
-0.04634593427181244,
-0.01066146045923233,
-0.11149224638938904,
-0.05743234232068062,
-0.02080429159104824,
-0.022068003192543983,
0.037171777337789536,
0.0001729097421048209,
-0.12216414511203766,
0.029366502538323402,
-0.022474834695458412,
0.06585606932640076,
0.015210091136395931,
0.029571030288934708,
0.02564927376806736,
0.00929323025047779,
-0.06481517851352692,
0.010671744123101234,
0.030471419915556908,
0.003579639596864581,
-0.02854280173778534,
-0.03775135800242424,
0.04128077253699303,
0.052737265825271606,
-0.0874381735920906,
-0.054875582456588745,
0.004276643041521311,
-0.003774452954530716,
0.05953165143728256,
0.06077062711119652,
0.1072988212108612,
0.030624322593212128,
0.00010086634574690834,
0.03694090619683266,
0.055282145738601685,
0.013173972256481647,
0.027181707322597504,
0.045379847288131714,
0.08665740489959717,
-0.04556090012192726,
-5.316957896184249e-8,
-0.07406366616487503,
-0.07073071599006653,
-0.11541750282049179,
0.031116995960474014,
-0.05386863648891449,
-0.05645236000418663,
-0.0337698794901371,
-0.04346013814210892,
-0.06848010420799255,
-0.058412253856658936,
-0.02098729833960533,
-0.046860575675964355,
-0.07510945945978165,
0.03862224519252777,
-0.014464713633060455,
0.04980635643005371,
-0.009515685960650444,
0.10809193551540375,
-0.04776465892791748,
-0.06828926503658295,
0.022226767614483833,
0.06257850676774979,
0.01371992751955986,
-0.06970572471618652,
-0.020529408007860184,
0.06103267893195152,
-0.06156172603368759,
0.04314402863383293,
0.03786003217101097,
-0.007097016554325819,
0.026485729962587357,
0.024687284603714943,
0.006841948721557856,
-0.06299767643213272,
0.013741927221417427,
0.05623329430818558,
0.029034404084086418,
-0.010910642333328724,
-0.001634648535400629,
0.060567691922187805,
0.11271100491285324,
0.0486607626080513,
-0.058205924928188324,
0.018209857866168022,
0.02741345204412937,
-0.03926286846399307,
-0.05632886290550232,
-0.11193571239709854,
0.06141486391425133,
-0.051498740911483765,
0.11399495601654053,
-0.01566002145409584,
-0.034510836005210876,
0.048777781426906586,
0.05653402954339981,
0.02397581934928894,
0.06561760604381561,
-0.0371420793235302,
0.0698540210723877,
0.05344826728105545,
0.05605900660157204,
0.00151661632116884,
-0.04572317749261856,
-0.03202806040644646
] |
facebook/nllb-200-distilled-1.3B | b14baa07325b1cea23404c4d374d7eb469b1973d | 2022-07-19T15:45:28.000Z | [
"pytorch",
"m2m_100",
"text2text-generation",
"ace",
"acm",
"acq",
"aeb",
"af",
"ajp",
"ak",
"als",
"am",
"apc",
"ar",
"ars",
"ary",
"arz",
"as",
"ast",
"awa",
"ayr",
"azb",
"azj",
"ba",
"bm",
"ban",
"be",
"bem",
"bn",
"bho",
"bjn",
"bo",
"bs",
"bug",
"bg",
"ca",
"ceb",
"cs",
"cjk",
"ckb",
"crh",
"cy",
"da",
"de",
"dik",
"dyu",
"dz",
"el",
"en",
"eo",
"et",
"eu",
"ee",
"fo",
"fj",
"fi",
"fon",
"fr",
"fur",
"fuv",
"gaz",
"gd",
"ga",
"gl",
"gn",
"gu",
"ht",
"ha",
"he",
"hi",
"hne",
"hr",
"hu",
"hy",
"ig",
"ilo",
"id",
"is",
"it",
"jv",
"ja",
"kab",
"kac",
"kam",
"kn",
"ks",
"ka",
"kk",
"kbp",
"kea",
"khk",
"km",
"ki",
"rw",
"ky",
"kmb",
"kmr",
"knc",
"kg",
"ko",
"lo",
"lij",
"li",
"ln",
"lt",
"lmo",
"ltg",
"lb",
"lua",
"lg",
"luo",
"lus",
"lvs",
"mag",
"mai",
"ml",
"mar",
"min",
"mk",
"mt",
"mni",
"mos",
"mi",
"my",
"nl",
"nn",
"nb",
"npi",
"nso",
"nus",
"ny",
"oc",
"ory",
"pag",
"pa",
"pap",
"pbt",
"pes",
"plt",
"pl",
"pt",
"prs",
"quy",
"ro",
"rn",
"ru",
"sg",
"sa",
"sat",
"scn",
"shn",
"si",
"sk",
"sl",
"sm",
"sn",
"sd",
"so",
"st",
"es",
"sc",
"sr",
"ss",
"su",
"sv",
"swh",
"szl",
"ta",
"taq",
"tt",
"te",
"tg",
"tl",
"th",
"ti",
"tpi",
"tn",
"ts",
"tk",
"tum",
"tr",
"tw",
"tzm",
"ug",
"uk",
"umb",
"ur",
"uzn",
"vec",
"vi",
"war",
"wo",
"xh",
"ydd",
"yo",
"yue",
"zh",
"zsm",
"zu",
"dataset:flores-200",
"transformers",
"nllb",
"license:cc-by-nc-4.0",
"autotrain_compatible"
] | text2text-generation | false | facebook | null | facebook/nllb-200-distilled-1.3B | 1,155 | 2 | transformers | ---
language:
- ace
- acm
- acq
- aeb
- af
- ajp
- ak
- als
- am
- apc
- ar
- ars
- ary
- arz
- as
- ast
- awa
- ayr
- azb
- azj
- ba
- bm
- ban
- be
- bem
- bn
- bho
- bjn
- bo
- bs
- bug
- bg
- ca
- ceb
- cs
- cjk
- ckb
- crh
- cy
- da
- de
- dik
- dyu
- dz
- el
- en
- eo
- et
- eu
- ee
- fo
- fj
- fi
- fon
- fr
- fur
- fuv
- gaz
- gd
- ga
- gl
- gn
- gu
- ht
- ha
- he
- hi
- hne
- hr
- hu
- hy
- ig
- ilo
- id
- is
- it
- jv
- ja
- kab
- kac
- kam
- kn
- ks
- ka
- kk
- kbp
- kea
- khk
- km
- ki
- rw
- ky
- kmb
- kmr
- knc
- kg
- ko
- lo
- lij
- li
- ln
- lt
- lmo
- ltg
- lb
- lua
- lg
- luo
- lus
- lvs
- mag
- mai
- ml
- mar
- min
- mk
- mt
- mni
- mos
- mi
- my
- nl
- nn
- nb
- npi
- nso
- nus
- ny
- oc
- ory
- pag
- pa
- pap
- pbt
- pes
- plt
- pl
- pt
- prs
- quy
- ro
- rn
- ru
- sg
- sa
- sat
- scn
- shn
- si
- sk
- sl
- sm
- sn
- sd
- so
- st
- es
- sc
- sr
- ss
- su
- sv
- swh
- szl
- ta
- taq
- tt
- te
- tg
- tl
- th
- ti
- tpi
- tn
- ts
- tk
- tum
- tr
- tw
- tzm
- ug
- uk
- umb
- ur
- uzn
- vec
- vi
- war
- wo
- xh
- ydd
- yo
- yue
- zh
- zsm
- zu
language_details: "ace_Arab, ace_Latn, acm_Arab, acq_Arab, aeb_Arab, afr_Latn, ajp_Arab, aka_Latn, amh_Ethi, apc_Arab, arb_Arab, ars_Arab, ary_Arab, arz_Arab, asm_Beng, ast_Latn, awa_Deva, ayr_Latn, azb_Arab, azj_Latn, bak_Cyrl, bam_Latn, ban_Latn,bel_Cyrl, bem_Latn, ben_Beng, bho_Deva, bjn_Arab, bjn_Latn, bod_Tibt, bos_Latn, bug_Latn, bul_Cyrl, cat_Latn, ceb_Latn, ces_Latn, cjk_Latn, ckb_Arab, crh_Latn, cym_Latn, dan_Latn, deu_Latn, dik_Latn, dyu_Latn, dzo_Tibt, ell_Grek, eng_Latn, epo_Latn, est_Latn, eus_Latn, ewe_Latn, fao_Latn, pes_Arab, fij_Latn, fin_Latn, fon_Latn, fra_Latn, fur_Latn, fuv_Latn, gla_Latn, gle_Latn, glg_Latn, grn_Latn, guj_Gujr, hat_Latn, hau_Latn, heb_Hebr, hin_Deva, hne_Deva, hrv_Latn, hun_Latn, hye_Armn, ibo_Latn, ilo_Latn, ind_Latn, isl_Latn, ita_Latn, jav_Latn, jpn_Jpan, kab_Latn, kac_Latn, kam_Latn, kan_Knda, kas_Arab, kas_Deva, kat_Geor, knc_Arab, knc_Latn, kaz_Cyrl, kbp_Latn, kea_Latn, khm_Khmr, kik_Latn, kin_Latn, kir_Cyrl, kmb_Latn, kon_Latn, kor_Hang, kmr_Latn, lao_Laoo, lvs_Latn, lij_Latn, lim_Latn, lin_Latn, lit_Latn, lmo_Latn, ltg_Latn, ltz_Latn, lua_Latn, lug_Latn, luo_Latn, lus_Latn, mag_Deva, mai_Deva, mal_Mlym, mar_Deva, min_Latn, mkd_Cyrl, plt_Latn, mlt_Latn, mni_Beng, khk_Cyrl, mos_Latn, mri_Latn, zsm_Latn, mya_Mymr, nld_Latn, nno_Latn, nob_Latn, npi_Deva, nso_Latn, nus_Latn, nya_Latn, oci_Latn, gaz_Latn, ory_Orya, pag_Latn, pan_Guru, pap_Latn, pol_Latn, por_Latn, prs_Arab, pbt_Arab, quy_Latn, ron_Latn, run_Latn, rus_Cyrl, sag_Latn, san_Deva, sat_Beng, scn_Latn, shn_Mymr, sin_Sinh, slk_Latn, slv_Latn, smo_Latn, sna_Latn, snd_Arab, som_Latn, sot_Latn, spa_Latn, als_Latn, srd_Latn, srp_Cyrl, ssw_Latn, sun_Latn, swe_Latn, swh_Latn, szl_Latn, tam_Taml, tat_Cyrl, tel_Telu, tgk_Cyrl, tgl_Latn, tha_Thai, tir_Ethi, taq_Latn, taq_Tfng, tpi_Latn, tsn_Latn, tso_Latn, tuk_Latn, tum_Latn, tur_Latn, twi_Latn, tzm_Tfng, uig_Arab, ukr_Cyrl, umb_Latn, urd_Arab, uzn_Latn, vec_Latn, vie_Latn, war_Latn, wol_Latn, xho_Latn, ydd_Hebr, yor_Latn, yue_Hant, zho_Hans, zho_Hant, zul_Latn"
tags:
- nllb
license: "cc-by-nc-4.0"
datasets:
- flores-200
metrics:
- bleu
- spbleu
- chrf++
---
# NLLB-200
This is the model card of NLLB-200's distilled 1.3B variant.
Here are the [metrics](https://tinyurl.com/nllb200densedst1bmetrics) for that particular checkpoint.
- Information about training algorithms, parameters, fairness constraints or other applied approaches, and features. The exact training algorithm, data and the strategies to handle data imbalances for high and low resource languages that were used to train NLLB-200 is described in the paper.
- Paper or other resource for more information NLLB Team et al, No Language Left Behind: Scaling Human-Centered Machine Translation, Arxiv, 2022
- License: CC-BY-NC
- Where to send questions or comments about the model: https://github.com/facebookresearch/fairseq/issues
## Intended Use
- Primary intended uses: NLLB-200 is a machine translation model primarily intended for research in machine translation, - especially for low-resource languages. It allows for single sentence translation among 200 languages. Information on how to - use the model can be found in Fairseq code repository along with the training code and references to evaluation and training data.
- Primary intended users: Primary users are researchers and machine translation research community.
- Out-of-scope use cases: NLLB-200 is a research model and is not released for production deployment. NLLB-200 is trained on general domain text data and is not intended to be used with domain specific texts, such as medical domain or legal domain. The model is not intended to be used for document translation. The model was trained with input lengths not exceeding 512 tokens, therefore translating longer sequences might result in quality degradation. NLLB-200 translations can not be used as certified translations.
## Metrics
• Model performance measures: NLLB-200 model was evaluated using BLEU, spBLEU, and chrF++ metrics widely adopted by machine translation community. Additionally, we performed human evaluation with the XSTS protocol and measured the toxicity of the generated translations.
## Evaluation Data
- Datasets: Flores-200 dataset is described in Section 4
- Motivation: We used Flores-200 as it provides full evaluation coverage of the languages in NLLB-200
- Preprocessing: Sentence-split raw text data was preprocessed using SentencePiece. The
SentencePiece model is released along with NLLB-200.
## Training Data
• We used parallel multilingual data from a variety of sources to train the model. We provide detailed report on data selection and construction process in Section 5 in the paper. We also used monolingual data constructed from Common Crawl. We provide more details in Section 5.2.
## Ethical Considerations
• In this work, we took a reflexive approach in technological development to ensure that we prioritize human users and minimize risks that could be transferred to them. While we reflect on our ethical considerations throughout the article, here are some additional points to highlight. For one, many languages chosen for this study are low-resource languages, with a heavy emphasis on African languages. While quality translation could improve education and information access in many in these communities, such an access could also make groups with lower levels of digital literacy more vulnerable to misinformation or online scams. The latter scenarios could arise if bad actors misappropriate our work for nefarious activities, which we conceive as an example of unintended use. Regarding data acquisition, the training data used for model development were mined from various publicly available sources on the web. Although we invested heavily in data cleaning, personally identifiable information may not be entirely eliminated. Finally, although we did our best to optimize for translation quality, mistranslations produced by the model could remain. Although the odds are low, this could have adverse impact on those who rely on these translations to make important decisions (particularly when related to health and safety).
## Caveats and Recommendations
• Our model has been tested on the Wikimedia domain with limited investigation on other domains supported in NLLB-MD. In addition, the supported languages may have variations that our model is not capturing. Users should make appropriate assessments.
## Carbon Footprint Details
• The carbon dioxide (CO2e) estimate is reported in Section 8.8. | [
-0.0747252032160759,
0.0533888153731823,
-0.033430878072977066,
-0.055815599858760834,
-0.0011690609389916062,
0.004526800476014614,
0.10390043258666992,
-0.03787222504615784,
0.013742909766733646,
0.030754439532756805,
0.10300511121749878,
-0.045150306075811386,
0.08489503711462021,
-0.035055290907621384,
-0.09264793992042542,
-0.0014136917889118195,
-0.0922417864203453,
0.03928272798657417,
-0.0608409158885479,
-0.05286221206188202,
-0.04463565722107887,
0.01086440309882164,
-0.004518928937613964,
-0.009781417436897755,
0.02244248427450657,
0.09576168656349182,
-0.0547272264957428,
0.12192397564649582,
0.0024990118108689785,
-0.11582695692777634,
-0.01380478497594595,
0.09412999451160431,
0.10207995027303696,
-0.017858760431408882,
0.05694899335503578,
0.027563435956835747,
-0.09911371022462845,
-0.03516742214560509,
0.015162240713834763,
0.02148371748626232,
-0.00025669706519693136,
-0.031614065170288086,
-0.019114287570118904,
-0.005737617611885071,
0.06014912575483322,
0.01297408901154995,
-0.08530933409929276,
-0.008118776604533195,
0.024732954800128937,
-0.038934677839279175,
-0.03682970255613327,
0.05297454819083214,
-0.06966105103492737,
0.08317205309867859,
-0.04740709438920021,
-0.03595395386219025,
-0.03438596799969673,
0.021656081080436707,
0.058554355055093765,
0.019937213510274887,
-0.004868014249950647,
0.0521281436085701,
-0.08870099484920502,
0.04248534515500069,
-0.04010321944952011,
-0.02528727799654007,
0.008491205982863903,
-0.05365270748734474,
-0.05601850897073746,
0.10446098446846008,
0.008206430822610855,
-0.0473964661359787,
-0.022165419533848763,
0.11626270413398743,
-0.041045039892196655,
0.06658847630023956,
-0.024518325924873352,
-0.021187862381339073,
0.010407154448330402,
-0.054642725735902786,
-0.03714491054415703,
-0.021402765065431595,
0.0605124905705452,
-0.04610284045338631,
0.01928560994565487,
0.06525023281574249,
-0.023691244423389435,
0.016337629407644272,
0.01567930355668068,
-0.03806399554014206,
0.01240178756415844,
-0.0509042888879776,
0.018527258187532425,
0.004486917518079281,
-0.08555052429437637,
0.015021135099232197,
0.009210276417434216,
0.0689755454659462,
-0.06509660929441452,
0.11784003674983978,
0.003299734788015485,
0.005230587441474199,
0.013479895889759064,
-0.031401582062244415,
-0.061082158237695694,
-0.024330077692866325,
-0.0015407747123390436,
0.04185982793569565,
0.044749412685632706,
-0.05410408228635788,
-0.06095381826162338,
-0.020480183884501457,
0.013712683692574501,
-0.10409771651029587,
-0.007085882592946291,
0.03240162879228592,
-0.06004360318183899,
-0.042156681418418884,
0.09655249118804932,
0.05917566642165184,
0.017437350004911423,
-0.05882265791296959,
-0.1029951274394989,
0.02617381140589714,
-0.015833601355552673,
0.0010830037062987685,
0.005645715165883303,
-1.1058173823547772e-33,
0.06850104033946991,
0.042876385152339935,
0.0017903721891343594,
-0.06076929345726967,
0.029601112008094788,
-0.0813407152891159,
-0.05832494795322418,
-0.02422313019633293,
-0.07637937366962433,
0.040425658226013184,
-0.0019085679668933153,
-0.02581690065562725,
-0.004699903540313244,
-0.025583377107977867,
0.07224128395318985,
-0.004244441632181406,
0.01370013877749443,
-0.04917437955737114,
-0.0878528356552124,
0.0461607426404953,
0.04295177012681961,
0.0653655081987381,
0.04569398984313011,
-0.021031944081187248,
-0.014243283309042454,
0.034731484949588776,
0.00378496665507555,
-0.032661858946084976,
0.03682389110326767,
0.044476378709077835,
-0.01328261848539114,
-0.013971459120512009,
-0.07571369409561157,
-0.020063431933522224,
-0.06255511939525604,
-0.05421620234847069,
-0.025155896320939064,
-0.005586553830653429,
-0.05408412218093872,
-0.07956851273775101,
-0.00701222475618124,
-0.013466859236359596,
-0.07703928649425507,
0.025509903207421303,
0.06669013947248459,
0.05357345566153526,
-0.02804437093436718,
-0.03469168767333031,
-0.02413083054125309,
0.005351295694708824,
0.009285572916269302,
0.015497725456953049,
0.02610524371266365,
0.05678465962409973,
0.011566849425435066,
0.03554214909672737,
0.011160521768033504,
0.031731169670820236,
0.04317567124962807,
0.05461505800485611,
0.058563489466905594,
0.026142166927456856,
0.00860652606934309,
0.04334830120205879,
0.05961824208498001,
-0.0724332332611084,
-0.02220204472541809,
-0.006241998169571161,
0.033225372433662415,
-0.019441088661551476,
-0.03818508982658386,
-0.05824660137295723,
0.052147187292575836,
0.07595910131931305,
-0.03064858727157116,
-0.0008250984246842563,
0.0223345085978508,
-0.03642826899886131,
0.007726007606834173,
-0.007584333419799805,
-0.06740368902683258,
-0.047316256910562515,
0.008028355427086353,
-0.05236590653657913,
-0.016910914331674576,
0.0029080857057124376,
0.006178725510835648,
-0.064290851354599,
-0.04419022053480148,
-0.00014166030450724065,
-0.06691545993089676,
0.1045355573296547,
0.025705767795443535,
-0.07532435655593872,
-0.11069506406784058,
7.734078755868852e-34,
-0.009794851765036583,
0.030681457370519638,
-0.026973601430654526,
0.061067111790180206,
-0.060538459569215775,
-0.0030981849413365126,
0.12022794038057327,
0.10600463300943375,
0.06934072077274323,
0.032899122685194016,
0.06938397139310837,
0.009631721302866936,
0.034622639417648315,
-0.07750081270933151,
0.13259710371494293,
-0.013048657216131687,
-0.011533175595104694,
0.12690305709838867,
0.043120238929986954,
0.11696480214595795,
-0.05616455152630806,
0.046475011855363846,
-0.04478283226490021,
0.0782645046710968,
-0.027459386736154556,
0.025200551375746727,
0.05700811743736267,
0.01289233099669218,
0.047900911420583725,
0.025048350915312767,
0.045266520231962204,
-0.019431097432971,
-0.05985627323389053,
0.049271658062934875,
-0.0600777268409729,
-0.06585124135017395,
-0.06262805312871933,
0.05424260348081589,
-0.05327678099274635,
0.07501939684152603,
-0.04028010740876198,
0.052709076553583145,
0.007071062456816435,
-0.018166977912187576,
0.02693055011332035,
-0.01135310623794794,
0.01108525786548853,
-0.003444351488724351,
-0.06698621064424515,
-0.07893185317516327,
0.08462634682655334,
0.03584963083267212,
-0.046718522906303406,
0.003363260067999363,
0.034398507326841354,
0.003327806480228901,
-0.003492838703095913,
-0.08155471086502075,
-0.08072390407323837,
0.03352627530694008,
-0.03726745769381523,
-0.025286586955189705,
0.010010000318288803,
-0.07425525039434433,
0.09560316801071167,
0.03820839524269104,
-0.003638407913967967,
0.07000242918729782,
0.04281983897089958,
-0.04172484204173088,
0.041670773178339005,
-0.01985280029475689,
-0.12577761709690094,
0.014859534800052643,
0.06123047322034836,
-0.004287872929126024,
-0.06203824654221535,
0.026683088392019272,
-0.03944623842835426,
-0.04426730051636696,
-0.016890157014131546,
-0.051392875611782074,
-0.04312274605035782,
0.05167824774980545,
0.013001223094761372,
-0.022208085283637047,
-0.08963770419359207,
-0.016560813412070274,
0.04738824442028999,
0.024578172713518143,
0.025586042553186417,
0.036777596920728683,
0.03662465140223503,
0.09006272256374359,
0.01027766615152359,
-4.487257498908548e-8,
-0.037165142595767975,
-0.04196903482079506,
0.0007214924553409219,
-0.009062995202839375,
0.13319416344165802,
-0.07683858275413513,
-0.015329715795814991,
-0.0847618505358696,
0.01600472442805767,
-0.08772759139537811,
0.06886982172727585,
0.014681259170174599,
-0.08432541787624359,
0.0062378025613725185,
-0.02878260426223278,
0.060520417988300323,
-0.04105524718761444,
0.1545703113079071,
-0.015637468546628952,
-0.02425430901348591,
0.021870775148272514,
0.051479268819093704,
0.052970245480537415,
-0.029721610248088837,
-0.04140932857990265,
0.039853040128946304,
0.005390312988311052,
0.04428427666425705,
0.03264230117201805,
-0.025158826261758804,
-0.032649751752614975,
-0.007534271106123924,
0.03921835124492645,
-0.05250535160303116,
-0.03149467334151268,
0.04207955673336983,
-0.01592702977359295,
-0.03651579096913338,
-0.019968241453170776,
0.02365696243941784,
0.03879981487989426,
0.06924402713775635,
0.028654389083385468,
0.04351348057389259,
-0.053307052701711655,
-0.03301019221544266,
0.01764470525085926,
-0.03041774034500122,
0.02439715713262558,
-0.12102571129798889,
0.011686045676469803,
0.04438808932900429,
-0.02619810216128826,
-0.038857750594615936,
0.008582721464335918,
0.015414940193295479,
0.053964000195264816,
0.0064878216944634914,
0.005187722854316235,
0.008031453005969524,
0.09333532303571701,
0.026289889588952065,
0.033794183284044266,
0.020168675109744072
] |
cambridgeltl/magic_mscoco | e0cfb935df539629d5abb2ecdc925aef3ecf35fa | 2022-04-08T14:39:58.000Z | [
"pytorch",
"gpt2",
"text-generation",
"transformers"
] | text-generation | false | cambridgeltl | null | cambridgeltl/magic_mscoco | 1,154 | null | transformers | Entry not found | [
0.0461147278547287,
-0.038838207721710205,
-0.01049656979739666,
-0.03682169318199158,
0.011261860840022564,
0.013094935566186905,
0.0019101888174191117,
-0.013979103416204453,
0.027092741802334785,
-0.015212527476251125,
0.017284274101257324,
-0.08189476281404495,
0.03817418962717056,
-0.04920130595564842,
0.021389011293649673,
-0.015245908871293068,
-0.03203780576586723,
-0.1245758980512619,
0.03150877356529236,
0.032381657510995865,
-0.060957908630371094,
0.05409295856952667,
-0.025087490677833557,
0.01568586938083172,
0.028129950165748596,
-0.04710396006703377,
-0.018688226118683815,
0.013785239309072495,
-0.04001208767294884,
0.01173911802470684,
-0.04317743331193924,
0.05500618368387222,
0.004543041344732046,
0.02973111905157566,
0.14852192997932434,
0.02658126689493656,
0.02907961793243885,
-0.05169107764959335,
0.05803573504090309,
-0.07732241600751877,
-0.017637968063354492,
-0.04219653457403183,
0.041807834059000015,
0.023620979860424995,
0.021563321352005005,
0.016478516161441803,
-0.0021814992651343346,
-0.06400240957736969,
0.06393089145421982,
0.019599027931690216,
-0.08565037697553635,
0.00934905931353569,
-0.008718925528228283,
-0.028583496809005737,
-0.07310017943382263,
0.09416428208351135,
0.001759322709403932,
0.06184990331530571,
0.011840506456792355,
-0.035997264087200165,
0.08358278125524521,
-0.02619801089167595,
0.03736566752195358,
-0.028206506744027138,
-0.07454850524663925,
-0.08883563429117203,
-0.06279942393302917,
-0.008695344440639019,
0.014119276776909828,
-0.0825355276465416,
0.0649217739701271,
-0.00223911227658391,
-0.14716917276382446,
0.07743025571107864,
-0.03548373281955719,
-0.055201586335897446,
0.006981803569942713,
-0.012166670523583889,
0.055111464112997055,
-0.007116836030036211,
-0.023175746202468872,
-0.005835152696818113,
-0.09185640513896942,
0.055196937173604965,
0.034148022532463074,
0.03835180774331093,
0.038685429841279984,
-0.025987252593040466,
0.017804903909564018,
0.022428328171372414,
0.025005368515849113,
-0.10761535167694092,
-0.048001550137996674,
-0.04343584179878235,
0.012374646961688995,
-0.019502125680446625,
0.029218152165412903,
0.0842173621058464,
-0.011719699949026108,
0.09283553808927536,
-0.007015465293079615,
-0.03543110564351082,
-0.06936459988355637,
0.09425332397222519,
-0.010958523489534855,
-0.00805904995650053,
0.004974212497472763,
-0.0031528924591839314,
0.06105927750468254,
-0.03964288905262947,
-0.03619541600346565,
-0.019901901483535767,
0.07134733349084854,
0.039514873176813126,
-0.012729483656585217,
-0.006646515801548958,
-0.04746140539646149,
-0.014432490803301334,
-0.05157482624053955,
0.09506245702505112,
-0.049747664481401443,
-0.04591796174645424,
-0.008965466171503067,
-0.0325421579182148,
-0.08626784384250641,
-0.06624380499124527,
0.02538885548710823,
-4.303924894057984e-33,
0.01133066974580288,
0.0033434738870710135,
-0.002155609894543886,
0.04871906340122223,
-0.023564351722598076,
-0.07933273911476135,
0.0600903145968914,
0.02335330657660961,
-0.03844716399908066,
-0.020433755591511726,
-0.06952055543661118,
-0.03235611692070961,
0.0062485747039318085,
0.064804308116436,
-0.03201229125261307,
0.061689723283052444,
0.0417000837624073,
-0.00761845987290144,
0.03340127319097519,
-0.047770582139492035,
0.00887306872755289,
-0.04066338762640953,
-0.010506896302103996,
0.0106519665569067,
0.021333497017621994,
0.12854498624801636,
-0.009705503471195698,
0.010055632330477238,
-0.017507633194327354,
0.006515394430607557,
0.06334009766578674,
-0.057817306369543076,
0.013668818399310112,
-0.020286159589886665,
0.05430467426776886,
-0.023184705525636673,
0.0828516036272049,
0.0005449643940664828,
-0.10372652113437653,
-0.07634282112121582,
-0.005381610710173845,
-0.039263784885406494,
0.0006114727002568543,
-0.013281986117362976,
0.07119110971689224,
0.043696220964193344,
0.03168422728776932,
0.04338686540722847,
0.05728672817349434,
0.0832006186246872,
-0.07961414009332657,
0.015234283171594143,
0.017002005130052567,
0.047004107385873795,
-0.09794387966394424,
0.004990279674530029,
-0.07062993198633194,
-0.028000490739941597,
-0.04018733277916908,
-0.0702052190899849,
0.011351344175636768,
0.06020182743668556,
-0.03297270089387894,
0.09396500885486603,
0.03417910635471344,
-0.019825750961899757,
-0.034690454602241516,
-0.013036907650530338,
0.05896938592195511,
-0.012359356507658958,
-0.017275206744670868,
-0.07982361316680908,
0.02059139870107174,
0.06737419217824936,
0.04176458343863487,
-0.04978838190436363,
-0.05877475067973137,
-0.06289287656545639,
-0.03354167565703392,
-0.03871942684054375,
0.009898529388010502,
-0.05514208599925041,
-0.11629002541303635,
-0.011855563148856163,
0.10663620382547379,
0.037354156374931335,
-0.0065480442717671394,
-0.051189567893743515,
0.06663123518228531,
0.01874656230211258,
0.032841797918081284,
0.041593004018068314,
-0.06879369914531708,
0.04216769337654114,
-0.01628219522535801,
5.4139394340936695e-34,
0.05697013810276985,
-0.006972255185246468,
0.015711724758148193,
-0.17956365644931793,
0.02320219948887825,
0.007923615165054798,
-0.008062449283897877,
0.0074974060989916325,
0.07391711324453354,
0.0309313777834177,
0.060510627925395966,
0.058605875819921494,
0.09515274316072464,
-0.002282935893163085,
0.001603541080839932,
0.07024981826543808,
0.012629246339201927,
0.07425693422555923,
-0.038426291197538376,
0.01861148327589035,
0.030608950182795525,
-0.02449394389986992,
0.021528491750359535,
-0.003039651783183217,
-0.03676343336701393,
0.03130284696817398,
0.07998586446046829,
0.010451192036271095,
-0.07930229604244232,
-0.013543923385441303,
0.018781835213303566,
0.05168003588914871,
-0.07191970944404602,
0.15783067047595978,
0.026191607117652893,
0.01262354850769043,
0.08218053728342056,
-0.029807550832629204,
-0.07528624683618546,
-0.04250097647309303,
0.017244765534996986,
0.04411793500185013,
0.03708017244935036,
0.009233047254383564,
-0.040271829813718796,
0.022496428340673447,
0.02495843544602394,
0.07633638381958008,
0.005147108342498541,
0.013892097398638725,
0.05610476806759834,
-0.06684739887714386,
0.05862557515501976,
-0.020688841119408607,
0.05377643182873726,
0.06718500703573227,
0.005329249892383814,
-0.01388032827526331,
0.029931528493762016,
0.009508464485406876,
-0.045173756778240204,
0.11534366756677628,
-0.06510116159915924,
0.05117698386311531,
-0.0026125339791178703,
-0.08554837852716446,
-0.03784770518541336,
0.0804959163069725,
0.011298024095594883,
-0.07695550471544266,
-0.04868878796696663,
0.02515520341694355,
0.06252261996269226,
-0.04509226232767105,
-0.01246943511068821,
0.028559505939483643,
-0.030573077499866486,
0.05066261067986488,
-0.08187384903430939,
0.04469604790210724,
0.0034051244147121906,
0.04145054519176483,
-0.021858664229512215,
-0.06112268194556236,
-0.00908052921295166,
-0.05903250351548195,
0.0259539932012558,
0.059690944850444794,
-0.07613514363765717,
-0.03720718249678612,
-0.036316655576229095,
0.07058046013116837,
-0.008224100805819035,
0.041961874812841415,
-0.0285952128469944,
-1.496900736697171e-8,
-0.0014124972512945533,
0.03401879221200943,
-0.040338415652513504,
0.04116074740886688,
0.0935964286327362,
-0.05115952715277672,
0.0008746005478315055,
-0.03389839455485344,
-0.00567849725484848,
-0.010686947964131832,
-0.04789939522743225,
-0.04820054769515991,
-0.02011880651116371,
-0.03209094703197479,
-0.04211259260773659,
-0.10229527950286865,
-0.07819421589374542,
-0.031228765845298767,
-0.02154778689146042,
-0.04960230365395546,
0.08087796717882156,
-0.07801242172718048,
0.06919731199741364,
-0.04999840259552002,
0.03687043860554695,
0.03889009356498718,
-0.049989692866802216,
-0.04254625365138054,
-0.04606937617063522,
0.08682432025671005,
-0.031148413196206093,
0.11826753616333008,
0.034102488309144974,
-0.0208592489361763,
-0.0205202866345644,
0.027134142816066742,
0.09741277992725372,
0.051608603447675705,
0.013477512635290623,
-0.13649295270442963,
-0.022304272279143333,
0.02385953813791275,
0.038732077926397324,
-0.09249968826770782,
-0.04549082741141319,
0.054220106452703476,
0.01160438358783722,
0.051190607249736786,
0.07713303714990616,
-0.022097084671258926,
-0.06127818301320076,
-0.01857956498861313,
0.006740490905940533,
-0.00496308971196413,
0.024095389991998672,
0.0736224576830864,
-0.003481915919110179,
-0.0699305310845375,
-0.006629763171076775,
-0.0598808117210865,
0.05297163128852844,
-0.02902800403535366,
-0.027858933433890343,
-0.01287526823580265
] |
NlpHUST/vibert4news-base-cased | d0926f978504f72d29bea14d7315b9e3ef09f292 | 2021-08-10T03:13:56.000Z | [
"pytorch",
"fill-mask",
"vn",
"transformers",
"autotrain_compatible"
] | fill-mask | false | NlpHUST | null | NlpHUST/vibert4news-base-cased | 1,149 | 1 | transformers | ---
language: vn
---
# BERT for Vietnamese is trained on more 20 GB news dataset
Apply for task sentiment analysis on using [AIViVN's comments dataset](https://www.aivivn.com/contests/6)
The model achieved 0.90268 on the public leaderboard, (winner's score is 0.90087)
Bert4news is used for a toolkit Vietnames(segmentation and Named Entity Recognition) at ViNLPtoolkit(https://github.com/bino282/ViNLP)
We use word sentencepiece, use basic bert tokenization and same config with bert base with lowercase = False.
You can download trained model:
- [tensorflow](https://drive.google.com/file/d/1X-sRDYf7moS_h61J3L79NkMVGHP-P-k5/view?usp=sharing).
- [pytorch](https://drive.google.com/file/d/11aFSTpYIurn-oI2XpAmcCTccB_AonMOu/view?usp=sharing).
Use with huggingface/transformers
``` bash
import torch
from transformers import BertTokenizer,BertModel
tokenizer= BertTokenizer.from_pretrained("NlpHUST/vibert4news-base-cased")
bert_model = BertModel.from_pretrained("NlpHUST/vibert4news-base-cased")
line = "Tôi là sinh viên trường Bách Khoa Hà Nội ."
input_id = tokenizer.encode(line,add_special_tokens = True)
att_mask = [int(token_id > 0) for token_id in input_id]
input_ids = torch.tensor([input_id])
att_masks = torch.tensor([att_mask])
with torch.no_grad():
features = bert_model(input_ids,att_masks)
print(features)
```
# Vietnamese toolkit with bert
ViNLP is a system annotation for Vietnamese, it use pretrain [Bert4news](https://github.com/bino282/bert4news/) to fine-turning to NLP problems in Vietnamese components of wordsegmentation,Named entity recognition (NER) and achieve high accuravy.
### Installation
```bash
git clone https://github.com/bino282/ViNLP.git
cd ViNLP
python setup.py develop build
```
### Test Segmentation
The model achieved F1 score : 0.984 on VLSP 2013 dataset
|Model | F1 |
|--------|-----------|
| **BertVnTokenizer** | 98.40 |
| **DongDu** | 96.90 |
| **JvnSegmenter-Maxent** | 97.00 |
| **JvnSegmenter-CRFs** | 97.06 |
| **VnTokenizer** | 97.33 |
| **UETSegmenter** | 97.87 |
| **VnTokenizer** | 97.33 |
| **VnCoreNLP (i.e. RDRsegmenter)** | 97.90 |
``` bash
from ViNLP import BertVnTokenizer
tokenizer = BertVnTokenizer()
sentences = tokenizer.split(["Tổng thống Donald Trump ký sắc lệnh cấm mọi giao dịch của Mỹ với ByteDance và Tecent - chủ sở hữu của 2 ứng dụng phổ biến TikTok và WeChat sau 45 ngày nữa."])
print(sentences[0])
```
``` bash
Tổng_thống Donald_Trump ký sắc_lệnh cấm mọi giao_dịch của Mỹ với ByteDance và Tecent - chủ_sở_hữu của 2 ứng_dụng phổ_biến TikTok và WeChat sau 45 ngày nữa .
```
### Test Named Entity Recognition
The model achieved F1 score VLSP 2018 for all named entities including nested entities : 0.786
|Model | F1 |
|--------|-----------|
| **BertVnNer** | 78.60 |
| **VNER Attentive Neural Network** | 77.52 |
| **vietner CRF (ngrams + word shapes + cluster + w2v)** | 76.63 |
| **ZA-NER BiLSTM** | 74.70 |
``` bash
from ViNLP import BertVnNer
bert_ner_model = BertVnNer()
sentence = "Theo SCMP, báo cáo của CSIS với tên gọi Định hình Tương lai Chính sách của Mỹ với Trung Quốc cũng cho thấy sự ủng hộ tương đối rộng rãi của các chuyên gia về việc cấm Huawei, tập đoàn viễn thông khổng lồ của Trung Quốc"
entities = bert_ner_model.annotate([sentence])
print(entities)
```
``` bash
[{'ORGANIZATION': ['SCMP', 'CSIS', 'Huawei'], 'LOCATION': ['Mỹ', 'Trung Quốc']}]
```
Run training with base config
``` bash
python train_pytorch.py \\\\
--model_path=bert4news.pytorch \\\\
--max_len=200 \\\\
--batch_size=16 \\\\
--epochs=6 \\\\
--lr=2e-5
```
### Contact information
For personal communication related to this project, please contact Nha Nguyen Van ([email protected]).
| [
-0.052404940128326416,
-0.04317978397011757,
0.009323809295892715,
-0.007804179564118385,
0.02861930802464485,
0.0895891934633255,
-0.023563839495182037,
0.0536334328353405,
0.026925677433609962,
-0.0012797705130651593,
-0.014446975663304329,
-0.0487176887691021,
-0.004958587232977152,
0.08405309170484543,
0.016101688146591187,
0.05718265846371651,
0.050699371844530106,
0.008408763445913792,
-0.04968338459730148,
-0.07679497450590134,
0.019800551235675812,
0.05377877503633499,
0.00767596997320652,
-0.034630004316568375,
0.009097523055970669,
-0.02301333285868168,
-0.06785669177770615,
-0.0305471308529377,
0.045236214995384216,
0.03581587225198746,
0.08148036152124405,
0.002586433431133628,
0.03845628723502159,
0.12519042193889618,
0.02593565545976162,
0.019861184060573578,
-0.030375661328434944,
-0.06794083118438721,
0.03251874819397926,
0.022240372374653816,
-0.01972310245037079,
-0.05426594242453575,
-0.07744982838630676,
-0.019817516207695007,
0.12233619391918182,
-0.004008671268820763,
-0.014590879902243614,
0.014069483615458012,
0.004307644907385111,
-0.00479210214689374,
-0.05670297518372536,
-0.013935315422713757,
0.053584024310112,
0.06481501460075378,
-0.07241323590278625,
0.002899880288168788,
0.010075998492538929,
0.06347727030515671,
-0.01524292677640915,
-0.0924677699804306,
-0.0439370758831501,
-0.049492739140987396,
-0.006779784336686134,
-0.05055361986160278,
-0.10088218003511429,
0.039904557168483734,
-0.04275400936603546,
0.04565715044736862,
-0.00957159698009491,
0.01140428613871336,
0.029588593170046806,
0.10341981053352356,
0.03468279540538788,
0.010795320384204388,
-0.05179502069950104,
-0.053419217467308044,
0.1054222360253334,
-0.00571111124008894,
0.13564664125442505,
-0.07896670699119568,
-0.023356394842267036,
0.0011609727516770363,
0.06574515998363495,
-0.011427044868469238,
0.06902457028627396,
-0.00030721130315214396,
0.03557135537266731,
-0.011250444687902927,
-0.01604277268052101,
0.01503732893615961,
-0.03499680012464523,
-0.1222870722413063,
0.09481555968523026,
-0.034578628838062286,
0.05717671662569046,
0.02457713708281517,
-0.03161351755261421,
-0.03776497021317482,
-0.009049816057085991,
0.08054031431674957,
0.0075501385144889355,
0.01827559806406498,
-0.018850939348340034,
-0.0987958163022995,
0.022319534793496132,
0.0915740504860878,
0.0410456582903862,
0.004629652947187424,
0.017796359956264496,
-0.09172355383634567,
-0.030329115688800812,
0.06307598948478699,
0.013792253099381924,
-0.07131221145391464,
0.019690554589033127,
-0.0323946513235569,
-0.03570394217967987,
-0.0019238314125686884,
0.0034189384896308184,
0.0619751438498497,
-0.045904792845249176,
-0.007438015658408403,
-0.042863424867391586,
0.05734860152006149,
-0.03769482299685478,
0.001883281278423965,
0.045504771173000336,
6.883182843688739e-33,
0.03411410376429558,
0.056286487728357315,
0.020555371418595314,
-0.0809929221868515,
-0.011815052479505539,
0.005107249598950148,
0.01485571451485157,
0.01429683156311512,
-0.11455506831407547,
-0.040428526699543,
-0.04541745036840439,
-0.03407399356365204,
-0.060816116631031036,
0.06192389130592346,
-0.014295750297605991,
-0.032507579773664474,
-0.0016668187454342842,
0.0006515996647067368,
0.015762895345687866,
0.0015732682077214122,
0.06794559210538864,
0.015064869076013565,
-0.018080580979585648,
-0.03970341756939888,
0.01568172313272953,
-0.03339923545718193,
0.09818355739116669,
-0.1112399473786354,
-0.028782814741134644,
0.03568803519010544,
-0.14632664620876312,
-0.016495583578944206,
0.0038873287849128246,
0.03442934900522232,
0.00044049505959264934,
-0.03065783903002739,
-0.061310283839702606,
0.00768079562112689,
0.03425957262516022,
-0.0495092011988163,
0.045334622263908386,
0.0654815286397934,
0.003637055866420269,
-0.009242353029549122,
-0.055894769728183746,
0.03964059054851532,
0.01532469131052494,
-0.01014325674623251,
0.06057482585310936,
0.02775082364678383,
0.027599887922406197,
0.009821661747992039,
-0.06266602128744125,
0.02269773744046688,
0.02982437238097191,
-0.06036991626024246,
0.07920677959918976,
0.01873089000582695,
0.04709325730800629,
-0.0576653778553009,
0.005012376233935356,
-0.0468430370092392,
0.03625481575727463,
0.018384620547294617,
0.011166407726705074,
-0.02155499905347824,
-0.07401289790868759,
0.04780479148030281,
0.004607759416103363,
-0.029138587415218353,
-0.020926302298903465,
-0.012529074214398861,
0.06128554046154022,
0.012019069865345955,
0.016121581196784973,
-0.02966449223458767,
0.013130689039826393,
-0.10051547735929489,
-0.05605171248316765,
-0.0013918245676904917,
-0.01731248013675213,
-0.05181228369474411,
0.015144077129662037,
-0.11410892009735107,
-0.031511642038822174,
0.014977149665355682,
0.04337213188409805,
-0.11641426384449005,
0.04991336911916733,
0.0375555120408535,
-0.03641772270202637,
-0.03579019382596016,
0.0007861761841922998,
0.039687253534793854,
-0.05681310594081879,
-5.455409772764726e-33,
-0.024979280307888985,
0.05730203911662102,
-0.06948324292898178,
0.03134671226143837,
-0.06671103835105896,
-0.022609325125813484,
0.024240469560027122,
0.12206238508224487,
-0.03475995734333992,
-0.000023861353838583454,
-0.008398272097110748,
-0.05464630946516991,
-0.01610078662633896,
-0.03058597445487976,
0.04511585831642151,
0.007770267315208912,
0.017180118709802628,
0.05507907643914223,
0.05434543266892433,
0.08197636157274246,
-0.033434443175792694,
0.009162264876067638,
-0.10470718890428543,
0.06182057037949562,
0.03140659257769585,
0.027201935648918152,
-0.02539440430700779,
0.04300175979733467,
0.023923605680465698,
-0.015102851204574108,
-0.022281678393483162,
0.006538509391248226,
-0.06204802915453911,
0.03215546905994415,
-0.015309861861169338,
0.020424257963895798,
0.05532197281718254,
-0.06792587786912918,
-0.00647853733971715,
0.03729943931102753,
0.10338606685400009,
-0.02496696263551712,
-0.06719133257865906,
0.029450632631778717,
-0.09197738766670227,
0.022924931719899178,
-0.1075657308101654,
-0.008763021789491177,
-0.025814112275838852,
-0.04355558380484581,
-0.024574099108576775,
0.013100085780024529,
-0.09217271953821182,
0.06295738369226456,
-0.049383118748664856,
-0.06139104813337326,
0.055052295327186584,
-0.08709560334682465,
-0.06528323888778687,
0.004104590974748135,
-0.043826356530189514,
0.002549861092120409,
0.03146612271666527,
-0.024044658988714218,
0.04652541130781174,
-0.08051972091197968,
0.016612891107797623,
0.11082231998443604,
0.015098730102181435,
-0.02695886418223381,
-0.017183758318424225,
0.017591647803783417,
0.07090294361114502,
0.052726421505212784,
-0.03015352226793766,
0.024320336058735847,
-0.010805293917655945,
-0.028002245351672173,
0.011235686019062996,
-0.027363255620002747,
-0.02846822515130043,
-0.045883841812610626,
0.04535899683833122,
-0.022894427180290222,
0.012517712078988552,
0.11991061270236969,
0.031204570084810257,
0.06222333014011383,
-0.03377830982208252,
0.07167375087738037,
0.02711704932153225,
-0.05982860177755356,
0.0642990991473198,
0.08087561279535294,
0.05466233938932419,
-6.080193060142847e-8,
-0.048575758934020996,
-0.04357828199863434,
-0.02520059421658516,
0.06994052231311798,
-0.11741293221712112,
-0.07167834043502808,
-0.016661781817674637,
0.02349269576370716,
-0.0189207773655653,
-0.011654352769255638,
-0.0060865627601742744,
0.04394570738077164,
-0.1421174705028534,
-0.017849182710051537,
-0.017360493540763855,
0.06673727184534073,
0.007102688308805227,
0.05892660841345787,
0.05822291225194931,
-0.011621925979852676,
0.06662546843290329,
0.0506870336830616,
-0.0026217426639050245,
-0.04186977446079254,
0.043067704886198044,
-0.015807539224624634,
-0.05735646188259125,
0.10811840742826462,
-0.04681224748492241,
-0.021816270425915718,
0.027681386098265648,
0.040631216019392014,
-0.10130742937326431,
-0.023899951949715614,
0.11535979062318802,
0.09950026869773865,
-0.0323125422000885,
-0.014645341783761978,
-0.016338108107447624,
0.03986592963337898,
0.12160283327102661,
0.07163581252098083,
-0.0807301327586174,
-0.03674149885773659,
0.06875293701887131,
0.004302941728383303,
0.016976529732346535,
-0.09159424155950546,
0.09060923755168915,
-0.020800797268748283,
0.06004125997424126,
-0.002411960158497095,
-0.06267739832401276,
0.071626678109169,
-0.04791679233312607,
0.06562545895576477,
-0.04953838884830475,
-0.01267396192997694,
0.02239644154906273,
-0.0019035502336919308,
0.012694045901298523,
0.05648035183548927,
-0.00851132906973362,
0.023120149970054626
] |
monologg/koelectra-base-v2-discriminator | b87e70eb7b3ea33b24fc2e7a85b2cc8321b9dd28 | 2021-10-20T16:54:30.000Z | [
"pytorch",
"electra",
"pretraining",
"ko",
"transformers",
"korean",
"license:apache-2.0"
] | null | false | monologg | null | monologg/koelectra-base-v2-discriminator | 1,149 | 1 | transformers | ---
language: ko
license: apache-2.0
tags:
- korean
---
# KoELECTRA v2 (Base Discriminator)
Pretrained ELECTRA Language Model for Korean (`koelectra-base-v2-discriminator`)
For more detail, please see [original repository](https://github.com/monologg/KoELECTRA/blob/master/README_EN.md).
## Usage
### Load model and tokenizer
```python
>>> from transformers import ElectraModel, ElectraTokenizer
>>> model = ElectraModel.from_pretrained("monologg/koelectra-base-v2-discriminator")
>>> tokenizer = ElectraTokenizer.from_pretrained("monologg/koelectra-base-v2-discriminator")
```
### Tokenizer example
```python
>>> from transformers import ElectraTokenizer
>>> tokenizer = ElectraTokenizer.from_pretrained("monologg/koelectra-base-v2-discriminator")
>>> tokenizer.tokenize("[CLS] 한국어 ELECTRA를 공유합니다. [SEP]")
['[CLS]', '한국어', 'EL', '##EC', '##TRA', '##를', '공유', '##합니다', '.', '[SEP]']
>>> tokenizer.convert_tokens_to_ids(['[CLS]', '한국어', 'EL', '##EC', '##TRA', '##를', '공유', '##합니다', '.', '[SEP]'])
[2, 5084, 16248, 3770, 19059, 29965, 2259, 10431, 5, 3]
```
## Example using ElectraForPreTraining
```python
import torch
from transformers import ElectraForPreTraining, ElectraTokenizer
discriminator = ElectraForPreTraining.from_pretrained("monologg/koelectra-base-v2-discriminator")
tokenizer = ElectraTokenizer.from_pretrained("monologg/koelectra-base-v2-discriminator")
sentence = "나는 방금 밥을 먹었다."
fake_sentence = "나는 내일 밥을 먹었다."
fake_tokens = tokenizer.tokenize(fake_sentence)
fake_inputs = tokenizer.encode(fake_sentence, return_tensors="pt")
discriminator_outputs = discriminator(fake_inputs)
predictions = torch.round((torch.sign(discriminator_outputs[0]) + 1) / 2)
print(list(zip(fake_tokens, predictions.tolist()[1:-1])))
```
| [
-0.08737126737833023,
0.020561257377266884,
-0.014742044731974602,
-0.02869381196796894,
0.00298730144277215,
0.027003493160009384,
0.02983446978032589,
0.014906560070812702,
-0.02572145126760006,
-0.043666280806064606,
0.05375613272190094,
-0.11485300213098526,
0.029444396495819092,
-0.01080904621630907,
0.05112506449222565,
0.01746406778693199,
-0.023592885583639145,
0.06813682615756989,
-0.06364688277244568,
-0.04574689641594887,
0.17094281315803528,
-0.028034795075654984,
0.021000506356358528,
-0.04652197286486626,
0.06231442838907242,
0.002575266407802701,
0.02431381307542324,
0.04048426076769829,
0.05635073408484459,
-0.04849287122488022,
0.06427380442619324,
0.0707293450832367,
0.04187433049082756,
-0.008332875557243824,
-0.007453996688127518,
0.05723325535655022,
-0.052248600870370865,
-0.06158103048801422,
-0.017583733424544334,
-0.01626347191631794,
0.04165133461356163,
-0.0037340200506150723,
-0.007848191075026989,
-0.05673126503825188,
0.024930134415626526,
-0.05754593014717102,
-0.025478141382336617,
-0.05000828206539154,
-0.054845768958330154,
-0.06656985729932785,
0.04005397856235504,
-0.060625363141298294,
0.10939224809408188,
0.0409187488257885,
-0.08010856062173843,
-0.008725328370928764,
-0.03565184772014618,
0.043774355202913284,
0.08221780508756638,
-0.032006025314331055,
-0.07556425034999847,
0.013248239643871784,
-0.007212955970317125,
0.030386602506041527,
-0.09999309480190277,
-0.04425114020705223,
0.09470032900571823,
-0.03029966540634632,
0.018017204478383064,
-0.017545893788337708,
-0.03771570697426796,
-0.028672752901911736,
0.08554091304540634,
0.046636730432510376,
0.019984472543001175,
-0.007733623962849379,
0.13753609359264374,
0.037596531212329865,
0.025120675563812256,
-0.10705143213272095,
0.016637180000543594,
-0.054733868688344955,
-0.06776386499404907,
0.02390633337199688,
0.07671848684549332,
0.015076722949743271,
-0.05475392937660217,
-0.05027090013027191,
0.015224762260913849,
0.0761965736746788,
-0.01796923577785492,
-0.004364373162388802,
0.06649627536535263,
-0.022986479103565216,
-0.061374686658382416,
0.03508906438946724,
-0.003378848545253277,
0.08632892370223999,
0.00601575430482626,
0.10844031721353531,
-0.014722377993166447,
0.029516102746129036,
-0.013245557434856892,
-0.01318375300616026,
-0.10377709567546844,
-0.1375325322151184,
-0.012655643746256828,
0.04225768521428108,
-0.025462377816438675,
-0.026631733402609825,
0.034689970314502716,
-0.027495114132761955,
0.015171682462096214,
0.0017343844519928098,
0.04908658191561699,
0.020782433450222015,
0.0542566142976284,
-0.023129522800445557,
0.0472068153321743,
0.0331714041531086,
0.016209444031119347,
-0.09677492827177048,
-0.05086725950241089,
-0.005342868156731129,
-0.03144565597176552,
0.018568197265267372,
-0.032219626009464264,
6.191343439460374e-33,
0.07132510095834732,
0.029457541182637215,
0.03270892798900604,
-0.028802989050745964,
-0.054509781301021576,
-0.0021344819106161594,
-0.0014706550864502788,
0.027551259845495224,
-0.09214742481708527,
-0.0456007681787014,
-0.07118542492389679,
0.0909893587231636,
-0.04403923079371452,
0.026553450152277946,
-0.03855115547776222,
-0.02380519174039364,
-0.07145541906356812,
0.00444432720541954,
0.02339809387922287,
-0.009378996677696705,
0.09018838405609131,
0.015122322365641594,
-0.004819316323846579,
0.049920763820409775,
-0.028076887130737305,
-0.006270738318562508,
0.02038498781621456,
-0.11914879828691483,
-0.034092266112565994,
0.058584097772836685,
0.006883944850414991,
-0.019635841250419617,
0.02141031064093113,
0.07509776949882507,
-0.07503579556941986,
0.0029929308220744133,
-0.025215977802872658,
0.012780046090483665,
-0.05254526808857918,
-0.10541585087776184,
0.023772718384861946,
0.01906460151076317,
-0.03641422092914581,
0.0300056803971529,
-0.0005779994535259902,
-0.009888722561299801,
-0.020020121708512306,
0.020976407453417778,
0.12169589847326279,
0.010630430653691292,
0.0005032415501773357,
-0.00685516931116581,
0.02872159518301487,
0.042233821004629135,
0.03318623825907707,
0.14249294996261597,
0.025275638327002525,
0.007279898971319199,
0.04764263331890106,
-0.07025022059679031,
-0.09835021197795868,
0.11864100396633148,
0.02651344984769821,
-0.019677957519888878,
0.06319702416658401,
-0.004479875788092613,
-0.00040181534131988883,
-0.09967523068189621,
-0.03130662813782692,
-0.060386449098587036,
-0.04646040499210358,
-0.04136665537953377,
-0.017068682238459587,
0.05277806520462036,
0.002093798480927944,
-0.05095100402832031,
0.008520218543708324,
-0.045169245451688766,
-0.06971430778503418,
-0.007640903349965811,
-0.028109660372138023,
-0.033680159598588943,
0.014001553878188133,
-0.03234235569834709,
0.07158392667770386,
-0.0672970563173294,
0.0027083205059170723,
-0.03169680014252663,
-0.019205396994948387,
-0.01565375365316868,
0.02471180073916912,
-0.018106289207935333,
-0.04774077609181404,
-0.03502178192138672,
-0.021851956844329834,
-6.452521386481793e-33,
0.019688472151756287,
0.057957153767347336,
-0.018940629437565804,
0.05515134707093239,
-0.040447868406772614,
-0.0215391144156456,
0.05152503401041031,
0.08823537081480026,
-0.03675473853945732,
-0.03328676149249077,
0.05623701587319374,
-0.04402249678969383,
0.056713130325078964,
-0.024894213303923607,
0.10102692246437073,
-0.012372823432087898,
-0.03386150300502777,
0.07827982306480408,
0.08193698525428772,
0.06939948350191116,
-0.011115112341940403,
0.07610032707452774,
-0.11999621242284775,
0.03353948891162872,
-0.029956817626953125,
0.000416166934883222,
-0.03701845929026604,
0.08644051104784012,
0.05191362649202347,
0.0048789395950734615,
-0.0311199352145195,
0.019625455141067505,
-0.0669114962220192,
0.04151788726449013,
-0.03527163714170456,
-0.09147419780492783,
-0.020229455083608627,
0.004741148091852665,
-0.020377637818455696,
0.057434745132923126,
-0.04177897050976753,
0.016055839136242867,
-0.03740199655294418,
0.030295368283987045,
0.03126391023397446,
-0.03246275335550308,
-0.001984425587579608,
-0.04204500466585159,
0.021184753626585007,
-0.07790601998567581,
0.03309888392686844,
0.0013046489330008626,
-0.08752584457397461,
0.04016323760151863,
-0.01832684688270092,
-0.04772819206118584,
0.05068640410900116,
-0.022100405767560005,
-0.029876306653022766,
-0.041532404720783234,
-0.02463456429541111,
-0.10243260115385056,
0.09110238403081894,
-0.03624570369720459,
-0.004881713539361954,
-0.07509174197912216,
0.08801110088825226,
0.02319323644042015,
0.039343398064374924,
-0.03619629517197609,
-0.009802211076021194,
-0.03348036855459213,
0.028351180255413055,
-0.025189165025949478,
0.006160906050354242,
-0.027491381391882896,
-0.13484618067741394,
0.0498834066092968,
0.042964812368154526,
-0.06330569088459015,
-0.049060966819524765,
0.05209650099277496,
0.06303156167268753,
0.0010490956483408809,
0.026309430599212646,
0.007311797700822353,
-0.00811944529414177,
0.12044116109609604,
0.08298847824335098,
-0.024129416793584824,
-0.04488345608115196,
0.05374990031123161,
0.019846118986606598,
0.08894173055887222,
0.01090533472597599,
-4.878545212250174e-8,
0.004816270433366299,
-0.04352057725191116,
-0.010947853326797485,
0.017355898395180702,
0.01350297499448061,
-0.006254691630601883,
-0.042617082595825195,
-0.07708857953548431,
-0.02591842971742153,
-0.1250973641872406,
0.017171237617731094,
0.025943079963326454,
-0.02232501655817032,
-0.007618858944624662,
0.010904315859079361,
0.01943611353635788,
0.02430533803999424,
0.16602753102779388,
-0.041404880583286285,
0.034380681812763214,
-0.005012968089431524,
-0.01193434838205576,
0.006306921131908894,
-0.022942252457141876,
0.015024185180664062,
0.04457835853099823,
-0.05603611841797829,
0.03352639824151993,
0.01657101698219776,
-0.046964529901742935,
-0.0628354474902153,
0.024936538189649582,
0.017490075901150703,
-0.018326254561543465,
-0.037443432956933975,
0.08423073589801788,
-0.024609755724668503,
-0.084023118019104,
-0.01773396134376526,
0.03753352537751198,
0.05547517165541649,
-0.047830164432525635,
-0.08984427154064178,
0.017223702743649483,
0.053283702582120895,
0.016150234267115593,
0.047501206398010254,
-0.020241165533661842,
0.014494006521999836,
0.03095063380897045,
-0.017904911190271378,
-0.05126645043492317,
-0.14339329302310944,
-0.012247662991285324,
-0.04445694014430046,
-0.020059997215867043,
-0.05099567398428917,
-0.0343172550201416,
0.03184102103114128,
0.0035856550093740225,
0.02629711665213108,
0.04451654106378555,
0.023343222215771675,
0.01823258586227894
] |
Hate-speech-CNERG/dehatebert-mono-english | 25d0e4d9122d2a5c283e07405a325e3dfd4a73b3 | 2021-09-25T13:55:16.000Z | [
"pytorch",
"jax",
"bert",
"text-classification",
"en",
"arxiv:2004.06465",
"transformers",
"license:apache-2.0"
] | text-classification | false | Hate-speech-CNERG | null | Hate-speech-CNERG/dehatebert-mono-english | 1,146 | 2 | transformers | ---
language: en
license: apache-2.0
---
This model is used detecting **hatespeech** in **English language**. The mono in the name refers to the monolingual setting, where the model is trained using only English language data. It is finetuned on multilingual bert model.
The model is trained with different learning rates and the best validation score achieved is 0.726030 for a learning rate of 2e-5. Training code can be found here https://github.com/punyajoy/DE-LIMIT
### For more details about our paper
Sai Saketh Aluru, Binny Mathew, Punyajoy Saha and Animesh Mukherjee. "[Deep Learning Models for Multilingual Hate Speech Detection](https://arxiv.org/abs/2004.06465)". Accepted at ECML-PKDD 2020.
***Please cite our paper in any published work that uses any of these resources.***
~~~
@article{aluru2020deep,
title={Deep Learning Models for Multilingual Hate Speech Detection},
author={Aluru, Sai Saket and Mathew, Binny and Saha, Punyajoy and Mukherjee, Animesh},
journal={arXiv preprint arXiv:2004.06465},
year={2020}
}
~~~
| [
-0.08892443031072617,
-0.045097608119249344,
0.07170797884464264,
-0.0344398207962513,
0.0202645193785429,
0.02441413886845112,
-0.012024263851344585,
-0.02009456418454647,
0.05952276289463043,
-0.05635795369744301,
0.004640298895537853,
-0.11439390480518341,
0.0284669678658247,
0.02292834408581257,
-0.024373004212975502,
0.045295681804418564,
0.027399370446801186,
0.03998761624097824,
-0.06135139986872673,
-0.0392303429543972,
0.09007685631513596,
0.10572406649589539,
0.0339185930788517,
-0.02754403091967106,
-0.027288872748613358,
-0.027025416493415833,
-0.07049296051263809,
-0.015157876536250114,
0.028981054201722145,
0.03305567428469658,
0.0819309651851654,
-0.012986858375370502,
0.018729425966739655,
0.062202226370573044,
-0.01681314967572689,
-0.03205275908112526,
0.023137632757425308,
-0.04187280312180519,
0.05028368532657623,
0.04927428811788559,
-0.016766084358096123,
-0.010927247814834118,
-0.005556286312639713,
-0.09479375928640366,
0.05125842243432999,
-0.024973435327410698,
-0.04806208610534668,
0.012210273183882236,
-0.029542453587055206,
-0.058894120156764984,
-0.0026953518390655518,
-0.029736094176769257,
0.0955951064825058,
0.021223029121756554,
0.0055472636595368385,
-0.07612314075231552,
-0.00017809846031013876,
0.055785056203603745,
0.010009540244936943,
0.009144766256213188,
-0.02978270873427391,
-0.08017273992300034,
-0.01957719400525093,
0.005624709650874138,
-0.03914378210902214,
-0.01132861990481615,
-0.06314008682966232,
0.0016707085305824876,
-0.03338276594877243,
0.025898220017552376,
-0.03388698026537895,
0.036795709282159805,
0.061900436878204346,
0.03755204379558563,
0.014832165092229843,
-0.023204609751701355,
0.08212471753358841,
-0.04770544916391373,
0.109003946185112,
-0.10332658886909485,
-0.008987393230199814,
0.004338573664426804,
0.08176719397306442,
-0.03847907483577728,
0.07404808700084686,
-0.019776009023189545,
0.02171695977449417,
0.02973405458033085,
-0.04718542471528053,
-0.015838759019970894,
-0.047360070049762726,
-0.04918995499610901,
0.056577954441308975,
0.025707006454467773,
0.0662526860833168,
0.029025554656982422,
-0.011296888813376427,
0.009226425550878048,
-0.05135373771190643,
0.06229458376765251,
0.01706390082836151,
-0.0036914264783263206,
-0.0017000263324007392,
-0.06111173331737518,
0.042618848383426666,
-0.034526411443948746,
-0.02358018234372139,
0.041536636650562286,
0.03191320598125458,
-0.10664952546358109,
-0.018116554245352745,
-0.040781017392873764,
0.03126846253871918,
-0.07105173915624619,
0.013016323558986187,
0.02839728258550167,
0.04519738629460335,
-0.005991076584905386,
0.03370148316025734,
0.09088224917650223,
-0.06950683146715164,
0.014236142858862877,
0.0015334028284996748,
0.05326541140675545,
0.01888974942266941,
0.0026830146089196205,
-0.08064520359039307,
6.634675991085214e-33,
0.0061269234865903854,
0.05829766392707825,
-0.030161486938595772,
-0.06436152756214142,
0.006740248762071133,
-0.030133185908198357,
-0.0214590635150671,
0.0760933980345726,
-0.03536215052008629,
-0.03541465848684311,
0.0204986110329628,
-0.008599963039159775,
-0.04083573818206787,
0.11150606721639633,
0.019145313650369644,
0.01248887088149786,
0.041205909103155136,
-0.009775349870324135,
0.04496017098426819,
-0.04655689373612404,
0.1119072362780571,
-0.009736428037285805,
0.04719331115484238,
-0.04197341948747635,
-0.07162746042013168,
-0.011244210414588451,
0.052736807614564896,
-0.04015370085835457,
0.03548211604356766,
0.05491076409816742,
-0.15711940824985504,
0.004507088102400303,
-0.052886780351400375,
0.0020488379523158073,
0.09563720226287842,
-0.07782215625047684,
-0.018201258033514023,
0.04153017699718475,
-0.05171510577201843,
-0.08843120187520981,
-0.019992949441075325,
0.033851031213998795,
-0.037890348583459854,
-0.06288457661867142,
-0.013534272089600563,
0.010028377175331116,
0.018176712095737457,
-0.019009537994861603,
0.04303367808461189,
0.02477988414466381,
0.041645657271146774,
-0.0045112441293895245,
-0.0613471157848835,
0.07794807106256485,
0.030100304633378983,
0.0630149394273758,
0.05946852266788483,
0.04173679277300835,
0.07999328523874283,
-0.002419149735942483,
-0.01979217864573002,
-0.017375826835632324,
-0.0013896989403292537,
0.009666751138865948,
0.05997650697827339,
-0.008916588500142097,
-0.06390327960252762,
0.005440727341920137,
0.0037246611900627613,
-0.05499575659632683,
0.022626006975769997,
-0.005021384917199612,
0.02353786490857601,
0.007233625277876854,
-0.04871056228876114,
-0.023264681920409203,
0.1089194118976593,
-0.004187557380646467,
0.04193040356040001,
0.047250475734472275,
-0.0013416837900876999,
0.011588115245103836,
0.018881717696785927,
-0.12320882081985474,
-0.09147705882787704,
-0.004114061594009399,
0.0530163049697876,
-0.08795520663261414,
0.040337830781936646,
0.09310907870531082,
0.04096751660108566,
-0.06535106897354126,
-0.06318657100200653,
0.020091691985726357,
-0.061973415315151215,
-6.654408133131705e-33,
-0.05800985172390938,
0.05532394349575043,
-0.08919016271829605,
0.01616247184574604,
-0.05910831317305565,
-0.05759276822209358,
0.03795727714896202,
0.11941484361886978,
0.015681179240345955,
-0.03246593102812767,
0.09018011391162872,
-0.04893334582448006,
0.08087658137083054,
0.023017697036266327,
0.07544302940368652,
-0.005360346287488937,
0.022590752691030502,
0.047960538417100906,
-0.015747899189591408,
0.0465385802090168,
0.05831248685717583,
0.05290476977825165,
-0.10798396915197372,
0.04881278797984123,
-0.031940605491399765,
0.004020669963210821,
-0.05454646423459053,
0.04177774861454964,
-0.04590752348303795,
-0.004005074501037598,
-0.01161989662796259,
0.0937335342168808,
-0.10179295390844345,
0.05103517323732376,
-0.035649508237838745,
-0.03639208525419235,
-0.013820317573845387,
-0.005623322911560535,
0.027545500546693802,
0.02053225040435791,
0.07582267373800278,
0.0023638647980988026,
-0.04816495627164841,
-0.021972939372062683,
0.0016462383791804314,
0.0075551546178758144,
-0.06519010663032532,
-0.005221489351242781,
0.00905285682529211,
-0.10528375208377838,
-0.027324670925736427,
-0.052410777658224106,
-0.019903119653463364,
0.05409187078475952,
-0.06952551007270813,
-0.08589282631874084,
0.05122238025069237,
-0.04207742214202881,
-0.01700790785253048,
-0.0017096234951168299,
-0.06441202014684677,
-0.036676075309515,
-0.0707729309797287,
0.0147640286013484,
0.032530833035707474,
-0.019631661474704742,
-0.058447856456041336,
0.03225160762667656,
0.10216060280799866,
0.010214455425739288,
0.018916819244623184,
-0.010756706818938255,
-0.0219124648720026,
-0.010565711185336113,
-0.06582542508840561,
-0.0210917629301548,
-0.03458644449710846,
-0.08263922482728958,
-0.03953738510608673,
-0.017005303874611855,
-0.0074031418189406395,
-0.01220824383199215,
-0.010031584650278091,
0.020796112716197968,
0.046237409114837646,
-0.009450514800846577,
-0.014298655092716217,
0.04085731506347656,
-0.05926180258393288,
0.010960284620523453,
-0.010841907002031803,
0.050380997359752655,
-0.03029681369662285,
0.059910111129283905,
0.06697633862495422,
-5.770322175635556e-8,
-0.07962175458669662,
-0.041437551379203796,
-0.02327640727162361,
0.011512917466461658,
-0.003089649835601449,
-0.060063064098358154,
-0.045171916484832764,
-0.007307430263608694,
-0.013816338032484055,
0.04449030011892319,
0.02870556153357029,
0.04496971517801285,
-0.059712618589401245,
-0.04105491191148758,
-0.08257775008678436,
0.04055127501487732,
0.06417585909366608,
0.039597492665052414,
0.04566432908177376,
-0.05540570616722107,
0.054039083421230316,
0.019530022516846657,
0.08661191910505295,
0.007545494474470615,
-0.050492431968450546,
-0.07865996658802032,
0.016182996332645416,
0.036685630679130554,
-0.011814421974122524,
-0.03666369616985321,
-0.033275581896305084,
0.06256045401096344,
-0.10142375528812408,
-0.019736576825380325,
0.054068755358457565,
0.1767619103193283,
-0.0829523578286171,
0.008310763165354729,
-0.026199519634246826,
0.08380620181560516,
0.0918329730629921,
0.03839704021811485,
-0.07497455924749374,
-0.04247835651040077,
0.06532999873161316,
0.020705023780465126,
-0.03692805767059326,
-0.10558678209781647,
0.024017905816435814,
-0.025140682235360146,
0.10301906615495682,
0.046711236238479614,
-0.027853844687342644,
0.09583965688943863,
-0.021947558969259262,
-0.004449930973351002,
-0.09188961982727051,
-0.03625103458762169,
-0.04261795058846474,
0.11759001761674881,
0.0857158750295639,
0.009730475023388863,
0.04913140460848808,
-0.0031847760546952486
] |
TencentGameMate/chinese-wav2vec2-base | 3991242c806928916fff4a8c0e4f76acf661b743 | 2022-06-24T01:53:18.000Z | [
"pytorch",
"wav2vec2",
"pretraining",
"transformers",
"license:mit"
] | null | false | TencentGameMate | null | TencentGameMate/chinese-wav2vec2-base | 1,145 | 3 | transformers | ---
license: mit
---
Pretrained on 10k hours WenetSpeech L subset. More details in [TencentGameMate/chinese_speech_pretrain](https://github.com/TencentGameMate/chinese_speech_pretrain)
This model does not have a tokenizer as it was pretrained on audio alone.
In order to use this model speech recognition, a tokenizer should be created and the model should be fine-tuned on labeled text data.
python package:
transformers==4.16.2
```python
import torch
import torch.nn.functional as F
import soundfile as sf
from fairseq import checkpoint_utils
from transformers import (
Wav2Vec2FeatureExtractor,
Wav2Vec2ForPreTraining,
Wav2Vec2Model,
)
from transformers.models.wav2vec2.modeling_wav2vec2 import _compute_mask_indices
model_path=""
wav_path=""
mask_prob=0.0
mask_length=10
feature_extractor = Wav2Vec2FeatureExtractor.from_pretrained(model_path)
model = Wav2Vec2Model.from_pretrained(model_path)
# for pretrain: Wav2Vec2ForPreTraining
# model = Wav2Vec2ForPreTraining.from_pretrained(model_path)
model = model.to(device)
model = model.half()
model.eval()
wav, sr = sf.read(wav_path)
input_values = feature_extractor(wav, return_tensors="pt").input_values
input_values = input_values.half()
input_values = input_values.to(device)
# for Wav2Vec2ForPreTraining
# batch_size, raw_sequence_length = input_values.shape
# sequence_length = model._get_feat_extract_output_lengths(raw_sequence_length)
# mask_time_indices = _compute_mask_indices((batch_size, sequence_length), mask_prob=0.0, mask_length=2)
# mask_time_indices = torch.tensor(mask_time_indices, device=input_values.device, dtype=torch.long)
with torch.no_grad():
outputs = model(input_values)
last_hidden_state = outputs.last_hidden_state
# for Wav2Vec2ForPreTraining
# outputs = model(input_values, mask_time_indices=mask_time_indices, output_hidden_states=True)
# last_hidden_state = outputs.hidden_states[-1]
``` | [
-0.05739091709256172,
-0.0660773441195488,
0.00022669298050459474,
-0.03908038139343262,
-0.010170196183025837,
0.062121812254190445,
-0.001793952425941825,
-0.01739371195435524,
-0.04345238581299782,
-0.09392821043729782,
0.02261200360953808,
-0.07954955846071243,
-0.0441720150411129,
0.01786474511027336,
0.030796421691775322,
0.016337335109710693,
0.0450555682182312,
0.017205407842993736,
-0.07224612683057785,
-0.17844907939434052,
0.11893889307975769,
0.003589225234463811,
0.09665412455797195,
0.006032940931618214,
0.028867976740002632,
-0.005122784990817308,
-0.036722537130117416,
0.044097818434238434,
0.0368330143392086,
0.0013700942508876324,
-0.008213995024561882,
0.036704860627651215,
-0.00672296853736043,
0.06822138279676437,
-0.007272844668477774,
-0.012364096008241177,
-0.029029035940766335,
-0.049270786345005035,
0.004284031223505735,
-0.028931118547916412,
0.029782885685563087,
-0.0026211205404251814,
-0.019635582342743874,
-0.04898889362812042,
0.014926760457456112,
0.005600698292255402,
0.02964550070464611,
-0.025110406801104546,
-0.026913020759820938,
-0.027363386005163193,
-0.019374066963791847,
-0.012028577737510204,
0.08157288283109665,
0.08820067346096039,
-0.052092745900154114,
0.01987343654036522,
0.056892283260822296,
-0.03466528281569481,
0.06547554582357407,
-0.09801114350557327,
-0.0754895880818367,
-0.013894785195589066,
0.024752695113420486,
-0.055293016135692596,
-0.0782238021492958,
-0.0238645002245903,
-0.04617093876004219,
0.019887007772922516,
0.06385333836078644,
-0.008222251199185848,
-0.041057564318180084,
0.05308575555682182,
0.03573649749159813,
0.06099119409918785,
0.009904890321195126,
0.016391312703490257,
0.14399126172065735,
0.01003713347017765,
0.04641282930970192,
-0.105502650141716,
0.04714939743280411,
-0.07135836780071259,
0.10858544707298279,
-0.013062378391623497,
0.05325578898191452,
0.004410379100590944,
-0.010892728343605995,
-0.06820180267095566,
0.016253255307674408,
-0.0309266597032547,
-0.09112299978733063,
-0.05787723511457443,
0.004715037997812033,
0.04375888779759407,
-0.02512904815375805,
0.018332822248339653,
0.00895394291728735,
0.07422938197851181,
-0.02461918443441391,
0.06983233243227005,
0.008023718371987343,
-0.06011787801980972,
0.01107261423021555,
0.008818675763905048,
-0.010240206494927406,
-0.02895343117415905,
-0.00949001219123602,
0.09558706730604172,
0.029779501259326935,
-0.03355243802070618,
0.07455028593540192,
-0.00837381836026907,
0.01362771075218916,
-0.019796153530478477,
0.051294710487127304,
0.03272401914000511,
-0.0785897970199585,
-0.02908269688487053,
-0.07108458131551743,
0.09287294745445251,
-0.01656372658908367,
-0.007409748621284962,
-0.049864694476127625,
0.037757597863674164,
-0.006271375343203545,
-0.05410824716091156,
-0.006673414260149002,
6.279129357579782e-33,
-0.025543607771396637,
0.07562923431396484,
-0.015724213793873787,
-0.04479557275772095,
-0.020832985639572144,
-0.0827331393957138,
0.018062254413962364,
-0.014490602537989616,
-0.06907769292593002,
-0.005170179530978203,
-0.037386003881692886,
0.007917770184576511,
-0.10361248254776001,
0.019342921674251556,
-0.0807943195104599,
-0.054125625640153885,
-0.024535775184631348,
0.02149130217730999,
0.021831775084137917,
-0.04698216915130615,
0.10264340788125992,
0.02595212124288082,
-0.00615127244964242,
-0.0701453909277916,
-0.012142177671194077,
0.03557150438427925,
0.05135582759976387,
-0.08540364354848862,
0.05188337713479996,
0.0456249825656414,
-0.04926445707678795,
-0.04662099480628967,
0.11257398128509521,
-0.02014540508389473,
-0.00876157358288765,
0.020394861698150635,
0.032991088926792145,
0.04489299654960632,
-0.035630080848932266,
-0.11148317903280258,
0.0299637820571661,
0.040758684277534485,
-0.03663092106580734,
-0.08794473111629486,
-0.05107545852661133,
-0.05956750735640526,
-0.07253292948007584,
0.05490461364388466,
0.05555177479982376,
-0.00012107891961932182,
0.011553016491234303,
0.007328308653086424,
-0.02549506351351738,
0.07260333001613617,
0.04355553910136223,
-0.07130934298038483,
0.10971378535032272,
0.017014559358358383,
0.050367604941129684,
-0.04784243553876877,
0.021823272109031677,
0.004207783844321966,
0.012406058609485626,
0.028693221509456635,
0.06360553205013275,
-0.032752420753240585,
-0.018976567313075066,
-0.0005019889213144779,
0.035039354115724564,
-0.022304320707917213,
-0.06096524000167847,
-0.019615015015006065,
0.01324413064867258,
0.06522670388221741,
0.006979623809456825,
-0.04246462136507034,
0.01295370701700449,
-0.07080353796482086,
-0.08420068770647049,
0.033984966576099396,
-0.04588576406240463,
0.004597506485879421,
-0.029155246913433075,
-0.012101885862648487,
-0.01828083023428917,
-0.045001741498708725,
0.03719174116849899,
-0.09146483987569809,
-0.0013827180955559015,
-0.08228979259729385,
-0.062144700437784195,
0.009714663028717041,
0.03623730316758156,
0.012717919424176216,
-0.045491598546504974,
-6.626331450562315e-33,
-0.017071187496185303,
0.12596356868743896,
-0.0066747041419148445,
0.08409247547388077,
-0.015266512520611286,
0.01956445537507534,
0.12065022438764572,
0.06483715772628784,
-0.05262255668640137,
-0.0329882837831974,
0.04605064168572426,
-0.07584542781114578,
0.012133680284023285,
-0.014575407840311527,
0.03815045952796936,
-0.035424619913101196,
0.002254555933177471,
0.06671415269374847,
0.10744266957044601,
0.1028314009308815,
0.02959807589650154,
0.05636240914463997,
-0.1413683295249939,
0.03670031949877739,
-0.1331031620502472,
0.0235952939838171,
-0.01086095254868269,
0.0612495131790638,
0.03603053092956543,
0.01223989948630333,
-0.03954606503248215,
0.055455755442380905,
-0.04664433375000954,
0.008851148188114166,
-0.06045253574848175,
-0.04100484773516655,
0.04205708205699921,
-0.022292276844382286,
-0.01962163671851158,
-0.0032410172279924154,
0.045265231281518936,
0.03427248075604439,
-0.09369838237762451,
-0.0225040465593338,
-0.04291229695081711,
0.01755926012992859,
-0.004375867545604706,
0.002278231317177415,
0.002825317671522498,
0.00020391067664604634,
0.08804619312286377,
-0.04891344532370567,
-0.024336665868759155,
0.006142774131149054,
-0.06277164071798325,
0.0011023456463590264,
0.061323147267103195,
-0.08847550302743912,
-0.014581744559109211,
-0.03780263662338257,
-0.04989498108625412,
-0.025881273671984673,
0.04514981433749199,
-0.12300334870815277,
0.013296704739332199,
0.00906339567154646,
0.011859177611768246,
0.08273762464523315,
0.0785769671201706,
-0.046148329973220825,
-0.042782701551914215,
0.11116378009319305,
0.03328889608383179,
0.015712810680270195,
-0.043839406222105026,
0.04037337377667427,
-0.10010885447263718,
-0.016467435285449028,
0.0001861088239820674,
-0.007308272644877434,
-0.1034298688173294,
-0.00621215533465147,
0.0419645830988884,
0.041446760296821594,
0.09108060598373413,
0.0771021768450737,
0.022938264533877373,
0.0695674866437912,
-0.015548712573945522,
0.008031333796679974,
0.001449004514142871,
0.03272341191768646,
0.08742348104715347,
0.053933631628751755,
-0.015042596496641636,
-5.5522029640542314e-8,
-0.05054859817028046,
0.02586507797241211,
-0.04414529725909233,
-0.0032629589550197124,
-0.08991917967796326,
-0.050116751343011856,
0.019522788003087044,
-0.012231899425387383,
-0.000039451868360629305,
-0.0319712869822979,
0.019450688734650612,
-0.006300428882241249,
-0.017413737252354622,
0.04712289571762085,
-0.019757293164730072,
0.02877684496343136,
-0.006566741969436407,
0.1215757429599762,
-0.018662385642528534,
-0.12924711406230927,
-0.004940930288285017,
0.04413428530097008,
0.017959536984562874,
-0.019700808450579643,
0.00280829519033432,
-0.0019436458824202418,
-0.04816360026597977,
0.0346221923828125,
-0.05249443277716637,
-0.010116981342434883,
-0.049809422343969345,
0.04167695343494415,
0.012137598358094692,
-0.007616092916578054,
0.012463055551052094,
0.05724295601248741,
-0.01315620169043541,
-0.028643248602747917,
-0.010662512853741646,
0.05182855576276779,
0.04860012233257294,
0.053651101887226105,
-0.08868744224309921,
-0.014659295789897442,
0.07122732698917389,
-0.02850976400077343,
0.01628556102514267,
-0.09649595618247986,
0.013495998457074165,
0.05799708142876625,
0.019522039219737053,
0.04459463059902191,
-0.028158672153949738,
-0.040634166449308395,
0.07127153128385544,
0.07402624189853668,
-0.02027156390249729,
0.021633487194776535,
-0.03462035208940506,
0.0210455022752285,
0.05518486723303795,
0.015232991427183151,
-0.029445800930261612,
-0.052780620753765106
] |
flair/ner-dutch-large | 44c285912a9d6eec4d0858580f3cb13b7b8c9959 | 2021-05-08T15:36:03.000Z | [
"pytorch",
"nl",
"dataset:conll2003",
"arxiv:2011.06993",
"flair",
"token-classification",
"sequence-tagger-model"
] | token-classification | false | flair | null | flair/ner-dutch-large | 1,144 | 3 | flair | ---
tags:
- flair
- token-classification
- sequence-tagger-model
language: nl
datasets:
- conll2003
widget:
- text: "George Washington ging naar Washington"
---
## Dutch NER in Flair (large model)
This is the large 4-class NER model for Dutch that ships with [Flair](https://github.com/flairNLP/flair/).
F1-Score: **95,25** (CoNLL-03 Dutch)
Predicts 4 tags:
| **tag** | **meaning** |
|---------------------------------|-----------|
| PER | person name |
| LOC | location name |
| ORG | organization name |
| MISC | other name |
Based on document-level XLM-R embeddings and [FLERT](https://arxiv.org/pdf/2011.06993v1.pdf/).
---
### Demo: How to use in Flair
Requires: **[Flair](https://github.com/flairNLP/flair/)** (`pip install flair`)
```python
from flair.data import Sentence
from flair.models import SequenceTagger
# load tagger
tagger = SequenceTagger.load("flair/ner-dutch-large")
# make example sentence
sentence = Sentence("George Washington ging naar Washington")
# predict NER tags
tagger.predict(sentence)
# print sentence
print(sentence)
# print predicted NER spans
print('The following NER tags are found:')
# iterate over entities and print
for entity in sentence.get_spans('ner'):
print(entity)
```
This yields the following output:
```
Span [1,2]: "George Washington" [− Labels: PER (1.0)]
Span [5]: "Washington" [− Labels: LOC (1.0)]
```
So, the entities "*George Washington*" (labeled as a **person**) and "*Washington*" (labeled as a **location**) are found in the sentence "*George Washington ging naar Washington*".
---
### Training: Script to train this model
The following Flair script was used to train this model:
```python
import torch
# 1. get the corpus
from flair.datasets import CONLL_03_DUTCH
corpus = CONLL_03_DUTCH()
# 2. what tag do we want to predict?
tag_type = 'ner'
# 3. make the tag dictionary from the corpus
tag_dictionary = corpus.make_tag_dictionary(tag_type=tag_type)
# 4. initialize fine-tuneable transformer embeddings WITH document context
from flair.embeddings import TransformerWordEmbeddings
embeddings = TransformerWordEmbeddings(
model='xlm-roberta-large',
layers="-1",
subtoken_pooling="first",
fine_tune=True,
use_context=True,
)
# 5. initialize bare-bones sequence tagger (no CRF, no RNN, no reprojection)
from flair.models import SequenceTagger
tagger = SequenceTagger(
hidden_size=256,
embeddings=embeddings,
tag_dictionary=tag_dictionary,
tag_type='ner',
use_crf=False,
use_rnn=False,
reproject_embeddings=False,
)
# 6. initialize trainer with AdamW optimizer
from flair.trainers import ModelTrainer
trainer = ModelTrainer(tagger, corpus, optimizer=torch.optim.AdamW)
# 7. run training with XLM parameters (20 epochs, small LR)
from torch.optim.lr_scheduler import OneCycleLR
trainer.train('resources/taggers/ner-dutch-large',
learning_rate=5.0e-6,
mini_batch_size=4,
mini_batch_chunk_size=1,
max_epochs=20,
scheduler=OneCycleLR,
embeddings_storage_mode='none',
weight_decay=0.,
)
)
```
---
### Cite
Please cite the following paper when using this model.
```
@misc{schweter2020flert,
title={FLERT: Document-Level Features for Named Entity Recognition},
author={Stefan Schweter and Alan Akbik},
year={2020},
eprint={2011.06993},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
---
### Issues?
The Flair issue tracker is available [here](https://github.com/flairNLP/flair/issues/).
| [
-0.03944353759288788,
-0.01400077622383833,
0.006958059035241604,
-0.008062342181801796,
0.08098382502794266,
0.022124001756310463,
-0.09306979924440384,
0.008092742413282394,
0.013129357248544693,
-0.0295549388974905,
0.016546078026294708,
-0.11874425411224365,
-0.02871812880039215,
0.038878634572029114,
-0.07144061475992203,
0.06156758964061737,
-0.01596796326339245,
-0.006453929003328085,
-0.09000252187252045,
-0.06944844871759415,
0.026562897488474846,
0.033651068806648254,
0.008763366378843784,
-0.01822098344564438,
0.03445952758193016,
-0.04338736832141876,
-0.03370886296033859,
0.027366288006305695,
-0.0025174859911203384,
-0.013118497096002102,
0.05063074454665184,
0.06854592263698578,
-0.022436728700995445,
0.0889580026268959,
-0.02754136361181736,
-0.022992951795458794,
-0.05037975311279297,
0.0527854822576046,
0.052352581173181534,
0.08640725910663605,
0.016895338892936707,
-0.042052511125802994,
-0.010802515782415867,
-0.004661244340240955,
0.046610575169324875,
0.03230128809809685,
-0.04059404879808426,
0.0006405049352906644,
-0.03152753785252571,
0.059026576578617096,
-0.09888775646686554,
-0.005629545543342829,
0.02122502587735653,
0.0351356640458107,
0.021597646176815033,
-0.014987558126449585,
-0.06807487457990646,
-0.08651476353406906,
-0.027570292353630066,
0.02137787826359272,
0.02241884171962738,
-0.01411829050630331,
-0.044264763593673706,
0.01805630885064602,
-0.06214563548564911,
0.01357804425060749,
-0.05147968977689743,
0.1178150624036789,
-0.05382552742958069,
0.026972463354468346,
0.05981462076306343,
-0.03305145353078842,
-0.004074619617313147,
0.01675846427679062,
0.011265870183706284,
0.02236391417682171,
-0.030874086543917656,
0.04750599339604378,
0.06479585915803909,
-0.0560484379529953,
-0.011531304568052292,
0.07463030517101288,
0.0606524795293808,
-0.10086991637945175,
0.03867277875542641,
-0.038989197462797165,
0.04007910564541817,
-0.01177107822149992,
-0.026374349370598793,
0.02327859029173851,
-0.028135478496551514,
-0.06472575664520264,
0.07023944705724716,
0.002276841551065445,
-0.06849571317434311,
0.059816423803567886,
0.07075249403715134,
0.023344939574599266,
-0.00284467451274395,
0.11187143623828888,
-0.04223955422639847,
0.01889892667531967,
-0.025353748351335526,
-0.032359808683395386,
-0.053206734359264374,
-0.0196275245398283,
0.016009224578738213,
0.025600440800189972,
0.012822415679693222,
-0.06981497257947922,
-0.020869705826044083,
0.019736051559448242,
-0.09219970554113388,
-0.0861416831612587,
0.011747729033231735,
-0.07381133735179901,
0.04892662540078163,
-0.06049000471830368,
0.0447857491672039,
-0.00243981066159904,
-0.011496201157569885,
0.00941456202417612,
-0.0238574780523777,
-0.07170280814170837,
0.00592335220426321,
0.02845091186463833,
-0.06847888231277466,
2.5655489304352824e-33,
0.03564689680933952,
0.06605999171733856,
-0.003569787135347724,
0.09490477293729782,
-0.04606594890356064,
-0.009585965424776077,
-0.06938594579696655,
-0.01536406110972166,
-0.10783081501722336,
-0.009180211462080479,
0.013545298017561436,
0.0329977348446846,
-0.04237327724695206,
0.026513012126088142,
0.0023819475900381804,
0.03123343735933304,
-0.015603634528815746,
-0.04151257872581482,
-0.10115857422351837,
0.022084278985857964,
0.07292763143777847,
0.020574532449245453,
-0.013268973678350449,
-0.02566825971007347,
-0.005668026395142078,
0.046457771211862564,
0.01101785060018301,
-0.10656194388866425,
-0.041993528604507446,
0.01058399211615324,
-0.039469052106142044,
-0.004512721672654152,
0.0689220130443573,
0.001991228898987174,
-0.0059066759422421455,
-0.023710034787654877,
-0.04534979537129402,
-0.1149853989481926,
-0.017080286517739296,
-0.09374896436929703,
-0.008826559409499168,
0.007984125055372715,
0.02194833569228649,
-0.021839339286088943,
-0.06286554038524628,
0.03381817415356636,
0.028787605464458466,
-0.04268884286284447,
0.0409373976290226,
0.0993923768401146,
0.035729631781578064,
-0.08664371073246002,
-0.013048388995230198,
-0.021792840212583542,
-0.0004346577334217727,
-0.011128273792564869,
-0.016841895878314972,
0.050571244210004807,
0.05316338688135147,
-0.03648548200726509,
-0.09117937833070755,
-0.08794542402029037,
-0.014212832786142826,
-0.01793479546904564,
0.048929549753665924,
-0.00911832507699728,
0.0010435791919007897,
-0.009540033526718616,
0.030530015006661415,
-0.03997928276658058,
0.04714595898985863,
0.03742978721857071,
0.002173587679862976,
0.006454466376453638,
0.05177522450685501,
0.037306930869817734,
0.009858694858849049,
-0.05097139999270439,
-0.06777581572532654,
0.04498052969574928,
-0.07687928527593613,
-0.04755253344774246,
-0.05751549452543259,
-0.001970759592950344,
-0.033254820853471756,
-0.061967138200998306,
0.09065336734056473,
-0.06507948040962219,
0.036972273141145706,
0.03111557476222515,
0.033406585454940796,
0.00314628635533154,
-0.053091056644916534,
-0.06330635398626328,
-0.024301081895828247,
-3.516510084926089e-33,
0.045602936297655106,
-0.005279093515127897,
-0.04617561027407646,
0.0061385901644825935,
0.04457348957657814,
-0.03489430248737335,
0.08543170988559723,
0.14512908458709717,
-0.036986224353313446,
-0.06317529082298279,
0.06606868654489517,
-0.07997587323188782,
0.05842277407646179,
0.0415269136428833,
0.06738739460706711,
0.025680389255285263,
-0.0024829169269651175,
-0.009683933109045029,
0.02157471887767315,
0.0725954994559288,
0.05690842866897583,
0.029201136901974678,
-0.12961943447589874,
0.1084604561328888,
0.017411701381206512,
0.029301637783646584,
-0.01898660883307457,
-0.017648965120315552,
-0.06635848432779312,
-0.032060932368040085,
-0.03636614605784416,
0.0138102937489748,
-0.05465959757566452,
0.03562458977103233,
-0.06835925579071045,
-0.009673694148659706,
0.008555198088288307,
0.030586708337068558,
-0.03539172187447548,
0.07320947200059891,
0.015271618962287903,
0.07062766700983047,
-0.0824694037437439,
0.058657072484493256,
-0.045097578316926956,
-0.035485316067934036,
-0.04257799685001373,
0.004090461414307356,
0.04037735238671303,
-0.04013471677899361,
-0.005188752431422472,
0.015357338823378086,
-0.08696968853473663,
0.04409101977944374,
0.007346226368099451,
-0.04893103614449501,
0.04700315743684769,
-0.14048674702644348,
-0.06090641766786575,
0.0038875630125403404,
-0.06821027398109436,
0.03338763862848282,
0.047151777893304825,
0.06415125727653503,
0.037326615303754807,
-0.08263691514730453,
-0.08157132565975189,
0.06445472687482834,
-0.06330469250679016,
-0.005044972989708185,
0.09940878301858902,
0.02798326127231121,
-0.016260143369436264,
0.009623398073017597,
-0.0036443171557039022,
-0.06394487619400024,
0.035751745104789734,
0.02858678065240383,
-0.010488064028322697,
-0.03473861515522003,
-0.03470483049750328,
-0.026890747249126434,
0.06327470391988754,
0.03517064452171326,
0.025891806930303574,
0.050917044281959534,
0.07547315955162048,
0.05365739017724991,
0.020601121708750725,
-0.015486008487641811,
0.008763936348259449,
0.04455118253827095,
0.01088728103786707,
0.08009228110313416,
0.02378578670322895,
-5.53122490032365e-8,
-0.08853146433830261,
0.00890200212597847,
-0.030433572828769684,
0.0031456798315048218,
-0.05292802304029465,
0.019063841551542282,
0.015706855803728104,
0.017131950706243515,
-0.01511883083730936,
0.09922764450311661,
0.11309297382831573,
0.05474663898348808,
-0.1006304919719696,
-0.0851123183965683,
0.008661272004246712,
0.027749203145503998,
-0.004760906100273132,
0.10018279403448105,
-0.008014308288693428,
0.037378888577222824,
0.038700081408023834,
0.0582333505153656,
-0.0017021760577335954,
0.015449422411620617,
0.1000213772058487,
-0.032312337309122086,
-0.03812330961227417,
0.046681616455316544,
0.08379269391298294,
-0.03721173107624054,
0.0075594354420900345,
0.05620187148451805,
-0.006673057563602924,
-0.0063216001726686954,
0.03296308219432831,
0.03843926265835762,
-0.03876807913184166,
-0.0021615936420857906,
-0.02533741667866707,
0.018878581002354622,
0.04206154868006706,
-0.008383904583752155,
-0.1431787759065628,
-0.0231014471501112,
0.09340083599090576,
-0.022339949384331703,
-0.07560502737760544,
-0.13321758806705475,
0.11492202430963516,
-0.03150356933474541,
-0.023547783493995667,
-0.025669658556580544,
-0.05379829183220863,
0.0697614774107933,
0.007165024988353252,
-0.038601312786340714,
-0.031767792999744415,
0.08242756873369217,
0.056115295737981796,
-0.07605449110269547,
0.08795388787984848,
-0.005647074896842241,
-0.017153911292552948,
0.07305824011564255
] |
Averium/DialoGPT-medium-TailsBot1.1 | 462a773376d390ff76c8e078388a2afde248b9de | 2022-06-17T00:29:05.000Z | [
"pytorch",
"gpt2",
"text-generation",
"transformers",
"conversational"
] | conversational | false | Averium | null | Averium/DialoGPT-medium-TailsBot1.1 | 1,136 | null | transformers | ---
tags:
- conversational
---
# Miles Prower DialoGPT Model | [
-0.009194874204695225,
-0.06014004722237587,
-0.0014087907038629055,
-0.02506985329091549,
0.021449608728289604,
0.012041029520332813,
0.1032983586192131,
0.05375879630446434,
0.06340494006872177,
-0.04309028387069702,
0.01988186687231064,
-0.007458957377821207,
0.016072364524006844,
0.006502524949610233,
0.007523313630372286,
0.04136980324983597,
-0.004825921263545752,
-0.07989992201328278,
-0.11567440629005432,
0.029612651094794273,
-0.005527115892618895,
0.09135060012340546,
0.056523311883211136,
0.0474315881729126,
-0.058092739433050156,
0.010709033347666264,
-0.03571869432926178,
0.019524965435266495,
0.024053888395428658,
0.012276916764676571,
0.023273762315511703,
0.046759217977523804,
0.09054192900657654,
0.04930047318339348,
-0.04430544003844261,
0.03195890039205551,
0.01675555296242237,
0.022060705348849297,
-0.051790181547403336,
-0.013457768596708775,
-0.060404032468795776,
-0.04892764613032341,
-0.09215877950191498,
-0.0009478094289079309,
0.025216830894351006,
-0.07116343826055527,
-0.0888446643948555,
-0.009531842544674873,
-0.03081180900335312,
0.06854593008756638,
-0.08600316196680069,
-0.03367479145526886,
0.0032947687432169914,
0.06602048873901367,
-0.03770674392580986,
0.0862036943435669,
-0.03734052926301956,
-0.01104723010212183,
0.0065208603627979755,
0.006372122559696436,
-0.04215046390891075,
-0.07245749235153198,
-0.13351507484912872,
0.008749817498028278,
-0.03488966077566147,
0.03944374620914459,
-0.10832931846380234,
0.020690882578492165,
-0.01868283376097679,
0.07741814851760864,
-0.026839889585971832,
0.06851521879434586,
-0.021675895899534225,
0.026478236541152,
0.01924237236380577,
0.03835661709308624,
-0.014132251031696796,
-0.07388656586408615,
0.03382521867752075,
-0.030692705884575844,
-0.014544142410159111,
-0.10569526255130768,
0.003773626871407032,
-0.025983603671193123,
0.06001043692231178,
-0.06288029998540878,
0.02426331676542759,
-0.007598136551678181,
-0.023413848131895065,
-0.02963513880968094,
-0.07806715369224548,
-0.08904174715280533,
0.07257995754480362,
-0.02169920690357685,
-0.059177808463573456,
0.07721468061208725,
-0.05777432397007942,
-0.1145528107881546,
-0.09470003843307495,
0.0915919840335846,
-0.008048624731600285,
0.03215864673256874,
0.006092048715800047,
-0.10233741253614426,
-0.009467202238738537,
0.05246387794613838,
-0.016974322497844696,
0.06301217526197433,
0.03658061474561691,
-0.004845981020480394,
0.014887021854519844,
-0.029848933219909668,
0.034252725541591644,
-0.016168847680091858,
0.06625166535377502,
-0.08713465929031372,
0.06791799515485764,
-0.008662973530590534,
0.0714549571275711,
-0.00039942440344020724,
-0.05194214731454849,
0.015679100528359413,
-0.06718742102384567,
-0.03170030564069748,
0.003294057445600629,
-0.03687215596437454,
-0.01769387535750866,
-1.28255865180167e-33,
0.09882711619138718,
0.031496305018663406,
0.06592467427253723,
0.10265303403139114,
0.028512293472886086,
0.06572932749986649,
-0.09574659913778305,
0.011306201107800007,
0.031654562801122665,
-0.042983438819646835,
0.0062636034563183784,
-0.07229186594486237,
0.0010512382723391056,
0.006029731128364801,
0.04613382741808891,
-0.012248251587152481,
-0.09438136965036392,
0.028893930837512016,
-0.04919515922665596,
-0.059262096881866455,
0.031851064413785934,
0.04167725518345833,
0.01801237091422081,
0.06230780482292175,
0.11498837918043137,
0.014676320366561413,
0.10321303457021713,
-0.06284497678279877,
-0.014438706450164318,
0.04109693318605423,
-0.07993654161691666,
0.03235476464033127,
-0.04879578575491905,
0.03846951201558113,
-0.008188380859792233,
0.05468423292040825,
-0.00011640830780379474,
-0.04570542275905609,
0.01325968373566866,
-0.05008167400956154,
-0.06038268655538559,
-0.05048193037509918,
0.016855232417583466,
-0.030309196561574936,
-0.03228000923991203,
0.04192054271697998,
0.015047194436192513,
0.08804001659154892,
0.032697755843400955,
0.006548718549311161,
-0.0129404840990901,
0.016635842621326447,
0.003139674197882414,
-0.057982683181762695,
-0.036134857684373856,
-0.07963608205318451,
-0.03256489709019661,
0.014521893113851547,
0.022117964923381805,
0.004908863455057144,
0.050612129271030426,
0.03046829253435135,
0.08972707390785217,
-0.024858487769961357,
0.06561646610498428,
0.010965117253363132,
-0.08262591809034348,
-0.0016928967088460922,
0.026144491508603096,
0.014488115906715393,
-0.033970847725868225,
0.016097085550427437,
-0.0028063689824193716,
0.03674476593732834,
-0.030739925801753998,
0.058635175228118896,
0.0007103094249032438,
-0.09243886172771454,
0.060412321239709854,
0.08427747339010239,
-0.05983410403132439,
-0.078403040766716,
-0.047376424074172974,
-0.013495182618498802,
-0.02010282129049301,
-0.07243778556585312,
0.041351329535245895,
-0.08506695926189423,
0.03169827535748482,
0.015994934365153313,
0.04519636183977127,
0.05358613282442093,
-0.09503021091222763,
0.014767191372811794,
-0.061888448894023895,
-5.6714796852767746e-34,
-0.021224431693553925,
0.012997436337172985,
-0.06642311066389084,
0.08377799391746521,
0.06984899193048477,
-0.02002422884106636,
0.02074245922267437,
0.12713538110256195,
-0.006248153746128082,
-0.022776104509830475,
-0.043390221893787384,
0.017918050289154053,
0.009501725435256958,
-0.013991555199027061,
0.10860419273376465,
0.013946257531642914,
0.04630668833851814,
-0.060454368591308594,
-0.004754974506795406,
0.018513759598135948,
0.05928230658173561,
0.03126157447695732,
-0.17203237116336823,
0.039754971861839294,
-0.005853703711181879,
-0.03603823482990265,
0.006963400635868311,
0.06276591867208481,
0.07026300579309464,
-0.04589183256030083,
-0.028592616319656372,
0.02261805348098278,
-0.02002885565161705,
-0.020430536940693855,
-0.0020984169095754623,
0.09202147275209427,
0.02454490214586258,
-0.028799917548894882,
0.033849433064460754,
0.04707340896129608,
0.024588845670223236,
-0.06951435655355453,
0.024159112945199013,
0.0170889925211668,
-0.04198622331023216,
-0.07660583406686783,
-0.027792245149612427,
-0.07825925201177597,
-0.09851869940757751,
-0.010799252428114414,
0.011354255490005016,
-0.013181074522435665,
-0.07738608866930008,
-0.007869207300245762,
-0.05239589512348175,
-0.02412739023566246,
-0.03094353713095188,
-0.046730343252420425,
0.02423219382762909,
0.031838007271289825,
-0.05965365841984749,
-0.03235212713479996,
0.08389304578304291,
0.0017379461787641048,
0.027004355564713478,
-0.12317728251218796,
-0.012133008800446987,
0.013812030665576458,
-0.02067134529352188,
-0.03797880560159683,
0.07384379953145981,
0.031116334721446037,
0.026913223788142204,
0.01602022536098957,
0.034363895654678345,
0.0015021087601780891,
0.03643917292356491,
-0.02053282968699932,
0.007834420539438725,
-0.08495594561100006,
0.021166596561670303,
-0.03319655731320381,
0.06674892455339432,
0.0580953024327755,
0.022549772635102272,
0.05347394570708275,
0.007183999754488468,
0.08818893134593964,
0.010379529558122158,
0.05130667984485626,
0.01613570749759674,
0.021335693076252937,
-0.0016096032923087478,
0.08444426208734512,
-0.07002002745866776,
-2.6005301023701577e-8,
-0.05411352589726448,
0.027101688086986542,
0.017403243109583855,
0.0716107115149498,
0.010616053827106953,
-0.0015329252928495407,
0.08436890691518784,
0.032538771629333496,
-0.06924866884946823,
0.02190091274678707,
0.023789992555975914,
0.07931745052337646,
-0.015084393322467804,
0.0322086326777935,
-0.029422780498862267,
0.040514931082725525,
-0.0617208257317543,
0.06223607808351517,
-0.020459285005927086,
0.015815285965800285,
0.08190684020519257,
0.0027033428195863962,
-0.07834362238645554,
0.11735846847295761,
0.04268955439329147,
-0.013370758853852749,
-0.07980860024690628,
0.08371716737747192,
-0.01402314379811287,
0.023831775411963463,
0.023929046466946602,
0.04489794746041298,
-0.10182139277458191,
-0.0284534003585577,
-0.020605498924851418,
0.03707059472799301,
-0.02278093434870243,
0.007391370367258787,
-0.023494016379117966,
-0.01746957190334797,
0.019353296607732773,
0.014771034009754658,
-0.10620903223752975,
0.02144274115562439,
0.08043365925550461,
-0.010898786596953869,
-0.0289690550416708,
-0.10643883049488068,
-0.04116936773061752,
-0.00869776215404272,
-0.062167033553123474,
-0.028344467282295227,
0.05055800825357437,
0.021265339106321335,
0.02567044273018837,
-0.009754770435392857,
-0.01576678641140461,
0.006849208846688271,
0.05321710929274559,
0.012603840790688992,
0.05863475427031517,
0.06780228763818741,
-0.0022427409421652555,
-0.03560328483581543
] |
sentence-transformers/sentence-t5-xl | e0976ba9afd18be963c22c680367a3928c44fd22 | 2022-02-09T14:02:31.000Z | [
"pytorch",
"t5",
"en",
"arxiv:2108.08877",
"sentence-transformers",
"feature-extraction",
"sentence-similarity",
"transformers",
"license:apache-2.0"
] | sentence-similarity | false | sentence-transformers | null | sentence-transformers/sentence-t5-xl | 1,135 | 1 | sentence-transformers | ---
pipeline_tag: sentence-similarity
language: en
license: apache-2.0
tags:
- sentence-transformers
- feature-extraction
- sentence-similarity
- transformers
---
# sentence-transformers/sentence-t5-xl
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space. The model works well for sentence similarity tasks, but doesn't perform that well for semantic search tasks.
This model was converted from the Tensorflow model [st5-3b-1](https://tfhub.dev/google/sentence-t5/st5-3b/1) to PyTorch. When using this model, have a look at the publication: [Sentence-T5: Scalable sentence encoders from pre-trained text-to-text models](https://arxiv.org/abs/2108.08877). The tfhub model and this PyTorch model can produce slightly different embeddings, however, when run on the same benchmarks, they produce identical results.
The model uses only the encoder from a T5-3B model. The weights are stored in FP16.
## Usage (Sentence-Transformers)
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
```
pip install -U sentence-transformers
```
Then you can use the model like this:
```python
from sentence_transformers import SentenceTransformer
sentences = ["This is an example sentence", "Each sentence is converted"]
model = SentenceTransformer('sentence-transformers/sentence-t5-xl')
embeddings = model.encode(sentences)
print(embeddings)
```
The model requires sentence-transformers version 2.2.0 or newer.
## Evaluation Results
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=sentence-transformers/sentence-t5-xl)
## Citing & Authors
If you find this model helpful, please cite the respective publication:
[Sentence-T5: Scalable sentence encoders from pre-trained text-to-text models](https://arxiv.org/abs/2108.08877)
| [
-0.03533744066953659,
-0.10067413002252579,
0.006124476436525583,
0.010074365884065628,
0.038293857127428055,
0.022046267986297607,
-0.10002962499856949,
-0.015228284522891045,
0.032407451421022415,
-0.08806859701871872,
-0.00878337025642395,
-0.008468633517622948,
-0.0119567159563303,
0.060890235006809235,
0.012510891072452068,
0.009628208354115486,
0.10200076550245285,
0.01266881451010704,
-0.10533720254898071,
-0.08306507021188736,
0.04330085963010788,
0.11148466169834137,
0.04871223494410515,
0.003475118661299348,
0.014120668172836304,
0.026364123448729515,
-0.03513678163290024,
-0.07860098779201508,
-0.003107925644144416,
0.0058587826788425446,
0.061511535197496414,
0.036752570420503616,
-0.07640868425369263,
0.06761857867240906,
-0.009844968095421791,
0.03297705948352814,
-0.06870817393064499,
-0.020825134590268135,
-0.006695969495922327,
-0.07079780846834183,
0.030886709690093994,
-0.032407745718955994,
-0.07046564668416977,
0.05374816432595253,
0.03998596593737602,
-0.016734356060624123,
-0.03734029456973076,
-0.0430968701839447,
0.03048505075275898,
-0.03047569841146469,
-0.11601916700601578,
0.024899950250983238,
-0.01555725559592247,
0.039709024131298065,
-0.053337953984737396,
0.02702704258263111,
0.02054591290652752,
0.026117412373423576,
0.007958140224218369,
-0.08661772310733795,
-0.05545853450894356,
-0.06293866038322449,
0.021155180409550667,
0.007261686027050018,
-0.014554417692124844,
-0.014624988660216331,
0.03970818594098091,
-0.06986193358898163,
0.00763635104522109,
-0.022318148985505104,
0.02148767001926899,
0.061496756970882416,
-0.0652742013335228,
0.032949790358543396,
-0.04761431738734245,
0.04593733698129654,
0.09636753052473068,
0.021837230771780014,
0.09424099326133728,
-0.019290484488010406,
-0.002948017790913582,
-0.08007479459047318,
0.05221741646528244,
0.024040477350354195,
0.025052350014448166,
-0.040769949555397034,
-0.004653157200664282,
-0.02813822217285633,
0.02636583335697651,
-0.02813180536031723,
0.04133051633834839,
-0.09664665907621384,
0.008764807134866714,
-0.006786845158785582,
0.020102927461266518,
-0.020665349438786507,
-0.036018285900354385,
0.0029104365967214108,
-0.006866608280688524,
0.013889719732105732,
-0.000935413409024477,
0.04263079911470413,
0.05329318717122078,
-0.03669479861855507,
-0.008474173955619335,
0.0362730398774147,
0.007200603373348713,
0.030408896505832672,
0.01810602657496929,
-0.1051890105009079,
0.0229866411536932,
0.03829985484480858,
0.022948283702135086,
-0.0004979431396350265,
0.06737219542264938,
-0.012979920022189617,
-0.023966163396835327,
-0.022237777709960938,
0.05320160835981369,
0.08397578448057175,
-0.08304516971111298,
0.077054962515831,
-0.04245101287961006,
-0.0017553019570186734,
-0.06421801447868347,
0.002754678949713707,
-0.04742638021707535,
1.243397975858987e-33,
-0.011060222052037716,
0.019941380247473717,
0.04455900192260742,
-0.015032914467155933,
0.029284195974469185,
-0.0005859141820110381,
0.03544625639915466,
0.08586452156305313,
-0.05955810472369194,
0.005392308346927166,
-0.11462295800447464,
-0.01519445888698101,
-0.03202233463525772,
0.0547177754342556,
0.0020140937995165586,
-0.04623757675290108,
-0.08879842609167099,
-0.0342315137386322,
-0.02409852296113968,
0.06732666492462158,
0.08303474634885788,
0.04195120558142662,
-0.004678639583289623,
-0.0014808929990977049,
-0.06536916643381119,
0.0038452723529189825,
0.03629879653453827,
-0.05672716721892357,
-0.02544247731566429,
0.02043186128139496,
-0.15089936554431915,
0.022688003256917,
0.03137139603495598,
-0.0025212338659912348,
0.0069700805470347404,
-0.023410087451338768,
0.039802875369787216,
-0.008110868744552135,
-0.05566978454589844,
-0.13852457702159882,
-0.016651352867484093,
0.0365896038711071,
-0.0035221856087446213,
-0.09649140387773514,
-0.044087618589401245,
-0.010009009391069412,
-0.016205113381147385,
-0.012536273337900639,
0.04747027903795242,
-0.012698663398623466,
0.10118982195854187,
0.045008063316345215,
-0.025765808299183846,
0.015382645651698112,
0.04970287159085274,
-0.03847530484199524,
0.09891654551029205,
0.04439159110188484,
0.07463718205690384,
0.0785718709230423,
0.03963642939925194,
0.017521148547530174,
0.06940929591655731,
0.026991015300154686,
0.0885225310921669,
0.01621955633163452,
0.017377888783812523,
0.06870846450328827,
0.02362740784883499,
0.04825228080153465,
0.013586047105491161,
0.05758002772927284,
-0.015475299209356308,
-0.03645901754498482,
0.07807070761919022,
-0.06833802163600922,
0.012593340128660202,
-0.09409269690513611,
-0.016951384022831917,
0.03588489070534706,
-0.09384369105100632,
-0.038619183003902435,
0.08288154006004333,
-0.05369899421930313,
-0.08890458941459656,
-0.03411643207073212,
-0.006292406003922224,
-0.042769405990839005,
0.06497050076723099,
0.008531796745955944,
0.0906129777431488,
0.047814078629016876,
0.03567765653133392,
0.041724346578121185,
0.0486472025513649,
-7.423950744520797e-34,
-0.0317121185362339,
-0.03767988830804825,
-0.07131367921829224,
0.08014779537916183,
-0.007691340520977974,
-0.03242049366235733,
0.014852591790258884,
0.03258642554283142,
-0.009225822985172272,
-0.021176498383283615,
0.0011523799039423466,
-0.037662822753190994,
0.01040161494165659,
-0.04200093820691109,
0.10712605714797974,
0.040072109550237656,
0.0266816858202219,
0.032015714794397354,
0.03756356239318848,
0.07561256736516953,
0.032104313373565674,
0.09901627898216248,
-0.17292578518390656,
0.0726444274187088,
-0.008264296688139439,
0.03525843471288681,
-0.015750089660286903,
0.0035748209338635206,
-0.002530203666538,
-0.04688303545117378,
-0.010740802623331547,
-0.023990947753190994,
-0.045966990292072296,
-0.021360961720347404,
-0.07258276641368866,
0.037077054381370544,
-0.014640293084084988,
-0.000038482834497699514,
0.023456595838069916,
0.04902234673500061,
-0.002135489135980606,
0.06221349909901619,
-0.04422496631741524,
0.01804661750793457,
-0.06131859868764877,
0.019345371052622795,
-0.10062577575445175,
-0.06035890057682991,
0.09415575861930847,
0.009886923246085644,
0.02160460129380226,
0.04546437785029411,
-0.08378880470991135,
0.014520080760121346,
-0.06105531007051468,
-0.08501894772052765,
-0.03143127262592316,
-0.024981319904327393,
-0.0759669691324234,
-0.010166708379983902,
-0.03362516686320305,
0.018251705914735794,
0.019457394257187843,
-0.05176100134849548,
0.05179103836417198,
-0.03555704653263092,
-0.007536022923886776,
0.08433595299720764,
-0.0029901238158345222,
-0.01191423088312149,
-0.04025261476635933,
-0.03824016451835632,
0.0621100552380085,
0.09770044684410095,
0.03027983196079731,
0.006062221713364124,
0.054262351244688034,
0.021539393812417984,
-0.001275339862331748,
-0.07177850604057312,
0.03702735900878906,
0.0005153847159817815,
-0.0006753262714482844,
0.043049801141023636,
0.04899102449417114,
0.05721547082066536,
0.032346487045288086,
0.004940219689160585,
-0.009587917476892471,
0.08162208646535873,
0.0182962529361248,
-0.04717905819416046,
-0.05704541504383087,
0.09148689359426498,
0.07718047499656677,
-4.899902350530283e-8,
-0.0768904760479927,
0.012735314667224884,
-0.1454065591096878,
0.03953668475151062,
-0.07823483645915985,
-0.09386595338582993,
0.05929629132151604,
0.11163575947284698,
-0.028787216171622276,
-0.013160054571926594,
-0.026322398334741592,
-0.009811167605221272,
-0.07219921052455902,
0.02005988173186779,
0.007202674634754658,
0.036246899515390396,
-0.047598425298929214,
-0.03351244702935219,
0.04976941645145416,
-0.02314191684126854,
0.0020526605658233166,
0.020688248798251152,
-0.01974800042808056,
-0.026665272191166878,
0.0049489825032651424,
0.00219164602458477,
-0.054518721997737885,
0.07149531692266464,
0.03932022303342819,
0.010915379039943218,
-0.022317031398415565,
0.019758321344852448,
-0.08539371937513351,
-0.05812869966030121,
0.06558656692504883,
0.0871843695640564,
0.04499220848083496,
-0.1049184799194336,
-0.0370531789958477,
0.09546003490686417,
0.034919820725917816,
0.03114013932645321,
-0.037071309983730316,
-0.0407465361058712,
0.0861414298415184,
0.005632900167256594,
-0.0010135191259905696,
-0.05731375887989998,
0.05748242512345314,
0.04180985316634178,
0.04968946799635887,
0.005949168000370264,
-0.07517752796411514,
0.039126474410295486,
0.07021407783031464,
-0.0007449255208484828,
-0.031158749014139175,
-0.03188306838274002,
0.058532942086458206,
-0.032453011721372604,
0.07249140739440918,
0.013704453594982624,
0.03064090944826603,
-0.04591057822108269
] |
DeepPavlov/bert-base-bg-cs-pl-ru-cased | 0ab00895c22312978e0a8abd16bbec3fbf7f2bc8 | 2021-11-08T12:58:09.000Z | [
"pytorch",
"jax",
"bert",
"feature-extraction",
"bg",
"cs",
"pl",
"ru",
"transformers"
] | feature-extraction | false | DeepPavlov | null | DeepPavlov/bert-base-bg-cs-pl-ru-cased | 1,131 | null | transformers | ---
language:
- bg
- cs
- pl
- ru
---
# bert-base-bg-cs-pl-ru-cased
SlavicBERT\[1\] \(Slavic \(bg, cs, pl, ru\), cased, 12‑layer, 768‑hidden, 12‑heads, 180M parameters\) was trained on Russian News and four Wikipedias: Bulgarian, Czech, Polish, and Russian. Subtoken vocabulary was built using this data. Multilingual BERT was used as an initialization for SlavicBERT.
08.11.2021: upload model with MLM and NSP heads
\[1\]: Arkhipov M., Trofimova M., Kuratov Y., Sorokin A. \(2019\). [Tuning Multilingual Transformers for Language-Specific Named Entity Recognition](https://www.aclweb.org/anthology/W19-3712/). ACL anthology W19-3712.
| [
-0.11369168013334274,
-0.08308153599500656,
-0.009941669180989265,
-0.021518930792808533,
-0.05169115215539932,
0.07147455960512161,
0.016056600958108902,
0.06768076866865158,
0.027116650715470314,
-0.032381847500801086,
-0.022166093811392784,
0.00503045879304409,
0.05182109773159027,
0.054360341280698776,
-0.012309717945754528,
0.06196288391947746,
0.016206884756684303,
0.09088680893182755,
-0.12235618382692337,
-0.08598006516695023,
0.04911745339632034,
0.023407362401485443,
0.09745361655950546,
-0.05835796892642975,
0.07969580590724945,
-0.05225008726119995,
-0.051748596131801605,
-0.0572509691119194,
0.0994122326374054,
0.034485042095184326,
0.0692410096526146,
0.031679484993219376,
0.03662922978401184,
0.07781008630990982,
0.036161892116069794,
0.03989611566066742,
-0.07083369791507721,
-0.05511474609375,
0.01818278431892395,
0.10152092576026917,
-0.0020496470388025045,
-0.03226219490170479,
-0.10449539870023727,
0.05049047991633415,
0.08668707311153412,
0.06244661286473274,
-0.07189848273992538,
0.04683065414428711,
-0.060212668031454086,
0.02436187118291855,
-0.0745377242565155,
-0.009881793521344662,
0.027225544676184654,
0.1071079671382904,
-0.019430045038461685,
-0.06119217723608017,
-0.032650694251060486,
0.0255288016051054,
-0.010832054540514946,
-0.06833431869745255,
-0.10025066137313843,
-0.09001990407705307,
-0.0002831706660799682,
0.0019023303175345063,
-0.07818098366260529,
0.02141408436000347,
-0.03958567604422569,
0.044743895530700684,
0.008497877046465874,
0.020286282524466515,
0.023303940892219543,
0.08073510974645615,
-0.06223100423812866,
0.05463313311338425,
-0.021995574235916138,
-0.03610387071967125,
0.05599558725953102,
-0.01825990155339241,
-0.004003222566097975,
-0.14583911001682281,
0.07338538020849228,
0.041553348302841187,
0.03848383203148842,
-0.030151743441820145,
0.05840468406677246,
-0.039058834314346313,
0.05734439566731453,
-0.032861679792404175,
-0.028462519869208336,
0.032607320696115494,
-0.03653329983353615,
-0.0890815258026123,
0.14776846766471863,
-0.047718606889247894,
-0.0032899524085223675,
-0.016737112775444984,
0.0584503710269928,
0.014125673100352287,
0.031415846198797226,
0.033029764890670776,
-0.04782577604055405,
0.02679363638162613,
0.04553862661123276,
-0.0034345826134085655,
-0.08974973857402802,
-0.04958634078502655,
-0.040001604706048965,
-0.009971799328923225,
0.06421376764774323,
-0.09678735584020615,
-0.010566755197942257,
-0.010057828389108181,
-0.016538521274924278,
-0.06616219878196716,
0.02357252687215805,
-0.04372495040297508,
0.013227539137005806,
-0.008552366867661476,
0.030642854049801826,
0.02523832768201828,
-0.05873648449778557,
0.009218976832926273,
-0.012752754613757133,
0.019552960991859436,
-0.030253097414970398,
0.03239612653851509,
-0.033498916774988174,
5.828234612023811e-34,
0.04213334992527962,
0.04372590780258179,
-0.0340644009411335,
0.021624069660902023,
-0.10438622534275055,
-0.01752038672566414,
-0.031807392835617065,
-0.013797004707157612,
-0.08381320536136627,
-0.004192744381725788,
-0.05771784111857414,
0.0697568729519844,
-0.07375278323888779,
0.022541813552379608,
-0.01715550757944584,
0.02774004638195038,
0.029329130426049232,
0.06558466702699661,
0.002322376938536763,
0.031110092997550964,
0.12088752537965775,
0.09275031834840775,
0.0017838061321526766,
-0.03472186252474785,
-0.030700117349624634,
0.0319073311984539,
0.1060231402516365,
-0.1276654154062271,
0.021336430683732033,
0.012609440833330154,
-0.07877132296562195,
-0.002274637809023261,
-0.06115421652793884,
0.09041744470596313,
0.004570612218230963,
-0.03688906505703926,
-0.04869142547249794,
-0.057585302740335464,
0.00521899713203311,
-0.014156566932797432,
0.021427568048238754,
-0.015671033412218094,
-0.004393726587295532,
-0.01592947542667389,
0.006695744581520557,
-0.025755902752280235,
-0.0151068065315485,
-0.0377618707716465,
0.05152321606874466,
-0.029576217755675316,
0.009388345293700695,
0.05867868661880493,
-0.01683100499212742,
0.07611788064241409,
0.04982742667198181,
0.049016114324331284,
0.013361777178943157,
0.01615017093718052,
0.04648854210972786,
-0.019886678084731102,
0.0036625703796744347,
-0.011298008263111115,
0.017896205186843872,
0.059426985681056976,
0.09659070521593094,
-0.02824166975915432,
-0.043836116790771484,
0.03530801832675934,
0.0197348203510046,
0.04227687418460846,
0.02515321411192417,
0.009409140795469284,
-0.00004482145232032053,
0.06376168876886368,
-0.016645319759845734,
0.0058824713341891766,
0.006332538090646267,
-0.07779069989919662,
-0.046871770173311234,
0.0482330396771431,
-0.04499489441514015,
0.01584767922759056,
0.0012716136407107115,
-0.05004401504993439,
-0.06960327178239822,
0.001836015610024333,
0.020239371806383133,
-0.1007847860455513,
-0.003198625287041068,
0.002079208381474018,
0.015525280497968197,
-0.015052152797579765,
-0.04713990166783333,
0.024879544973373413,
-0.0715237557888031,
-9.790779691720183e-34,
-0.003431451739743352,
-0.024764791131019592,
-0.09164586663246155,
0.0704071894288063,
-0.09209243208169937,
-0.051975954324007034,
0.03705360367894173,
0.1146472841501236,
-0.027539120987057686,
0.04333619773387909,
0.05353875830769539,
-0.11571450531482697,
-0.01690828800201416,
0.0068570771254599094,
0.049627840518951416,
0.007807237096130848,
0.028950946405529976,
0.04559789225459099,
-0.038364309817552567,
0.08205832540988922,
-0.04628939554095268,
0.009528052061796188,
-0.0991661474108696,
0.08911872655153275,
0.01652826927602291,
0.0724809542298317,
-0.0111923823133111,
0.0032210415229201317,
-0.014940828084945679,
0.0576845183968544,
-0.019770538434386253,
0.004474001470953226,
-0.0361718088388443,
0.03196113929152489,
-0.015071569010615349,
0.004906740505248308,
0.026909438893198967,
-0.022389056161046028,
-0.0322408452630043,
-0.003216130193322897,
0.00544018717482686,
0.03798547387123108,
-0.045269399881362915,
0.018283426761627197,
0.02220422402024269,
-0.04479415714740753,
-0.08861479163169861,
0.03475251421332359,
0.03873767331242561,
-0.08620212227106094,
0.01992979645729065,
0.0031273234635591507,
-0.07195113599300385,
-0.00762734841555357,
0.020245850086212158,
-0.05976375564932823,
-0.0020120476838201284,
-0.0987529307603836,
-0.02588443085551262,
-0.023825237527489662,
0.0051823509857058525,
-0.05728258565068245,
0.03545321524143219,
-0.025997409597039223,
-0.025193778797984123,
-0.006367697846144438,
0.0021998551674187183,
0.08803445845842361,
0.0037740019615739584,
-0.06762142479419708,
0.05051184818148613,
-0.07110808789730072,
0.057619694620370865,
0.014409768395125866,
-0.002391369082033634,
0.027494676411151886,
0.0003343412827234715,
-0.02133486419916153,
0.010175713337957859,
-0.06269819289445877,
-0.05234689638018608,
-0.051769983023405075,
0.029727788642048836,
0.06711942702531815,
0.03269518166780472,
0.0358874574303627,
0.023550106212496758,
0.02252999320626259,
0.004807564429938793,
-0.024786250665783882,
0.016228461638092995,
0.031020475551486015,
0.02236490324139595,
0.17270545661449432,
-0.007920349948108196,
-4.973769662797167e-8,
0.005413751117885113,
0.020313555374741554,
-0.006550593767315149,
0.03431490436196327,
0.040310442447662354,
-0.09968051314353943,
-0.0815766453742981,
-0.0048584905453026295,
-0.06317301839590073,
-0.018690625205636024,
-0.03967348858714104,
0.004537791479378939,
-0.10192161053419113,
-0.0038925923872739077,
0.05782529339194298,
0.05315837264060974,
0.0047988248988986015,
0.0704750344157219,
0.02174576371908188,
-0.006242009345442057,
0.006761362310498953,
0.05050529167056084,
-0.005349176935851574,
-0.04291486367583275,
-0.017093870788812637,
-0.027610983699560165,
-0.0098820049315691,
0.06479203701019287,
0.06727907806634903,
-0.07906889170408249,
-0.005250385031104088,
0.04774487391114235,
-0.11293661594390869,
0.0025499474722892046,
0.06846503168344498,
0.11308377236127853,
0.027399219572544098,
0.014025984331965446,
-0.035894762724637985,
0.042012766003608704,
0.07807137817144394,
0.021266482770442963,
-0.023513471707701683,
0.047078199684619904,
0.05350248143076897,
0.007134385872632265,
-0.05384645611047745,
-0.11716849356889725,
-0.00045566854532808065,
-0.04366237670183182,
0.0243337731808424,
0.01577274315059185,
0.02085450105369091,
0.09379152208566666,
-0.04093821346759796,
0.03909327834844589,
-0.03417087718844414,
-0.04370676353573799,
0.008668872527778149,
0.005840696394443512,
-0.05684517323970795,
-0.007961357943713665,
-0.0003133204882033169,
0.04532356187701225
] |
voidful/dpr-ctx_encoder-bert-base-multilingual | c7a3dc617754e93efe785aa88dc1f52b4f7cb688 | 2021-02-21T09:00:44.000Z | [
"pytorch",
"dpr",
"multilingual",
"dataset:NQ",
"dataset:Trivia",
"dataset:SQuAD",
"dataset:MLQA",
"dataset:DRCD",
"arxiv:2004.04906",
"transformers"
] | null | false | voidful | null | voidful/dpr-ctx_encoder-bert-base-multilingual | 1,130 | 4 | transformers | ---
language: multilingual
datasets:
- NQ
- Trivia
- SQuAD
- MLQA
- DRCD
---
# dpr-ctx_encoder-bert-base-multilingual
## Description
Multilingual DPR Model base on bert-base-multilingual-cased.
[DPR model](https://arxiv.org/abs/2004.04906)
[DPR repo](https://github.com/facebookresearch/DPR)
## Data
1. [NQ](https://github.com/facebookresearch/DPR/blob/master/data/download_data.py)
2. [Trivia](https://github.com/facebookresearch/DPR/blob/master/data/download_data.py)
3. [SQuAD](https://github.com/facebookresearch/DPR/blob/master/data/download_data.py)
4. [DRCD*](https://github.com/DRCKnowledgeTeam/DRCD)
5. [MLQA*](https://github.com/facebookresearch/MLQA)
`question pairs for train`: 644,217
`question pairs for dev`: 73,710
*DRCD and MLQA are converted using script from haystack [squad_to_dpr.py](https://github.com/deepset-ai/haystack/blob/master/haystack/retriever/squad_to_dpr.py)
## Training Script
I use the script from [haystack](https://colab.research.google.com/github/deepset-ai/haystack/blob/master/tutorials/Tutorial9_DPR_training.ipynb)
## Usage
```python
from transformers import DPRContextEncoder, DPRContextEncoderTokenizer
tokenizer = DPRContextEncoderTokenizer.from_pretrained('voidful/dpr-ctx_encoder-bert-base-multilingual')
model = DPRContextEncoder.from_pretrained('voidful/dpr-ctx_encoder-bert-base-multilingual')
input_ids = tokenizer("Hello, is my dog cute ?", return_tensors='pt')["input_ids"]
embeddings = model(input_ids).pooler_output
```
Follow the tutorial from `haystack`:
[Better Retrievers via "Dense Passage Retrieval"](https://colab.research.google.com/github/deepset-ai/haystack/blob/master/tutorials/Tutorial6_Better_Retrieval_via_DPR.ipynb)
```
from haystack.retriever.dense import DensePassageRetriever
retriever = DensePassageRetriever(document_store=document_store,
query_embedding_model="voidful/dpr-question_encoder-bert-base-multilingual",
passage_embedding_model="voidful/dpr-ctx_encoder-bert-base-multilingual",
max_seq_len_query=64,
max_seq_len_passage=256,
batch_size=16,
use_gpu=True,
embed_title=True,
use_fast_tokenizers=True)
```
| [
-0.1499413400888443,
-0.07662321627140045,
-0.014785001054406166,
-0.04227795451879501,
0.0042687575332820415,
0.00805031880736351,
-0.019492048770189285,
0.018540769815444946,
-0.001619726768694818,
-0.020081492140889168,
0.07434666156768799,
-0.0458984337747097,
-0.029853004962205887,
0.02253829315304756,
0.059878379106521606,
0.0777590200304985,
-0.03415437415242195,
0.05084682255983353,
-0.0381777323782444,
-0.03918294236063957,
0.07628405839204788,
0.03272409737110138,
0.06793121248483658,
-0.022982606664299965,
0.054800037294626236,
-0.029288599267601967,
0.022163065150380135,
0.0020397151820361614,
0.04612404480576515,
-0.03006901778280735,
0.048352066427469254,
0.08961302042007446,
0.04153246432542801,
0.06580843776464462,
0.11337720602750778,
0.028798235580325127,
0.012356320396065712,
-0.0518016442656517,
-0.016032002866268158,
0.062485165894031525,
0.010967850685119629,
0.030803216621279716,
0.030833864584565163,
-0.038410816341638565,
0.044028013944625854,
0.02517758123576641,
-0.09551602602005005,
0.06488588452339172,
-0.030485810711979866,
-0.055467162281274796,
-0.03866912052035332,
-0.026763999834656715,
-0.012456811033189297,
0.05715824291110039,
0.04439328610897064,
-0.0371876135468483,
-0.006309684831649065,
-0.008361926302313805,
-0.009050263091921806,
-0.03691304102540016,
-0.05973975732922554,
-0.003872930072247982,
-0.008660217747092247,
0.008672469295561314,
-0.056649766862392426,
0.06682423502206802,
-0.046461790800094604,
0.013926422223448753,
0.017574237659573555,
-0.05941906198859215,
-0.07627533376216888,
-0.011323746293783188,
-0.047087837010622025,
0.036261241883039474,
0.026650598272681236,
-0.04885256290435791,
0.054607488214969635,
-0.05983699858188629,
-0.009844646789133549,
-0.16980747878551483,
-0.013928244821727276,
0.011483334936201572,
0.08361954987049103,
0.029488245025277138,
0.07589543610811234,
0.0033153071999549866,
0.015043799765408039,
-0.022669879719614983,
0.011508038267493248,
-0.024248166009783745,
-0.0423915795981884,
-0.08847296237945557,
0.04705570638179779,
0.05390896275639534,
-0.04487818107008934,
0.041140366345644,
0.08475511521100998,
0.015563718974590302,
0.01217387430369854,
0.02202877774834633,
0.03149621561169624,
-0.0070914835669100285,
0.05258900299668312,
-0.040887877345085144,
-0.04818426072597504,
-0.012368883937597275,
0.07620475441217422,
0.02117723785340786,
0.005666239187121391,
-0.10037492215633392,
-0.059499647468328476,
0.030591072514653206,
-0.011675418354570866,
-0.08537621051073074,
-0.03698695823550224,
-0.041840437799692154,
-0.044090814888477325,
-0.02744748257100582,
0.07862265408039093,
-0.020671408623456955,
0.009104620665311813,
-0.03366607427597046,
0.009248764254152775,
0.0043172757141292095,
-0.028911881148815155,
-0.014554889872670174,
-0.006815397180616856,
5.277445805521986e-33,
0.08787555247545242,
0.0222043227404356,
-0.014311128295958042,
0.002130824374035001,
0.02885088138282299,
-0.051924098283052444,
-0.0042152670212090015,
-0.010184281505644321,
-0.12446314841508865,
-0.010442312806844711,
-0.09905499964952469,
0.033311665058135986,
-0.12512287497520447,
0.028926607221364975,
-0.039479516446590424,
-0.03048357553780079,
0.021983157843351364,
0.09200835973024368,
0.09758105129003525,
0.08714921772480011,
0.11290710419416428,
0.09921525418758392,
-0.028872830793261528,
-0.05926612764596939,
0.0018424278823658824,
0.07403858751058578,
0.04086042195558548,
-0.04235320910811424,
0.04085303843021393,
0.022755736485123634,
-0.10593543201684952,
-0.010537594556808472,
-0.010203204117715359,
0.06819891184568405,
0.02846579998731613,
-0.024482762441039085,
-0.030401337891817093,
-0.04757126793265343,
-0.029673390090465546,
-0.01450545247644186,
0.0017747113015502691,
0.041846863925457,
-0.05594585835933685,
-0.0856710895895958,
-0.010174530558288097,
-0.009975367225706577,
-0.024729890748858452,
-0.03590085357427597,
0.06277235597372055,
-0.003558993572369218,
-0.011343060061335564,
0.008949599228799343,
-0.023355472832918167,
-0.022624311968684196,
0.004426587838679552,
-0.020352177321910858,
0.012744365260004997,
-0.004520757589489222,
0.10277768969535828,
0.051613613963127136,
-0.03992132842540741,
0.028533464297652245,
-0.0032466489356011152,
-0.04310676455497742,
0.08232820779085159,
-0.04635288193821907,
-0.012732723727822304,
-0.03430677205324173,
0.0861005187034607,
0.055834151804447174,
-0.044875770807266235,
0.0011331012938171625,
0.08388166129589081,
-0.010527212172746658,
0.061298809945583344,
-0.034220773726701736,
-0.028816992416977882,
-0.0380435585975647,
-0.04896508902311325,
-0.0012438928242772818,
-0.03998517617583275,
0.011378776282072067,
-0.09579955041408539,
0.011834310367703438,
-0.03410295397043228,
0.06362400203943253,
0.048347584903240204,
-0.12294954061508179,
-0.05597972869873047,
0.003515767166391015,
0.023563159629702568,
-0.052970536053180695,
-0.0645698681473732,
-0.0534004345536232,
0.028642643243074417,
-5.356326619249946e-33,
0.005545048508793116,
0.010290256701409817,
-0.01698235236108303,
0.028400922194123268,
-0.009357939474284649,
-0.09031561762094498,
0.015517788007855415,
0.08883609622716904,
0.0675632506608963,
-0.03151450678706169,
0.023378387093544006,
-0.06733128428459167,
-0.007490315940231085,
-0.04192584007978439,
0.07462995499372482,
0.04886600375175476,
-0.020851435139775276,
-0.008211097680032253,
-0.01542545948177576,
0.06717168539762497,
-0.011055889539420605,
0.028398945927619934,
-0.12888294458389282,
0.022926906123757362,
-0.01969645917415619,
0.06869439780712128,
-0.03215433657169342,
0.07748101651668549,
0.023556433618068695,
-0.01739632524549961,
0.0014014819171279669,
0.008367108181118965,
-0.10479100793600082,
0.001993645215407014,
-0.08704095333814621,
0.04060467705130577,
0.030149109661579132,
0.04458736628293991,
-0.06843997538089752,
0.02671537734568119,
0.08152864873409271,
0.004685806110501289,
-0.07019756734371185,
0.030780402943491936,
0.05312710627913475,
0.006449284963309765,
-0.09596240520477295,
0.05189216881990433,
0.005968277342617512,
-0.06607753038406372,
0.05293680354952812,
-0.029249342158436775,
-0.02744998224079609,
-0.024273334071040154,
-0.01942635141313076,
-0.04362209141254425,
0.08316505700349808,
0.002100166631862521,
-0.04991454631090164,
-0.0071970028802752495,
-0.06388342380523682,
-0.0014763696817681193,
-0.015488858334720135,
0.0339103601872921,
-0.028008976951241493,
-0.06593000888824463,
-0.06774977594614029,
0.019000748172402382,
-0.01989021524786949,
-0.04153618961572647,
-0.0021113897673785686,
-0.03103409893810749,
0.06431136280298233,
-0.010977237485349178,
0.006201185751706362,
-0.003815537551417947,
-0.07845351845026016,
0.00573061965405941,
0.05165807902812958,
0.057429078966379166,
-0.09516962617635727,
-0.007110779173672199,
0.07222858816385269,
0.12294431775808334,
-0.04057667776942253,
0.04613608866930008,
0.026659749448299408,
0.05333098769187927,
0.0032827488612383604,
-0.018598174676299095,
-0.000101253273896873,
0.011568947695195675,
0.0032268620561808348,
0.1679290235042572,
0.021220719441771507,
-5.082624454644247e-8,
-0.06481602787971497,
0.003405884839594364,
-0.06911721080541611,
-0.039801839739084244,
-0.017742391675710678,
-0.031858835369348526,
-0.06939776986837387,
0.04492843896150589,
0.02373129315674305,
0.03830064460635185,
0.03313513472676277,
0.06259818375110626,
-0.10046520829200745,
-0.005321978125721216,
-0.030024375766515732,
0.054174959659576416,
0.045646898448467255,
0.05063622444868088,
-0.006174586247652769,
0.004691233392804861,
0.04486340284347534,
0.0508979894220829,
0.0012658573687076569,
-0.07926683872938156,
-0.0030165293719619513,
-0.0005228071240708232,
-0.002336858306080103,
0.03967710956931114,
0.02346775308251381,
-0.039730004966259,
0.024100305512547493,
0.03798370435833931,
-0.07225032895803452,
-0.07423411309719086,
0.10456135869026184,
0.02260645478963852,
-0.037676677107810974,
-0.05199812725186348,
0.01979064755141735,
0.019016871228814125,
0.1257047802209854,
-0.006395285949110985,
-0.09536024928092957,
0.0538877509534359,
0.07556892186403275,
0.00436530401930213,
-0.04053514078259468,
-0.07885941118001938,
0.02008523792028427,
-0.018896808847784996,
-0.029044747352600098,
-0.06988067924976349,
-0.062131233513355255,
0.053286660462617874,
-0.030600817874073982,
0.05168638005852699,
-0.08330413699150085,
-0.013396957889199257,
0.055513203144073486,
-0.013309906236827374,
0.023551061749458313,
0.076687291264534,
0.039007849991321564,
0.0894918441772461
] |
Muennighoff/SGPT-1.3B-weightedmean-msmarco-specb-bitfit | 7853d0d3eef3dd556b99ae342e7461c61d8faed5 | 2022-06-18T20:51:30.000Z | [
"pytorch",
"gpt_neo",
"feature-extraction",
"arxiv:2202.08904",
"sentence-transformers",
"sentence-similarity"
] | sentence-similarity | false | Muennighoff | null | Muennighoff/SGPT-1.3B-weightedmean-msmarco-specb-bitfit | 1,128 | null | sentence-transformers | ---
pipeline_tag: sentence-similarity
tags:
- sentence-transformers
- feature-extraction
- sentence-similarity
---
# SGPT-1.3B-weightedmean-msmarco-specb-bitfit
## Usage
For usage instructions, refer to our codebase: https://github.com/Muennighoff/sgpt
## Evaluation Results
For eval results, refer to the eval folder or our paper: https://arxiv.org/abs/2202.08904
## Training
The model was trained with the parameters:
**DataLoader**:
`torch.utils.data.dataloader.DataLoader` of length 62398 with parameters:
```
{'batch_size': 8, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
```
**Loss**:
`sentence_transformers.losses.MultipleNegativesRankingLoss.MultipleNegativesRankingLoss` with parameters:
```
{'scale': 20.0, 'similarity_fct': 'cos_sim'}
```
Parameters of the fit()-Method:
```
{
"epochs": 10,
"evaluation_steps": 0,
"evaluator": "NoneType",
"max_grad_norm": 1,
"optimizer_class": "<class 'transformers.optimization.AdamW'>",
"optimizer_params": {
"lr": 0.0002
},
"scheduler": "WarmupLinear",
"steps_per_epoch": null,
"warmup_steps": 1000,
"weight_decay": 0.01
}
```
## Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 300, 'do_lower_case': False}) with Transformer model: GPTNeoModel
(1): Pooling({'word_embedding_dimension': 2048, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': True, 'pooling_mode_lasttoken': False})
)
```
## Citing & Authors
```bibtex
@article{muennighoff2022sgpt,
title={SGPT: GPT Sentence Embeddings for Semantic Search},
author={Muennighoff, Niklas},
journal={arXiv preprint arXiv:2202.08904},
year={2022}
}
```
| [
-0.08528872579336166,
-0.04275880381464958,
-0.05167440325021744,
0.06958850473165512,
0.036145444959402084,
-0.030276209115982056,
0.00422003073617816,
0.013585587963461876,
-0.029767362400889397,
-0.08659802377223969,
0.0425693653523922,
-0.06590081751346588,
0.018423058092594147,
-0.032759975641965866,
-0.03436097130179405,
0.018894007429480553,
-0.007522013504058123,
-0.040113385766744614,
-0.08453477174043655,
-0.09070154279470444,
0.10841751843690872,
0.05892890691757202,
0.04349488019943237,
-0.01185589749366045,
0.04213067889213562,
0.01678556390106678,
-0.0494401790201664,
0.026830026879906654,
0.07561655342578888,
-0.040927402675151825,
0.09386507421731949,
0.07104605436325073,
-0.025304989889264107,
0.04567193612456322,
0.04452196881175041,
0.0520363487303257,
-0.05656753107905388,
0.006805625278502703,
-0.01923292502760887,
-0.026209833100438118,
0.027167655527591705,
0.0006902322638779879,
-0.034928157925605774,
-0.00932780746370554,
0.03840262442827225,
-0.06433361023664474,
-0.07102299481630325,
-0.07147739827632904,
-0.03440389782190323,
0.05192818492650986,
-0.06675117462873459,
-0.017905782908201218,
-0.0009347635786980391,
0.07744051516056061,
-0.008915615268051624,
0.0367814339697361,
0.01194483321160078,
-0.03955087065696716,
0.00044626533053815365,
-0.057191114872694016,
-0.04170159250497818,
-0.04111779108643532,
-0.09285222738981247,
-0.046235959976911545,
-0.05529939755797386,
-0.007696294691413641,
0.03497942537069321,
-0.009205467998981476,
0.029190048575401306,
-0.0021292511373758316,
-0.03969985246658325,
0.04979846626520157,
-0.07799205183982849,
0.06639212369918823,
-0.034856051206588745,
0.07025967538356781,
0.08555883914232254,
-0.045521967113018036,
0.053628209978342056,
-0.1033673882484436,
-0.003813772229477763,
-0.0660485252737999,
0.0753796175122261,
0.010128356516361237,
0.06911999732255936,
-0.029568759724497795,
0.023737778887152672,
-0.042803362011909485,
0.03885069116950035,
-0.004918850027024746,
-0.046166419982910156,
-0.08616800606250763,
-0.01655566692352295,
0.005748724564909935,
0.03380720689892769,
0.04118543863296509,
-0.06219152733683586,
0.015288745984435081,
-0.01564563438296318,
0.0846540629863739,
0.01973346620798111,
0.0038426723331212997,
0.034563735127449036,
0.007288488559424877,
-0.01810452528297901,
0.0003302573168184608,
0.08056388050317764,
0.03415052592754364,
0.041422441601753235,
-0.07665856182575226,
0.027425408363342285,
0.10337820649147034,
0.0047513325698673725,
-0.06823574006557465,
0.04236280545592308,
0.010391189716756344,
-0.013530176132917404,
-0.0009862178703770041,
-0.07316670566797256,
0.1051144152879715,
-0.07387364655733109,
0.04649093747138977,
-0.09511900693178177,
-0.013898571021854877,
0.03069096989929676,
-0.054903194308280945,
0.019134851172566414,
6.613304769103296e-33,
0.00038371371920220554,
-0.016790105029940605,
-0.03576447442173958,
-0.06372544914484024,
0.021452732384204865,
0.03646545484662056,
-0.008388602174818516,
-0.018364930525422096,
-0.07032138109207153,
-0.007080726325511932,
-0.05623963475227356,
-0.038691598922014236,
-0.025071293115615845,
0.05046088248491287,
-0.0017322264611721039,
-0.03892521187663078,
-0.023719049990177155,
0.04816458746790886,
-0.006223788484930992,
-0.015584730543196201,
0.08627773821353912,
0.007765256334096193,
-0.010456262156367302,
-0.07603765279054642,
-0.039649032056331635,
0.018612051382660866,
0.08157846331596375,
-0.009035316295921803,
-0.04199355095624924,
0.02980714477598667,
-0.0550384446978569,
0.02184930443763733,
0.05073406174778938,
0.002292915713042021,
-0.021813560277223587,
0.00600329739972949,
0.044763918966054916,
-0.019381536170840263,
0.018330644816160202,
-0.07879812270402908,
-0.024912916123867035,
0.07428919523954391,
0.011599426157772541,
-0.08849134296178818,
-0.0775812566280365,
-0.019802911207079887,
0.005256164353340864,
0.011791514232754707,
0.08599110692739487,
0.03685029223561287,
0.014851748012006283,
-0.006522693205624819,
0.020388228818774223,
0.010142236016690731,
0.011145710945129395,
0.03912618011236191,
0.050472840666770935,
0.05031489580869675,
0.14176484942436218,
0.06896737217903137,
0.007203896064311266,
0.050249531865119934,
0.0009228443377651274,
0.01575331576168537,
0.08046845346689224,
0.02030869573354721,
0.003928616177290678,
-0.00882874894887209,
0.018061170354485512,
0.018163898959755898,
-0.06798777729272842,
0.0009679775102995336,
-0.03678583726286888,
-0.027212010696530342,
0.0844685286283493,
-0.09614235907793045,
0.011071956716477871,
-0.09046774357557297,
-0.030554013326764107,
0.04327188804745674,
-0.08895620703697205,
0.02163631096482277,
0.03906157985329628,
-0.07707025110721588,
-0.09209282696247101,
-0.05121708661317825,
0.01534189935773611,
-0.09675275534391403,
-0.000850723881740123,
-0.08825957030057907,
0.005972668528556824,
0.01425622683018446,
-0.04364761337637901,
-0.005781749729067087,
-0.015414511784911156,
-5.3040556915467406e-33,
0.01046257745474577,
0.0671406239271164,
-0.04034881293773651,
0.11537420749664307,
-0.03270513564348221,
0.007177949417382479,
0.05788785219192505,
0.08993469178676605,
-0.05024196207523346,
-0.03058403544127941,
-0.022611986845731735,
0.0021136098075658083,
-0.06209380179643631,
-0.08869071304798126,
0.08458556234836578,
0.03339831531047821,
-0.007861647754907608,
0.035819344222545624,
0.07954124361276627,
0.04442385211586952,
0.02600933611392975,
0.12950730323791504,
-0.11825446784496307,
0.06390462070703506,
-0.0587969645857811,
-0.0013893127907067537,
0.033469751477241516,
0.05162365734577179,
-0.0021380458492785692,
-0.08316117525100708,
0.008078455924987793,
-0.0036138256546109915,
-0.1035972610116005,
-0.00917944498360157,
-0.0660775825381279,
-0.027455050498247147,
0.09137415140867233,
0.023403681814670563,
-0.0019407770596444607,
0.06782656908035278,
0.043269362300634384,
0.09260660409927368,
-0.075045645236969,
-0.0025636705104261637,
-0.024850616231560707,
0.02419409528374672,
-0.019098810851573944,
-0.08512654155492783,
0.05712716281414032,
-0.09669128060340881,
0.035809628665447235,
-0.03899490460753441,
-0.07864546030759811,
0.051700226962566376,
-0.026007285341620445,
-0.11051107943058014,
-0.010155652649700642,
-0.06047850474715233,
-0.06864359229803085,
-0.048794277012348175,
-0.029406080022454262,
0.0523725263774395,
0.05455480515956879,
-0.06632325798273087,
0.0990557074546814,
0.0098493080586195,
-0.0007140540983527899,
0.02778378129005432,
-0.01394256018102169,
-0.001232170150615275,
-0.06271478533744812,
0.05688314139842987,
0.007926861755549908,
0.05364460498094559,
0.005919249728322029,
0.02002670057117939,
-0.04145517572760582,
-0.0132414186373353,
-0.00536771584302187,
-0.04821431264281273,
-0.0005102920695208013,
-0.014722655527293682,
0.005533293820917606,
0.05339175835251808,
0.003509084926918149,
0.09706719219684601,
0.0072799138724803925,
0.10529668629169464,
0.024841688573360443,
0.052386388182640076,
-0.04096969962120056,
-0.0034902559127658606,
0.042010292410850525,
0.055964626371860504,
0.02052043192088604,
-6.193528889752997e-8,
-0.07925065606832504,
0.026658151298761368,
-0.07281110435724258,
0.08226912468671799,
-0.08243848383426666,
-0.02143898420035839,
0.01048360113054514,
0.07392827421426773,
0.009428690187633038,
-0.012142966501414776,
0.07126609236001968,
0.006034613121300936,
-0.0629732757806778,
0.024158190935850143,
-0.0023760495241731405,
0.024056505411863327,
0.012478142976760864,
0.0555403046309948,
-0.013885566033422947,
-0.035528507083654404,
0.027225181460380554,
0.06942947208881378,
-0.0034938338212668896,
-0.02145017869770527,
0.05784088745713234,
-0.005967928096652031,
-0.017652712762355804,
0.08894369751214981,
-0.024136506021022797,
-0.024196958169341087,
0.026340162381529808,
0.012570378370583057,
-0.04379788041114807,
-0.03055722638964653,
0.020279642194509506,
0.08749523758888245,
0.0353359654545784,
-0.028320927172899246,
0.076895572245121,
0.09675263613462448,
-0.008823370561003685,
0.04469422623515129,
-0.0427570678293705,
-0.0015103779733181,
0.04669390618801117,
0.016737306490540504,
-0.04259917512536049,
-0.08745741844177246,
0.03663969412446022,
-0.04351429641246796,
0.05407971143722534,
-0.06360770761966705,
-0.06642709672451019,
0.04173421859741211,
0.0707060918211937,
-0.06191196292638779,
-0.022577404975891113,
-0.04847162216901779,
0.07976479083299637,
-0.01195349171757698,
0.11961132287979126,
-0.03041635826230049,
-0.061142340302467346,
-0.018615473061800003
] |
allenai/t5-small-squad2-question-generation | 7e7d6d8a68f96223a5cdaaf063e55293d52f1aef | 2021-06-23T11:56:56.000Z | [
"pytorch",
"jax",
"t5",
"text2text-generation",
"transformers",
"autotrain_compatible"
] | text2text-generation | false | allenai | null | allenai/t5-small-squad2-question-generation | 1,128 | 12 | transformers | A simple question-generation model built based on SQuAD 2.0 dataset.
Example use:
```python
from transformers import T5Config, T5ForConditionalGeneration, T5Tokenizer
model_name = "allenai/t5-small-squad2-question-generation"
tokenizer = T5Tokenizer.from_pretrained(model_name)
model = T5ForConditionalGeneration.from_pretrained(model_name)
def run_model(input_string, **generator_args):
input_ids = tokenizer.encode(input_string, return_tensors="pt")
res = model.generate(input_ids, **generator_args)
output = tokenizer.batch_decode(res, skip_special_tokens=True)
print(output)
return output
run_model("shrouds herself in white and walks penitentially disguised as brotherly love through factories and parliaments; offers help, but desires power;")
run_model("He thanked all fellow bloggers and organizations that showed support.")
run_model("Races are held between April and December at the Veliefendi Hippodrome near Bakerky, 15 km (9 miles) west of Istanbul.")
```
which should result in the following:
```
['What is the name of the man who is a brotherly love?']
['What did He thank all fellow bloggers and organizations that showed support?']
['Where is the Veliefendi Hippodrome located?']
```
| [
-0.07733354717493057,
0.01608331874012947,
-0.01522858627140522,
0.05639607459306717,
-0.001199019025079906,
0.018490860238671303,
-0.028590111061930656,
0.04686910659074783,
-0.02564777247607708,
-0.01753864251077175,
0.06832805275917053,
-0.14676423370838165,
0.056437909603118896,
-0.06466356664896011,
0.0346103310585022,
0.026978235691785812,
-0.00520790321752429,
-0.04382563754916191,
-0.1332503706216812,
-0.08501936495304108,
0.023846248164772987,
0.053687553852796555,
0.02696768194437027,
0.02856888808310032,
-0.01735479012131691,
-0.07096488028764725,
0.03403254598379135,
0.046916618943214417,
0.024715572595596313,
0.03262684866786003,
0.01718766801059246,
0.040939122438430786,
-0.04869193211197853,
0.06528696417808533,
0.09793721139431,
0.04380027949810028,
0.029321160167455673,
0.008217601105570793,
0.014768633991479874,
-0.03145933896303177,
0.03277374058961868,
-0.10754097253084183,
0.005066046491265297,
-0.030652688816189766,
0.05604218319058418,
-0.0361609049141407,
-0.007880567573010921,
0.02907189540565014,
-0.0036528652999550104,
-0.06461554765701294,
-0.08684618026018143,
-0.05916139483451843,
0.0367337130010128,
-0.015258537605404854,
-0.024518821388483047,
-0.011002715677022934,
0.03530661016702652,
0.0008830966544337571,
-0.012903985567390919,
-0.06273822486400604,
-0.09806223958730698,
-0.0765550285577774,
-0.031702328473329544,
-0.04001826420426369,
-0.04133337736129761,
-0.048172034323215485,
0.003873569192364812,
0.003790154354646802,
0.041113175451755524,
-0.030969571322202682,
-0.05986654385924339,
0.002277378225699067,
-0.020143326371908188,
0.03813501447439194,
-0.03300032392144203,
0.07862019538879395,
0.086696557700634,
0.004805422388017178,
0.09153014421463013,
-0.09667608141899109,
-0.006247989367693663,
-0.08021221309900284,
0.08773157000541687,
0.036698199808597565,
0.005785103887319565,
-0.01658354327082634,
0.05976725369691849,
0.04350748658180237,
0.06379850953817368,
0.05613623932003975,
-0.09777536988258362,
0.004040513187646866,
0.06440909206867218,
0.02127632312476635,
0.003470770549029112,
0.08378277719020844,
0.03825471177697182,
0.02460353635251522,
-0.031409088522195816,
0.08116811513900757,
0.02713622711598873,
0.010579310357570648,
0.06970395892858505,
0.019135938957333565,
-0.034672897309064865,
-0.028340661898255348,
0.0027977924328297377,
0.008943391963839531,
0.03848238289356232,
-0.10446339845657349,
0.02223675698041916,
0.043231263756752014,
0.024257905781269073,
0.005999785382300615,
0.07379850745201111,
-0.006278511136770248,
0.004489517770707607,
0.054865870624780655,
-0.0038317302241921425,
0.06589503586292267,
0.06135837361216545,
0.003999397624284029,
-0.033019427210092545,
0.058784954249858856,
-0.004186155274510384,
-0.046214934438467026,
-0.029646769165992737,
7.831856008628265e-33,
0.02549949288368225,
-0.005033161956816912,
0.09531375020742416,
0.08328188955783844,
-0.006455796770751476,
0.05598035827279091,
-0.03294794261455536,
0.04988141357898712,
-0.01362599153071642,
0.014751747250556946,
-0.09380662441253662,
-0.03164982795715332,
-0.029137203469872475,
0.04201142489910126,
-0.01614413410425186,
-0.046084847301244736,
-0.08327944576740265,
0.009056361392140388,
0.03241956979036331,
0.05824907869100571,
0.06781243532896042,
0.09262359142303467,
-0.019344650208950043,
-0.06566459685564041,
0.038724739104509354,
0.04040219262242317,
-0.0007793483673594892,
-0.08512506633996964,
-0.02229589782655239,
0.04423768073320389,
-0.1263953447341919,
-0.041499942541122437,
-0.07017974555492401,
-0.005479767452925444,
-0.05565204843878746,
0.010369437746703625,
0.04387250170111656,
-0.04575764760375023,
-0.03331154212355614,
-0.05710258707404137,
-0.011256820522248745,
0.033880021423101425,
0.00809199083596468,
0.004539710935205221,
-0.08959171175956726,
-0.060271840542554855,
0.03899150714278221,
-0.04335949942469597,
0.022810980677604675,
0.0316002257168293,
0.003719157073646784,
-0.03608877211809158,
-0.017277680337429047,
-0.07819178700447083,
-0.008925192058086395,
-0.007765694987028837,
-0.009881190024316311,
0.01705971360206604,
0.06881672143936157,
-0.0028042313642799854,
0.021511277183890343,
0.030353669077157974,
-0.0052195340394973755,
0.06789622455835342,
0.06949138641357422,
-0.014924096874892712,
-0.003778244601562619,
-0.04781999811530113,
0.0666089877486229,
-0.0012159571051597595,
0.012468229979276657,
-0.037200093269348145,
-0.03066931664943695,
-0.06166354566812515,
0.0181629229336977,
-0.056875500828027725,
0.046526286751031876,
-0.005526786670088768,
-0.08522380143404007,
-0.033028360456228256,
-0.019717160612344742,
0.02164904959499836,
-0.08910419046878815,
-0.07508950680494308,
-0.007325547747313976,
0.013832837343215942,
0.0617063082754612,
-0.03414543718099594,
-0.021479694172739983,
-0.1162334457039833,
-0.00800940953195095,
-0.07197659462690353,
-0.01078862976282835,
0.004050379619002342,
-0.02781711518764496,
-9.130753021715907e-33,
0.02313489466905594,
0.016505058854818344,
-0.043577890843153,
0.00935712642967701,
0.04569071903824806,
-0.08268041163682938,
0.03600773587822914,
0.03573885187506676,
-0.011398372240364552,
0.001964855706319213,
0.024340618401765823,
-0.04895386844873428,
0.04323863610625267,
-0.013025470077991486,
0.06820561736822128,
-0.08040113002061844,
-0.02347092144191265,
-0.04391438141465187,
0.02726891078054905,
0.06820978224277496,
-0.022727476432919502,
0.0762949138879776,
-0.11234861612319946,
0.03838734328746796,
-0.05080481991171837,
0.046140123158693314,
-0.03332145884633064,
0.050936389714479446,
0.037930119782686234,
0.0164139736443758,
-0.04874291270971298,
-0.002863190369680524,
0.0010834034765139222,
0.04195229709148407,
-0.06678152084350586,
-0.002162847900763154,
0.008419184945523739,
-0.02661985345184803,
-0.001858896459452808,
0.09971784055233002,
0.005261612590402365,
0.04625234380364418,
-0.09942717105150223,
0.08302713930606842,
-0.06614293903112411,
0.008741680532693863,
0.010008875280618668,
-0.03493747487664223,
-0.00042730491259135306,
-0.042391035705804825,
0.0411837212741375,
0.014112725853919983,
-0.09640423953533173,
0.008907564915716648,
-0.1011168509721756,
-0.08902275562286377,
0.012834683060646057,
-0.006389263551682234,
-0.06891719251871109,
0.023947063833475113,
-0.0378665030002594,
0.008746212348341942,
0.040096770972013474,
-0.09539730846881866,
0.034013859927654266,
-0.007212769705802202,
-0.08906324952840805,
0.054965049028396606,
0.029356947168707848,
0.012643548659980297,
-0.034213513135910034,
-0.025051183998584747,
0.04744230955839157,
0.01860128901898861,
0.03205883875489235,
0.002597818151116371,
-0.09585697203874588,
-0.04988386109471321,
0.07212839275598526,
-0.007722522132098675,
-0.09608642011880875,
-0.014437287114560604,
0.07563189417123795,
0.10558970272541046,
-0.04891328513622284,
-0.04802573844790459,
0.09833420813083649,
0.1897026151418686,
0.012661140412092209,
0.005210872273892164,
0.05963478982448578,
-0.029052942991256714,
0.050954338163137436,
0.1845608651638031,
-0.014264305122196674,
-6.590308743170681e-8,
0.0026556046213954687,
0.022717127576470375,
0.016549861058592796,
0.10438746958971024,
-0.01567421481013298,
-0.006751351524144411,
-0.07114717364311218,
-0.043179478496313095,
0.06595364958047867,
0.02201920561492443,
-0.05527159199118614,
-0.012324533425271511,
-0.016563398763537407,
-0.02038341760635376,
-0.018231866881251335,
0.06346697360277176,
-0.030847156420350075,
0.011883622966706753,
-0.02457493357360363,
-0.08689454942941666,
-0.021165505051612854,
-0.037308525294065475,
-0.08801287412643433,
0.019046278670430183,
0.03366120532155037,
0.033753763884305954,
-0.08978975564241409,
0.0017861133674159646,
-0.06789343804121017,
-0.004686214495450258,
-0.057580724358558655,
-0.007661234121769667,
-0.04987488314509392,
0.007954725995659828,
0.016921985894441605,
0.0902637466788292,
-0.04041784629225731,
-0.07083270698785782,
0.02355315536260605,
-0.003651239210739732,
-0.011609861627221107,
0.016857564449310303,
-0.08529451489448547,
-0.021427832543849945,
0.04248208552598953,
-0.014240602031350136,
0.01866971328854561,
-0.10618236660957336,
0.01128073688596487,
-0.024041682481765747,
-0.05025695264339447,
0.002456983085721731,
-0.040683988481760025,
0.05748891830444336,
-0.00504700094461441,
0.010998491197824478,
-0.05303151160478592,
0.04780677333474159,
0.005159567575901747,
0.028650294989347458,
0.05237531289458275,
0.053238142281770706,
-0.005193601828068495,
-0.0715685784816742
] |
diptanu/fBERT | 7bd599f887e294a43afb6b4c3f611d66af2f94ae | 2021-09-01T19:57:23.000Z | [
"pytorch",
"bert",
"fill-mask",
"transformers",
"autotrain_compatible"
] | fill-mask | false | diptanu | null | diptanu/fBERT | 1,128 | 3 | transformers | fBERT: A Neural Transformer for Identifying Offensive Content [Accepted at EMNLP 2021]
Authors: Diptanu Sarkar, Marcos Zampieri, Tharindu Ranasinghe and Alexander Ororbia
About:
Transformer-based models such as BERT, ELMO, and XLM-R have achieved state-of-the-art performance across various NLP tasks including the identification of offensive language and hate speech, an important problem in social media. Previous studies have shown that domain-specific fine-tuning or retraining of models before attempting to solve downstream tasks can lead to excellent results in multiple domains. Fine-tuning/retraining a complex models to identify offensive language has not been substantially explored before and we address this gap by proposing fBERT, a bert-base-uncased model that has been learned using over 1.4 million offensive instances from the SOLID dataset. The shifted fBERT model better incorporates domain-specific offensive language and social media features. The fBERT model achieves better results in both OffensEval and HatEval tasks and in the HS & O dataset over BERT and HateBERT.
| [
-0.06355717033147812,
-0.05831332877278328,
0.011338331736624241,
-0.016309507191181183,
0.07308157533407211,
0.04933035373687744,
0.0006425594328902662,
-0.00929225143045187,
0.052959516644477844,
-0.061709001660346985,
-0.05646331608295441,
-0.05074775964021683,
0.07170772552490234,
0.013134760782122612,
0.014443433843553066,
0.045493658632040024,
0.05128021165728569,
0.08181097358465195,
-0.05402492359280586,
-0.024472443386912346,
0.007184458430856466,
0.11444167792797089,
0.0632859542965889,
-0.04299177974462509,
-0.0372745543718338,
-0.015204371884465218,
-0.027684956789016724,
-0.018569262698292732,
0.0025051424745470285,
-0.018075445666909218,
0.060039449483156204,
-0.04050825536251068,
-0.005138783250004053,
0.09010776877403259,
-0.09322061389684677,
0.019055670127272606,
-0.0347047820687294,
-0.007216142024844885,
0.07691702991724014,
-0.01073149312287569,
-0.013849292881786823,
-0.061644136905670166,
-0.01793811097741127,
-0.015093473717570305,
0.05654405429959297,
-0.008582745678722858,
-0.005553191993385553,
0.02124980092048645,
-0.010463605634868145,
0.011707521975040436,
-0.05478116124868393,
0.019736887887120247,
0.05766548588871956,
0.15026989579200745,
-0.06136104092001915,
0.0051073008216917515,
0.03896873816847801,
0.03736080229282379,
0.01621055044233799,
0.009617026895284653,
-0.08681114763021469,
-0.1059291884303093,
0.03070168010890484,
-0.019812660291790962,
-0.0299485232681036,
-0.019222073256969452,
-0.0754682868719101,
0.04538293555378914,
-0.054345883429050446,
-0.017682544887065887,
0.02893444336950779,
0.059243280440568924,
0.019234787672758102,
0.034625403583049774,
0.007626289967447519,
0.016149306669831276,
0.08175143599510193,
-0.0191479604691267,
0.09134095162153244,
-0.0806751698255539,
0.010195638053119183,
0.0314587764441967,
0.10809116065502167,
0.0027167215012013912,
0.06514686346054077,
0.019456416368484497,
-0.006185924168676138,
0.0014690262032672763,
-0.07315463572740555,
0.10617738962173462,
-0.019217148423194885,
-0.011653824709355831,
0.1568417102098465,
-0.06616199761629105,
0.05275910720229149,
-0.015224108472466469,
-0.011642605066299438,
-0.09916472434997559,
-0.061633046716451645,
0.0980876162648201,
-0.0665612742304802,
0.03480092063546181,
0.041003160178661346,
-0.10103652626276016,
0.005785853136330843,
-0.06629388779401779,
0.001196335768327117,
0.018286556005477905,
-0.06970443576574326,
-0.08272271603345871,
-0.0034998690243810415,
0.022482872009277344,
-0.0020959931425750256,
-0.11156043410301208,
0.10861777514219284,
-0.061576612293720245,
0.03139549493789673,
-0.02710801549255848,
0.04875602200627327,
0.056083422154188156,
-0.02205692045390606,
0.010824997909367085,
0.0038974087219685316,
0.10893278568983078,
0.009864864870905876,
0.01167397852987051,
-0.059394583106040955,
4.575443150604808e-33,
-0.012516669929027557,
0.07041941583156586,
-0.016766443848609924,
-0.01335726585239172,
-0.006104942876845598,
-0.026260240003466606,
-0.040341783314943314,
0.036797985434532166,
0.008973279967904091,
-0.00873484555631876,
-0.015809396281838417,
0.037855114787817,
-0.02365071326494217,
0.050605420023202896,
0.033306725323200226,
-0.00953479390591383,
-0.02957894280552864,
0.014295999892055988,
0.021558787673711777,
-0.007437880616635084,
0.06805308163166046,
0.03240824118256569,
0.09976775199174881,
-0.013112709857523441,
-0.06880887597799301,
-0.007707925979048014,
0.0756673812866211,
-0.04256755858659744,
-0.06751711666584015,
0.028354506939649582,
-0.12267599999904633,
0.03942280635237694,
-0.03168090432882309,
0.022763829678297043,
0.11408635973930359,
-0.07885714620351791,
0.0007605528808198869,
0.0060793510638177395,
-0.023765750229358673,
-0.009282722137868404,
-0.007650577928870916,
0.057695504277944565,
-0.01986074075102806,
0.006150148343294859,
-0.004949782509356737,
0.027126451954245567,
0.03455114737153053,
-0.06935832649469376,
0.0551033616065979,
0.0028137322515249252,
0.14180339872837067,
0.028932785615324974,
-0.04979708790779114,
0.04698654264211655,
-0.01723455637693405,
0.015281680971384048,
-0.03158272057771683,
0.005591114982962608,
0.08159257471561432,
0.004186355508863926,
0.021826516836881638,
0.019069885835051537,
0.006284734699875116,
-0.01308105606585741,
0.022618327289819717,
-0.032409731298685074,
-0.053081583231687546,
0.06156817451119423,
-0.02267983742058277,
-0.09630030393600464,
0.0844765454530716,
0.019542329013347626,
0.02041439339518547,
0.003680658061057329,
-0.02902870625257492,
0.002370531437918544,
0.031162867322564125,
-0.06971247494220734,
0.04294803366065025,
-0.006789779290556908,
0.03389636427164078,
-0.07120586186647415,
-0.011282075196504593,
-0.06336706131696701,
-0.093268021941185,
-0.05516652762889862,
0.07168890535831451,
-0.0338280089199543,
0.04151933640241623,
0.043451130390167236,
0.02036622352898121,
-0.009023427031934261,
-0.026346659287810326,
0.05960430949926376,
-0.06671245396137238,
-2.9213650092864424e-33,
-0.0687788799405098,
-0.0027324536349624395,
-0.12635767459869385,
0.039342135190963745,
-0.0425785668194294,
-0.04612475633621216,
0.05122394859790802,
0.0964793935418129,
0.021699344739317894,
0.032438796013593674,
0.07910268753767014,
-0.11097310483455658,
-0.005860054399818182,
-0.056775644421577454,
0.09045273065567017,
-0.014003907330334187,
0.006828690879046917,
0.052205510437488556,
-0.04516531154513359,
0.041888583451509476,
0.012767992913722992,
0.02103896625339985,
-0.09252940118312836,
0.058797720819711685,
-0.028875315561890602,
0.0600510835647583,
-0.026700695976614952,
0.05223697051405907,
-0.007360471878200769,
0.0354451984167099,
-0.025470487773418427,
0.08070102334022522,
-0.008667421527206898,
0.004074221011251211,
-0.07386545091867447,
-0.02177940122783184,
-0.0058716232888400555,
-0.015477224253118038,
0.0392434298992157,
0.011546310037374496,
0.09778344631195068,
0.02730061113834381,
-0.11519859731197357,
0.06338993459939957,
-0.024094874039292336,
-0.0235507320612669,
-0.12103954702615738,
-0.0057836174964904785,
0.03427862375974655,
-0.026751980185508728,
-0.03270776569843292,
-0.01329324021935463,
-0.06559590995311737,
-0.04478653520345688,
-0.04262707754969597,
-0.11439540982246399,
0.04277115687727928,
-0.062128230929374695,
-0.017595157027244568,
0.05516211688518524,
-0.10458780080080032,
-0.010190658271312714,
-0.009577426128089428,
-0.028463970869779587,
0.05340973287820816,
-0.06251944601535797,
0.0030962619930505753,
0.011147557757794857,
0.02708340995013714,
0.037158042192459106,
0.054950252175331116,
-0.011370740830898285,
0.021875135600566864,
-0.05647459998726845,
-0.04392670467495918,
-0.03266036882996559,
0.013224300928413868,
-0.05315428227186203,
-0.03386581316590309,
-0.015548612922430038,
-0.004528908524662256,
-0.0394793264567852,
0.004069035407155752,
0.00903464388102293,
0.009364650584757328,
0.026955241337418556,
0.01410459354519844,
0.020860115066170692,
0.0190130528062582,
0.013098306953907013,
0.021750913932919502,
0.01738668978214264,
-0.015493938699364662,
0.03784233331680298,
0.040198441594839096,
-4.224936489549691e-8,
-0.08451753854751587,
-0.04902130737900734,
-0.02229408174753189,
0.09982214123010635,
0.033444445580244064,
-0.012122716754674911,
-0.05163450911641121,
0.017309455201029778,
-0.014652351848781109,
-0.047244809567928314,
0.009039188735187054,
0.0800713449716568,
0.004436451476067305,
-0.06963998824357986,
0.007047372870147228,
0.006332409102469683,
0.039552852511405945,
0.003411342389881611,
0.028542200103402138,
-0.044849179685115814,
0.008619251661002636,
0.022797228768467903,
0.018673907965421677,
-0.04693318530917168,
0.043707121163606644,
-0.07390877604484558,
-0.08447441458702087,
-0.02966451644897461,
0.009086423553526402,
0.05265314504504204,
0.007888666354119778,
0.034166086465120316,
-0.08414269983768463,
-0.003287359606474638,
0.08639530092477798,
0.12126056849956512,
-0.04983258992433548,
-0.06413963437080383,
-0.018200477585196495,
0.043605249375104904,
0.03401976451277733,
0.07515677064657211,
-0.029769469052553177,
-0.053109511733055115,
-0.010895411483943462,
0.034256890416145325,
-0.04586459696292877,
-0.13828812539577484,
0.007700434420257807,
-0.03734351322054863,
0.07263273745775223,
0.0339692085981369,
-0.03737708181142807,
0.12422093749046326,
0.01810632087290287,
-0.001408336334861815,
-0.02296009287238121,
-0.006420440971851349,
-0.027016233652830124,
0.07958116382360458,
0.051921382546424866,
0.00037631523446179926,
0.01676350086927414,
-0.02203439176082611
] |
sentence-transformers/distilroberta-base-msmarco-v2 | f273032139d26a1e54280e0b7d2f4a2193de4feb | 2022-06-15T21:50:52.000Z | [
"pytorch",
"tf",
"roberta",
"feature-extraction",
"arxiv:1908.10084",
"sentence-transformers",
"sentence-similarity",
"transformers",
"license:apache-2.0"
] | sentence-similarity | false | sentence-transformers | null | sentence-transformers/distilroberta-base-msmarco-v2 | 1,128 | null | sentence-transformers | ---
pipeline_tag: sentence-similarity
license: apache-2.0
tags:
- sentence-transformers
- feature-extraction
- sentence-similarity
- transformers
---
**⚠️ This model is deprecated. Please don't use it as it produces sentence embeddings of low quality. You can find recommended sentence embedding models here: [SBERT.net - Pretrained Models](https://www.sbert.net/docs/pretrained_models.html)**
# sentence-transformers/distilroberta-base-msmarco-v2
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
## Usage (Sentence-Transformers)
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
```
pip install -U sentence-transformers
```
Then you can use the model like this:
```python
from sentence_transformers import SentenceTransformer
sentences = ["This is an example sentence", "Each sentence is converted"]
model = SentenceTransformer('sentence-transformers/distilroberta-base-msmarco-v2')
embeddings = model.encode(sentences)
print(embeddings)
```
## Usage (HuggingFace Transformers)
Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
```python
from transformers import AutoTokenizer, AutoModel
import torch
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
token_embeddings = model_output[0] #First element of model_output contains all token embeddings
input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
# Sentences we want sentence embeddings for
sentences = ['This is an example sentence', 'Each sentence is converted']
# Load model from HuggingFace Hub
tokenizer = AutoTokenizer.from_pretrained('sentence-transformers/distilroberta-base-msmarco-v2')
model = AutoModel.from_pretrained('sentence-transformers/distilroberta-base-msmarco-v2')
# Tokenize sentences
encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
# Compute token embeddings
with torch.no_grad():
model_output = model(**encoded_input)
# Perform pooling. In this case, max pooling.
sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
print("Sentence embeddings:")
print(sentence_embeddings)
```
## Evaluation Results
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=sentence-transformers/distilroberta-base-msmarco-v2)
## Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 350, 'do_lower_case': False}) with Transformer model: RobertaModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
)
```
## Citing & Authors
This model was trained by [sentence-transformers](https://www.sbert.net/).
If you find this model helpful, feel free to cite our publication [Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks](https://arxiv.org/abs/1908.10084):
```bibtex
@inproceedings{reimers-2019-sentence-bert,
title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
author = "Reimers, Nils and Gurevych, Iryna",
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
month = "11",
year = "2019",
publisher = "Association for Computational Linguistics",
url = "http://arxiv.org/abs/1908.10084",
}
``` | [
-0.04918654263019562,
-0.08095026016235352,
0.006300399545580149,
0.04701220989227295,
0.028915150091052055,
0.06441868841648102,
-0.029924441128969193,
0.05285554751753807,
0.011619863100349903,
-0.09100352227687836,
0.041200511157512665,
0.018216008320450783,
0.0461004413664341,
0.0703430101275444,
0.05333586409687996,
0.07216659188270569,
0.044428564608097076,
0.06667472422122955,
-0.06980659067630768,
-0.11837087571620941,
0.1196494847536087,
0.1099993959069252,
0.021253857761621475,
0.02510436251759529,
-0.006845267955213785,
0.0906846672296524,
-0.031167268753051758,
0.013429258950054646,
0.010007956996560097,
-0.009229307062923908,
0.034758035093545914,
-0.01676304265856743,
-0.014598866924643517,
0.08266443759202957,
0.018997695297002792,
0.07213164865970612,
0.0068403673358261585,
0.012132522650063038,
-0.021878570318222046,
-0.07277123630046844,
0.0034441628959029913,
-0.029419340193271637,
-0.025499580428004265,
0.01555202342569828,
0.04820249602198601,
-0.07025855779647827,
-0.13110417127609253,
-0.029565393924713135,
-0.012051116675138474,
-0.020628241822123528,
-0.12670907378196716,
-0.00848051905632019,
0.03739258646965027,
0.09206340461969376,
-0.004470627289265394,
0.046288684010505676,
0.010040872730314732,
-0.03230725973844528,
0.01812840811908245,
-0.12873928248882294,
-0.05701069533824921,
-0.010572624392807484,
-0.011803797446191311,
-0.007852422073483467,
-0.06137562170624733,
-0.004220129456371069,
0.032920293509960175,
0.006973301526159048,
0.04143607243895531,
0.021733978763222694,
-0.07114095240831375,
0.056971266865730286,
-0.052662938833236694,
-0.02821330726146698,
-0.059038642793893814,
0.015940958634018898,
0.09285816550254822,
-0.006688306108117104,
0.06028435379266739,
0.0046303668059408665,
-0.011760931462049484,
-0.07334966957569122,
0.03640236333012581,
0.05530345067381859,
0.01853136532008648,
-0.043800920248031616,
-0.0029497453942894936,
-0.05046350136399269,
-0.011025721207261086,
-0.010678751394152641,
-0.059117935597896576,
-0.11215811222791672,
0.0316845141351223,
-0.03667677938938141,
0.0027864754665642977,
0.03363146260380745,
-0.037529461085796356,
-0.031197646632790565,
0.01634073629975319,
0.06859052181243896,
0.020920405164361,
0.03841491416096687,
0.03645188361406326,
-0.0703534409403801,
-0.04261557012796402,
0.012783840298652649,
-0.03389844298362732,
0.0037945257499814034,
0.0387115553021431,
-0.1059340387582779,
0.03156753256917,
-0.021311607211828232,
-0.03465846925973892,
-0.03162742033600807,
0.057368334382772446,
-0.028703399002552032,
0.03481890261173248,
-0.023186057806015015,
-0.018507031723856926,
0.08374925702810287,
-0.0053419433534145355,
0.06880579888820648,
-0.05269976705312729,
0.001466538175009191,
-0.002579917898401618,
-0.06532841920852661,
-0.005334948189556599,
1.4110975815316447e-33,
0.011454642750322819,
-0.00983960647135973,
0.006490782834589481,
0.012669786810874939,
0.029742984101176262,
0.0028647975996136665,
0.02074180357158184,
0.07562311738729477,
-0.11493463069200516,
-0.022005829960107803,
-0.04976833984255791,
0.012199535965919495,
-0.05874350666999817,
0.0737592801451683,
0.02252979204058647,
-0.029805298894643784,
-0.026342757046222687,
-0.039383698254823685,
0.07100833207368851,
0.01611250452697277,
0.024044040590524673,
0.030076373368501663,
0.008226349018514156,
-0.052843399345874786,
-0.07632067799568176,
-0.02062470279633999,
0.07175589352846146,
-0.07490256428718567,
-0.04029548540711403,
0.031489044427871704,
-0.08557616919279099,
0.021560732275247574,
-0.004992587026208639,
-0.0056642889976501465,
-0.008062288165092468,
-0.019239261746406555,
0.006024525035172701,
-0.031148597598075867,
-0.02493537776172161,
-0.07947024703025818,
-0.02875816822052002,
0.0002248055679956451,
-0.012924621812999249,
-0.08978298306465149,
-0.010394846089184284,
-0.012227490544319153,
0.04713895544409752,
-0.005163567140698433,
0.10143934190273285,
0.032277438789606094,
0.08327357470989227,
0.01426277682185173,
-0.0025503868237137794,
-0.0325765423476696,
0.011270598508417606,
0.021018654108047485,
0.05087045580148697,
0.05744514986872673,
0.1063578873872757,
-0.013998132199048996,
0.03831247240304947,
-0.002918276935815811,
0.04148878529667854,
0.015367012470960617,
0.09918425977230072,
-0.03410211578011513,
0.06695733964443207,
0.05163455754518509,
0.025323854759335518,
0.06785132735967636,
-0.03163180500268936,
0.00591678312048316,
-0.061310186982154846,
0.029014714062213898,
0.025732912123203278,
-0.011787124909460545,
0.012123155407607555,
-0.06462785601615906,
-0.022286295890808105,
0.08610798418521881,
-0.04393111169338226,
-0.07466302067041397,
0.06945651769638062,
-0.059165727347135544,
-0.003589729545637965,
-0.01796424575150013,
0.05026010796427727,
-0.07347439229488373,
0.05557164549827576,
-0.05329527705907822,
0.02518993616104126,
0.030156973749399185,
0.007673176936805248,
0.059476736932992935,
0.07565430551767349,
-3.0972740654671514e-33,
0.041766706854104996,
0.026076259091496468,
-0.08445193618535995,
0.06874699890613556,
-0.023989668115973473,
-0.05738784000277519,
0.030778024345636368,
0.07988560199737549,
0.0030352245084941387,
-0.03274669870734215,
-0.04518575593829155,
-0.020856712013483047,
0.05219253525137901,
-0.08632887899875641,
0.10251551866531372,
0.0719045028090477,
-0.036041997373104095,
0.014734677039086819,
0.023741628974676132,
0.06620968133211136,
-0.009084238670766354,
0.07833120226860046,
-0.10079479962587357,
0.050235651433467865,
-0.04163265600800514,
-0.013206718489527702,
-0.01465469878166914,
-0.008202338591217995,
-0.032993514090776443,
-0.03595169261097908,
-0.02533620409667492,
0.023432094603776932,
-0.014776557683944702,
-0.05645741894841194,
-0.13760140538215637,
0.010151413269340992,
-0.01867477223277092,
-0.05559073016047478,
0.05581987649202347,
0.035970501601696014,
0.026928553357720375,
0.08277618139982224,
-0.049603722989559174,
-0.017351645976305008,
-0.014500174671411514,
-0.01346793957054615,
-0.07015354186296463,
-0.09501458704471588,
0.05543331429362297,
0.009079537354409695,
-0.041699063032865524,
0.03694465383887291,
-0.125106081366539,
0.01840520277619362,
-0.04296937584877014,
-0.08849158883094788,
-0.037209950387477875,
-0.039856940507888794,
-0.0859912857413292,
-0.06635770946741104,
-0.07603748142719269,
-0.012203586287796497,
0.01634616032242775,
-0.09134797006845474,
0.060231734067201614,
-0.06269730627536774,
-0.016227273270487785,
0.028728369623422623,
-0.03504868969321251,
-0.0524028018116951,
-0.007230898831039667,
-0.011926333419978619,
0.024330519139766693,
0.06277026236057281,
-0.00009441353904549032,
-0.05299780145287514,
0.0017388283740729094,
0.03907572478055954,
-0.0179089717566967,
-0.024645203724503517,
0.03288015350699425,
-0.0223134346306324,
-0.011334960348904133,
-0.015117437578737736,
0.03847882151603699,
-0.016333898529410362,
0.053161539137363434,
0.08107269555330276,
-0.0187680721282959,
0.03166205435991287,
-0.024646950885653496,
-0.01878749020397663,
-0.017908846959471703,
0.06939173489809036,
0.03082061931490898,
-5.183473916758885e-8,
-0.08601000159978867,
-0.024940023198723793,
-0.0850813090801239,
0.0702684223651886,
-0.08461520820856094,
-0.06042860820889473,
0.07480280101299286,
0.08139659464359283,
-0.07562890648841858,
-0.016350766643881798,
0.0013701849384233356,
0.026352394372224808,
-0.09983882308006287,
0.00890132412314415,
-0.0400487557053566,
0.12117253243923187,
-0.030666664242744446,
0.03822612389922142,
0.024095045402646065,
-0.032435525208711624,
0.04554669186472893,
-0.0014768954133614898,
-0.0009835445089265704,
0.02860569767653942,
0.004697541706264019,
0.03201417624950409,
-0.0329742394387722,
0.028397871181368828,
-0.000024332126486115158,
0.039282459765672684,
0.010226809419691563,
0.026757756248116493,
-0.05661061778664589,
-0.06007305532693863,
0.013589860871434212,
0.0587245337665081,
0.05583953857421875,
-0.07565073668956757,
0.008641693741083145,
0.06113683059811592,
0.07184073328971863,
0.05706579610705376,
-0.10607018321752548,
-0.002491386840119958,
0.10644224286079407,
0.05032254755496979,
-0.018164191395044327,
-0.04980071261525154,
0.02180590108036995,
0.004973473958671093,
0.09214374423027039,
-0.05368524417281151,
-0.03350812569260597,
-0.008861838839948177,
0.040694788098335266,
0.053760867565870285,
0.024658797308802605,
-0.027820130810141563,
0.057426176965236664,
-0.05703491345047951,
0.07053021341562271,
0.08696702867746353,
0.0995693951845169,
-0.07347127050161362
] |
akdeniz27/roberta-base-cuad | 94a24c27b5d8bf9c2fa89cf80729814cfb002e7b | 2021-11-14T08:42:48.000Z | [
"pytorch",
"roberta",
"question-answering",
"en",
"dataset:cuad",
"transformers",
"autotrain_compatible"
] | question-answering | false | akdeniz27 | null | akdeniz27/roberta-base-cuad | 1,124 | null | transformers | ---
language: en
datasets:
- cuad
---
# RoBERTa Base Model fine-tuned with CUAD dataset
This model is the fine-tuned version of "RoBERTa Base"
using CUAD dataset https://huggingface.co/datasets/cuad
Link for model checkpoint: https://github.com/TheAtticusProject/cuad
For the use of the model with CUAD: https://github.com/marshmellow77/cuad-demo
and https://huggingface.co/spaces/akdeniz27/contract-understanding-atticus-dataset-demo | [
-0.08384528756141663,
-0.06591169536113739,
-0.04940364509820938,
-0.08467657119035721,
-0.042881760746240616,
0.03330354019999504,
-0.033334411680698395,
-0.00931447371840477,
-0.04368089884519577,
0.02411743253469467,
-0.008591366931796074,
-0.05817456543445587,
0.03636644035577774,
-0.0872935801744461,
-0.05053766071796417,
-0.023670507594943047,
0.026553280651569366,
0.02425473742187023,
-0.021710077300667763,
-0.012743289582431316,
-0.0631227195262909,
0.04149462282657623,
0.013319175690412521,
-0.009955587796866894,
0.0376301072537899,
0.009745356626808643,
0.024628307670354843,
0.012725315056741238,
0.04353794828057289,
-0.06903494894504547,
0.03852678835391998,
0.06287489086389542,
0.0005281533231027424,
0.07009433954954147,
0.02518642693758011,
0.09666167944669724,
-0.06500403583049774,
-0.03980197757482529,
-0.10329177975654602,
-0.0584208220243454,
0.06091003492474556,
0.04792308062314987,
0.03572214022278786,
0.025239767506718636,
0.008063065819442272,
-0.10343453288078308,
0.020410964265465736,
0.03612884506583214,
-0.01955292746424675,
-0.039344120770692825,
-0.03173546865582466,
-0.07749950140714645,
-0.0024976623244583607,
0.0726952776312828,
0.01577976532280445,
0.0057700322940945625,
-0.017532194033265114,
-0.0667513906955719,
-0.05219456925988197,
-0.051013149321079254,
0.009371738880872726,
-0.058646656572818756,
-0.12162257730960846,
-0.03555403649806976,
0.048237308859825134,
0.012120780535042286,
-0.03339274972677231,
0.05059400573372841,
0.036072373390197754,
-0.022864459082484245,
-0.007025918923318386,
0.026457838714122772,
-0.024396106600761414,
0.012816631235182285,
0.0040333326905965805,
0.0937371477484703,
0.07219615578651428,
0.04526900127530098,
0.10230150073766708,
-0.13260072469711304,
-0.042452868074178696,
0.01510260533541441,
0.026905393227934837,
-0.021408116444945335,
0.03033638559281826,
-0.017716603353619576,
0.020333893597126007,
0.058713965117931366,
0.06485015153884888,
-0.04287446290254593,
0.09145786613225937,
0.07346590608358383,
0.026244282722473145,
-0.03355744481086731,
-0.10592812299728394,
0.029825272038578987,
0.040537625551223755,
0.02250613272190094,
-0.03472067415714264,
0.0451405793428421,
-0.04706764593720436,
0.1100059375166893,
0.02033822052180767,
0.034927356988191605,
-0.06933033466339111,
-0.041448384523391724,
0.08017827570438385,
0.05784662440419197,
-0.0942949503660202,
-0.019233331084251404,
0.00979501660913229,
0.025209886953234673,
-0.042025573551654816,
0.022892124950885773,
0.08473720401525497,
-0.015690747648477554,
-0.023059749975800514,
-0.08187732100486755,
-0.009529059752821922,
-0.01781228370964527,
-0.02796030603349209,
-0.023492686450481415,
-0.001647092984057963,
0.010834976099431515,
-0.057961463928222656,
0.04963523894548416,
-0.011317707598209381,
2.4809238064226397e-33,
0.06977783143520355,
0.04508177191019058,
0.015150805935263634,
-0.004871923476457596,
0.08427763730287552,
0.012084639631211758,
-0.08991872519254684,
-0.021836938336491585,
-0.044908586889505386,
0.04144494608044624,
-0.09550277888774872,
0.015849824994802475,
-0.08368877321481705,
0.026923097670078278,
0.0005136628751643002,
-0.008424099534749985,
-0.053040869534015656,
0.03118935413658619,
0.003464156063273549,
0.03889729082584381,
0.08021432906389236,
0.09705165773630142,
-0.05069832503795624,
-0.0276156198233366,
0.03136907145380974,
0.05340314656496048,
0.047695234417915344,
-0.0003561960475053638,
-0.04885553941130638,
0.037059277296066284,
-0.025442076846957207,
0.0754496231675148,
-0.017614981159567833,
-0.011939182877540588,
0.017030883580446243,
-0.002309926087036729,
-0.058038320392370224,
0.0002458490489516407,
-0.015132466331124306,
-0.03940477967262268,
0.08152295649051666,
-0.0038709871005266905,
0.013994856737554073,
-0.007690198719501495,
-0.04244386404752731,
-0.05837637186050415,
0.04984591156244278,
0.007677460554987192,
0.07328794151544571,
-0.059993576258420944,
0.053565192967653275,
0.05664496496319771,
-0.022202011197805405,
-0.025775646790862083,
-0.02363477647304535,
0.023365136235952377,
0.1027134582400322,
0.02308966964483261,
0.03277721256017685,
0.07331428676843643,
0.04192470386624336,
-0.02971837669610977,
0.019455349072813988,
0.017519330605864525,
0.04556965082883835,
0.005475748796015978,
-0.10085013508796692,
-0.05522865802049637,
0.07591349631547928,
0.07607590407133102,
-0.08201709389686584,
0.003272582544013858,
0.02322976663708687,
0.06587237864732742,
0.009213507175445557,
-0.03329794108867645,
0.013749876990914345,
-0.05178750306367874,
-0.05931564047932625,
-0.009810551069676876,
-0.036201171576976776,
0.02894660457968712,
-0.045554712414741516,
-0.007495350204408169,
-0.02818841114640236,
-0.10955896973609924,
0.029910871759057045,
-0.04380819573998451,
-0.06989395618438721,
-0.04015535116195679,
0.004092326387763023,
0.09594418853521347,
0.015287552028894424,
-0.02075120620429516,
-0.033939823508262634,
-2.7423083833104144e-33,
0.019425740465521812,
-0.02158776856958866,
-0.014818734489381313,
0.022101687267422676,
0.002331912750378251,
0.010434890165925026,
0.005256227217614651,
0.07640990614891052,
0.006972408853471279,
-0.02563009038567543,
0.05020459368824959,
-0.05670002102851868,
0.04516269639134407,
-0.05498882755637169,
0.0851830542087555,
-0.05088965967297554,
0.015568614937365055,
-0.10567638278007507,
-0.008379794657230377,
0.017473895102739334,
-0.048790138214826584,
0.029212404042482376,
-0.03547787293791771,
0.010020510293543339,
-0.01940091699361801,
-0.04662986472249031,
-0.0001949940633494407,
0.09183607995510101,
-0.0053237732499837875,
-0.006678813137114048,
0.03854222223162651,
-0.07703330367803574,
-0.014965273439884186,
0.04128459095954895,
-0.1114364042878151,
0.02992618829011917,
0.09679512679576874,
-0.016253802925348282,
-0.013649960979819298,
0.0063644456677138805,
0.12672585248947144,
0.050340648740530014,
-0.10344066470861435,
0.011279616504907608,
-0.006831985432654619,
-0.019694963470101357,
-0.09547577053308487,
0.037467509508132935,
-0.1199834793806076,
-0.028933074325323105,
0.0675070658326149,
-0.003748699091374874,
0.006277069449424744,
0.03625401481986046,
0.04661017656326294,
-0.02783060632646084,
0.06060820072889328,
-0.08264407515525818,
0.058840230107307434,
0.04504086822271347,
-0.04380567744374275,
-0.03921940550208092,
-0.05027434229850769,
-0.026146158576011658,
0.07129393517971039,
-0.09387730062007904,
-0.04987837001681328,
0.02008812688291073,
-0.036517977714538574,
0.0520261749625206,
0.01863682083785534,
-0.03411201760172844,
0.002696048468351364,
0.0763414055109024,
0.027989160269498825,
-0.032229308038949966,
-0.046336252242326736,
-0.051597852259874344,
0.030561408028006554,
-0.06197700276970863,
-0.0924760177731514,
-0.00020836619660258293,
0.07335430383682251,
0.07960937172174454,
0.061107926070690155,
0.0878775492310524,
0.05251308158040047,
0.04589878395199776,
0.05077788233757019,
0.04372532293200493,
-0.048260126262903214,
-0.06854131072759628,
-0.04167579859495163,
0.10155504196882248,
-0.017795726656913757,
-4.310184564815245e-8,
-0.05534471943974495,
-0.001210327260196209,
-0.02334015816450119,
0.011743049137294292,
-0.02226565033197403,
-0.0347774438560009,
0.007055256981402636,
0.0031952972058206797,
0.009049912914633751,
0.0588848777115345,
0.012356653809547424,
0.01330766174942255,
-0.037451840937137604,
-0.0000265433409367688,
-0.03233446553349495,
0.11447261273860931,
0.05981171503663063,
0.04155965894460678,
-0.061070941388607025,
0.0299867931753397,
-0.0055525884963572025,
0.11612778902053833,
-0.011007203720510006,
-0.011579927988350391,
0.030174531042575836,
0.014503276906907558,
-0.05767931416630745,
0.11904361099004745,
0.001084742834791541,
-0.030124222859740257,
-0.004614745732396841,
-0.0170893557369709,
0.11113206297159195,
-0.05321941897273064,
-0.052345797419548035,
0.11494588106870651,
0.06959745287895203,
-0.03370455652475357,
0.02401624247431755,
0.01090063713490963,
0.05555284023284912,
0.0153999924659729,
-0.10299921780824661,
-0.04303405433893204,
0.01808011531829834,
0.010336114093661308,
0.008439085446298122,
-0.028572896495461464,
0.01013551652431488,
0.10652242600917816,
-0.014719848521053791,
0.021447427570819855,
-0.03886902704834938,
-0.01317508053034544,
-0.0479600690305233,
-0.049620117992162704,
-0.08798123896121979,
-0.037238363176584244,
0.08421498537063599,
-0.008571244776248932,
0.062124863266944885,
-0.03989238291978836,
-0.02113664522767067,
-0.00613383250311017
] |
tdopierre/ProtAugment-ParaphraseGenerator | d389c0e6ca11d0add1eaaecf6d8848fa76e6ab46 | 2021-07-07T14:15:07.000Z | [
"pytorch",
"bart",
"text2text-generation",
"en",
"dataset:Quora",
"dataset:MSR",
"dataset:Google-PAWS",
"arxiv:2105.12995",
"transformers",
"Paraphase Generation",
"Data Augmentation",
"autotrain_compatible"
] | text2text-generation | false | tdopierre | null | tdopierre/ProtAugment-ParaphraseGenerator | 1,123 | 4 | transformers | ---
language: "en"
tags:
- Paraphase Generation
- Data Augmentation
datasets:
- Quora
- MSR
- Google-PAWS
---
[](https://arxiv.org/abs/2105.12995)
This model is used to generate paraphrases. It has been trained on a mix of 3 different paraphrase detection datasets: MSR, Quora, Google-PAWS.
We use this model in our ACL'21 Paper ["PROTAUGMENT: Unsupervised diverse short-texts paraphrasing for intent detection meta-learning"](https://arxiv.org/abs/2105.12995)
Jointly used with generation constraints, this model allows to generate diverse paraphrases. We use those paraphrases as a data augmentation technique to further boosts a classification model's generalization capability. Feel free to play with the [code](https://github.com/tdopierre/ProtAugment)!
If you use this model, please consider citing our paper.
```
@article{Dopierre2021ProtAugmentUD,
title={ProtAugment: Unsupervised diverse short-texts paraphrasing for intent detection meta-learning},
author={Thomas Dopierre and C. Gravier and Wilfried Logerais},
journal={ArXiv},
year={2021},
volume={abs/2105.12995}
}
```
| [
-0.12025392800569534,
-0.05582575500011444,
0.02411247231066227,
0.010839407332241535,
0.0824587270617485,
0.029253678396344185,
0.048134803771972656,
-0.05095698684453964,
-0.021874675527215004,
-0.05244489014148712,
0.01009508315473795,
-0.056889913976192474,
0.04691489785909653,
0.02377413585782051,
0.00891297310590744,
0.02333366498351097,
-0.0032248920761048794,
0.023678215220570564,
-0.1545780748128891,
-0.08939097076654434,
0.12489496916532516,
0.05988185107707977,
0.033059779554605484,
-0.003179222345352173,
-0.02202565222978592,
-0.00026578581309877336,
-0.03247453644871712,
-0.005454812198877335,
0.06017559766769409,
0.0011458330554887652,
0.06661611050367355,
0.043703608214855194,
-0.0513114295899868,
0.07981490343809128,
0.046200837939977646,
0.07531571388244629,
-0.017663445323705673,
0.08203105628490448,
0.04744962230324745,
0.01473350077867508,
-0.003663986688479781,
-0.03688473626971245,
0.0010748951463028789,
0.05835011973977089,
0.08361072093248367,
0.0079048415645957,
-0.0802229642868042,
-0.02448567934334278,
-0.03252606838941574,
0.03856876865029335,
-0.07934899628162384,
0.01886082999408245,
-0.018668677657842636,
0.07139521837234497,
-0.06551630795001984,
-0.03748844191431999,
-0.049225687980651855,
0.0003026144695468247,
-0.00691416347399354,
-0.04231010749936104,
-0.08616355061531067,
-0.12756136059761047,
-0.05425618216395378,
0.008590372279286385,
-0.07901744544506073,
-0.02974076196551323,
0.02700585126876831,
0.0019417079165577888,
-0.04216063767671585,
0.06232283636927605,
-0.03767824545502663,
0.03650536388158798,
-0.010950366966426373,
0.05302784964442253,
0.005351273808628321,
0.0769270807504654,
0.052349790930747986,
-0.0347483828663826,
-0.014048239216208458,
-0.12091919779777527,
0.032309986650943756,
-0.005330029875040054,
0.05617218464612961,
0.04167313501238823,
0.062485579401254654,
-0.037492647767066956,
0.0009897074196487665,
-0.04758964851498604,
-0.009575104340910912,
0.0923832431435585,
-0.005286008585244417,
-0.11216916888952255,
0.0712728202342987,
-0.02507570944726467,
-0.013729211874306202,
0.06845705211162567,
-0.03586426004767418,
-0.1404862403869629,
-0.03946409374475479,
0.05152472108602524,
0.0063087246380746365,
0.05447739362716675,
0.012303628958761692,
-0.1179436445236206,
0.0144770797342062,
-0.013682271353900433,
0.038826003670692444,
0.044619664549827576,
0.02295895852148533,
-0.027455803006887436,
0.010110262781381607,
0.08431771397590637,
-0.0007093327003531158,
-0.09714113175868988,
-0.020662600174546242,
0.06638053804636002,
-0.04696463793516159,
-0.051029425114393234,
0.09418244659900665,
-0.004834311082959175,
-0.0341729111969471,
0.03793790936470032,
-0.04000549018383026,
0.06527432799339294,
0.035903532058000565,
-0.1055358499288559,
-0.10015899688005447,
3.0491577756101335e-33,
0.060265760868787766,
0.08975572139024734,
0.009412198327481747,
0.02307974360883236,
-0.04943447187542915,
-0.005820721387863159,
0.0008364951936528087,
0.021830841898918152,
-0.022136684507131577,
-0.010505658574402332,
-0.014531700871884823,
0.01918404921889305,
-0.061112403869628906,
0.02753400430083275,
0.07279397547245026,
0.011229985393583775,
-0.11996419727802277,
0.07958675175905228,
-0.05433044582605362,
0.019975833594799042,
0.03540265932679176,
-0.009992585517466068,
0.020331289619207382,
-0.04428927227854729,
-0.018046785145998,
0.007786868140101433,
0.049867816269397736,
-0.07491020113229752,
-0.018480701372027397,
0.03626563400030136,
-0.03473635017871857,
0.00546380877494812,
0.016662249341607094,
0.05911674350500107,
0.03320856764912605,
-0.012663248926401138,
-0.0037587371189147234,
-0.05515705421566963,
0.0044347564689815044,
-0.009097844362258911,
-0.060120295733213425,
0.023566581308841705,
0.10698471963405609,
0.011440044268965721,
-0.023591291159391403,
-0.10449289530515671,
-0.049519117921590805,
0.011786960996687412,
0.0017270203679800034,
0.010009516030550003,
0.07156959921121597,
0.022684048861265182,
0.02191764861345291,
-0.08559098094701767,
0.03772179037332535,
0.009632082656025887,
-0.0393151231110096,
0.03354953974485397,
0.04362168908119202,
-0.004464481025934219,
0.0441625639796257,
-0.014003467746078968,
0.02179819531738758,
0.061072640120983124,
0.037347208708524704,
-0.0006605911767110229,
-0.028952384367585182,
0.05453718453645706,
0.03236071765422821,
0.04412328079342842,
-0.0007220733095891774,
0.015985438600182533,
-0.0631282702088356,
-0.027023695409297943,
0.06009301543235779,
-0.0011745109222829342,
0.027774961665272713,
-0.034010209143161774,
-0.01575705036520958,
-0.0007909233681857586,
-0.0074739279225468636,
-0.059576451778411865,
0.031110132113099098,
-0.014362232759594917,
-0.08161991834640503,
-0.028605874627828598,
0.064607635140419,
-0.08557378500699997,
-0.005332689266651869,
-0.019426677376031876,
0.0015652836300432682,
0.03473568335175514,
-0.07624395936727524,
-0.004057866055518389,
-0.006785874255001545,
-2.5966752861610873e-33,
0.027883345261216164,
0.01269103866070509,
-0.019265184178948402,
0.047443974763154984,
-0.026960669085383415,
-0.027501868084073067,
0.0036311959847807884,
0.04071119800209999,
-0.011520832777023315,
-0.05571823567152023,
-0.04863631725311279,
0.054638173431158066,
0.027590055018663406,
-0.060440950095653534,
-0.012871296145021915,
-0.014661574736237526,
0.0038507655262947083,
0.05570872128009796,
0.036699723452329636,
0.10127799212932587,
-0.023923110216856003,
0.08022894710302353,
-0.04362183064222336,
0.047578439116477966,
0.01947897858917713,
0.03497437387704849,
-0.0234959926456213,
0.09262272715568542,
0.03255722299218178,
0.007558377459645271,
-0.0143802585080266,
0.05889512598514557,
-0.08582651615142822,
-0.03714834898710251,
-0.097105972468853,
0.03531336411833763,
0.07077101618051529,
-0.030769605189561844,
-0.017870241776108742,
0.07543781399726868,
0.06584061682224274,
0.010624096728861332,
-0.03615681082010269,
0.00389090902172029,
-0.039831388741731644,
-0.021721895784139633,
-0.05937029421329498,
0.003863672725856304,
0.04110351577401161,
-0.0350647047162056,
0.008736714720726013,
-0.0037487326189875603,
-0.08718638122081757,
0.005541488062590361,
-0.004460382740944624,
-0.11509577929973602,
-0.03194389492273331,
-0.08475536853075027,
-0.044357217848300934,
-0.07046904414892197,
-0.025986183434724808,
0.056147366762161255,
0.020825723186135292,
0.01988140493631363,
0.07765587419271469,
-0.08192884176969528,
-0.036066047847270966,
-0.027521686628460884,
-0.06581754982471466,
0.006705393083393574,
0.051891833543777466,
-0.030693192034959793,
-0.03247032314538956,
-0.008611421100795269,
0.021708400920033455,
-0.04132896661758423,
0.10469204187393188,
-0.04553573951125145,
-0.08091728389263153,
-0.09389899671077728,
0.04124775901436806,
-0.04341569170355797,
0.023124296218156815,
0.08547289669513702,
-0.020153310149908066,
0.09499920904636383,
0.00015005419845692813,
0.13653406500816345,
-0.003799982136115432,
0.06540840864181519,
-0.0563686229288578,
0.02711334638297558,
0.008278707042336464,
0.12571625411510468,
-0.012626235373318195,
-5.065539099291527e-8,
-0.03500080481171608,
0.1051684319972992,
-0.10298382490873337,
0.051962386816740036,
-0.017989838495850563,
-0.025841442868113518,
-0.06053842976689339,
0.01793133094906807,
-0.017728585749864578,
-0.06568261981010437,
0.027406906709074974,
0.04929954931139946,
-0.0115823894739151,
0.02556842565536499,
-0.012012558057904243,
0.047797054052352905,
-0.02738177590072155,
0.015182886272668839,
0.018843740224838257,
0.04309985041618347,
0.016131043434143066,
0.027233537286520004,
-0.039959073066711426,
0.0021711778827011585,
0.07181256264448166,
0.007867475040256977,
-0.02574535645544529,
0.017986545339226723,
0.02127085253596306,
0.004793490283191204,
-0.0003177302423864603,
-0.006212403066456318,
-0.05306953936815262,
-0.03896034136414528,
0.0637076199054718,
0.11761987954378128,
-0.030906494706869125,
-0.05736666917800903,
0.016929320991039276,
0.006039074622094631,
0.05427638813853264,
0.05046198517084122,
-0.07637812942266464,
0.016703113913536072,
0.013875811360776424,
-0.06343153864145279,
0.054807644337415695,
-0.09418956935405731,
0.039076417684555054,
-0.0027254470624029636,
0.029460150748491287,
-0.08735726773738861,
0.0180105771869421,
-0.03219737485051155,
0.036268237978219986,
0.024093760177493095,
0.012629731558263302,
-0.03293803706765175,
0.08407678455114365,
0.0399373322725296,
0.10179232060909271,
0.028601042926311493,
-0.06965334713459015,
0.041182052344083786
] |
valhalla/distilt5-qa-qg-hl-12-6 | f865250f90ada38bcb43602dd5254e4c166e6b8e | 2021-09-23T16:42:44.000Z | [
"pytorch",
"t5",
"text2text-generation",
"dataset:squad",
"transformers",
"question-generation",
"distilt5",
"distilt5-qg",
"license:mit",
"autotrain_compatible"
] | text2text-generation | false | valhalla | null | valhalla/distilt5-qa-qg-hl-12-6 | 1,119 | null | transformers | ---
datasets:
- squad
tags:
- question-generation
- distilt5
- distilt5-qg
widget:
- text: 'generate question: <hl> 42 <hl> is the answer to life, the universe and everything.
</s>'
- text: 'question: What is 42 context: 42 is the answer to life, the universe and
everything. </s>'
license: mit
---
## DistilT5 for question-generation
This is distilled version of [t5-base-qa-qg-hl](https://huggingface.co/valhalla/t5-base-qa-qg-hl) model trained for question answering and answer aware question generation tasks.
The model is distilled using the **No Teacher Distillation** method proposed by Huggingface, [here](https://github.com/huggingface/transformers/tree/master/examples/seq2seq#distilbart).
We just copy alternating layers from `t5-base-qa-qg-hl` and finetune more on the same data. Following table lists other distilled models and their metrics.
| Name | BLEU-4 | METEOR | ROUGE-L | QA-EM | QA-F1 |
|---------------------------------------------------------------------------------|---------|---------|---------|--------|--------|
| [distilt5-qg-hl-6-4](https://huggingface.co/valhalla/distilt5-qg-hl-6-4) | 18.4141 | 24.8417 | 40.3435 | - | - |
| [distilt5-qa-qg-hl-6-4](https://huggingface.co/valhalla/distilt5-qa-qg-hl-6-4) | 18.6493 | 24.9685 | 40.5605 | 76.13 | 84.659 |
| [distilt5-qg-hl-12-6](https://huggingface.co/valhalla/distilt5-qg-hl-12-6) | 20.5275 | 26.5010 | 43.2676 | - | - |
| [distilt5-qa-qg-hl-12-6](https://huggingface.co/valhalla/distilt5-qa-qg-hl-12-6)| 20.6109 | 26.4533 | 43.0895 | 81.61 | 89.831 |
You can play with the model using the inference API. Here's how you can use it
For QG
`generate question: <hl> 42 <hl> is the answer to life, the universe and everything.`
For QA
`question: What is 42 context: 42 is the answer to life, the universe and everything.`
For more deatils see [this](https://github.com/patil-suraj/question_generation) repo.
### Model in action 🚀
You'll need to clone the [repo](https://github.com/patil-suraj/question_generation).
[](https://colab.research.google.com/github/patil-suraj/question_generation/blob/master/question_generation.ipynb)
```python3
from pipelines import pipeline
nlp = pipeline("multitask-qa-qg", model="valhalla/distilt5-qa-qg-hl-12-6")
# to generate questions simply pass the text
nlp("42 is the answer to life, the universe and everything.")
=> [{'answer': '42', 'question': 'What is the answer to life, the universe and everything?'}]
# for qa pass a dict with "question" and "context"
nlp({
"question": "What is 42 ?",
"context": "42 is the answer to life, the universe and everything."
})
=> 'the answer to life, the universe and everything'
``` | [
-0.09939464181661606,
-0.00011183145397808403,
0.055383436381816864,
0.02249300852417946,
-0.0114706726744771,
-0.011137647554278374,
0.009664932265877724,
-0.01784418150782585,
-0.0016285604797303677,
-0.02439757250249386,
0.03284919634461403,
-0.10080055892467499,
0.02819550782442093,
-0.01967894285917282,
-0.020520979538559914,
0.08624435216188431,
0.016038358211517334,
-0.024925420060753822,
-0.14485765993595123,
-0.05172817036509514,
0.029051583260297775,
0.03169920668005943,
0.02691822312772274,
0.03110964223742485,
0.02444642037153244,
0.02227015234529972,
0.011667396873235703,
0.08147349953651428,
0.09880021959543228,
-0.060726940631866455,
0.04443246126174927,
0.06690245866775513,
-0.05944720655679703,
0.04931258037686348,
-0.02081097662448883,
0.11480855196714401,
-0.037361811846494675,
-0.01180450152605772,
-0.02820800617337227,
0.005592848174273968,
0.0033329930156469345,
-0.03114773891866207,
-0.05987617373466492,
0.06562590599060059,
0.10126202553510666,
-0.06106985732913017,
-0.09018977731466293,
-0.018375691026449203,
0.0015893251402303576,
-0.027922004461288452,
-0.08656448870897293,
-0.05267445370554924,
-0.01324821263551712,
0.03281388059258461,
0.0008099258411675692,
0.038236912339925766,
0.02086026780307293,
0.0055521405301988125,
-0.04262525215744972,
-0.0289952103048563,
-0.050772521644830704,
-0.02263735607266426,
-0.06316639482975006,
0.011480097658932209,
-0.0016666313167661428,
-0.014252660796046257,
-0.011290771886706352,
0.07344535738229752,
0.011485820636153221,
-0.028469428420066833,
-0.10920099914073944,
0.0012699299259111285,
0.05026598647236824,
0.037255849689245224,
0.06126251816749573,
0.027041299268603325,
-0.0028815476689487696,
-0.030421648174524307,
0.04762352257966995,
-0.048226043581962585,
0.024558762088418007,
-0.05978338420391083,
0.0495435930788517,
0.0316399410367012,
0.06803134828805923,
0.014550563879311085,
-0.027617890387773514,
0.04412667453289032,
0.014492387883365154,
-0.002063852734863758,
-0.05992213636636734,
0.006795477122068405,
0.0025585349649190903,
-0.0042154863476753235,
0.005487582180649042,
0.0917036384344101,
-0.005881742108613253,
-0.007935791276395321,
-0.07318933308124542,
0.09555398672819138,
0.026152919977903366,
0.029021350666880608,
0.006247173063457012,
-0.010582935065031052,
-0.031252797693014145,
-0.0365033857524395,
0.025129983201622963,
0.03512118384242058,
0.008840696886181831,
-0.14400596916675568,
-0.023990007117390633,
0.013319962657988071,
-0.009326220490038395,
-0.010518400929868221,
0.010055136866867542,
-0.038190387189388275,
0.033225446939468384,
0.04909525439143181,
-0.010513384826481342,
-0.016895735636353493,
0.0302050169557333,
0.01280654314905405,
-0.015554053708910942,
-0.006905007641762495,
-0.022189930081367493,
-0.05562305077910423,
-0.06538703292608261,
4.3317117438014994e-33,
0.14035813510417938,
0.06021276116371155,
0.05838168412446976,
0.1273251622915268,
0.0021238531917333603,
0.026448048651218414,
-0.013692311011254787,
0.044099245220422745,
-0.05745546519756317,
0.06548923999071121,
-0.004705592058598995,
-0.0178483035415411,
-0.08372814953327179,
-0.034566156566143036,
0.05888841301202774,
-0.06886772066354752,
-0.13793808221817017,
0.016361277550458908,
-0.03755458444356918,
0.03821690380573273,
0.033497486263513565,
0.03886129707098007,
-0.03525089472532272,
-0.0558064840734005,
0.017979096621274948,
0.07974456250667572,
0.07286734879016876,
-0.033126674592494965,
-0.0043333047069609165,
0.022837772965431213,
-0.10735458135604858,
-0.03326364606618881,
-0.04887957125902176,
-0.012528610415756702,
-0.025413241237401962,
-0.0034013139083981514,
-0.011394917964935303,
-0.01594793237745762,
-0.034772928804159164,
-0.06832684576511383,
0.03497621789574623,
0.027842940762639046,
0.1076105609536171,
-0.01075172983109951,
-0.05208691582083702,
0.04816731438040733,
0.05657585710287094,
-0.01637580804526806,
0.019176656380295753,
-0.00808507576584816,
-0.0030800998210906982,
-0.019025159999728203,
-0.044535811990499496,
-0.12892509996891022,
-0.02118055894970894,
-0.0037427570205181837,
0.027885837480425835,
0.07143828272819519,
0.037766918540000916,
0.08971129357814789,
0.012774527072906494,
0.035966657102108,
-0.003725231857970357,
0.07372180372476578,
0.09267739951610565,
0.03318926319479942,
-0.09307374060153961,
-0.04591750726103783,
0.11868775635957718,
0.04179732874035835,
-0.033952198922634125,
0.0061541153118014336,
-0.0314670130610466,
0.002241545356810093,
0.05529141426086426,
-0.08431018888950348,
-0.007572584319859743,
-0.04333622753620148,
-0.01935666799545288,
-0.018671466037631035,
0.027836505323648453,
-0.01865820586681366,
-0.058173056691884995,
-0.06398951262235641,
-0.018551558256149292,
-0.054899223148822784,
0.06443651765584946,
-0.03562132641673088,
0.020433614030480385,
-0.055116623640060425,
0.016493475064635277,
-0.04734986647963524,
0.05414683371782303,
-0.0845600962638855,
-0.020205993205308914,
-3.462639750221796e-33,
0.045488569885492325,
-0.038049690425395966,
-0.10096234083175659,
0.10896465927362442,
0.07265188544988632,
-0.04075473174452782,
0.0008619111613370478,
0.0853271409869194,
0.03287294879555702,
0.006312064826488495,
0.06666018068790436,
0.03670068830251694,
-0.040923334658145905,
-0.009577566757798195,
0.008058805018663406,
0.022838052362203598,
-0.03958701714873314,
-0.059472717344760895,
-0.02647658996284008,
0.0323447547852993,
-0.03880591690540314,
0.08665471524000168,
-0.0876799151301384,
0.0430099219083786,
0.009952663443982601,
0.05441656708717346,
0.023024803027510643,
0.07336913049221039,
0.07130638509988785,
0.03327496349811554,
-0.0452306903898716,
-0.06934549659490585,
0.02418973110616207,
-0.008516102097928524,
-0.12886686623096466,
0.06041233241558075,
0.015881022438406944,
-0.014864690601825714,
-0.05439264327287674,
0.05106036737561226,
-0.0009449746576137841,
-0.018880542367696762,
-0.07243059575557709,
0.04025202617049217,
-0.07337293028831482,
0.05722283199429512,
-0.08650185167789459,
-0.04470368102192879,
-0.007900238037109375,
0.06022920086979866,
0.08279579132795334,
-0.025595808401703835,
-0.10763666033744812,
-0.03156834840774536,
-0.05865910276770592,
-0.003091613994911313,
-0.004693571012467146,
-0.0026701840106397867,
-0.02889353409409523,
0.028134455904364586,
-0.004454826936125755,
0.02199912630021572,
0.01587141491472721,
-0.04695011302828789,
0.04526348412036896,
-0.02303929254412651,
-0.04117561876773834,
0.09930114448070526,
-0.0375901497900486,
-0.0543680414557457,
-0.034104570746421814,
0.012517926283180714,
0.10185709595680237,
-0.051726531237363815,
0.08215928822755814,
-0.025122029706835747,
-0.008572051301598549,
-0.009538072161376476,
0.05542512238025665,
-0.0632636547088623,
-0.08126836270093918,
-0.05552191287279129,
0.019220473244786263,
0.1356343924999237,
0.020353224128484726,
-0.04780034348368645,
0.04960760101675987,
0.1258564293384552,
0.019749082624912262,
0.03649216517806053,
0.034739360213279724,
0.014660882763564587,
0.028194822371006012,
0.13697612285614014,
0.011032180860638618,
-5.583597584291056e-8,
-0.045536842197179794,
0.009544559754431248,
-0.051840804517269135,
0.023386206477880478,
-0.005185223184525967,
-0.02243294194340706,
0.03137283772230148,
0.03433866426348686,
-0.0014316460583359003,
0.00318949855864048,
0.0021463471930474043,
0.05202841758728027,
-0.05606740340590477,
-0.006835850887000561,
0.018626034259796143,
0.08145801723003387,
-0.029457997530698776,
0.07986894994974136,
-0.03504132479429245,
-0.07854536920785904,
0.07759087532758713,
0.0010583358816802502,
-0.018022198230028152,
0.02736038714647293,
0.05685875192284584,
-0.025539590045809746,
-0.08095112442970276,
0.07776778936386108,
0.06442820280790329,
0.0020100707188248634,
0.03564416989684105,
-0.04504769667983055,
-0.07032544165849686,
-0.004480385221540928,
-0.010455083101987839,
0.060710448771715164,
-0.09999599307775497,
-0.05734454467892647,
0.013324324041604996,
0.07885371893644333,
-0.014929233118891716,
-0.030146310105919838,
-0.0879000648856163,
0.013447903096675873,
0.00038299086736515164,
-0.013818223960697651,
-0.05075884610414505,
-0.09015852212905884,
0.042242612689733505,
0.014102151617407799,
-0.03262238949537277,
-0.026466775685548782,
-0.025155765935778618,
-0.013734996318817139,
0.017906388267874718,
0.017104314640164375,
0.04755020514130592,
0.001357253990136087,
0.010493765585124493,
-0.041031114757061005,
0.05761504918336868,
0.015014712698757648,
0.044864460825920105,
0.010182805359363556
] |
huggingface/CodeBERTa-language-id | 386451c69a3cb8722b742e66987d888db858b33c | 2022-06-27T15:49:04.000Z | [
"pytorch",
"tf",
"jax",
"roberta",
"text-classification",
"code",
"dataset:code_search_net",
"arxiv:1909.09436",
"transformers"
] | text-classification | false | huggingface | null | huggingface/CodeBERTa-language-id | 1,118 | 12 | transformers | ---
language: code
thumbnail: https://cdn-media.huggingface.co/CodeBERTa/CodeBERTa.png
datasets:
- code_search_net
---
# CodeBERTa-language-id: The World’s fanciest programming language identification algo 🤯
To demonstrate the usefulness of our CodeBERTa pretrained model on downstream tasks beyond language modeling, we fine-tune the [`CodeBERTa-small-v1`](https://huggingface.co/huggingface/CodeBERTa-small-v1) checkpoint on the task of classifying a sample of code into the programming language it's written in (*programming language identification*).
We add a sequence classification head on top of the model.
On the evaluation dataset, we attain an eval accuracy and F1 > 0.999 which is not surprising given that the task of language identification is relatively easy (see an intuition why, below).
## Quick start: using the raw model
```python
CODEBERTA_LANGUAGE_ID = "huggingface/CodeBERTa-language-id"
tokenizer = RobertaTokenizer.from_pretrained(CODEBERTA_LANGUAGE_ID)
model = RobertaForSequenceClassification.from_pretrained(CODEBERTA_LANGUAGE_ID)
input_ids = tokenizer.encode(CODE_TO_IDENTIFY)
logits = model(input_ids)[0]
language_idx = logits.argmax() # index for the resulting label
```
## Quick start: using Pipelines 💪
```python
from transformers import TextClassificationPipeline
pipeline = TextClassificationPipeline(
model=RobertaForSequenceClassification.from_pretrained(CODEBERTA_LANGUAGE_ID),
tokenizer=RobertaTokenizer.from_pretrained(CODEBERTA_LANGUAGE_ID)
)
pipeline(CODE_TO_IDENTIFY)
```
Let's start with something very easy:
```python
pipeline("""
def f(x):
return x**2
""")
# [{'label': 'python', 'score': 0.9999965}]
```
Now let's probe shorter code samples:
```python
pipeline("const foo = 'bar'")
# [{'label': 'javascript', 'score': 0.9977546}]
```
What if I remove the `const` token from the assignment?
```python
pipeline("foo = 'bar'")
# [{'label': 'javascript', 'score': 0.7176245}]
```
For some reason, this is still statistically detected as JS code, even though it's also valid Python code. However, if we slightly tweak it:
```python
pipeline("foo = u'bar'")
# [{'label': 'python', 'score': 0.7638422}]
```
This is now detected as Python (Notice the `u` string modifier).
Okay, enough with the JS and Python domination already! Let's try fancier languages:
```python
pipeline("echo $FOO")
# [{'label': 'php', 'score': 0.9995257}]
```
(Yes, I used the word "fancy" to describe PHP 😅)
```python
pipeline("outcome := rand.Intn(6) + 1")
# [{'label': 'go', 'score': 0.9936151}]
```
Why is the problem of language identification so easy (with the correct toolkit)? Because code's syntax is rigid, and simple tokens such as `:=` (the assignment operator in Go) are perfect predictors of the underlying language:
```python
pipeline(":=")
# [{'label': 'go', 'score': 0.9998052}]
```
By the way, because we trained our own custom tokenizer on the [CodeSearchNet](https://github.blog/2019-09-26-introducing-the-codesearchnet-challenge/) dataset, and it handles streams of bytes in a very generic way, syntactic constructs such `:=` are represented by a single token:
```python
self.tokenizer.encode(" :=", add_special_tokens=False)
# [521]
```
<br>
## Fine-tuning code
<details>
```python
import gzip
import json
import logging
import os
from pathlib import Path
from typing import Dict, List, Tuple
import numpy as np
import torch
from sklearn.metrics import f1_score
from tokenizers.implementations.byte_level_bpe import ByteLevelBPETokenizer
from tokenizers.processors import BertProcessing
from torch.nn.utils.rnn import pad_sequence
from torch.utils.data import DataLoader, Dataset
from torch.utils.data.dataset import Dataset
from torch.utils.tensorboard.writer import SummaryWriter
from tqdm import tqdm, trange
from transformers import RobertaForSequenceClassification
from transformers.data.metrics import acc_and_f1, simple_accuracy
logging.basicConfig(level=logging.INFO)
CODEBERTA_PRETRAINED = "huggingface/CodeBERTa-small-v1"
LANGUAGES = [
"go",
"java",
"javascript",
"php",
"python",
"ruby",
]
FILES_PER_LANGUAGE = 1
EVALUATE = True
# Set up tokenizer
tokenizer = ByteLevelBPETokenizer("./pretrained/vocab.json", "./pretrained/merges.txt",)
tokenizer._tokenizer.post_processor = BertProcessing(
("</s>", tokenizer.token_to_id("</s>")), ("<s>", tokenizer.token_to_id("<s>")),
)
tokenizer.enable_truncation(max_length=512)
# Set up Tensorboard
tb_writer = SummaryWriter()
class CodeSearchNetDataset(Dataset):
examples: List[Tuple[List[int], int]]
def __init__(self, split: str = "train"):
"""
train | valid | test
"""
self.examples = []
src_files = []
for language in LANGUAGES:
src_files += list(
Path("../CodeSearchNet/resources/data/").glob(f"{language}/final/jsonl/{split}/*.jsonl.gz")
)[:FILES_PER_LANGUAGE]
for src_file in src_files:
label = src_file.parents[3].name
label_idx = LANGUAGES.index(label)
print("🔥", src_file, label)
lines = []
fh = gzip.open(src_file, mode="rt", encoding="utf-8")
for line in fh:
o = json.loads(line)
lines.append(o["code"])
examples = [(x.ids, label_idx) for x in tokenizer.encode_batch(lines)]
self.examples += examples
print("🔥🔥")
def __len__(self):
return len(self.examples)
def __getitem__(self, i):
# We’ll pad at the batch level.
return self.examples[i]
model = RobertaForSequenceClassification.from_pretrained(CODEBERTA_PRETRAINED, num_labels=len(LANGUAGES))
train_dataset = CodeSearchNetDataset(split="train")
eval_dataset = CodeSearchNetDataset(split="test")
def collate(examples):
input_ids = pad_sequence([torch.tensor(x[0]) for x in examples], batch_first=True, padding_value=1)
labels = torch.tensor([x[1] for x in examples])
# ^^ uncessary .unsqueeze(-1)
return input_ids, labels
train_dataloader = DataLoader(train_dataset, batch_size=256, shuffle=True, collate_fn=collate)
batch = next(iter(train_dataloader))
model.to("cuda")
model.train()
for param in model.roberta.parameters():
param.requires_grad = False
## ^^ Only train final layer.
print(f"num params:", model.num_parameters())
print(f"num trainable params:", model.num_parameters(only_trainable=True))
def evaluate():
eval_loss = 0.0
nb_eval_steps = 0
preds = np.empty((0), dtype=np.int64)
out_label_ids = np.empty((0), dtype=np.int64)
model.eval()
eval_dataloader = DataLoader(eval_dataset, batch_size=512, collate_fn=collate)
for step, (input_ids, labels) in enumerate(tqdm(eval_dataloader, desc="Eval")):
with torch.no_grad():
outputs = model(input_ids=input_ids.to("cuda"), labels=labels.to("cuda"))
loss = outputs[0]
logits = outputs[1]
eval_loss += loss.mean().item()
nb_eval_steps += 1
preds = np.append(preds, logits.argmax(dim=1).detach().cpu().numpy(), axis=0)
out_label_ids = np.append(out_label_ids, labels.detach().cpu().numpy(), axis=0)
eval_loss = eval_loss / nb_eval_steps
acc = simple_accuracy(preds, out_label_ids)
f1 = f1_score(y_true=out_label_ids, y_pred=preds, average="macro")
print("=== Eval: loss ===", eval_loss)
print("=== Eval: acc. ===", acc)
print("=== Eval: f1 ===", f1)
# print(acc_and_f1(preds, out_label_ids))
tb_writer.add_scalars("eval", {"loss": eval_loss, "acc": acc, "f1": f1}, global_step)
### Training loop
global_step = 0
train_iterator = trange(0, 4, desc="Epoch")
optimizer = torch.optim.AdamW(model.parameters())
for _ in train_iterator:
epoch_iterator = tqdm(train_dataloader, desc="Iteration")
for step, (input_ids, labels) in enumerate(epoch_iterator):
optimizer.zero_grad()
outputs = model(input_ids=input_ids.to("cuda"), labels=labels.to("cuda"))
loss = outputs[0]
loss.backward()
tb_writer.add_scalar("training_loss", loss.item(), global_step)
optimizer.step()
global_step += 1
if EVALUATE and global_step % 50 == 0:
evaluate()
model.train()
evaluate()
os.makedirs("./models/CodeBERT-language-id", exist_ok=True)
model.save_pretrained("./models/CodeBERT-language-id")
```
</details>
<br>
## CodeSearchNet citation
<details>
```bibtex
@article{husain_codesearchnet_2019,
title = {{CodeSearchNet} {Challenge}: {Evaluating} the {State} of {Semantic} {Code} {Search}},
shorttitle = {{CodeSearchNet} {Challenge}},
url = {http://arxiv.org/abs/1909.09436},
urldate = {2020-03-12},
journal = {arXiv:1909.09436 [cs, stat]},
author = {Husain, Hamel and Wu, Ho-Hsiang and Gazit, Tiferet and Allamanis, Miltiadis and Brockschmidt, Marc},
month = sep,
year = {2019},
note = {arXiv: 1909.09436},
}
```
</details>
| [
-0.1296144276857376,
-0.0559852197766304,
-0.034488972276449203,
0.016569163650274277,
0.064992755651474,
0.017927618697285652,
0.040093451738357544,
-0.007542737759649754,
-0.00856051966547966,
-0.07817612588405609,
-0.02311982773244381,
-0.048346906900405884,
0.01149273756891489,
-0.03461917117238045,
-0.053744543343782425,
-0.026607491075992584,
0.05311966314911842,
0.027611495926976204,
-0.08280652016401291,
-0.06862564384937286,
0.02838369831442833,
0.03389764949679375,
0.030551966279745102,
0.010882505215704441,
0.049562059342861176,
-0.010159878060221672,
0.010671955533325672,
0.03990036994218826,
0.08093038201332092,
-0.017186198383569717,
0.040791235864162445,
0.11009647697210312,
0.01173242088407278,
0.05724399909377098,
0.019527031108736992,
0.12322087585926056,
-0.06811226159334183,
-0.0492916963994503,
0.005025194957852364,
0.013069899752736092,
-0.06242483854293823,
0.03056207485496998,
0.01009348314255476,
-0.01360632386058569,
0.03285255655646324,
-0.007856853306293488,
-0.01677500270307064,
0.0011836150661110878,
-0.04288367182016373,
-0.08623886853456497,
-0.08924204856157303,
0.057241231203079224,
0.024155626073479652,
-0.01723897084593773,
-0.051946286112070084,
-0.058702997863292694,
0.056467220187187195,
-0.02455599047243595,
-0.004840772598981857,
0.022004829719662666,
-0.04889879375696182,
-0.05195038765668869,
0.013459189794957638,
-0.004195643123239279,
-0.022684406489133835,
-0.026809439063072205,
-0.006713542155921459,
-0.022623032331466675,
0.0011076349765062332,
-0.02862662635743618,
0.0024273127783089876,
0.044805385172367096,
-0.045951396226882935,
0.09627200663089752,
0.033060282468795776,
-0.03411382436752319,
0.06480181962251663,
0.06492287665605545,
0.02438093163073063,
-0.12947715818881989,
0.0017495692009106278,
-0.02646113559603691,
0.020566478371620178,
0.04959356412291527,
0.06913891434669495,
0.03431188315153122,
0.018858296796679497,
0.038389015942811966,
0.058469388633966446,
0.08489038050174713,
-0.019428199157118797,
-0.05250212177634239,
0.07975656539201736,
-0.014346521347761154,
0.09347686171531677,
0.03223012387752533,
0.06040386110544205,
-0.023762738332152367,
-0.039025191217660904,
0.09309999644756317,
-0.046397823840379715,
0.023610340431332588,
0.046328574419021606,
-0.028625929728150368,
-0.04010823369026184,
-0.002613056218251586,
0.07761605829000473,
0.03599962964653969,
0.060622669756412506,
-0.08187802881002426,
0.022323505952954292,
0.015631040558218956,
-0.0310919638723135,
-0.0009033469250425696,
0.038274288177490234,
-0.006931854411959648,
0.0219546090811491,
-0.005895631387829781,
0.035484250634908676,
0.08152130991220474,
-0.017895976081490517,
0.014089604839682579,
0.04090225324034691,
-0.012144805863499641,
0.017565980553627014,
-0.0014825534308329225,
-0.06699219346046448,
1.794274209753155e-33,
0.06294520944356918,
0.018161790445446968,
0.0181652270257473,
-0.04537942260503769,
-0.010067927651107311,
-0.07334888726472855,
0.02977391704916954,
-0.020777937024831772,
-0.0578104592859745,
0.023577546700835228,
-0.03421436622738838,
0.06309779733419418,
-0.06835294514894485,
0.05694126710295677,
-0.04474429413676262,
0.07209564745426178,
-0.048651013523340225,
0.02798214554786682,
-0.03811022639274597,
0.05739390477538109,
0.09128023684024811,
-0.03145143762230873,
0.06441580504179001,
-0.06769593060016632,
-0.03661509230732918,
0.02884470671415329,
0.04691559448838234,
-0.07372642308473587,
-0.03869776055216789,
0.04350820183753967,
-0.15190941095352173,
-0.044734396040439606,
0.07202727347612381,
0.0015793552156537771,
0.033545512706041336,
-0.01784278079867363,
0.04112885519862175,
-0.039866965264081955,
0.02742362581193447,
0.014532642439007759,
0.0359807088971138,
0.059544723480939865,
0.02552991360425949,
-0.013845609501004219,
-0.028771279379725456,
-0.030797839164733887,
-0.024773381650447845,
0.010690243914723396,
-0.0004873683210462332,
0.032865431159734726,
0.06933426856994629,
0.017413780093193054,
-0.0031702856067568064,
-0.010320517234504223,
-0.07352934032678604,
0.018707092851400375,
0.0390305295586586,
0.05414478853344917,
0.05504158139228821,
0.02740650065243244,
0.012704510241746902,
0.02881431393325329,
0.016274385154247284,
0.021472103893756866,
0.06842957437038422,
0.016376055777072906,
-0.048858195543289185,
0.053093183785676956,
0.10200575739145279,
-0.0418182797729969,
-0.0670914500951767,
-0.06442784518003464,
-0.017107132822275162,
0.05126063525676727,
0.09888139367103577,
-0.03126148134469986,
0.05079236254096031,
-0.13417473435401917,
0.00029877552879042923,
0.0027469582855701447,
0.002268214011564851,
0.007379661779850721,
0.039655230939388275,
-0.043110404163599014,
-0.10267529636621475,
-0.029785286635160446,
0.04081691429018974,
-0.09045128524303436,
-0.06119653955101967,
-0.026374435052275658,
-0.01963801495730877,
-0.05053054541349411,
0.020843541249632835,
-0.016144845634698868,
0.0064743896946311,
-4.236540047739081e-33,
0.011193204671144485,
-0.005796873942017555,
-0.001483578933402896,
0.02167256735265255,
-0.070830337703228,
-0.0697956457734108,
0.0842331051826477,
0.029286203905940056,
-0.06539828330278397,
-0.0007126448908820748,
0.03578972816467285,
-0.09675940126180649,
0.006360407918691635,
0.006684050429612398,
0.09652525931596756,
0.008087173104286194,
0.001964108319953084,
0.044543735682964325,
0.09853906184434891,
0.07784505933523178,
-0.0009117273730225861,
0.09155333042144775,
-0.10254799574613571,
0.0037375325337052345,
-0.09175227582454681,
0.006360508967190981,
-0.039450205862522125,
0.04744740575551987,
0.015003510750830173,
-0.04572277516126633,
-0.010207204148173332,
0.04384125769138336,
-0.06452261656522751,
0.007854475639760494,
-0.06886234879493713,
0.031213736161589622,
0.00909444596618414,
-0.03270544111728668,
0.0057250699028372765,
0.1400173008441925,
0.06421197950839996,
0.004737908486276865,
-0.11323490738868713,
0.03291314095258713,
-0.020486637949943542,
-0.0648675262928009,
-0.09343388676643372,
0.0020529432222247124,
-0.03179004415869713,
-0.009473293088376522,
0.025249594822525978,
0.06932856142520905,
-0.043028801679611206,
-0.0011038618395105004,
-0.009578060358762741,
-0.07634017616510391,
0.04168684780597687,
-0.03177250176668167,
-0.015540105290710926,
0.04963567107915878,
-0.05372018739581108,
-0.0033672265708446503,
0.02001105062663555,
0.00548544991761446,
0.028289511799812317,
-0.048911403864622116,
-0.03933952748775482,
0.0009764166316017509,
0.004817379172891378,
-0.11304543912410736,
0.08955176919698715,
-0.04526020586490631,
-0.018066275864839554,
0.06320030987262726,
-0.04166891425848007,
-0.0015207186806946993,
-0.03015861101448536,
-0.04317787289619446,
-0.01310240849852562,
-0.0530935563147068,
-0.08893082290887833,
-0.01648428663611412,
0.012961204163730145,
-0.0027887311298400164,
0.01461878977715969,
0.07237903773784637,
0.06204969435930252,
0.07048685848712921,
-0.0412687323987484,
0.006442587822675705,
-0.004123612307012081,
-0.0035300832241773605,
-0.00011751328565878794,
0.024925952777266502,
-0.022700201719999313,
-5.366381472526882e-8,
-0.08148173987865448,
0.0002794454921968281,
-0.022848529741168022,
0.019563402980566025,
-0.01666388474404812,
-0.03036508709192276,
-0.09478283673524857,
0.0510491207242012,
-0.02381402999162674,
-0.08716642111539841,
0.03893618658185005,
0.06520334631204605,
-0.07433278113603592,
0.002023011213168502,
-0.02222735993564129,
0.1097830981016159,
-0.02644192986190319,
0.051040906459093094,
-0.03497739136219025,
0.03323538601398468,
0.04342759773135185,
0.03512768819928169,
-0.01651715859770775,
-0.08897055685520172,
-0.0464235320687294,
-0.09455348551273346,
-0.05599678307771683,
0.1340837925672531,
0.011976019479334354,
-0.031811490654945374,
-0.006898139137774706,
0.0398174412548542,
-0.05249149352312088,
-0.05341658368706703,
0.021991951391100883,
0.07425981760025024,
-0.026509853079915047,
-0.07750388979911804,
-0.020444557070732117,
0.01817586086690426,
0.06645558029413223,
0.03710022196173668,
-0.0651954784989357,
-0.07247279584407806,
0.0430862121284008,
-0.03072764351963997,
-0.05911280959844589,
-0.1094777062535286,
0.02002185396850109,
-0.023089492693543434,
0.02867795340716839,
-0.023702966049313545,
-0.10766763240098953,
0.06382095813751221,
0.07223425805568695,
-0.012837507762014866,
-0.12985219061374664,
-0.03169778734445572,
0.020915132015943527,
0.08994553983211517,
-0.04652831703424454,
0.06370504200458527,
0.030061814934015274,
-0.022853009402751923
] |
voidful/bart-distractor-generation-both | 33dac39b96071b8fb44fe0bab40b89c2057aae27 | 2021-04-04T16:20:20.000Z | [
"pytorch",
"bart",
"text2text-generation",
"en",
"dataset:race",
"transformers",
"distractor",
"generation",
"seq2seq",
"autotrain_compatible"
] | text2text-generation | false | voidful | null | voidful/bart-distractor-generation-both | 1,117 | null | transformers | ---
language: en
tags:
- bart
- distractor
- generation
- seq2seq
datasets:
- race
metrics:
- bleu
- rouge
pipeline_tag: text2text-generation
widget:
- text: "When you ' re having a holiday , one of the main questions to ask is which hotel or apartment to choose . However , when it comes to France , you have another special choice : treehouses . In France , treehouses are offered to travelers as a new choice in many places . The price may be a little higher , but you do have a chance to _ your childhood memories . Alain Laurens , one of France ' s top treehouse designers , said , ' Most of the people might have the experience of building a den when they were young . And they like that feeling of freedom when they are children . ' Its fairy - tale style gives travelers a special feeling . It seems as if they are living as a forest king and enjoying the fresh air in the morning . Another kind of treehouse is the ' star cube ' . It gives travelers the chance of looking at the stars shining in the sky when they are going to sleep . Each ' star cube ' not only offers all the comfortable things that a hotel provides for travelers , but also gives them a chance to look for stars by using a telescope . The glass roof allows you to look at the stars from your bed . </s> The passage mainly tells us </s> treehouses in france."
---
# bart-distractor-generation-both
## Model description
This model is a sequence-to-sequence distractor generator which takes an answer, question and context as an input, and generates a distractor as an output. It is based on a pretrained `bart-base` model.
This model trained with Parallel MLM & Answer Negative Regularization refer to the [Paper](https://www.aclweb.org/anthology/2020.findings-emnlp.393/).
For details, please see https://github.com/voidful/BDG.
## Intended uses & limitations
The model is trained to generate examinations-style multiple choice distractor. The model performs best with full sentence answers.
#### How to use
The model takes concatenated context, question and answers as an input sequence, and will generate a full distractor sentence as an output sequence. The max sequence length is 1024 tokens. Inputs should be organised into the following format:
```
context </s> question </s> answer
```
The input sequence can then be encoded and passed as the `input_ids` argument in the model's `generate()` method.
For details, please see https://github.com/voidful/BDG.
#### Limitations and bias
The model is limited to generating distractor in the same style as those found in [RACE](https://www.aclweb.org/anthology/D17-1082/). The generated distractors can potentially be leading or reflect biases that are present in the context. If the context is too short or completely absent, or if the context, question and answer do not match, the generated distractor is likely to be incoherent. | [
0.0849575325846672,
0.021134143695235252,
0.056632302701473236,
0.11352335661649704,
0.030987534672021866,
-0.02140902727842331,
0.04052186384797096,
-0.05516694113612175,
0.09369223564863205,
0.0068085407838225365,
-0.01892196387052536,
-0.030523983761668205,
0.028811732307076454,
-0.021420976147055626,
0.06827639043331146,
-0.02475070022046566,
0.023254523053765297,
-0.01841072551906109,
0.035951707512140274,
0.05747723579406738,
0.007971875369548798,
-0.08295859396457672,
-0.008543791249394417,
0.06393062323331833,
0.05083981156349182,
-0.04637480899691582,
-0.06922608613967896,
0.024451466277241707,
-0.011158746667206287,
-0.062140971422195435,
0.021364480257034302,
0.09690886735916138,
-0.04005655646324158,
0.03966771811246872,
-0.03340981528162956,
0.02433614805340767,
0.03942565992474556,
-0.08664238452911377,
-0.0035773571580648422,
0.04755005985498428,
0.017497973516583443,
0.010130571201443672,
0.007296019699424505,
-0.033881571143865585,
-0.038797128945589066,
0.03349301591515541,
-0.07917528599500656,
-0.021529285237193108,
-0.04018735513091087,
-0.020992448553442955,
-0.03866346925497055,
0.009970600716769695,
-0.051302846521139145,
-0.03299783170223236,
0.0234671700745821,
0.0898025780916214,
0.0052563343197107315,
-0.054933007806539536,
0.09610331803560257,
0.0164891816675663,
0.06374730169773102,
-0.01311748381704092,
0.005679525434970856,
0.04097903519868851,
0.02551322430372238,
-0.04997025802731514,
-0.11966446042060852,
0.0324559360742569,
-0.08378393948078156,
-0.03119332157075405,
-0.10586164891719818,
-0.0077673872001469135,
-0.03668021038174629,
-0.011378688737750053,
0.04890219122171402,
-0.03619122877717018,
0.03591093793511391,
-0.039362501353025436,
0.018065860494971275,
-0.029515188187360764,
-0.02401304617524147,
0.014227847568690777,
-0.0854388028383255,
0.03533923998475075,
-0.060518693178892136,
-0.021870270371437073,
0.05391046777367592,
0.06142110377550125,
-0.06276131421327591,
-0.013560133054852486,
-0.0393514484167099,
-0.014795003458857536,
-0.053598664700984955,
0.0321345292031765,
-0.03209562599658966,
0.036119893193244934,
-0.06398693472146988,
-0.029394667595624924,
0.007596008479595184,
0.010875851847231388,
0.02062712237238884,
0.02343928813934326,
0.12308302521705627,
0.005188309587538242,
-0.10242527723312378,
-0.06558948755264282,
-0.05470268428325653,
-0.013387288898229599,
0.011112668551504612,
-0.02538655512034893,
-0.06553970277309418,
-0.034811679273843765,
-0.01697790063917637,
0.0035300920717418194,
-0.07435356825590134,
0.032665301114320755,
0.03034251555800438,
-0.08625662326812744,
0.08396682143211365,
0.03137893229722977,
0.047375258058309555,
0.009412666782736778,
0.029818592593073845,
-0.037490811198949814,
-0.02565675973892212,
-0.038590606302022934,
0.006956797558814287,
1.6868299164495449e-34,
0.007781920023262501,
0.07687917351722717,
0.03600134700536728,
0.05126410722732544,
0.13175514340400696,
0.010455391369760036,
-0.11684602499008179,
0.07565397769212723,
-0.02381378971040249,
0.03714920952916145,
-0.027678323909640312,
-0.015077712014317513,
-0.03593346104025841,
0.03537937253713608,
0.10656322538852692,
0.0186204444617033,
-0.06735514104366302,
-0.020639706403017044,
-0.05568346008658409,
0.0064262403175234795,
-0.0034254114143550396,
0.059434499591588974,
0.017189528793096542,
0.09038399904966354,
0.05111841484904289,
-0.05323415249586105,
0.06839818507432938,
-0.011468349024653435,
-0.04063991457223892,
-0.0018467727350071073,
0.00823552068322897,
-0.01613636314868927,
-0.0017932828050106764,
-0.025777488946914673,
-0.023428015410900116,
0.02660149149596691,
-0.01579873077571392,
-0.04609706997871399,
-0.06849409639835358,
-0.06777128577232361,
-0.04765688255429268,
-0.0021622495260089636,
0.014265059493482113,
0.07998531311750412,
0.0073406631126999855,
0.09483388811349869,
0.07269957661628723,
-0.02416163682937622,
-0.05261588841676712,
0.03562876954674721,
-0.02547468990087509,
0.022638024762272835,
-0.057748693972826004,
-0.015117811039090157,
-0.020724983885884285,
0.0221632719039917,
0.0246830303221941,
0.013672406785190105,
-0.0004376877041067928,
-0.048631977289915085,
0.050972964614629745,
-0.07235819101333618,
-0.0008563973824493587,
-0.014283214695751667,
-0.01560215838253498,
0.08667673915624619,
0.07164216786623001,
0.046385981142520905,
0.04337188974022865,
-0.07991751283407211,
-0.03754299879074097,
0.03645595535635948,
0.04010834917426109,
-0.002773023210465908,
-0.005851332098245621,
0.07009003311395645,
-0.01029147021472454,
-0.0005267836968414485,
0.03353117033839226,
-0.027357501909136772,
-0.044552478939294815,
0.009418796747922897,
0.02825600653886795,
0.00017897982615977526,
-0.006869472563266754,
-0.11031381785869598,
0.0630652904510498,
0.014052340760827065,
0.008444409817457199,
0.018853507936000824,
-0.01629732735455036,
-0.051829103380441666,
0.06808052211999893,
-0.042968831956386566,
-0.05062732845544815,
-3.0378171938605755e-33,
-0.00040338243707083166,
-0.09144843369722366,
-0.014917902648448944,
0.0024339265655726194,
0.1065678745508194,
-0.0019389266381040215,
-0.06947194039821625,
-0.1170952245593071,
0.06412771344184875,
-0.043546997010707855,
-0.07465564459562302,
0.028126375749707222,
0.08572077006101608,
-0.027552347630262375,
0.06690505892038345,
-0.022994736209511757,
0.021578390151262283,
0.0006996025331318378,
-0.09309390932321548,
0.058976270258426666,
0.04853450879454613,
0.14110855758190155,
-0.11108854413032532,
0.028703657910227776,
-0.06409291177988052,
0.04292495548725128,
-0.05090292543172836,
-0.032798100262880325,
-0.06022125110030174,
-0.003019926371052861,
-0.06044775992631912,
-0.06334616988897324,
-0.016756972298026085,
0.005702512804418802,
0.01375698484480381,
0.021905237808823586,
-0.041730109602212906,
-0.02618252858519554,
-0.061494819819927216,
0.14096896350383759,
-0.03208833560347557,
-0.03914039582014084,
-0.008712183684110641,
0.012407840229570866,
0.05629987269639969,
0.004522368777543306,
-0.056972455233335495,
-0.08387061953544617,
-0.016560902819037437,
0.02718706801533699,
0.04242139309644699,
0.07506502419710159,
-0.08168291300535202,
-0.009480679407715797,
-0.05335251986980438,
0.0011910060420632362,
-0.023821581155061722,
0.018139369785785675,
0.03252081200480461,
0.054988794028759,
-0.0121756037697196,
0.05890935659408569,
0.0004296467232052237,
0.02231449820101261,
-0.07698448747396469,
-0.1018846184015274,
-0.10262329876422882,
-0.056689705699682236,
-0.019301405176520348,
-0.0235676821321249,
0.04387956112623215,
-0.01708780787885189,
-0.07694613188505173,
0.05980612710118294,
-0.05584132298827171,
0.01864457316696644,
0.020831972360610962,
-0.007975091226398945,
-0.033565208315849304,
-0.0012439857237040997,
-0.08070187270641327,
0.012538764625787735,
0.03382953628897667,
0.05275001376867294,
0.04335544630885124,
-0.06363736093044281,
-0.04504312574863434,
0.022193370386958122,
0.0059867617674171925,
-0.04393729194998741,
-0.025213390588760376,
0.06734747439622879,
-0.06189853698015213,
0.03385953977704048,
0.019164057448506355,
-6.317623046925291e-8,
-0.027157476171851158,
0.04231474548578262,
-0.01976855657994747,
0.004137668292969465,
-0.026432998478412628,
-0.14989586174488068,
0.0784364864230156,
0.09327048808336258,
-0.04667678847908974,
0.015346625819802284,
-0.014049224555492401,
0.04642394930124283,
-0.02452629618346691,
-0.004288170021027327,
0.04353141039609909,
0.03986426815390587,
-0.0039404914714396,
-0.0015222214860841632,
-0.0056950971484184265,
0.12084081768989563,
0.04662848636507988,
0.014758611097931862,
0.007773480378091335,
-0.0012096645077690482,
-0.060556452721357346,
-0.03885037079453468,
-0.033709652721881866,
0.01948375068604946,
0.02019069530069828,
0.02681661769747734,
0.08672476559877396,
0.06845532357692719,
-0.009761127643287182,
-0.028474651277065277,
-0.029525432735681534,
0.07223045825958252,
0.005222567357122898,
-0.004192698281258345,
0.03356417268514633,
0.05020293593406677,
0.07463058084249496,
-0.14932052791118622,
-0.04341316595673561,
0.00007551013550255448,
-0.05497768893837929,
0.04433725029230118,
-0.02323836460709572,
-0.021703757345676422,
0.028072457760572433,
0.07821094244718552,
-0.07331689447164536,
0.01811748556792736,
-0.10140201449394226,
-0.025459865108132362,
0.02314276061952114,
-0.025281265377998352,
-0.04768097400665283,
-0.010638945735991001,
0.11333359777927399,
-0.031827207654714584,
0.12386582791805267,
-0.025453215464949608,
-0.021543797105550766,
0.04440806806087494
] |
snunlp/KR-SBERT-V40K-klueNLI-augSTS | f06554f8087e15a6ffc279ef812ba8fed57e81d5 | 2022-05-03T03:53:28.000Z | [
"pytorch",
"bert",
"feature-extraction",
"ko",
"sentence-transformers",
"sentence-similarity",
"transformers"
] | sentence-similarity | false | snunlp | null | snunlp/KR-SBERT-V40K-klueNLI-augSTS | 1,116 | 2 | sentence-transformers | ---
pipeline_tag: sentence-similarity
tags:
- sentence-transformers
- feature-extraction
- sentence-similarity
- transformers
language:
- ko
---
# snunlp/KR-SBERT-V40K-klueNLI-augSTS
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
<!--- Describe your model here -->
## Usage (Sentence-Transformers)
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
```
pip install -U sentence-transformers
```
Then you can use the model like this:
```python
from sentence_transformers import SentenceTransformer
sentences = ["This is an example sentence", "Each sentence is converted"]
model = SentenceTransformer('snunlp/KR-SBERT-V40K-klueNLI-augSTS')
embeddings = model.encode(sentences)
print(embeddings)
```
## Usage (HuggingFace Transformers)
Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
```python
from transformers import AutoTokenizer, AutoModel
import torch
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
token_embeddings = model_output[0] #First element of model_output contains all token embeddings
input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
# Sentences we want sentence embeddings for
sentences = ['This is an example sentence', 'Each sentence is converted']
# Load model from HuggingFace Hub
tokenizer = AutoTokenizer.from_pretrained('snunlp/KR-SBERT-V40K-klueNLI-augSTS')
model = AutoModel.from_pretrained('snunlp/KR-SBERT-V40K-klueNLI-augSTS')
# Tokenize sentences
encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
# Compute token embeddings
with torch.no_grad():
model_output = model(**encoded_input)
# Perform pooling. In this case, mean pooling.
sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
print("Sentence embeddings:")
print(sentence_embeddings)
```
## Evaluation Results
<!--- Describe how your model was evaluated -->
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=snunlp/KR-SBERT-V40K-klueNLI-augSTS)
## Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: BertModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
)
```
## Application for document classification
Tutorial in Google Colab: https://colab.research.google.com/drive/1S6WSjOx9h6Wh_rX1Z2UXwx9i_uHLlOiM
|Model|Accuracy|
|-|-|
|KR-SBERT-Medium-NLI-STS|0.8400|
|KR-SBERT-V40K-NLI-STS|0.8400|
|KR-SBERT-V40K-NLI-augSTS|0.8511|
|KR-SBERT-V40K-klueNLI-augSTS|**0.8628**|
## Citation
```bibtex
@misc{kr-sbert,
author = {Park, Suzi and Hyopil Shin},
title = {KR-SBERT: A Pre-trained Korean-specific Sentence-BERT model},
year = {2021},
publisher = {GitHub},
journal = {GitHub repository},
howpublished = {\url{https://github.com/snunlp/KR-SBERT}}
}
``` | [
-0.052920833230018616,
-0.034483253955841064,
-0.020926548168063164,
0.037464939057826996,
0.018694384023547173,
0.06946393847465515,
-0.05396059527993202,
0.005022427067160606,
0.012270268984138966,
-0.07674575597047806,
0.04214707016944885,
-0.026909802109003067,
0.04525855556130409,
0.04137447103857994,
0.04112929105758667,
0.020012957975268364,
0.01895716041326523,
0.10027867555618286,
-0.0709054172039032,
-0.1275835633277893,
0.11116913706064224,
0.10527563095092773,
0.007252360228449106,
0.02540004998445511,
0.002880177227780223,
0.10528936982154846,
-0.015430163592100143,
-0.02319822460412979,
0.0034685309510678053,
-0.016218381002545357,
0.03332468867301941,
0.0005927150486968458,
-0.03946477919816971,
0.08037365227937698,
0.08150245994329453,
0.06798981875181198,
-0.04561870917677879,
0.0360979363322258,
-0.020935848355293274,
-0.06503108888864517,
0.0006669713766314089,
-0.0573190338909626,
-0.042320769280195236,
-0.022408341988921165,
0.09523449838161469,
-0.052880167961120605,
-0.14712406694889069,
-0.03289974480867386,
-0.01896413043141365,
-0.029876714572310448,
-0.09073228389024734,
0.02464170567691326,
0.008106130175292492,
0.09349943697452545,
0.00674094632267952,
0.048599254339933395,
0.04180052876472473,
-0.015948858112096786,
0.03906802088022232,
-0.14693862199783325,
-0.04958753287792206,
-0.027726219967007637,
-0.00028096052119508386,
-0.0005351442378014326,
-0.041455529630184174,
-0.024337302893400192,
0.04897743836045265,
0.006779810413718224,
0.024298716336488724,
0.006227497477084398,
-0.07850980758666992,
0.06470891833305359,
-0.04410414770245552,
-0.00047590528265573084,
-0.0576581172645092,
-0.02793600596487522,
0.07571539282798767,
-0.006507712882012129,
0.0620364248752594,
0.03452348709106445,
-0.007259154226630926,
-0.07701333612203598,
0.021754367277026176,
0.09861510992050171,
0.024650616571307182,
-0.06555912643671036,
-0.024528570473194122,
-0.048971857875585556,
0.002498992718756199,
-0.0029807023238390684,
-0.06606701016426086,
-0.12437961250543594,
0.024394407868385315,
-0.07125677913427353,
0.04000377655029297,
0.041600923985242844,
-0.0551278218626976,
-0.01721927709877491,
0.04639682173728943,
0.04308229312300682,
0.024864789098501205,
0.03955051675438881,
0.047717005014419556,
-0.1173451766371727,
-0.04212173447012901,
0.03688334301114082,
-0.023690322414040565,
-0.022477468475699425,
0.05048977956175804,
-0.09507618844509125,
-0.0032001391518861055,
0.00013746132026426494,
-0.0084451325237751,
-0.05019797384738922,
0.10656002908945084,
-0.04571213573217392,
0.03524946793913841,
-0.028986601158976555,
0.0016292511718347669,
0.10633312165737152,
-0.02534668520092964,
0.04762034863233566,
-0.037464968860149384,
0.039953626692295074,
-0.014990429393947124,
-0.018309378996491432,
0.020845947787165642,
1.134784226388427e-33,
-0.016393231227993965,
-0.002508307807147503,
0.005418152082711458,
-0.02020484022796154,
0.02420979179441929,
0.0032818480394780636,
0.029477497562766075,
0.05540388077497482,
-0.10109103471040726,
-0.03973127529025078,
-0.10622692108154297,
0.03336698189377785,
-0.0392233245074749,
0.05500749126076698,
0.010729254223406315,
0.008252926170825958,
-0.04021367430686951,
-0.023604635149240494,
0.0419645830988884,
0.02650037407875061,
0.06566879153251648,
0.021609928458929062,
0.0072467029094696045,
-0.012762009166181087,
-0.08203418552875519,
-0.04290657117962837,
0.05668852850794792,
-0.08301705867052078,
-0.0778530165553093,
-0.007106426637619734,
-0.07307340949773788,
0.02429627999663353,
0.005499451886862516,
0.03574142977595329,
-0.02558421529829502,
-0.010495041497051716,
0.02601580135524273,
-0.02857694961130619,
-0.014985641464591026,
-0.07552722841501236,
-0.038493260741233826,
0.021873004734516144,
-0.03923001140356064,
-0.05981656163930893,
-0.0002492065541446209,
0.04523938521742821,
0.010824098251760006,
-0.009503532201051712,
0.10480791330337524,
0.010679617524147034,
0.10558740049600601,
-0.01473291777074337,
-0.03594237193465233,
-0.013003945350646973,
0.013510208576917648,
0.03539228066802025,
0.06939517706632614,
0.0339445099234581,
0.10900174826383591,
-0.011647743172943592,
0.03135022148489952,
-0.0072721331380307674,
0.054070454090833664,
0.03282932937145233,
0.10141890496015549,
-0.027942515909671783,
0.06330607086420059,
0.03064657188951969,
-0.01011468656361103,
0.0280730202794075,
-0.051967937499284744,
0.01925431191921234,
-0.04943345487117767,
0.0064775459468364716,
0.05458445847034454,
-0.05803968757390976,
-0.042342301458120346,
-0.0829506441950798,
-0.04225439950823784,
0.07562508434057236,
-0.016501570120453835,
-0.05153628811240196,
0.06304799020290375,
-0.05612502247095108,
0.0027608098462224007,
-0.0611967034637928,
0.009892357513308525,
-0.05520863085985184,
0.06953056156635284,
-0.05615457892417908,
0.02016616240143776,
-0.02040059119462967,
0.006358694285154343,
0.03679932653903961,
0.05808684602379799,
-2.684650752744566e-33,
0.038520701229572296,
0.03731219470500946,
-0.056948598474264145,
0.051393162459135056,
-0.027029119431972504,
-0.04548110067844391,
0.016969677060842514,
0.0699927806854248,
0.017104359343647957,
-0.01271750871092081,
-0.045281827449798584,
-0.027292314916849136,
0.11904160678386688,
-0.0742010548710823,
0.08063993602991104,
0.05704290047287941,
-0.006180344149470329,
0.08888188004493713,
0.0278483759611845,
0.07268469780683517,
-0.0059646316803991795,
0.05618644878268242,
-0.12173327803611755,
0.05559256300330162,
-0.0284715685993433,
-0.004595078527927399,
-0.007922437973320484,
-0.0017711458494886756,
-0.01722671464085579,
-0.02717290259897709,
-0.0072286599315702915,
-0.017559396103024483,
-0.06104423478245735,
-0.00839882530272007,
-0.12689031660556793,
0.01877249963581562,
-0.01599629782140255,
-0.011986360885202885,
0.012285923585295677,
0.011595713905990124,
0.027816861867904663,
0.10526317358016968,
-0.0530257523059845,
0.0013676848029717803,
-0.02410620078444481,
-0.006900240201503038,
-0.07025529444217682,
-0.04183880612254143,
0.03058372251689434,
-0.06393157690763474,
-0.00950567051768303,
0.036898914724588394,
-0.12283551692962646,
0.04388510435819626,
-0.06099529564380646,
-0.038499318063259125,
-0.0381486639380455,
-0.05452499911189079,
-0.08287424594163895,
-0.06522328406572342,
-0.07071270793676376,
-0.05090807378292084,
0.053289636969566345,
-0.05472062900662422,
0.09383165091276169,
-0.03454333916306496,
-0.005091674160212278,
0.025126660242676735,
-0.04175788536667824,
-0.05867776647210121,
-0.010365798138082027,
-0.03107300028204918,
0.03694848716259003,
0.07978604733943939,
0.04247095808386803,
-0.043850671499967575,
-0.018924832344055176,
0.008814135566353798,
-0.020714394748210907,
-0.06359532475471497,
0.010900633409619331,
-0.028280183672904968,
0.009029101580381393,
-0.0004868495452683419,
0.005275432486087084,
-0.027503063902258873,
0.03864563629031181,
0.07915682345628738,
0.011074520647525787,
0.012910787016153336,
-0.022992093116044998,
-0.0039061210118234158,
-0.018401360139250755,
0.0844140276312828,
0.04527495801448822,
-4.979829526519097e-8,
-0.07549677044153214,
-0.06430312991142273,
-0.07984378188848495,
0.055718302726745605,
-0.06132754310965538,
-0.038734860718250275,
0.05240320414304733,
0.06597495079040527,
-0.037340085953474045,
-0.05203879997134209,
0.009600000455975533,
0.03378881886601448,
-0.08566737920045853,
0.009434736333787441,
0.002556057181209326,
0.09476859122514725,
0.005187234375625849,
0.07282944023609161,
0.04667431861162186,
0.013088836334645748,
0.04907333850860596,
-0.0028547849506139755,
-0.005042515695095062,
0.03671184554696083,
0.01871274784207344,
0.03188468515872955,
-0.039969246834516525,
0.058938901871442795,
0.0295306034386158,
-0.012089613825082779,
0.010236539877951145,
0.007968852296471596,
-0.0390714593231678,
-0.012174848467111588,
0.025610053911805153,
0.05872360244393349,
0.05324632674455643,
-0.04822679981589317,
0.04062265530228615,
0.06585600227117538,
0.02874566614627838,
0.045803673565387726,
-0.12720677256584167,
0.0017101542325690389,
0.08415950834751129,
0.022676462307572365,
0.00445153983309865,
-0.0727793425321579,
0.04682912304997444,
0.014426138252019882,
0.051941655576229095,
-0.06517982482910156,
-0.05193311721086502,
-0.03195144981145859,
0.004374390468001366,
0.024381844326853752,
0.018324781209230423,
-0.017990874126553535,
0.06774535775184631,
-0.07181024551391602,
0.04319332540035248,
0.08922067284584045,
0.0831887423992157,
-0.044569287449121475
] |
facebook/xglm-1.7B | a1060a08b652653f6c0dece48f53bb785538e4d6 | 2022-02-15T01:29:52.000Z | [
"pytorch",
"xglm",
"text-generation",
"arxiv:2112.10668",
"transformers",
"license:mit"
] | text-generation | false | facebook | null | facebook/xglm-1.7B | 1,112 | null | transformers | ---
license: mit
thumbnail: https://huggingface.co/front/thumbnails/facebook.png
inference: false
---
# XGLM-1.7B
XGLM-1.7B is a multilingual autoregressive language model (with 1.7 billion parameters) trained on a balanced corpus of a diverse set of languages totaling 500 billion sub-tokens. It was introduced in the paper [Few-shot Learning with Multilingual Language Models](https://arxiv.org/abs/2112.10668) by Xi Victoria Lin\*, Todor Mihaylov, Mikel Artetxe, Tianlu Wang, Shuohui Chen, Daniel Simig, Myle Ott, Naman Goyal, Shruti Bhosale, Jingfei Du, Ramakanth Pasunuru, Sam Shleifer, Punit Singh Koura, Vishrav Chaudhary, Brian O'Horo, Jeff Wang, Luke Zettlemoyer, Zornitsa Kozareva, Mona Diab, Veselin Stoyanov, Xian Li\* (\*Equal Contribution). The original implementation was released in [this repository](https://github.com/pytorch/fairseq/tree/main/examples/xglm).
## Training Data Statistics
The training data statistics of XGLM-1.7B is shown in the table below.
| ISO-639-1| family | name | # tokens | ratio | ratio w/ lowRes upsampling |
|:--------|:-----------------|:------------------------|-------------:|------------:|-------------:|
| en | Indo-European | English | 803526736124 | 0.489906 | 0.3259 |
| ru | Indo-European | Russian | 147791898098 | 0.0901079 | 0.0602 |
| zh | Sino-Tibetan | Chinese | 132770494630 | 0.0809494 | 0.0483 |
| de | Indo-European | German | 89223707856 | 0.0543992 | 0.0363 |
| es | Indo-European | Spanish | 87303083105 | 0.0532282 | 0.0353 |
| fr | Indo-European | French | 77419639775 | 0.0472023 | 0.0313 |
| ja | Japonic | Japanese | 66054364513 | 0.040273 | 0.0269 |
| it | Indo-European | Italian | 41930465338 | 0.0255648 | 0.0171 |
| pt | Indo-European | Portuguese | 36586032444 | 0.0223063 | 0.0297 |
| el | Indo-European | Greek (modern) | 28762166159 | 0.0175361 | 0.0233 |
| ko | Koreanic | Korean | 20002244535 | 0.0121953 | 0.0811 |
| fi | Uralic | Finnish | 16804309722 | 0.0102455 | 0.0681 |
| id | Austronesian | Indonesian | 15423541953 | 0.00940365 | 0.0125 |
| tr | Turkic | Turkish | 12413166065 | 0.00756824 | 0.0101 |
| ar | Afro-Asiatic | Arabic | 12248607345 | 0.00746791 | 0.0099 |
| vi | Austroasiatic | Vietnamese | 11199121869 | 0.00682804 | 0.0091 |
| th | Tai–Kadai | Thai | 10842172807 | 0.00661041 | 0.044 |
| bg | Indo-European | Bulgarian | 9703797869 | 0.00591635 | 0.0393 |
| ca | Indo-European | Catalan | 7075834775 | 0.0043141 | 0.0287 |
| hi | Indo-European | Hindi | 3448390110 | 0.00210246 | 0.014 |
| et | Uralic | Estonian | 3286873851 | 0.00200399 | 0.0133 |
| bn | Indo-European | Bengali, Bangla | 1627447450 | 0.000992245 | 0.0066 |
| ta | Dravidian | Tamil | 1476973397 | 0.000900502 | 0.006 |
| ur | Indo-European | Urdu | 1351891969 | 0.000824241 | 0.0055 |
| sw | Niger–Congo | Swahili | 907516139 | 0.000553307 | 0.0037 |
| te | Dravidian | Telugu | 689316485 | 0.000420272 | 0.0028 |
| eu | Language isolate | Basque | 105304423 | 6.42035e-05 | 0.0043 |
| my | Sino-Tibetan | Burmese | 101358331 | 6.17976e-05 | 0.003 |
| ht | Creole | Haitian, Haitian Creole | 86584697 | 5.27902e-05 | 0.0035 |
| qu | Quechuan | Quechua | 3236108 | 1.97304e-06 | 0.0001 |
## Model card
For intended usage of the model, please refer to the [model card](https://github.com/pytorch/fairseq/blob/main/examples/xglm/model_card.md) released by the XGLM-1.7B development team.
## Example (COPA)
The following snippet shows how to evaluate our models (GPT-3 style, zero-shot) on the Choice of Plausible Alternatives (COPA) task, using examples in English, Chinese and Hindi.
```python
import torch
import torch.nn.functional as F
from transformers import XGLMTokenizer, XGLMForCausalLM
tokenizer = XGLMTokenizer.from_pretrained("facebook/xglm-1.7B")
model = XGLMForCausalLM.from_pretrained("facebook/xglm-1.7B")
data_samples = {
'en': [
{
"premise": "I wanted to conserve energy.",
"choice1": "I swept the floor in the unoccupied room.",
"choice2": "I shut off the light in the unoccupied room.",
"question": "effect",
"label": "1"
},
{
"premise": "The flame on the candle went out.",
"choice1": "I blew on the wick.",
"choice2": "I put a match to the wick.",
"question": "cause",
"label": "0"
}
],
'zh': [
{
"premise": "我想节约能源。",
"choice1": "我在空着的房间里扫了地板。",
"choice2": "我把空房间里的灯关了。",
"question": "effect",
"label": "1"
},
{
"premise": "蜡烛上的火焰熄灭了。",
"choice1": "我吹灭了灯芯。",
"choice2": "我把一根火柴放在灯芯上。",
"question": "cause",
"label": "0"
}
],
'hi': [
{
"premise": "M te vle konsève enèji.",
"choice1": "Mwen te fin baleye chanm lib la.",
"choice2": "Mwen te femen limyè nan chanm lib la.",
"question": "effect",
"label": "1"
},
{
"premise": "Flam bouji a te etenn.",
"choice1": "Mwen te soufle bouji a.",
"choice2": "Mwen te limen mèch bouji a.",
"question": "cause",
"label": "0"
}
]
}
def get_logprobs(prompt):
inputs = tokenizer(prompt, return_tensors="pt")
input_ids, output_ids = inputs["input_ids"], inputs["input_ids"][:, 1:]
outputs = model(**inputs, labels=input_ids)
logits = outputs.logits
logprobs = torch.gather(F.log_softmax(logits, dim=2), 2, output_ids.unsqueeze(2))
return logprobs
# Zero-shot evaluation for the Choice of Plausible Alternatives (COPA) task.
# A return value of 0 indicates that the first alternative is more plausible,
# while 1 indicates that the second alternative is more plausible.
def COPA_eval(prompt, alternative1, alternative2):
lprob1 = get_logprobs(prompt + "\n" + alternative1).sum()
lprob2 = get_logprobs(prompt + "\n" + alternative2).sum()
return 0 if lprob1 > lprob2 else 1
for lang in data_samples_long:
for idx, example in enumerate(data_samples_long[lang]):
predict = COPA_eval(example["premise"], example["choice1"], example["choice2"])
print(f'{lang}-{idx}', predict, example['label'])
# en-0 1 1
# en-1 0 0
# zh-0 1 1
# zh-1 0 0
# hi-0 1 1
# hi-1 0 0
``` | [
-0.029091421514749527,
-0.08594769239425659,
0.06796472519636154,
0.01862875185906887,
0.03356607258319855,
0.0924237072467804,
0.04181424528360367,
0.020541928708553314,
0.06085725128650665,
0.018148398026823997,
0.011457864195108414,
-0.09621525555849075,
0.03689919412136078,
0.010389620438218117,
0.022507423534989357,
-0.003232761984691024,
-0.01947128213942051,
0.0017577764810994267,
-0.11146510392427444,
-0.10775207728147507,
0.009084529243409634,
-0.013244147412478924,
0.05654462054371834,
-0.010063967667520046,
0.08685854077339172,
-0.04639344662427902,
-0.04956543818116188,
-0.07131041586399078,
0.08542737364768982,
-0.05375413969159126,
0.023782877251505852,
0.09154198318719864,
-0.020549511536955833,
0.050465214997529984,
-0.004191093146800995,
0.0510840080678463,
-0.07967167347669601,
-0.018806299194693565,
0.068440280854702,
0.007314207032322884,
-0.02877410128712654,
0.02433164417743683,
0.00034129462437704206,
-0.05013757571578026,
0.11005575954914093,
0.0491984486579895,
-0.041431598365306854,
0.008988097310066223,
-0.07712119817733765,
0.009877601638436317,
-0.15507330000400543,
-0.026969272643327713,
0.004346695262938738,
0.06079363450407982,
-0.04219380021095276,
-0.13908343017101288,
-0.0157561544328928,
-0.0164737980812788,
0.03349106013774872,
-0.038277242332696915,
-0.09355002641677856,
-0.10476364195346832,
-0.05631210282444954,
0.025384126231074333,
0.028390031307935715,
-0.022911367937922478,
-0.010643776506185532,
0.018752021715044975,
-0.05645034834742546,
0.06028008460998535,
0.007596156559884548,
0.07392796128988266,
-0.05644442141056061,
0.07808659225702286,
0.034148748964071274,
0.01985078491270542,
0.03701213002204895,
-0.07098178565502167,
0.07371897995471954,
-0.07299739122390747,
0.021594293415546417,
0.032247092574834824,
0.0745730996131897,
-0.038592275232076645,
0.06079297140240669,
-0.051900964230298996,
0.0011825073743239045,
-0.008183454163372517,
0.049628689885139465,
0.0187695175409317,
0.010761010460555553,
-0.021280445158481598,
0.055823035538196564,
0.03921172022819519,
0.031745538115501404,
0.006414402276277542,
0.03597065806388855,
0.02099101059138775,
-0.016771916300058365,
0.06845423579216003,
0.03602148965001106,
0.05889376252889633,
0.07245802134275436,
-0.029611580073833466,
0.004435328301042318,
-0.06862945109605789,
-0.001608008868061006,
-0.02523919753730297,
0.020373346284031868,
-0.1279844045639038,
-0.04157121479511261,
0.04680437967181206,
-0.046869199723005295,
0.03495484218001366,
-0.03067919984459877,
-0.010674909688532352,
-0.010899052023887634,
-0.07801971584558487,
0.027829190716147423,
0.0020450642332434654,
-0.09690630435943604,
0.00039314033347181976,
-0.021428188309073448,
0.021472640335559845,
-0.018438270315527916,
-0.055232904851436615,
-0.011560973711311817,
1.0329452365707396e-32,
0.0528554730117321,
0.06937392801046371,
0.0004186721926089376,
-0.018755903467535973,
-0.009489549323916435,
-0.022100791335105896,
-0.0062543852254748344,
0.01660485379397869,
-0.0849967747926712,
-0.07661574333906174,
-0.022476496174931526,
0.06601639837026596,
-0.045142412185668945,
0.11611343175172806,
-0.04137325659394264,
-0.004047777503728867,
-0.035215526819229126,
0.024067355319857597,
0.027203872799873352,
0.02113449014723301,
0.07740430533885956,
0.05156276747584343,
0.018835296854376793,
-0.04299043491482735,
-0.09437460452318192,
0.10465493053197861,
0.08948661386966705,
-0.0270138718187809,
-0.08473896235227585,
0.04909435287117958,
-0.08603394776582718,
0.006714421324431896,
0.00802879873663187,
0.0567496232688427,
0.008192203938961029,
-0.06949687004089355,
-0.0249666478484869,
-0.06341953575611115,
-0.028994152322411537,
-0.04922349378466606,
-0.01533232256770134,
0.04951781779527664,
0.01839202269911766,
-0.06581754237413406,
-0.04034160077571869,
-0.04457642883062363,
-0.028816061094403267,
-0.03524726629257202,
-0.09998054057359695,
0.006745029706507921,
0.008791337721049786,
-0.005382521077990532,
-0.044523220509290695,
-0.012607737444341183,
0.0728379338979721,
0.0903581902384758,
-0.05379321053624153,
0.04498745873570442,
0.031493403017520905,
0.02306116558611393,
0.004474700894206762,
0.027397187426686287,
0.028606178238987923,
0.04265184700489044,
0.11189958453178406,
0.12889474630355835,
-0.029338741675019264,
-0.008735430426895618,
0.13436532020568848,
-0.003166968934237957,
-0.027063807472586632,
-0.01953055150806904,
-0.10489732772111893,
-0.06585295498371124,
0.057026758790016174,
-0.042175646871328354,
0.10188987106084824,
-0.05074959993362427,
0.039034463465213776,
0.09538257122039795,
-0.05428436025977135,
-0.03649366647005081,
0.03855405002832413,
-0.05732320249080658,
-0.0576777346432209,
-0.0499100536108017,
0.05012555047869682,
-0.08519216626882553,
0.019341465085744858,
0.004759696312248707,
0.03487981855869293,
-0.04751124233007431,
-0.0013440236216410995,
0.026506394147872925,
0.010942725464701653,
-1.0228062304529127e-32,
-0.004886831622570753,
-0.003981641493737698,
-0.06521512567996979,
0.09900351613759995,
-0.051853299140930176,
-0.03657786175608635,
0.08717522770166397,
0.05339184030890465,
-0.009251030161976814,
-0.03378366306424141,
0.006743981968611479,
-0.02398131974041462,
0.048775676637887955,
0.03568335622549057,
0.035046715289354324,
-0.02664230577647686,
0.0807303935289383,
0.05364861339330673,
0.014110373333096504,
0.10354935377836227,
0.00987941399216652,
0.011606368236243725,
-0.03458467870950699,
0.04664388671517372,
0.003935839980840683,
0.052652470767498016,
-0.04320072382688522,
-0.021046338602900505,
-0.05354452133178711,
0.04836919903755188,
-0.0025362225715070963,
-0.038878295570611954,
-0.05933627858757973,
-0.02740788459777832,
-0.06284911930561066,
-0.03299552574753761,
-0.006575197447091341,
0.056166984140872955,
-0.007755259983241558,
0.09029880911111832,
0.05688536912202835,
-0.0025389555376023054,
-0.045847710222005844,
-0.0606849230825901,
0.008004672825336456,
0.012766672298312187,
-0.09448453038930893,
-0.03138048201799393,
0.01570354588329792,
-0.04213012382388115,
-0.019586918875575066,
-0.024714579805731773,
-0.06139395758509636,
0.014058641158044338,
-0.012412364594638348,
-0.07930648326873779,
0.06515304744243622,
-0.07328514754772186,
-0.012557790614664555,
-0.043445371091365814,
-0.08296258002519608,
0.009394340217113495,
0.02111009508371353,
-0.04248476400971413,
0.05013567581772804,
-0.015270001254975796,
-0.06434357166290283,
-0.006787382066249847,
0.014736928045749664,
-0.0575726144015789,
0.07927561551332474,
-0.030338572338223457,
-0.043328024446964264,
-0.010546882636845112,
-0.052631426602602005,
0.04463791102170944,
-0.030467228963971138,
-0.01932331547141075,
-0.011784755624830723,
-0.06208418309688568,
0.01259169727563858,
-0.029458610340952873,
0.018719755113124847,
0.03998614102602005,
0.0027336967177689075,
0.02410968393087387,
-0.043275702744722366,
0.05343599617481232,
0.05087064206600189,
0.08532325178384781,
0.013531793840229511,
0.06368134170770645,
0.0359615683555603,
0.05513733625411987,
-0.016430100426077843,
-7.696451120864367e-8,
-0.08307928591966629,
0.03850375860929489,
0.017627933993935585,
0.06776905059814453,
0.03409271314740181,
-0.08089056611061096,
-0.06392499059438705,
0.00825780350714922,
-0.04001038521528244,
0.06748652458190918,
0.06288380175828934,
0.04232224076986313,
-0.027415750548243523,
-0.027052439749240875,
-0.0344696007668972,
0.025323139503598213,
-0.023563595488667488,
0.00910457968711853,
-0.018679670989513397,
0.030210521072149277,
0.05541935935616493,
-0.03316252678632736,
0.04368586465716362,
-0.044928066432476044,
0.01970062218606472,
-0.05107298120856285,
-0.03251064196228981,
-0.016720738261938095,
0.023237526416778564,
-0.06904035061597824,
-0.031333476305007935,
0.03283221647143364,
-0.05486251041293144,
-0.0732211247086525,
0.04223254695534706,
0.12206486612558365,
0.01155180111527443,
-0.06573451310396194,
-0.0028126260731369257,
0.04454084113240242,
0.030154166743159294,
0.0367739237844944,
-0.03261786699295044,
0.007781737018376589,
0.06350068002939224,
0.043818749487400055,
-0.029899736866354942,
-0.14041279256343842,
0.023753022775053978,
-0.020606152713298798,
0.005780351813882589,
0.003952609840780497,
-0.015604404732584953,
0.048620354384183884,
0.06354702264070511,
0.046381399035453796,
-0.029444124549627304,
-0.03836491331458092,
0.05179895833134651,
0.02151620201766491,
0.0841696709394455,
0.005963393487036228,
-0.025755956768989563,
-0.0162576362490654
] |
svalabs/cross-electra-ms-marco-german-uncased | 34a0bc5aee354593b64f1c2cfe173356ced6e90f | 2021-06-10T07:20:46.000Z | [
"pytorch",
"electra",
"text-classification",
"arxiv:1908.10084",
"arxiv:1611.09268",
"arxiv:2104.08663",
"arxiv:2104.12741",
"arxiv:2010.02666",
"transformers"
] | text-classification | false | svalabs | null | svalabs/cross-electra-ms-marco-german-uncased | 1,112 | 3 | transformers | # SVALabs - German Uncased Electra Cross-Encoder
In this repository, we present our german, uncased cross-encoder for Passage Retrieval.
This model was trained on the basis of the german electra uncased model from the [german-nlp-group](https://huggingface.co/german-nlp-group/electra-base-german-uncased) and finetuned as a cross-encoder for Passage Retrieval using the [sentence-transformers](https://github.com/UKPLab/sentence-transformers) package.
For this purpose, we translated the [MSMARCO-Passage-Ranking](https://github.com/microsoft/MSMARCO-Passage-Ranking) dataset using the [fairseq-wmt19-en-de](https://github.com/pytorch/fairseq/tree/master/examples/wmt19) translation model.
### Model Details
| | Description or Link |
|---|---|
|**Base model** | [```german-nlp-group/electra-base-german-uncased```](https://huggingface.co/german-nlp-group/electra-base-german-uncased) |
|**Finetuning task**| Passage Retrieval / Semantic Search |
|**Source dataset**| [```MSMARCO-Passage-Ranking```](https://github.com/microsoft/MSMARCO-Passage-Ranking) |
|**Translation model**| [```fairseq-wmt19-en-de```](https://github.com/pytorch/fairseq/tree/master/examples/wmt19) |
### Performance
We evaluated our model on the [GermanDPR testset](https://deepset.ai/germanquad) and followed the benchmark framework of [BEIR](https://github.com/UKPLab/beir).
In order to compare our results, we conducted an evaluation on the same test data with BM25 and presented the results in the table below.
We took every paragraph with negative and positive context out of the testset and deduplicated them. The resulting corpus size is 2871 against 1025 queries.
| Model | NDCG@1 | NDCG@5 | NDCG@10 | Recall@1 | Recall@5 | Recall@10 |
|:-------------------:|:------:|:------:|:-------:|:--------:|:--------:|:---------:|
| BM25 | 0.1463 | 0.3451 | 0.4097 | 0.1463 | 0.5424 | 0.7415 |
| BM25(Top 100) +Ours | 0.6410 | 0.7885 | 0.7943 | 0.6410 | 0.8576 | 0.9024 |
### How to Use
With ```sentence-transformers``` package (see [UKPLab/sentence-transformers](https://github.com/UKPLab/sentence-transformers) on GitHub for more details):
```python
from sentence_transformers.cross_encoder import CrossEncoder
cross_model = CrossEncoder("svalabs/cross-electra-ms-marco-german-uncased")
```
### Semantic Search Example
```python
import numpy as np
from sklearn.metrics.pairwise import cosine_similarity
K = 3 # number of top ranks to retrieve
docs = [
"Auf Netflix gibt es endlich die neue Staffel meiner Lieblingsserie.",
"Der Gepard jagt seine Beute.",
"Wir haben in der Agentur ein neues System für Zeiterfassung.",
"Mein Arzt sagt, dass mir dabei eher ein Orthopäde helfen könnte.",
"Einen Impftermin kann mir der Arzt momentan noch nicht anbieten.",
"Auf Kreta hat meine Tochter mit Muscheln eine schöne Sandburg gebaut.",
"Das historische Zentrum (centro storico) liegt auf mehr als 100 Inseln in der Lagune von Venedig.",
"Um in Zukunft sein Vermögen zu schützen, sollte man andere Investmentstrategien in Betracht ziehen.",
"Die Ära der Dinosaurier wurde vermutlich durch den Einschlag eines gigantischen Meteoriten auf der Erde beendet.",
"Bei ALDI sind die Bananen gerade im Angebot.",
"Die Entstehung der Erde ist 4,5 milliarden jahre her.",
"Finanzwerte treiben DAX um mehr als sechs Prozent nach oben Frankfurt/Main gegeben.",
"DAX dreht ins Minus. Konjunkturdaten und Gewinnmitnahmen belasten Frankfurt/Main."
]
queries = [
"dax steigt",
"dax sinkt",
"probleme mit knieschmerzen",
"software für urlaubsstunden",
"raubtier auf der jagd",
"alter der erde",
"wie alt ist unser planet?",
"wie kapital sichern",
"supermarkt lebensmittel reduziert",
"wodurch ist der tyrannosaurus aussgestorben",
"serien streamen"
]
# encode each query document pair
from itertools import product
combs = list(product(queries, docs))
outputs = cross_model.predict(combs).reshape((len(queries), len(docs)))
# print results
for i, query in enumerate(queries):
ranks = np.argsort(-outputs[i])
print("Query:", query)
for j, r in enumerate(ranks[:3]):
print(f"[{j}: {outputs[i, r]: .3f}]", docs[r])
print("-"*96)
```
**Console Output**:
```
Query: dax steigt
[0: 7.676] Finanzwerte treiben DAX um mehr als sechs Prozent nach oben Frankfurt/Main gegeben.
[1: 0.821] DAX dreht ins Minus. Konjunkturdaten und Gewinnmitnahmen belasten Frankfurt/Main.
[2: -9.905] Um in Zukunft sein Vermögen zu schützen, sollte man andere Investmentstrategien in Betracht ziehen.
------------------------------------------------------------------------------------------------
Query: dax sinkt
[0: 8.079] DAX dreht ins Minus. Konjunkturdaten und Gewinnmitnahmen belasten Frankfurt/Main.
[1: -0.491] Finanzwerte treiben DAX um mehr als sechs Prozent nach oben Frankfurt/Main gegeben.
[2: -9.224] Um in Zukunft sein Vermögen zu schützen, sollte man andere Investmentstrategien in Betracht ziehen.
------------------------------------------------------------------------------------------------
Query: probleme mit knieschmerzen
[0: 6.753] Mein Arzt sagt, dass mir dabei eher ein Orthopäde helfen könnte.
[1: -5.866] Einen Impftermin kann mir der Arzt momentan noch nicht anbieten.
[2: -9.461] Auf Kreta hat meine Tochter mit Muscheln eine schöne Sandburg gebaut.
------------------------------------------------------------------------------------------------
Query: software für urlaubsstunden
[0: 1.707] Wir haben in der Agentur ein neues System für Zeiterfassung.
[1: -10.649] Mein Arzt sagt, dass mir dabei eher ein Orthopäde helfen könnte.
[2: -11.280] DAX dreht ins Minus. Konjunkturdaten und Gewinnmitnahmen belasten Frankfurt/Main.
------------------------------------------------------------------------------------------------
Query: raubtier auf der jagd
[0: 4.596] Der Gepard jagt seine Beute.
[1: -6.809] Auf Netflix gibt es endlich die neue Staffel meiner Lieblingsserie.
[2: -8.392] Das historische Zentrum (centro storico) liegt auf mehr als 100 Inseln in der Lagune von Venedig.
------------------------------------------------------------------------------------------------
Query: alter der erde
[0: 7.343] Die Entstehung der Erde ist 4,5 milliarden jahre her.
[1: -7.664] Die Ära der Dinosaurier wurde vermutlich durch den Einschlag eines gigantischen Meteoriten auf der Erde beendet.
[2: -8.020] Das historische Zentrum (centro storico) liegt auf mehr als 100 Inseln in der Lagune von Venedig.
------------------------------------------------------------------------------------------------
Query: wie alt ist unser planet?
[0: 7.672] Die Entstehung der Erde ist 4,5 milliarden jahre her.
[1: -9.638] Die Ära der Dinosaurier wurde vermutlich durch den Einschlag eines gigantischen Meteoriten auf der Erde beendet.
[2: -10.251] Auf Kreta hat meine Tochter mit Muscheln eine schöne Sandburg gebaut.
------------------------------------------------------------------------------------------------
Query: wie kapital sichern
[0: 3.927] Um in Zukunft sein Vermögen zu schützen, sollte man andere Investmentstrategien in Betracht ziehen.
[1: -8.733] Finanzwerte treiben DAX um mehr als sechs Prozent nach oben Frankfurt/Main gegeben.
[2: -10.090] Mein Arzt sagt, dass mir dabei eher ein Orthopäde helfen könnte.
------------------------------------------------------------------------------------------------
Query: supermarkt lebensmittel reduziert
[0: 3.508] Bei ALDI sind die Bananen gerade im Angebot.
[1: -10.057] Das historische Zentrum (centro storico) liegt auf mehr als 100 Inseln in der Lagune von Venedig.
[2: -10.470] DAX dreht ins Minus. Konjunkturdaten und Gewinnmitnahmen belasten Frankfurt/Main.
------------------------------------------------------------------------------------------------
Query: wodurch ist der tyrannosaurus aussgestorben
[0: 0.079] Die Ära der Dinosaurier wurde vermutlich durch den Einschlag eines gigantischen Meteoriten auf der Erde beendet.
[1: -10.701] Mein Arzt sagt, dass mir dabei eher ein Orthopäde helfen könnte.
[2: -11.200] Auf Netflix gibt es endlich die neue Staffel meiner Lieblingsserie.
------------------------------------------------------------------------------------------------
Query: serien streamen
[0: 3.392] Auf Netflix gibt es endlich die neue Staffel meiner Lieblingsserie.
[1: -5.725] Der Gepard jagt seine Beute.
[2: -8.378] Auf Kreta hat meine Tochter mit Muscheln eine schöne Sandburg gebaut.
------------------------------------------------------------------------------------------------
```
### Contact
- Baran Avinc, [email protected]
- Jonas Grebe, [email protected]
- Lisa Stolz, [email protected]
- Bonian Riebe, [email protected]
### References
- N. Reimers and I. Gurevych (2019), ['Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks'](https://arxiv.org/abs/1908.10084).
- Payal Bajaj et al. (2018), ['MS MARCO: A Human Generated MAchine Reading COmprehension Dataset'](https://arxiv.org/abs/1611.09268).
- N. Thakur et al. (2021), ['BEIR: A Heterogenous Benchmark for Zero-shot Evaluation of Information Retrieval Models'](https://arxiv.org/abs/2104.08663).
- T. Möller, J. Risch and M. Pietsch (2021), ['GermanQuAD and GermanDPR: Improving Non-English Question Answering and Passage Retrieval'](https://arxiv.org/abs/2104.12741).
- Hofstätter et al. (2021), ['Improving Efficient Neural Ranking Models with Cross-Architecture Knowledge Distillation'](https://arxiv.org/abs/2010.02666)
| [
-0.07072284072637558,
-0.0517718531191349,
0.01625501736998558,
0.02741219289600849,
0.04414548724889755,
0.0707341879606247,
-0.0815039649605751,
0.00018886897305492312,
0.002650399459525943,
-0.06708227097988129,
0.012290116399526596,
-0.06604907661676407,
0.003955815453082323,
0.07872337847948074,
-0.032479461282491684,
0.027872059494256973,
0.07050013542175293,
0.05674384534358978,
-0.07146618515253067,
-0.0445236898958683,
0.033976420760154724,
0.07057886570692062,
0.042815323919057846,
-0.03512248396873474,
0.06738653033971786,
-0.02205490879714489,
-0.02891772985458374,
0.02352292463183403,
0.026391098275780678,
-0.06222604960203171,
-0.00706905871629715,
0.005244319327175617,
-0.023152055218815804,
0.10871865600347519,
0.0014315820299088955,
0.04715300723910332,
-0.0296112522482872,
-0.0702773705124855,
0.018627364188432693,
-0.04589992016553879,
-0.040492162108421326,
0.06975696980953217,
0.00034368978231213987,
0.0374915786087513,
0.10919512808322906,
0.004768649581819773,
-0.06007323041558266,
-0.015077225863933563,
-0.059483304619789124,
0.03281806409358978,
-0.04187820106744766,
0.014814658090472221,
0.04426058009266853,
0.14236760139465332,
-0.02061920054256916,
-0.054027386009693146,
0.008228477090597153,
-0.05880497395992279,
-0.04206368327140808,
-0.0804414451122284,
-0.0784696713089943,
-0.054717324674129486,
-0.04345938563346863,
-0.017968175932765007,
-0.048729926347732544,
-0.04523662105202675,
0.02602929249405861,
0.04925127327442169,
-0.06727304309606552,
-0.008764907717704773,
-0.001731604104861617,
-0.011451827362179756,
-0.0004983021062798798,
0.05068545043468475,
-0.0010356110287830234,
0.03455613553524017,
0.0401776023209095,
-0.004496803041547537,
-0.002956555224955082,
-0.013262251392006874,
0.004526141099631786,
-0.014631246216595173,
0.04927881434559822,
0.02952618896961212,
0.04992993175983429,
-0.05651455372571945,
0.04843413829803467,
-0.02175416238605976,
0.015511860139667988,
0.017257777974009514,
0.0018880250863730907,
-0.06968522816896439,
0.03478721156716347,
-0.013230356387794018,
0.0007968545542098582,
-0.018825046718120575,
0.03228997439146042,
0.04671907052397728,
0.018620464950799942,
0.024702921509742737,
0.019732261076569557,
0.08089554309844971,
0.015590967610478401,
-0.07700389623641968,
-0.049566593021154404,
-0.038716915994882584,
0.07473085075616837,
0.060358963906764984,
0.031058767810463905,
-0.1250571608543396,
0.010906040668487549,
0.04376626014709473,
-0.04360423609614372,
-0.038486968725919724,
-0.04277369752526283,
-0.0526491180062294,
0.046781715005636215,
-0.03950726240873337,
0.06581490486860275,
-0.008887930773198605,
-0.03883694112300873,
0.04717760905623436,
0.03044007532298565,
0.017248114570975304,
-0.041985370218753815,
-0.010368709452450275,
0.057325027883052826,
1.1355069717431903e-33,
0.02898724190890789,
0.019676199182868004,
-0.028946468606591225,
-0.03708783537149429,
0.02563363127410412,
-0.0017281700856983662,
0.009070472791790962,
0.038712963461875916,
-0.09290628880262375,
-0.027926435694098473,
-0.10658326745033264,
0.0715983584523201,
-0.014425544999539852,
0.039520327001810074,
-0.04089072346687317,
0.020745093002915382,
-0.07126504927873611,
-0.010741476900875568,
-0.031695764511823654,
0.03395367041230202,
0.06871318072080612,
0.015881601721048355,
0.01680779829621315,
-0.047382283955812454,
-0.06547018140554428,
0.015535666607320309,
0.03719061613082886,
-0.11830240488052368,
-0.03589025139808655,
0.022624004632234573,
-0.056706879287958145,
-0.010942941531538963,
0.026879649609327316,
0.027200421318411827,
0.0003474689438007772,
0.008393688127398491,
-0.013518361374735832,
-0.008245564065873623,
0.025383032858371735,
-0.07102153450250626,
0.039737556129693985,
-0.023359913378953934,
0.008440514095127583,
-0.05507325753569603,
-0.026857102289795876,
-0.02791777066886425,
-0.05704958364367485,
-0.02594013512134552,
0.06791611760854721,
-0.017521601170301437,
0.04992785304784775,
0.007056122180074453,
-0.08234225958585739,
-0.04732956364750862,
0.04863175004720688,
0.02422277443110943,
0.08370306342840195,
0.06716405600309372,
0.03987685590982437,
0.047500740736722946,
-0.012081554159522057,
0.06692881882190704,
0.1008358895778656,
0.028344731777906418,
0.08002733439207077,
-0.019719749689102173,
-0.04205159842967987,
-0.023216968402266502,
0.009457605890929699,
-0.009238121099770069,
-0.08731115609407425,
-0.01002038735896349,
0.028139082714915276,
0.049861207604408264,
0.03280904144048691,
-0.019712112843990326,
-0.03781716153025627,
-0.05902193859219551,
0.0578358992934227,
-0.007560139521956444,
-0.01944747567176819,
-0.01919499970972538,
0.016353890299797058,
-0.05589132383465767,
-0.036262575536966324,
-0.005955703556537628,
-0.005754379089921713,
-0.09319794178009033,
-0.051314134150743484,
-0.01775001361966133,
0.08497298508882523,
0.043779000639915466,
-0.0717899352312088,
-0.0049451496452093124,
0.06508006155490875,
-2.452136684454971e-33,
0.07559680193662643,
0.03368353843688965,
0.04534519091248512,
0.02579212561249733,
-0.05878659337759018,
-0.07105114310979843,
-0.05146779865026474,
0.09312330931425095,
0.04470301792025566,
0.02795165590941906,
0.013559842482209206,
-0.04125078395009041,
0.04014827683568001,
-0.029124954715371132,
0.09258486330509186,
-0.00043233917676843703,
0.016182854771614075,
0.02133100852370262,
0.01359446719288826,
0.08575768023729324,
0.024831239134073257,
0.06254567205905914,
-0.14245474338531494,
0.05874686315655708,
-0.024541085585951805,
0.012674531899392605,
0.095069020986557,
-0.0029269238002598286,
0.01638481765985489,
-0.06839094310998917,
-0.0071692387573421,
-0.062434133142232895,
-0.027972616255283356,
-0.03302232548594475,
-0.10704221576452255,
0.026900334283709526,
0.0603928305208683,
-0.037042517215013504,
-0.05781333148479462,
0.08849525451660156,
0.011583570390939713,
0.08862296491861343,
-0.10222481191158295,
0.0120594697073102,
0.018092220649123192,
-0.008208629675209522,
-0.1915680170059204,
-0.009918303228914738,
0.04252151772379875,
0.004782090894877911,
-0.00024253847368527204,
0.030406814068555832,
-0.09937945008277893,
0.014061899855732918,
0.011734046041965485,
-0.07830415666103363,
-0.04137333855032921,
-0.08497502654790878,
-0.036366790533065796,
-0.019562851637601852,
0.007449718192219734,
0.04211035370826721,
0.025711216032505035,
-0.022300120443105698,
0.04988052323460579,
-0.07135190814733505,
0.013605118729174137,
-0.04863140359520912,
-0.06387851387262344,
-0.00809503998607397,
0.006236960180103779,
-0.08959603309631348,
0.022854268550872803,
-0.015048145316541195,
0.036342766135931015,
-0.0057706329971551895,
-0.00006582061178050935,
0.008970774710178375,
-0.044936757534742355,
-0.07554604858160019,
-0.04620707407593727,
-0.022237610071897507,
0.06209835410118103,
0.0868397057056427,
0.09685269743204117,
-0.012101126834750175,
0.010185564868152142,
0.025742528960108757,
0.015315751545131207,
0.020412079989910126,
-0.013839581981301308,
-0.035789038985967636,
0.024299083277583122,
0.08221308141946793,
0.016299627721309662,
-4.98766361545222e-8,
-0.10891076177358627,
0.001995588419958949,
-0.14226321876049042,
0.024221498519182205,
-0.09792158007621765,
-0.03546803444623947,
-0.020932946354150772,
0.04241121932864189,
-0.07095640897750854,
0.00994030386209488,
0.05425354838371277,
0.021007230505347252,
-0.12299513071775436,
0.004265780560672283,
-0.012286391109228134,
0.06254017353057861,
0.03068627044558525,
0.0474509634077549,
0.031112169846892357,
0.03749850392341614,
0.03991447016596794,
0.061031144112348557,
0.07297419011592865,
-0.023435238748788834,
0.006543324328958988,
0.03497137874364853,
-0.09151948243379593,
0.05713466554880142,
0.030968328937888145,
-0.07143718749284744,
0.031852684915065765,
0.03933048993349075,
0.023329460993409157,
0.02089451067149639,
-0.03440530598163605,
0.054074954241514206,
-0.02055760845541954,
-0.03564940020442009,
0.0025736556854099035,
0.06640902161598206,
0.1294678896665573,
0.0057817124761641026,
-0.13644053041934967,
0.02233799546957016,
0.08564190566539764,
0.007135874126106501,
0.0114119378849864,
-0.06995631754398346,
0.04632287099957466,
0.015310795977711678,
0.07205703109502792,
-0.13491588830947876,
-0.032764334231615067,
0.02254585176706314,
0.04004492610692978,
0.057245269417762756,
0.010460357181727886,
-0.0152070177718997,
0.07835988700389862,
-0.008452476002275944,
0.06593644618988037,
0.012981359846889973,
-0.014351428486406803,
0.03209232538938522
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.